Compare commits

..

4 Commits

Author SHA1 Message Date
Matthew af57c97961 格式化代码 5 months ago
Matthew 12e8cb37e6 RTMPSuck Initial Commit 5 months ago
Matthew c7f98851ca 修改通知栏图标 5 months ago
Matthew e27435e9a4 修改通知栏图标 5 months ago

@ -5,7 +5,7 @@ plugins {
// 10,00,000 major-minor-build // 10,00,000 major-minor-build
def AppMajorVersion = 1 def AppMajorVersion = 1
def AppMinorVersion = 3 def AppMinorVersion = 3
def AppBuildNumber = 196 def AppBuildNumber = 4
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber
@ -24,9 +24,15 @@ android {
defaultConfig { defaultConfig {
applicationId "com.xypower.mpapp" applicationId "com.xypower.mpapp"
if (com.android.build.OutputFile.ABI.equalsIgnoreCase('arm64-v8a')) {
minSdk COMPILE_MIN_SDK_VERSION as int minSdk COMPILE_MIN_SDK_VERSION as int
//noinspection ExpiredTargetSdkVersion //noinspection ExpiredTargetSdkVersion
targetSdk TARGET_SDK_VERSION as int targetSdk TARGET_SDK_VERSION as int
} else {
minSdk COMPILE_MIN_SDK_VERSION_N938 as int
//noinspection ExpiredTargetSdkVersion
targetSdk TARGET_SDK_VERSION_N938 as int
}
versionCode AppVersionCode versionCode AppVersionCode
versionName AppVersionName versionName AppVersionName
@ -82,7 +88,6 @@ android {
enable isReleaseTask enable isReleaseTask
reset() reset()
include "armeabi-v7a", "arm64-v8a" include "armeabi-v7a", "arm64-v8a"
// include "arm64-v8a"
universalApk false universalApk false
} }
} }
@ -98,7 +103,7 @@ android {
def abi = output.getFilter(com.android.build.OutputFile.ABI) def abi = output.getFilter(com.android.build.OutputFile.ABI)
if (abi == null) abi = "all" if (abi == null) abi = "all"
if (abi.contains("v7a")) prevFileName = "N938" if (abi.contains("v7a")) prevFileName = "N938"
def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}.apk" def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}_${abi}.apk"
outputFileName = fileName outputFileName = fileName
} }
} }
@ -126,10 +131,10 @@ android {
dependencies { dependencies {
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'androidx.legacy:legacy-support-v13:1.0.0'
// implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" // implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.0.0' implementation 'androidx.appcompat:appcompat:1.0.0'
// implementation "androidx.core:core:1.10.0" // 使
implementation 'androidx.fragment:fragment:1.3.6'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4' implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
implementation 'com.google.android.material:material:1.8.0' implementation 'com.google.android.material:material:1.8.0'
implementation project(path: ':common') implementation project(path: ':common')

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -1,8 +1,6 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android" <manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools" xmlns:tools="http://schemas.android.com/tools">
android:sharedUserId="com.xypower.mp"
tools:ignore="Deprecated">
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" /> <uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" /> <uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
@ -12,10 +10,9 @@
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" /> <uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" /> <uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
<uses-permission android:name="android.permission.RECORD_AUDIO" /> <uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.CAMERA" /> <uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" /> <uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.MANAGE_NETWORK_POLICY"
tools:ignore="ProtectedPermissions" />
<uses-permission <uses-permission
android:name="android.permission.READ_PRIVILEGED_PHONE_STATE" android:name="android.permission.READ_PRIVILEGED_PHONE_STATE"
tools:ignore="ProtectedPermissions" /> tools:ignore="ProtectedPermissions" />
@ -58,7 +55,6 @@
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" /> <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-permission android:name="android.permission.WAKE_LOCK" /> <uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.DISABLE_KEYGUARD" /> <uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
<uses-permission android:name="android.permission.USB_PERMISSION" />
<uses-permission <uses-permission
android:name="android.permission.DEVICE_POWER" android:name="android.permission.DEVICE_POWER"
tools:ignore="ProtectedPermissions" /> tools:ignore="ProtectedPermissions" />
@ -67,23 +63,14 @@
tools:ignore="ProtectedPermissions" /> tools:ignore="ProtectedPermissions" />
<uses-permission <uses-permission
android:name="android.permission.START_ACTIVITIES_FROM_BACKGROUND" android:name="android.permission.START_ACTIVITIES_FROM_BACKGROUND"
tools:ignore="ProtectedPermissions" /> tools:ignore="ProtectedPermissions" /> <!-- WiFi AP startTethering -->
<uses-permission android:name="android.permission.KILL_BACKGROUND_PROCESSES" />
<uses-permission <uses-permission
android:name="android.permission.TETHER_PRIVILEGED" android:name="android.permission.TETHER_PRIVILEGED"
tools:ignore="ProtectedPermissions" /> tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.CONNECTIVITY_INTERNAL"
tools:ignore="ProtectedPermissions" />
<uses-feature android:name="android.hardware.camera" /> <uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="com.mediatek.camera.feature.mfnr" />
<uses-permission android:name="android.hardware.usb.accessory" /> <uses-permission android:name="android.hardware.usb.accessory" />
<uses-feature android:name="android.hardware.usb.host" />
<uses-feature
android:name="android.hardware.telephony"
android:required="false" />
<queries> <queries>
<provider <provider
@ -99,10 +86,6 @@
<intent> <intent>
<action android:name="android.media.action.STILL_IMAGE_CAMERA" /> <action android:name="android.media.action.STILL_IMAGE_CAMERA" />
</intent> </intent>
<intent>
<action android:name="android.intent.action.TIME_CHANGED" />
</intent>
<package android:name="com.xypower.mplive" />
</queries> </queries>
<application <application
@ -116,6 +99,14 @@
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/Theme.MicroPhoto" android:theme="@style/Theme.MicroPhoto"
tools:targetApi="28"> tools:targetApi="28">
<service
android:name=".RtmpService"
android:enabled="true"
android:exported="true"
>
</service>
<activity <activity
android:name=".LogActivity" android:name=".LogActivity"
android:exported="false" android:exported="false"
@ -174,10 +165,11 @@
<category android:name="android.intent.category.default" /> <category android:name="android.intent.category.default" />
</intent-filter> </intent-filter>
</service> </service>
<service android:name=".FloatingWindow" />
<receiver <receiver
android:name=".MicroPhotoService$AlarmReceiver" android:name=".MicroPhotoService$AlarmReceiver"
android:exported="true" > android:exported="true" />
</receiver>
<receiver <receiver
android:name=".BootBroadcastReceiver" android:name=".BootBroadcastReceiver"
android:enabled="true" android:enabled="true"
@ -191,7 +183,17 @@
</intent-filter> </intent-filter>
</receiver> </receiver>
<receiver android:name=".NetworkChangedReceiver" /> <receiver android:name=".NetworkChangedReceiver" />
<receiver
android:name=".ScreenActionReceiver"
android:exported="true">
<intent-filter android:priority="90000">
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.BOOT_COMPLETED" />
<action android:name="android.intent.action.SCREEN_ON" />
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.USER_UNLOCKED" />
</intent-filter>
</receiver>
<receiver <receiver
android:name="com.xypower.common.UpdateReceiver" android:name="com.xypower.common.UpdateReceiver"
android:enabled="true" android:enabled="true"
@ -204,17 +206,11 @@
<data android:scheme="package" /> <data android:scheme="package" />
</intent-filter> </intent-filter>
</receiver> </receiver>
<receiver
android:name=".HeartBeatResponseReceiver"
android:enabled="true"
android:exported="true">
<intent-filter >
<action android:name="com.systemui.ACTION_HEARTBEAT_RESPONSE" />
</intent-filter>
</receiver>
<activity <activity
android:name=".MainActivity" android:name=".MainActivity"
android:exported="true" android:exported="true"
android:launchMode="singleTop"
android:screenOrientation="landscape"> android:screenOrientation="landscape">
<intent-filter> <intent-filter>
<action android:name="android.intent.action.MAIN" /> <action android:name="android.intent.action.MAIN" />

@ -1,227 +0,0 @@
#!/system/bin/sh
# ==============================================
# Configuration parameters - modify as needed
# ==============================================
ETH_IP="192.168.68.91" # Ethernet IP address
ETH_NETMASK="24" # Subnet mask (CIDR format)
ETH_NETWORK="192.168.68.0" # Network address
ETH_BROADCAST="192.168.68.255" # Broadcast address
ETH_GATEWAY="192.168.68.1" # Default gateway
ROUTE_TABLE="20" # Routing table number
MAX_INIT_WAIT=150 # Maximum seconds to wait for ethernet interface
MAX_UP_WAIT=10 # Maximum seconds to wait for interface to come UP
MAX_ROUTE_WAIT=5 # Maximum seconds to wait for routing rules
# For debugging only - comment out in production
# set -x
ANDROID_VERSION=$(getprop ro.build.version.release 2>/dev/null | cut -d '.' -f1)
# Record script start time
SCRIPT_START=$(date +%s)
# Cleanup function - handles unexpected interruptions
cleanup() {
echo "Script interrupted, cleaning up..." >&2
# Add additional cleanup code here if needed
exit 1
}
trap cleanup INT TERM
# Get script directory for finding tools like ethtool
SCRIPT_PATH="$0"
# Ensure path is absolute
case "$SCRIPT_PATH" in
/*) ;; # Already absolute path
*) SCRIPT_PATH="$PWD/$SCRIPT_PATH" ;;
esac
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
echo "Script directory detected as: $SCRIPT_DIR"
# Only configure rp_filter for eth0 interface
echo 0 > /proc/sys/net/ipv4/conf/eth0/rp_filter 2>/dev/null || true
# Wait for eth0 interface to appear
WAITED=0
while [ $WAITED -lt $MAX_INIT_WAIT ]; do
if [ -d "/sys/class/net/eth0" ]; then
echo "eth0 found after $WAITED seconds"
break
fi
echo "Wait eth0... ($WAITED/$MAX_INIT_WAIT)"
sleep 0.1
WAITED=$((WAITED+1))
done
# Check if eth0 exists
if ! [ -d "/sys/class/net/eth0" ]; then
echo "Error: eth0 not exists" >&2
exit 1
fi
# Check physical connection status
if [ -f "/sys/class/net/eth0/carrier" ]; then
CARRIER=$(cat /sys/class/net/eth0/carrier)
echo "Physical connection status: $CARRIER (1=connected, 0=disconnected)"
if [ "$CARRIER" != "1" ]; then
echo "Warning: Ethernet physical connection may have issues, please check the cable" >&2
fi
fi
# Clear previous configuration
/system/bin/ip link set eth0 down
/system/bin/ip addr flush dev eth0
/system/bin/ip route flush dev eth0
/system/bin/ip route flush table $ROUTE_TABLE
/system/bin/ip rule del to $ETH_NETWORK/$ETH_NETMASK 2>/dev/null || true
# Configure physical layer with ethtool (while interface is DOWN)
if [ -x "$SCRIPT_DIR/ethtool" ]; then
echo "Using ethtool from script directory: $SCRIPT_DIR/ethtool"
"$SCRIPT_DIR/ethtool" -s eth0 speed 10 duplex full autoneg off
# Try alternative path next
elif [ -x "/data/data/com.xypower.mpapp/files/ethtool" ]; then
echo "Configuring eth0 to 10Mbps full duplex..."
/data/data/com.xypower.mpapp/files/ethtool -s eth0 speed 10 duplex full autoneg off
else
echo "Warning: ethtool not found, falling back to sysfs configuration" >&2
# Try sysfs configuration as fallback
if [ -f "/sys/class/net/eth0/speed" ]; then
echo "off" > /sys/class/net/eth0/autoneg 2>/dev/null || true
echo "10" > /sys/class/net/eth0/speed 2>/dev/null || true
echo "full" > /sys/class/net/eth0/duplex 2>/dev/null || true
fi
fi
# ====================================================
# MTK Android 9 IP configuration with loss prevention
# ====================================================
# Configure IP address first while interface is DOWN
echo "Setting IP address while interface is DOWN..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
PRE_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration before UP: $PRE_UP_IP (1=configured, 0=missing)"
# Enable interface and wait for UP
echo "Bringing up interface..."
/system/bin/ip link set eth0 up
if [ "$ANDROID_VERSION" = "9" ]; then
sleep 3
else
# Use standard configuration for other devices
sleep 1
fi
# Check if IP was lost after interface UP (common issue on MTK devices)
POST_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration after UP: $POST_UP_IP (1=retained, 0=lost)"
# IP address lost detection and recovery
if [ "$PRE_UP_IP" = "1" ] && [ "$POST_UP_IP" = "0" ]; then
echo "Warning: IP address was lost after bringing interface up - MTK issue detected"
echo "Reapplying IP configuration..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
# Check if reapplied configuration worked
FIXED_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP reapplication result: $FIXED_IP (1=success, 0=still missing)"
# If standard method fails, try MTK-specific approaches
if [ "$FIXED_IP" = "0" ]; then
echo "Standard IP configuration failed, trying MTK-specific methods"
# Try ifconfig if available (works better on some MTK devices)
if command -v ifconfig >/dev/null 2>&1; then
echo "Using ifconfig method..."
ifconfig eth0 $ETH_IP netmask 255.255.255.0 up
sleep 1
fi
# Try Android's netd service if available
if [ -x "/system/bin/ndc" ]; then
echo "Using MTK netd service..."
/system/bin/ndc network interface setcfg eth0 $ETH_IP 255.255.255.0 up
sleep 1
fi
fi
fi
# Use loop to wait for interface UP instead of fixed sleep
WAITED=0
while [ $WAITED -lt $MAX_UP_WAIT ]; do
# Check both link status and IP configuration
IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "1" ]; then
echo "Interface is UP with correct IP after $WAITED seconds"
break
fi
echo "Waiting for interface UP with IP... ($WAITED/$MAX_UP_WAIT)"
# If interface is UP but IP is missing, reapply IP
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "0" ]; then
echo "Interface UP but IP missing, reapplying IP..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
fi
sleep 0.5
WAITED=$((WAITED+1))
done
# Final status check
FINAL_IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
FINAL_IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$FINAL_IF_STATUS" != "1" ] || [ "$FINAL_IP_STATUS" != "1" ]; then
echo "Warning: Failed to achieve stable interface state with IP" >&2
echo "Final interface status: $FINAL_IF_STATUS (1=UP, 0=DOWN)"
echo "Final IP status: $FINAL_IP_STATUS (1=configured, 0=missing)"
/system/bin/ip addr show eth0
else
echo "Successfully configured eth0 with IP $ETH_IP"
fi
# First add to main routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link
# Then add to specified routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link table $ROUTE_TABLE
ADD_ROUTE_STATUS=$?
if [ $ADD_ROUTE_STATUS -eq 0 ]; then
echo "Add route successfully"
else
echo "Failed to add route: $ADD_ROUTE_STATUS" >&2
fi
# Only clear ARP and neighbor cache for eth0
/system/bin/ip neigh flush dev eth0
# Add routing rules - only flush cache once after rule is added
/system/bin/ip rule add from all to $ETH_NETWORK/$ETH_NETMASK lookup $ROUTE_TABLE prio 1000
/system/bin/ip route flush cache dev eth0
# Only enable forwarding for eth0 interface
echo 1 > /proc/sys/net/ipv4/conf/eth0/forwarding 2>/dev/null || true
# Wait for routing rules to take effect - using loop check instead of fixed wait
WAITED=0
while [ $WAITED -lt $MAX_ROUTE_WAIT ]; do
if /system/bin/ip rule | grep -q "$ETH_NETWORK/$ETH_NETMASK"; then
echo "Routing rules are now effective after $WAITED seconds"
break
fi
echo "Waiting for routing rules to take effect... ($WAITED/$MAX_ROUTE_WAIT)"
sleep 0.5
WAITED=$((WAITED+1))
done
# Display execution time
SCRIPT_END=$(date +%s)
TOTAL_TIME=$((SCRIPT_END - SCRIPT_START))
echo "Total script execution time: $TOTAL_TIME seconds"
exit 0

Binary file not shown.

Binary file not shown.

@ -14,27 +14,6 @@ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ffunction-sections -fdata-sections -Wformat
set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}") set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}")
# SET_TARGET_PROPERTIES(microphoto PROPERTIES LINK_FLAGS "-Wl,-s,--gc-sections") # SET_TARGET_PROPERTIES(microphoto PROPERTIES LINK_FLAGS "-Wl,-s,--gc-sections")
add_definitions(-DUSING_ETHERNET)
if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
# add_definitions(-DUSING_PTZ)
endif()
# OUTPUT_DBG_INFO:
add_definitions(-DOUTPUT_DBG_INFO)
# OUTPUT_SOCKET_DBG_INFO Depends ON OUTPUT_DBG_INFO
# TerminalService.cpp
# add_definitions(-DOUTPUT_SOCKET_DBG_INFO)
# OUTPUT_DB_DBG_INFO Depends ON OUTPUT_DBG_INFO
# Database.cpp
# add_definitions(-DOUTPUT_DB_DBG_INFO)
add_definitions(-DUSING_FFMPEG)
IF (CMAKE_BUILD_TYPE STREQUAL Debug) IF (CMAKE_BUILD_TYPE STREQUAL Debug)
ADD_DEFINITIONS(-D_DEBUG) ADD_DEFINITIONS(-D_DEBUG)
ELSE() ELSE()
@ -57,14 +36,23 @@ add_definitions(-DHAVE_STRING_H) # for memcpy in md5.c
# add_definitions(-DUSING_NRSEC_VPN) # add_definitions(-DUSING_NRSEC_VPN)
# add_definitions(-DUSING_CERT) # add_definitions(-DUSING_CERT)
# add_definitions(-DUSING_DOWSE) # add_definitions(-DUSING_DOWSE)
# OUTPUT_CAMERA_DBG_INFO: CARERA
# add_definitions(-DOUTPUT_CAMERA_DBG_INFO)
add_definitions(-DALIGN_HB_TIMER_TO_PHOTO) add_definitions(-DALIGN_HB_TIMER_TO_PHOTO)
add_definitions(-DENABLE_3V3_ALWAYS) add_definitions(-DENABLE_3V3_ALWAYS)
add_definitions(-DCURL_STATICLIB) add_definitions(-DCURL_STATICLIB)
add_definitions(-DUSING_HDRPLUS) add_definitions(-DUSING_HDRPLUS)
add_definitions(-DUSING_EXEC_HDRP=0) add_definitions(-DUSING_EXEC_HDRP=1)
#set(USING_EXEC_HDRP 1) set(USING_EXEC_HDRP 1)
if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
add_definitions(-DUSING_PLZ)
endif()
# include_directories(${OpenCV_DIR}/include) # include_directories(${OpenCV_DIR}/include)
# add_library( lib_opencv SHARED IMPORTED ) # add_library( lib_opencv SHARED IMPORTED )
@ -95,8 +83,6 @@ set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn)
find_package(ncnn REQUIRED) find_package(ncnn REQUIRED)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include) include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include)
@ -150,19 +136,9 @@ include_directories(hdrplus2/${ANDROID_ABI})
include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include) include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include)
SET(ZLMEDIAKIT_LIBS "") SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
SET(STREAMING_SRCS "")
add_definitions(-DDISABLE_RTTI)
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLMediaKit )
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLToolKit/src/ )
# SET(ZLMEDIAKIT_LIBS ${ZLMEDIAKIT_LIBS} zlmediakit zltoolkit)
SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp media/Streaming.cpp )
#SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX) SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
#SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
SET(HDRPLUS_SOURCES SET(HDRPLUS_SOURCES
hdrplus/src/align.cpp hdrplus/src/align.cpp
@ -180,7 +156,6 @@ SET(HDRPLUS2_SOURCES
hdrplus2/src/InputSource.cpp hdrplus2/src/InputSource.cpp
hdrplus2/src/LibRaw2DngConverter.cpp hdrplus2/src/LibRaw2DngConverter.cpp
hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp) hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp)
SET(HDRPLUS2_SOURCES )
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR}) SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
@ -190,6 +165,11 @@ SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp) SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp)
SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include) SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include)
SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
SET(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2) SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2)
SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype) SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
@ -197,12 +177,66 @@ SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
# SET(EVPP_SRC_DIR ${EVPP_ROOT}/evpp) # SET(EVPP_SRC_DIR ${EVPP_ROOT}/evpp)
include_directories(${YAMC_INC_DIR}) include_directories(${YAMC_INC_DIR})
include_directories(${BREAKPAD_ROOT} ${BREAKPAD_ROOT}/common/android/include)
include_directories(${ASIO_ROOT}/include) include_directories(${ASIO_ROOT}/include)
# SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite) add_library( # Sets the name of the library.
# SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite) sqlite3
# add_library(sqlite3 STATIC ${SQLITE_SRC_DIR}/sqlite3.c )
# INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR}) # Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${SQLITE_SRC_DIR}/sqlite3.c
)
INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
file(GLOB BREAKPAD_SOURCES_COMMON
native-lib.cpp
${BREAKPAD_ROOT}/client/linux/crash_generation/crash_generation_client.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/thread_info.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/ucontext_reader.cc
${BREAKPAD_ROOT}/client/linux/handler/exception_handler.cc
${BREAKPAD_ROOT}/client/linux/handler/minidump_descriptor.cc
${BREAKPAD_ROOT}/client/linux/log/log.cc
${BREAKPAD_ROOT}/client/linux/microdump_writer/microdump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_ptrace_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/minidump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/pe_file.cc
${BREAKPAD_ROOT}/client/minidump_file_writer.cc
${BREAKPAD_ROOT}/common/convert_UTF.cc
${BREAKPAD_ROOT}/common/md5.cc
${BREAKPAD_ROOT}/common/string_conversion.cc
${BREAKPAD_ROOT}/common/linux/elfutils.cc
${BREAKPAD_ROOT}/common/linux/file_id.cc
${BREAKPAD_ROOT}/common/linux/guid_creator.cc
${BREAKPAD_ROOT}/common/linux/linux_libc_support.cc
${BREAKPAD_ROOT}/common/linux/memory_mapped_file.cc
${BREAKPAD_ROOT}/common/linux/safe_readlink.cc
)
file(GLOB BREAKPAD_ASM_SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S)
set_property(SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S PROPERTY LANGUAGE C)
# set_source_files_properties(${BREAKPAD_ASM_SOURCE} PROPERTIES LANGUAGE C)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
breakpad
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${BREAKPAD_SOURCES_COMMON}
${BREAKPAD_ASM_SOURCE}
)
INCLUDE_DIRECTORIES(${JSONCPP_INCLUDE_DIR}) INCLUDE_DIRECTORIES(${JSONCPP_INCLUDE_DIR})
@ -296,8 +330,10 @@ include_directories(${TERM_CORE_ROOT})
add_library( # Sets the name of the library. add_library( # Sets the name of the library.
jsoncpp jsoncpp
# Sets the library as a shared library. # Sets the library as a shared library.
STATIC STATIC
# Provides a relative path to your source file(s). # Provides a relative path to your source file(s).
${JSONCPP_SOURCES} ${JSONCPP_SOURCES}
) )
@ -339,7 +375,6 @@ add_library( # Sets the name of the library.
GPIOControl.cpp GPIOControl.cpp
MicroPhoto.cpp MicroPhoto.cpp
PhoneDevice.cpp PhoneDevice.cpp
PtzController.cpp
# PhoneDevice2.cpp # PhoneDevice2.cpp
Camera.cpp Camera.cpp
Camera2Reader.cpp Camera2Reader.cpp
@ -353,12 +388,6 @@ add_library( # Sets the name of the library.
ncnn/yolov5ncnn.cpp ncnn/yolov5ncnn.cpp
netcamera/httpclient.cpp netcamera/httpclient.cpp
netcamera/VendorCtrl.cpp
netcamera/YuShiCtrl.cpp
netcamera/HangYuCtrl.cpp
netcamera/HikonCtrl.cpp
${STREAMING_SRCS}
#serial/WeatherComm.cpp #serial/WeatherComm.cpp
@ -378,11 +407,9 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/SpecData_I1_JS.cpp ${TERM_CORE_ROOT}/SpecData_I1_JS.cpp
${TERM_CORE_ROOT}/SpecData_I1_HN.cpp ${TERM_CORE_ROOT}/SpecData_I1_HN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN.cpp ${TERM_CORE_ROOT}/SpecData_I1_HEN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN_TY.cpp
${TERM_CORE_ROOT}/SpecData_I1_HENZZ.cpp ${TERM_CORE_ROOT}/SpecData_I1_HENZZ.cpp
${TERM_CORE_ROOT}/SpecData_I1_SHX.cpp ${TERM_CORE_ROOT}/SpecData_I1_SHX.cpp
${TERM_CORE_ROOT}/SpecData_I1_NX.cpp ${TERM_CORE_ROOT}/SpecData_I1_NX.cpp
${TERM_CORE_ROOT}/SpecData_I1_SX_ZY.cpp
${TERM_CORE_ROOT}/SpecData_XY.cpp ${TERM_CORE_ROOT}/SpecData_XY.cpp
${TERM_CORE_ROOT}/SpecData_ZJ.cpp ${TERM_CORE_ROOT}/SpecData_ZJ.cpp
${TERM_CORE_ROOT}/SpecData_NW.cpp ${TERM_CORE_ROOT}/SpecData_NW.cpp
@ -400,18 +427,14 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/Client/Terminal_AH.cpp ${TERM_CORE_ROOT}/Client/Terminal_AH.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_ZZ.cpp ${TERM_CORE_ROOT}/Client/Terminal_HEN_ZZ.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN.cpp ${TERM_CORE_ROOT}/Client/Terminal_HEN.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_TY.cpp
${TERM_CORE_ROOT}/Client/Terminal_SHX.cpp ${TERM_CORE_ROOT}/Client/Terminal_SHX.cpp
${TERM_CORE_ROOT}/Client/Terminal_JS.cpp ${TERM_CORE_ROOT}/Client/Terminal_JS.cpp
${TERM_CORE_ROOT}/Client/Terminal_NX.cpp ${TERM_CORE_ROOT}/Client/Terminal_NX.cpp
${TERM_CORE_ROOT}/Client/Terminal_SX_ZY.cpp
${TERM_CORE_ROOT}/Client/Terminal_ZJ.cpp ${TERM_CORE_ROOT}/Client/Terminal_ZJ.cpp
${TERM_CORE_ROOT}/Client/Terminal_NW.cpp ${TERM_CORE_ROOT}/Client/Terminal_NW.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp ${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp
${TERM_CORE_ROOT}/Client/Database.cpp ${TERM_CORE_ROOT}/Client/Database.cpp
# ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
) )
@ -434,15 +457,53 @@ find_library( # Sets the name of the path variable.
target_link_libraries( # Specifies the target library. target_link_libraries( # Specifies the target library.
${PROJECT_NAME} ${PROJECT_NAME}
jsoncpp jsoncpp
freetype freetype
breakpad # breakpad
# Links the target library to the log library # Links the target library to the log library
# included in the NDK. # included in the NDK.
avcodec avfilter avformat avutil swresample swscale x264
${log-lib} ${log-lib}
android camera2ndk mediandk z curl android camera2ndk mediandk z curl
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED} ${ZLMEDIAKIT_LIBS}
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED}
) )
# set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all") # set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all")
####################################################################################
### RtmpSuck
####################################################################################
add_definitions(-DRTMPDUMP_VERSION=\"1.0\")
add_library( # Sets the name of the library.
rtmpdump
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
RtmpService.cpp
rtmp/thread.c
rtmp/rtmpsuck.c
)
target_link_libraries( # Specifies the target library.
rtmpdump
PUBLIC -fopenmp -static-openmp
rtmp
${log-lib}
android z
)

@ -2509,8 +2509,8 @@ void DngCreator::writeInputStream(std::vector<uint8_t>& outStream,
uint64_t uOffset = static_cast<uint32_t>(offset); uint64_t uOffset = static_cast<uint32_t>(offset);
ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, " ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
"rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth, "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, (int64_t)offset); uHeight, rowStride, pixStride, offset);
ByteVectorOutput out(outStream); ByteVectorOutput out(outStream);
// std::vector<uint8_t>& out = outStream; // std::vector<uint8_t>& out = outStream;
@ -2578,8 +2578,8 @@ void DngCreator::writeInputBuffer(std::vector<uint8_t>& outStream,
uint64_t uOffset = static_cast<uint32_t>(offset); uint64_t uOffset = static_cast<uint32_t>(offset);
ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, " ALOGV("%s: nativeWriteInputStream called with: width=%u, height=%u, "
"rowStride=%d, pixStride=%d, offset=%lld", __FUNCTION__, uWidth, "rowStride=%d, pixStride=%d, offset=%" PRId64, __FUNCTION__, uWidth,
uHeight, rowStride, pixStride, (int64_t)offset); uHeight, rowStride, pixStride, offset);
ByteVectorOutput out(outStream); ByteVectorOutput out(outStream);
// std::vector<uint8_t>& out = outStream; // std::vector<uint8_t>& out = outStream;

@ -23,67 +23,30 @@
#define IOT_PARAM_READ 0xAF #define IOT_PARAM_READ 0xAF
std::mutex GpioControl::m_locker; std::mutex GpioControl::m_locker;
std::mutex GpioControl::m_gpioLocker; CSemaphore GpioControl::m_semaphore;
std::vector<GpioControl::ITEM> GpioControl::m_items; std::vector<GpioControl::ITEM> GpioControl::m_items;
std::thread GpioControl::m_thread;
bool GpioControl::m_exitSignal = false;
bool GpioControl::m_cameraPowerStatus = false; bool GpioControl::m_cameraPowerStatus = false;
#define ENABLE_GPIO_TRACING
#ifdef ENABLE_GPIO_TRACING
class GpioDebugLogger
{
public:
GpioDebugLogger(int cmd, int value)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_val." + std::to_string(value);
CreateEmptyFile(m_path + ".enter");
}
GpioDebugLogger(int cmd)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_get";
CreateEmptyFile(m_path + ".enter");
}
~GpioDebugLogger()
{
uint64_t ts = (GetMicroTimeStamp() - m_startTime);
if (ts > 1000)
{
CreateEmptyFile(m_path + ".leave." + std::to_string(ts));
}
else
{
std::string path = m_path + ".enter";
std::remove(path.c_str());
}
}
private:
std::string m_path;
uint64_t m_startTime;
};
#endif
size_t GpioControl::turnOnImpl(const IOT_PARAM& param) size_t GpioControl::turnOnImpl(const IOT_PARAM& param)
{ {
size_t oldRef = 0;
size_t references = 1; size_t references = 1;
std::vector<ITEM>::iterator it; std::vector<ITEM>::iterator it;
int res = 0; int res = 0;
int fd = -1; int fd = -1;
time_t now = time(NULL); time_t now = time(NULL);
// check res??? fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
// check res???
for (it = m_items.begin(); it != m_items.end(); ++it) for (it = m_items.begin(); it != m_items.end(); ++it)
{ {
if (it->cmd == param.cmd) if (it->cmd == param.cmd)
{ {
oldRef = it->references;
it->references++; it->references++;
// it->closeTime = 0; // it->closeTime = 0;
references = it->references; references = it->references;
@ -95,36 +58,15 @@ size_t GpioControl::turnOnImpl(const IOT_PARAM& param)
} }
if (it == m_items.end()) if (it == m_items.end())
{ {
oldRef = 0; ITEM item = {param.cmd, references, 0, 0, now};
ITEM item = {param.cmd, references, now};
m_items.push_back(item); m_items.push_back(item);
SetCamerastatus(param.cmd, true); SetCamerastatus(param.cmd, true);
} }
if (oldRef == 0/* || param.cmd != CMD_SET_3V3_PWR_EN*/)
{
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(param.cmd, param.value);
#endif
m_gpioLocker.lock();
fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
#ifdef OUTPUT_DBG_INFO
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, param.value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
#endif
} }
m_gpioLocker.unlock();
#ifdef _DEBUG #ifdef _DEBUG
ALOGI("PWR TurnOn cmd=%d,result=%d ref=%u\r\n",param.cmd, param.result, (uint32_t)references); ALOGI("PWR TurnOn cmd=%d,result=%d ref=%u\r\n",param.cmd, param.result, (uint32_t)references);
#endif #endif
std::this_thread::sleep_for(std::chrono::milliseconds(100)); std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
return references; return references;
} }
@ -134,30 +76,19 @@ void GpioControl::setInt(int cmd, int value)
// param.cmd = cmd; // param.cmd = cmd;
// param.value = value; // param.value = value;
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd, value);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY); int fd = open(GPIO_NODE_MP, O_RDONLY);
if (fd > 0) if (fd > 0)
{ {
int res = ioctl(fd, IOT_PARAM_WRITE, &param); int res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd); #ifdef _DEBUG
#ifdef OUTPUT_DBG_INFO ALOGI("setInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, value, param.result);
#endif #endif
close(fd);
} }
m_gpioLocker.unlock();
} }
int GpioControl::getInt(int cmd) int GpioControl::getInt(int cmd)
{ {
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY); int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd); // LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 ) if( fd > 0 )
@ -169,34 +100,29 @@ int GpioControl::getInt(int cmd)
ALOGI("getInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result); ALOGI("getInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
#endif #endif
close(fd); close(fd);
m_gpioLocker.unlock();
return param.value; return param.value;
} }
m_gpioLocker.unlock();
return -1; return -1;
} }
void GpioControl::setLong(int cmd, long value) void GpioControl::setLong(int cmd, long value)
{ {
int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param; IOT_PARAM param;
param.cmd = cmd; param.cmd = cmd;
param.value2 = value; param.value2 = value;
// LOGE("set_long fd=%d,cmd=%d,value2=%ld\r\n",fd, param.cmd, param.value2); // LOGE("set_long fd=%d,cmd=%d,value2=%ld\r\n",fd, param.cmd, param.value2);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 ) if( fd > 0 )
{ {
ioctl(fd, IOT_PARAM_WRITE, &param); ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result); // LOGE("set_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd); close(fd);
} }
m_gpioLocker.unlock();
} }
long GpioControl::getLong(int cmd) long GpioControl::getLong(int cmd)
{ {
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY); int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd); // LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 ) if( fd > 0 )
@ -206,37 +132,32 @@ long GpioControl::getLong(int cmd)
ioctl(fd, IOT_PARAM_READ, &param); ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result); // LOGE("get_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd); close(fd);
m_gpioLocker.unlock();
return param.value2; return param.value2;
} }
m_gpioLocker.unlock();
return -1; return -1;
} }
void GpioControl::setString(int cmd, const std::string& value) void GpioControl::setString(int cmd, const std::string& value)
{ {
IOT_PARAM param; IOT_PARAM param;
int fd = open(GPIO_NODE_MP, O_RDONLY);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
param.cmd = cmd; param.cmd = cmd;
memset(param.str, 0, MAX_STRING_LEN); memset(param.str, 0, MAX_STRING_LEN);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
memcpy(param.str, value.c_str(), len); memcpy(param.str, value.c_str(), len);
// LOGE("set_string fd=%d,cmd=%d,str=%s\r\n",fd, param.cmd, param.str); // LOGE("set_string fd=%d,cmd=%d,str=%s\r\n",fd, param.cmd, param.str);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 ) if( fd > 0 )
{ {
ioctl(fd, IOT_PARAM_WRITE, &param); ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result); // LOGE("set_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd); close(fd);
} }
m_gpioLocker.unlock();
return; return;
} }
std::string GpioControl::getString(int cmd) std::string GpioControl::getString(int cmd)
{ {
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY); int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd); // LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 ) if( fd > 0 )
@ -246,10 +167,8 @@ std::string GpioControl::getString(int cmd)
ioctl(fd, IOT_PARAM_READ, &param); ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result); // LOGE("get_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd); close(fd);
m_gpioLocker.unlock();
return std::string(param.str); return std::string(param.str);
} }
m_gpioLocker.unlock();
return ""; return "";
} }
@ -277,10 +196,6 @@ size_t GpioControl::TurnOn(const std::vector<int>& cmds)
m_locker.lock(); m_locker.lock();
for (it = cmds.cbegin(); it != cmds.cend(); ++it) for (it = cmds.cbegin(); it != cmds.cend(); ++it)
{ {
if (*it == 0)
{
continue;
}
param.cmd = *it; param.cmd = *it;
turnOnImpl(param); turnOnImpl(param);
} }
@ -299,17 +214,14 @@ size_t GpioControl::TurnOffImmediately(int cmd)
{ {
if (it->cmd == cmd) if (it->cmd == cmd)
{ {
if (it->references > 0) ref = it->references;
{ it->closeCmds++;
it->references = 0; it->closeTime = ts;
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
break; break;
} }
} }
m_locker.unlock(); m_locker.unlock();
m_semaphore.release();
#ifdef _DEBUG #ifdef _DEBUG
ALOGI("PWR TurnOffNow cmd=%d ref=%u", cmd, (uint32_t)ref); ALOGI("PWR TurnOffNow cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif #endif
@ -325,36 +237,22 @@ size_t GpioControl::TurnOff(int cmd, uint32_t delayedCloseTime/* = 0*/)
} }
size_t ref = 0; size_t ref = 0;
std::vector<ITEM>::iterator it; std::vector<ITEM>::iterator it;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmd);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock(); m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it) for (it = m_items.begin(); it != m_items.end(); ++it)
{ {
if (it->cmd == cmd) if (it->cmd == cmd)
{ {
if (it->references > 0) ref = it->references;
it->closeCmds++;
if (ts > it->closeTime)
{ {
it->references--; it->closeTime = ts;
if (it->references == 0)
{
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
} }
break; break;
} }
} }
m_locker.unlock(); m_locker.unlock();
m_semaphore.release();
#ifdef _DEBUG #ifdef _DEBUG
ALOGI("PWR TurnOff cmd=%d ref=%u", cmd, (uint32_t)ref); ALOGI("PWR TurnOff cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif #endif
@ -370,17 +268,6 @@ size_t GpioControl::TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseT
} }
std::vector<ITEM>::iterator it; std::vector<ITEM>::iterator it;
std::vector<int>::const_reverse_iterator itCmd; std::vector<int>::const_reverse_iterator itCmd;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmds);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock(); m_locker.lock();
// turnOnImpl(param); // turnOnImpl(param);
for (itCmd = cmds.crbegin(); itCmd != cmds.crend(); ++itCmd) for (itCmd = cmds.crbegin(); itCmd != cmds.crend(); ++itCmd)
@ -389,41 +276,25 @@ size_t GpioControl::TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseT
{ {
if (it->cmd == *itCmd) if (it->cmd == *itCmd)
{ {
if (it->references > 0) it->closeCmds++;
if (ts > it->closeTime)
{ {
it->references--; it->closeTime = ts;
if (it->references == 0)
{
SetCamerastatus(it->cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
} }
break; break;
} }
} }
} }
m_locker.unlock(); m_locker.unlock();
m_semaphore.release();
return 0; return 0;
} }
size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds) size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
{ {
for (auto itCmd = cmds.cbegin(); itCmd != cmds.end(); ++itCmd) time_t ts = time(NULL);
{ time_t ts2;
if (itCmd->second > 0)
{
uint32_t delayedCloseTime = itCmd->second;
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(itCmd->first);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
}
std::vector<ITEM>::iterator it; std::vector<ITEM>::iterator it;
std::vector<std::pair<int, uint32_t> >::const_iterator itCmd; std::vector<std::pair<int, uint32_t> >::const_iterator itCmd;
m_locker.lock(); m_locker.lock();
@ -433,14 +304,14 @@ size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
{ {
if (it->cmd == itCmd->first) if (it->cmd == itCmd->first)
{ {
if (it->references > 0)
it->closeCmds++;
if (itCmd->second != 0)
{ {
it->references--; ts2 = itCmd->second + ts;
if (it->references == 0) if (ts2 > it->closeTime)
{ {
SetCamerastatus(it->cmd, false); it->closeTime = ts2;
setInt(it->cmd, 0);
it->openTime = 0;
} }
} }
break; break;
@ -448,6 +319,7 @@ size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
} }
} }
m_locker.unlock(); m_locker.unlock();
m_semaphore.release();
return 0; return 0;
} }
@ -457,7 +329,7 @@ bool GpioControl::SetCamerastatus(int cmd, bool status)
if(cmd == CMD_SET_PIC1_POWER) if(cmd == CMD_SET_PIC1_POWER)
m_cameraPowerStatus = status; m_cameraPowerStatus = status;
#endif #endif
#ifdef USING_PTZ #ifdef USING_PLZ
if(cmd == CMD_SET_PTZ_PWR_ENABLE) if(cmd == CMD_SET_PTZ_PWR_ENABLE)
{ {
m_cameraPowerStatus = status; m_cameraPowerStatus = status;
@ -473,18 +345,18 @@ bool GpioControl::GetCamerastatus()
bool GpioControl::GetSelftestStatus(time_t wait_time) bool GpioControl::GetSelftestStatus(time_t wait_time)
{ {
int cmd = 0; int cmd;
#ifdef USING_N938 #ifdef USING_N938
cmd = CMD_SET_PIC1_POWER; cmd = CMD_SET_PIC1_POWER;
#endif #endif
#ifdef USING_PTZ #ifdef USING_PLZ
cmd = CMD_SET_PTZ_PWR_ENABLE; cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif #endif
time_t now = time(NULL); time_t now = time(NULL);
std::vector<ITEM>::iterator it; std::vector<ITEM>::iterator it;
for (it = m_items.begin(); it != m_items.end(); ++it) for (it = m_items.begin(); it != m_items.end(); ++it)
{ {
if (it->cmd == cmd && it->references > 0 && it->openTime!=0 && (now - it->openTime >= wait_time)) if (it->cmd == cmd && it->openTime!=0 && (now - it->openTime >= wait_time))
{ {
return true;//自检完成 return true;//自检完成
} }
@ -493,26 +365,115 @@ bool GpioControl::GetSelftestStatus(time_t wait_time)
} }
time_t GpioControl::GetSelfTestRemain(time_t wait_time) void GpioControl::PowerControlThreadProc()
{ {
int cmd = 0; time_t ts = 0;
#ifdef USING_N938
cmd = CMD_SET_PIC1_POWER;
#endif
#ifdef USING_PTZ
cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif
time_t now = time(NULL);
std::vector<ITEM>::iterator it; std::vector<ITEM>::iterator it;
std::vector<int> items;
time_t minDelayTime = 0;
time_t delayTime = 0;
int fd = -1;
int res = -1;
m_cameraPowerStatus = 0;
while(1)
{
// Check if there is close cmd
ts = time(NULL);
minDelayTime = std::numeric_limits<time_t>::max();
m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it) for (it = m_items.begin(); it != m_items.end(); ++it)
{ {
if (it->cmd == cmd && it->references > 0) if (it->references == 0 && it->closeCmds == 0 && it->closeTime == 0)
{ {
time_t remaintime = (now - it->openTime); #ifdef _DEBUG
remaintime = (wait_time > remaintime) ? (wait_time - remaintime) : 0; ALOGI("PWR THREAD cmd=%d ref=%u closeCmds=%u", it->cmd, (uint32_t)it->references, (uint32_t)it->closeCmds);
return remaintime;//自检完成 #endif
continue;
}
if (it->closeCmds > 0)
{
if (it->references <= it->closeCmds)
{
it->references = 0;
}
else
{
it->references -= it->closeCmds;
if(it->references < 0)
it->references = 0;
}
it->closeCmds = 0;
}
if (it->references == 0)
{
// Should turn off the power
if ((it->closeTime == 0) || (it->closeTime <= ts))
{
// close it directly
setInt(it->cmd, 0);
it->closeTime = 0;
it->openTime = 0;
#ifdef _DEBUG
ALOGI("PWR THREAD DO TurnOff cmd=%d", it->cmd);
#endif
SetCamerastatus(it->cmd, false);
}
else
{
// Check Time
delayTime = ts - it->closeTime;
if (delayTime < minDelayTime)
{
minDelayTime = delayTime;
}
}
}
#ifdef _DEBUG
ALOGI("PWR THREAD cmd=%d ref=%u closeCmds=%u", it->cmd, (uint32_t)it->references, (uint32_t)it->closeCmds);
#endif
}
m_locker.unlock();
if (minDelayTime < std::numeric_limits<time_t>::max())
{
m_semaphore.try_acquire_for(std::chrono::seconds(1));
}
else
{
m_semaphore.acquire();
}
if (m_exitSignal)
{
break;
} }
} }
return 0;
} }
bool GpioControl::Startup()
{
// if (m_thread.)
m_exitSignal = false;
m_thread = std::thread(PowerControlThreadProc);
#ifdef _DEBUG
pthread_t nativeHandle = m_thread.native_handle();
pthread_setname_np(nativeHandle, "gpioclose");
#endif
return true;
}
void GpioControl::Stop()
{
// Notify
m_exitSignal = true;
m_semaphore.release();
m_thread.detach();
}

@ -13,11 +13,10 @@
#include <utility> #include <utility>
#include <SemaphoreEx.h> #include <SemaphoreEx.h>
#include <LogThread.h>
#ifndef USING_N938 #ifndef USING_N938
#ifndef USING_PTZ // MicroPhoto #ifndef USING_PLZ // MicroPhoto
#define CMD_GET_LIGHT_ADC 101 #define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102 #define CMD_SET_LIGHT_ADC 102
@ -44,7 +43,6 @@
#define CMD_SET_PWM_BEE_STATE 126 // Removed #define CMD_SET_PWM_BEE_STATE 126 // Removed
#define CMD_SET_ALM_MODE 128 // Removed #define CMD_SET_ALM_MODE 128 // Removed
#define CMD_SET_SYSTEM_RESET 202 #define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN_STATE 131 #define CMD_SET_485_EN_STATE 131
#define CMD_SET_12V_EN_STATE 133 #define CMD_SET_12V_EN_STATE 133
#if 1 #if 1
@ -52,28 +50,15 @@
#define CMD_SET_3V3_PWR_EN 132 #define CMD_SET_3V3_PWR_EN 132
#endif #endif
#define CMD_GET_CAMERA_STATUS 310 #else // defined(USING_PLZ)
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_INIT_STATUS 401
#define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_NEW_OTG_STATE 507
#else // defined(USING_PTZ)
#define CMD_SET_OTG_STATE 107 #define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108 #define CMD_GET_OTG_STATE 108
#define CMD_SET_SPI_POWER 129 #define CMD_SET_SPI_POWER 129
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_SET_12V_EN_STATE 0 // TO BE ADDED #define CMD_SET_12V_EN_STATE 0 // TO BE ADDED
#define CMD_SET_SYSTEM_RESET 202 #define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_GET_LIGHT_ADC 101 #define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102 #define CMD_SET_LIGHT_ADC 102
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112 #define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
@ -82,9 +67,9 @@
#define CMD_SET_SPI_BITS_PER_WORD 0 // TO BE ADDED #define CMD_SET_SPI_BITS_PER_WORD 0 // TO BE ADDED
#define CMD_SET_SPI_MAXSPEEDHZ 0 // TO BE ADDED #define CMD_SET_SPI_MAXSPEEDHZ 0 // TO BE ADDED
#define CMD_SET_485_ENABLE 131 #define CMD_SET_485_ENABLE 512
#define CMD_SET_3V3_PWR_EN 132 #define CMD_SET_3V3_PWR_EN 516
// #define CMD_SET_5V_PWR_ENABLE 517 #define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_SENSOR_ENABLE 504 #define CMD_SET_SENSOR_ENABLE 504
#define CMD_SET_SENSOR_PWR_ENABLE 505 #define CMD_SET_SENSOR_PWR_ENABLE 505
#define CMD_SET_SENSOR2_ENABLE 506 #define CMD_SET_SENSOR2_ENABLE 506
@ -111,31 +96,20 @@
#define CMD_SET_LIGHT1_RESISTOR_ENABLE 524 #define CMD_SET_LIGHT1_RESISTOR_ENABLE 524
#define CMD_SET_100M_RESET 526 #define CMD_SET_100M_RESET 526
#define CMD_GET_CAMERA_STATUS 310 #endif // USING_PLZ
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_INIT_STATUS 401
#endif // USING_PTZ
#else // defined(USING_N938) #else // defined(USING_N938)
#define CMD_SET_SYSTEM_RESET 202 #define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN1 302 #define CMD_SET_485_EN1 302
#define CMD_SET_3V3_PWR_EN 132 #define CMD_SET_3V3_PWR_EN 360
#define CMD_SET_UART0_EN 361 #define CMD_SET_UART0_EN 361
#define CMD_SET_485_EN0 301 #define CMD_SET_485_EN0 301
#define CMD_SET_NETWORK_POWER_EN 362 #define CMD_SET_NETWORK_POWER_EN 362
#define CMD_SET_485_EN3 304 #define CMD_SET_485_EN3 304
#define CMD_SET_485_EN2 303 #define CMD_SET_485_EN2 303
#define CMD_SET_SPI_POWER 129 #define CMD_SET_SPI_POWER 129
// #define CMD_SET_5V_EN 363 #define CMD_SET_5V_EN 363
#define CMD_SDCARD_DETECT_EN 364 #define CMD_SDCARD_DETECT_EN 364
#define CMD_SET_PIC1_POWER 494 #define CMD_SET_PIC1_POWER 494
#define CMD_SET_OTHER_POWER 493 #define CMD_SET_OTHER_POWER 493
@ -156,8 +130,6 @@
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112 #define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117 #define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_SET_INIT_STATUS 0 // 401
#endif // USING_N938 #endif // USING_N938
@ -180,16 +152,19 @@ public:
{ {
int cmd; int cmd;
size_t references; size_t references;
size_t closeCmds;
time_t closeTime;
time_t openTime; time_t openTime;
}; };
private: private:
static std::mutex m_locker; static std::mutex m_locker;
static CSemaphore m_semaphore;
static std::vector<ITEM> m_items; static std::vector<ITEM> m_items;
static bool m_exitSignal;
static std::thread m_thread;
static bool m_cameraPowerStatus; static bool m_cameraPowerStatus;
static std::mutex m_gpioLocker;
protected: protected:
static size_t turnOnImpl(const IOT_PARAM& param); static size_t turnOnImpl(const IOT_PARAM& param);
static size_t turnOffImpl(const IOT_PARAM& param); static size_t turnOffImpl(const IOT_PARAM& param);
@ -206,7 +181,11 @@ public:
static bool SetCamerastatus(int cmd, bool status); static bool SetCamerastatus(int cmd, bool status);
static bool GetCamerastatus(); static bool GetCamerastatus();
static bool GetSelftestStatus(time_t wait_time); static bool GetSelftestStatus(time_t wait_time);
static time_t GetSelfTestRemain(time_t wait_time);
static void PowerControlThreadProc();
static bool Startup();
static void Stop();
public: public:
static void setInt(int cmd, int value); static void setInt(int cmd, int value);
@ -232,7 +211,7 @@ public:
static void setCam3V3Enable(bool enabled, uint32_t delayedCloseTime) static void setCam3V3Enable(bool enabled, uint32_t delayedCloseTime)
{ {
#ifdef USING_PTZ #ifdef USING_PLZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime); enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
#else #else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime); enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
@ -241,7 +220,7 @@ public:
static void setCam3V3Enable(bool enabled) static void setCam3V3Enable(bool enabled)
{ {
#ifdef USING_PTZ #ifdef USING_PLZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN); enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
#else #else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN); enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
@ -251,7 +230,7 @@ public:
static void setBeeOn(bool z) static void setBeeOn(bool z)
{ {
#ifndef USING_N938 #ifndef USING_N938
#ifndef USING_PTZ #ifndef USING_PLZ
z ? TurnOn(CMD_SET_PWM_BEE_STATE) : TurnOff(CMD_SET_PWM_BEE_STATE); z ? TurnOn(CMD_SET_PWM_BEE_STATE) : TurnOff(CMD_SET_PWM_BEE_STATE);
#endif #endif
#endif #endif
@ -259,7 +238,7 @@ public:
static void setJidianqiState(bool z) { static void setJidianqiState(bool z) {
#ifndef USING_N938 #ifndef USING_N938
#ifndef USING_PTZ #ifndef USING_PLZ
z ? TurnOn(CMD_SET_ALM_MODE) : TurnOff(CMD_SET_ALM_MODE); z ? TurnOn(CMD_SET_ALM_MODE) : TurnOff(CMD_SET_ALM_MODE);
#endif #endif
#endif #endif
@ -276,7 +255,7 @@ public:
static void setRS485Enable(bool z, uint32_t delayedCloseTime) static void setRS485Enable(bool z, uint32_t delayedCloseTime)
{ {
#ifndef USING_N938 #ifndef USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE, delayedCloseTime); z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE, delayedCloseTime);
#else #else
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime); z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime);
@ -294,7 +273,7 @@ public:
static void setRS485Enable(bool z) static void setRS485Enable(bool z)
{ {
#ifndef USING_N938 #ifndef USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE); z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE);
#else #else
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE); z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE);
@ -314,15 +293,10 @@ public:
setInt(CMD_SET_SYSTEM_RESET, 1); setInt(CMD_SET_SYSTEM_RESET, 1);
} }
static void reboot2()
{
setInt(CMD_SET_SYSTEM_RESET2, 1);
}
static void setLightAdc(int i) static void setLightAdc(int i)
{ {
#ifndef USING_N938 #ifndef USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
setInt(CMD_SET_LIGHT1_RESISTOR_ENABLE, i); setInt(CMD_SET_LIGHT1_RESISTOR_ENABLE, i);
#else #else
setInt(CMD_SET_LIGHT_ADC, i); setInt(CMD_SET_LIGHT_ADC, i);
@ -333,7 +307,7 @@ public:
static int getLightAdc() static int getLightAdc()
{ {
#ifndef USING_N938 #ifndef USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
return getInt(CMD_SET_LIGHT1_RESISTOR_ENABLE); return getInt(CMD_SET_LIGHT1_RESISTOR_ENABLE);
#else #else
return getInt(CMD_GET_LIGHT_ADC); return getInt(CMD_GET_LIGHT_ADC);
@ -364,7 +338,11 @@ public:
#endif #endif
static int getChargingBusVoltage() { static int getChargingBusVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE); return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE);
#else
return -1;
#endif
} }
#if 0 #if 0
@ -462,12 +440,6 @@ public:
m_cmds.resize(1, cmd1); m_cmds.resize(1, cmd1);
TurnOn(); TurnOn();
} }
PowerControl(const std::vector<int>& cmds) : m_delayCloseTime(0)
{
m_cmds = cmds;
TurnOn();
}
PowerControl(int cmd1, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime) PowerControl(int cmd1, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{ {
m_cmds.resize(1, cmd1); m_cmds.resize(1, cmd1);
@ -526,52 +498,14 @@ public:
TurnOn(); TurnOn();
} }
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, int cmd8, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(8, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
m_cmds[6] = cmd7;
m_cmds[7] = cmd8;
TurnOn();
}
virtual ~PowerControl() virtual ~PowerControl()
{ {
GpioControl::TurnOff(m_cmds, m_delayCloseTime); GpioControl::TurnOff(m_cmds, m_delayCloseTime);
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string status = GetStatus();
XYLOG(XYLOG_SEVERITY_INFO, "PWR After TurnOff %s, DelayCloseTime=%u", status.c_str(), m_delayCloseTime);
#endif
}
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string GetStatus()
{
std::string result;
for (auto it = m_cmds.cbegin(); it != m_cmds.cend(); ++it)
{
if (*it == 0)
{
continue;
}
result += std::to_string(*it) + "=" + std::to_string(GpioControl::getInt(*it)) + " ";
} }
return result;
}
#endif // #if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
protected: protected:
void TurnOn() void TurnOn()
{ {
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
// std::string status = GetStatus();
// XYLOG(XYLOG_SEVERITY_INFO, "PWR Before TurnOn %s", status.c_str());
#endif
GpioControl::TurnOn(m_cmds); GpioControl::TurnOn(m_cmds);
} }
@ -587,11 +521,11 @@ public:
#ifdef USING_N938 #ifdef USING_N938
PowerControl(0, closeDelayTime) PowerControl(0, closeDelayTime)
#else // USING_N938 #else // USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime) PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#else // USING_PTZ #else // USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime) PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#endif // USING_PTZ #endif // USING_PLZ
#endif // USING_N938 #endif // USING_N938
{ {
} }
@ -602,14 +536,13 @@ class NetCameraPowerCtrl : public PowerControl
public: public:
NetCameraPowerCtrl(uint32_t closeDelayTime) : NetCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938 #ifdef USING_N938
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938 #else // USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime) PowerControl(CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ #else // USING_PLZ
// MicroPhoto PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, CMD_SET_485_EN_STATE, closeDelayTime)
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_EN_STATE, closeDelayTime) #endif // USING_PLZ
#endif // USING_PTZ
#endif // USING_N938 #endif // USING_N938
{ {
} }
@ -620,32 +553,13 @@ class PlzCameraPowerCtrl : public PowerControl
public: public:
PlzCameraPowerCtrl(uint32_t closeDelayTime) : PlzCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938 #ifdef USING_N938
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938 #else // USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_485_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime) PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_485_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ #else // USING_PLZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime) PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ #endif // USING_PLZ
#endif // USING_N938
{
}
};
class EthernetPowerCtrl : public PowerControl
{
public:
EthernetPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
// PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_5V_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, closeDelayTime)
#else // USING_PTZ
// Micro Photo
PowerControl(CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE/* Only for wp6*/, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938 #endif // USING_N938
{ {
} }
@ -658,11 +572,11 @@ public:
#ifdef USING_N938 #ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime) PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938 #else // USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
PowerControl(CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime) PowerControl(CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ #else // USING_PLZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime) PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ #endif // USING_PLZ
#endif // USING_N938 #endif // USING_N938
{ {
} }
@ -675,11 +589,11 @@ public:
#ifdef USING_N938 #ifdef USING_N938
PowerControl(CMD_SET_SPI_POWER, CMD_SPI2SERIAL_POWER_EN, CMD_RS485_3V3_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN4, closeDelayTime) PowerControl(CMD_SET_SPI_POWER, CMD_SPI2SERIAL_POWER_EN, CMD_RS485_3V3_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN4, closeDelayTime)
#else // USING_N938 #else // USING_N938
#ifdef USING_PTZ #ifdef USING_PLZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_ENABLE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_PTZ_PWR_ENABLE, closeDelayTime) PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_ENABLE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_PTZ_PWR_ENABLE, closeDelayTime)
#else // USING_PTZ #else // USING_PLZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_485_EN_STATE, closeDelayTime) PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PTZ #endif // USING_PLZ
#endif // USING_N938 #endif // USING_N938
{ {
} }

@ -1,7 +1,6 @@
#include <jni.h> #include <jni.h>
#include <string> #include <string>
#include <thread> #include <thread>
#include <chrono>
#include <Factory.h> #include <Factory.h>
#include <Client/Terminal.h> #include <Client/Terminal.h>
#include "PhoneDevice.h" #include "PhoneDevice.h"
@ -20,18 +19,8 @@
#endif #endif
#ifdef USING_BREAK_PAD #ifdef USING_BREAK_PAD
#include <client/linux/handler/exception_handler.h> #include "client/linux/handler/exception_handler.h"
#include <client/linux/handler/minidump_descriptor.h> #include "client/linux/handler/minidump_descriptor.h"
#endif
#ifdef USING_MQTT
#include <mosquitto.h>
#endif
#ifdef USING_FFMPEG
extern "C" {
#include <libavformat/avformat.h>
}
#endif #endif
#include <android/native_window.h> #include <android/native_window.h>
@ -41,7 +30,6 @@ extern "C" {
#include "GPIOControl.h" #include "GPIOControl.h"
#ifdef USING_BREAK_PAD #ifdef USING_BREAK_PAD
static google_breakpad::ExceptionHandler* g_breakpad_handler = nullptr;
bool DumpCallback(const google_breakpad::MinidumpDescriptor& descriptor, bool DumpCallback(const google_breakpad::MinidumpDescriptor& descriptor,
void* context, void* context,
bool succeeded) { bool succeeded) {
@ -173,27 +161,11 @@ void Runner::RequestCapture(CTerminal* pTerminal, unsigned int channel, unsigned
pTerminal->RequestCapture(channel, preset, type, scheduleTime); pTerminal->RequestCapture(channel, preset, type, scheduleTime);
} }
#include <signal.h>
#include <android/log.h>
#if 0
void sighandler(int sig) {
__android_log_print(ANDROID_LOG_ERROR, "NativeCrash", "Caught signal %d", sig);
exit(1);
}
#endif
jint JNI_OnLoad(JavaVM* vm, void* reserved) jint JNI_OnLoad(JavaVM* vm, void* reserved)
{ {
JNIEnv* env = NULL; JNIEnv* env = NULL;
jint result = -1; jint result = -1;
// 在 JNI_OnLoad 或其他初始化函数中注册
#if 0
signal(SIGSEGV, sighandler);
#endif
#if defined(JNI_VERSION_1_6) #if defined(JNI_VERSION_1_6)
if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_6) == JNI_OK) if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_6) == JNI_OK)
{ {
@ -219,15 +191,8 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
} }
#ifdef USING_BREAK_PAD #ifdef USING_BREAK_PAD
google_breakpad::MinidumpDescriptor descriptor("/sdcard/com.xypower.mpapp/logs/"); google_breakpad::MinidumpDescriptor descriptor("/sdcard/Android/data/com.xypower.mpapp/files/logs/");
g_breakpad_handler = new google_breakpad::ExceptionHandler( google_breakpad::ExceptionHandler eh(descriptor, NULL, DumpCallback, NULL, true, -1);
descriptor,
nullptr, // Filter callback
DumpCallback, // Minidump callback
nullptr, // Context
true, // Install handlers
-1 // Server FD (not used)
);
#endif #endif
#if 0 #if 0
@ -262,40 +227,9 @@ jint JNI_OnLoad(JavaVM* vm, void* reserved)
curl_global_init(CURL_GLOBAL_ALL); curl_global_init(CURL_GLOBAL_ALL);
#ifdef USING_MQTT
mosquitto_lib_init();
#endif
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_init();
#endif
return result; return result;
} }
JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved)
{
#ifdef USING_MQTT
mosquitto_lib_cleanup();
#endif
curl_global_cleanup();
#ifdef USING_FFMPEG
// av_register_all();
avformat_network_deinit();
#endif
#ifdef USING_BREAKPAD
// Clean up breakpad handler
if (g_breakpad_handler) {
delete g_breakpad_handler;
g_breakpad_handler = nullptr;
}
#endif
}
bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread) bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread)
{ {
didAttachThread = false; didAttachThread = false;
@ -331,10 +265,12 @@ Java_com_xypower_mpapp_MainActivity_takePhoto(
unsigned char id = (unsigned char)channel - 1; unsigned char id = (unsigned char)channel - 1;
Camera2Reader *camera = new Camera2Reader(id); Camera2Reader *camera = new Camera2Reader(id);
std::string pathStr = jstring2string(env, path); const char *pathStr = env->GetStringUTFChars(path, 0);
std::string fileNameStr = jstring2string(env, fileName); const char *fileNameStr = env->GetStringUTFChars(fileName, 0);
camera->Open(pathStr.c_str(), fileNameStr.c_str()); camera->Open(pathStr, fileNameStr);
env->ReleaseStringUTFChars(fileName, fileNameStr);
env->ReleaseStringUTFChars(path, pathStr);
camera->start(); camera->start();
@ -365,12 +301,13 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
jstring modelName = env->NewStringUTF(model); jstring modelName = env->NewStringUTF(model);
env->SetObjectField(pThis, fieldId, modelName); env->SetObjectField(pThis, fieldId, modelName);
std::string appPathStr = jstring2string(env, appPath); bool udpOrTcp = (networkProtocol != 0); // 0: tcp
std::string ipStr = jstring2string(env, ip); const char *appPathStr = appPath == NULL ? NULL : env->GetStringUTFChars(appPath, 0);
std::string cmdidStr = jstring2string(env, cmdid); const char *ipStr = ip == NULL ? NULL : env->GetStringUTFChars(ip, 0);
std::string simcardStr = jstring2string(env, simcard); const char *cmdidStr = cmdid == NULL ? NULL : env->GetStringUTFChars(cmdid, 0);
std::string tfCardPathStr = jstring2string(env, tfCardPath); const char *simcardStr = simcard == NULL ? NULL : env->GetStringUTFChars(simcard, 0);
std::string nativeLibraryDirStr = jstring2string(env, nativeLibraryDir); const char *tfCardPathStr = tfCardPath == NULL ? NULL : env->GetStringUTFChars(tfCardPath, 0);
const char *nativeLibraryDirStr = nativeLibraryDir == NULL ? NULL : env->GetStringUTFChars(nativeLibraryDir, 0);
JavaVM* vm = NULL; JavaVM* vm = NULL;
jint ret = env->GetJavaVM(&vm); jint ret = env->GetJavaVM(&vm);
@ -381,14 +318,14 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
CTerminal* pTerminal = NewTerminal(protocol); CTerminal* pTerminal = NewTerminal(protocol);
CPhoneDevice* device = new CPhoneDevice(vm, pThis, appPathStr, (uint64_t)netHandle, versionCode, nativeLibraryDirStr); CPhoneDevice* device = new CPhoneDevice(vm, pThis, MakeString(appPathStr), NETID_UNSET, versionCode, MakeString(nativeLibraryDirStr));
device->SetListener(pTerminal); device->SetListener(pTerminal);
device->UpdateSignalLevel(signalLevel); device->UpdateSignalLevel(signalLevel);
device->SetBuildTime(buildTime / 1000); device->SetBuildTime(buildTime / 1000);
device->UpdateSimcard(simcardStr); device->UpdateSimcard(MakeString(simcardStr));
device->UpdateTfCardPath(tfCardPathStr); device->UpdateTfCardPath(MakeString(tfCardPathStr));
pTerminal->InitServerInfo(appPathStr, cmdidStr, ipStr, port, networkProtocol, encryptData); pTerminal->InitServerInfo(MakeString(appPathStr), MakeString(cmdidStr), MakeString(ipStr), port, udpOrTcp, encryptData);
// pTerminal->SetPacketSize(1 * 1024); // 1K // pTerminal->SetPacketSize(1 * 1024); // 1K
#if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN) #if defined(USING_NRSEC) && !defined(USING_NRSEC_VPN)
pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", ""); pTerminal->InitEncryptionInfo(simcardStr, "/dev/spidev0.0", "");
@ -400,6 +337,12 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
#ifdef _DEBUG #ifdef _DEBUG
ALOGD("Finish Startup"); ALOGD("Finish Startup");
#endif #endif
if (appPathStr != NULL) env->ReleaseStringUTFChars(appPath, appPathStr);
if (ipStr != NULL) env->ReleaseStringUTFChars(ip, ipStr);
if (cmdidStr != NULL) env->ReleaseStringUTFChars(cmdid, cmdidStr);
if (simcardStr != NULL) env->ReleaseStringUTFChars(simcard, simcardStr);
if (tfCardPathStr != NULL) env->ReleaseStringUTFChars(tfCardPath, tfCardPathStr);
if (nativeLibraryDirStr != NULL) env->ReleaseStringUTFChars(nativeLibraryDir, nativeLibraryDirStr);
if (!res) if (!res)
{ {
@ -413,7 +356,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
extern "C" JNIEXPORT jboolean JNICALL extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto( Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
JNIEnv* env, JNIEnv* env,
jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jstring url, jint mediaType) { jobject pThis, jlong handler, jint channel, jint preset, jlong scheduleTime, jboolean photoOrVideo) {
if (channel < 0 || channel > 0xFFFF) if (channel < 0 || channel > 0xFFFF)
{ {
@ -425,44 +368,24 @@ Java_com_xypower_mpapp_MicroPhotoService_notifyToTakePhoto(
return JNI_FALSE; return JNI_FALSE;
} }
uint8_t type = (uint8_t)mediaType; unsigned char type = photoOrVideo ? 0 : 1;
// std::thread th(&Runner::RequestCapture, pTerminal, (unsigned int)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true); // std::thread th(&Runner::RequestCapture, pTerminal, (unsigned int)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
// th.detach(); // th.detach();
if (channel == 0x200) if (channel < 0x100)
{ {
// Heartbeat pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
} }
else if (channel >= 0x100) else
{ {
uint32_t packetType = channel; uint32_t packetType = channel;
packetType &= 0xFF; packetType &= 0xFF;
pTerminal->RequestSampling(packetType, (uint64_t)scheduleTime, 0); pTerminal->RequestSampling(packetType, (uint64_t)scheduleTime, 0);
} }
else
{
if (mediaType == XY_MEDIA_TYPE_PHOTO || mediaType == XY_MEDIA_TYPE_VIDEO)
{
pTerminal->RequestCapture((uint32_t)channel, (unsigned int)preset, type, (uint64_t)scheduleTime, 0, true);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM)
{
// virtual bool StartStream(unsigned char channel, unsigned char preset, const std::string& url, uint32_t* photoId = NULL);
// virtual bool StopStream(unsigned char channel, unsigned char preset, uint32_t photoId);
uint32_t photoId = 0;
std::string urlStr = jstring2string(env, url);
pTerminal->StartStream(channel, preset, urlStr, &photoId);
}
else if (mediaType == XY_MEDIA_TYPE_STREAM_OFF)
{
pTerminal->StopStream(channel, preset, 0);
}
}
return JNI_TRUE; return JNI_TRUE;
} }
extern "C" JNIEXPORT jlong JNICALL extern "C" JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_MicroPhotoService_takePhoto( Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
JNIEnv* env, JNIEnv* env,
@ -506,8 +429,11 @@ Java_com_xypower_mpapp_MicroPhotoService_takePhoto(
osds[2].text = cfg.osd.rightBottom; osds[2].text = cfg.osd.rightBottom;
osds[3].text = cfg.osd.leftBottom; osds[3].text = cfg.osd.leftBottom;
std::string pathStr = jstring2string(env, path); const char* pathStr = env->GetStringUTFChars(path, 0);
device->TakePhoto(photoInfo, osds, pathStr);
device->TakePhoto(photoInfo, osds, MakeString(pathStr));
env->ReleaseStringUTFChars(path, pathStr);
// device->TurnOffCameraPower(NULL); // device->TurnOffCameraPower(NULL);
// if (photoInfo.usbCamera) // if (photoInfo.usbCamera)
@ -533,7 +459,7 @@ extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat( Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat(
JNIEnv* env, JNIEnv* env,
jobject pThis, jobject pThis,
jlong handler, jint signalLevel, jboolean scheduled) { jlong handler, jint signalLevel) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler); CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL) if (pTerminal == NULL)
@ -547,25 +473,7 @@ Java_com_xypower_mpapp_MicroPhotoService_sendHeartbeat(
device->UpdateSignalLevel(signalLevel); device->UpdateSignalLevel(signalLevel);
} }
pTerminal->SendHeartbeat(scheduled != JNI_FALSE); pTerminal->SendHeartbeat();
#ifdef OUTPUT_DBG_INFO
#if 0
std::thread t([]()
{
time_t ts = time(NULL);
int ldr = GpioControl::getLightAdc();
char buf[64] = { 0 };
snprintf(buf, sizeof(buf), "%s %d\r\n", FormatLocalDateTime(ts).c_str(), ldr);
appendFile("/sdcard/com.xypower.mpapp/tmp/ldr.txt", (const unsigned char* )buf, strlen(buf));
});
t.detach();
#endif
#endif
return JNI_TRUE; return JNI_TRUE;
} }
@ -759,7 +667,9 @@ Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture(
JNIEnv* env, JNIEnv* env,
jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) { jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) {
std::string paramPathStr = jstring2string(env, paramPath); const char* pParamPathStr = env->GetStringUTFChars(paramPath, 0);
std::string paramPathStr = MakeString(pParamPathStr);
env->ReleaseStringUTFChars(paramPath, pParamPathStr);
const char* pBinPathStr = env->GetStringUTFChars(binPath, 0); const char* pBinPathStr = env->GetStringUTFChars(binPath, 0);
std::string binPathStr = MakeString(pBinPathStr); std::string binPathStr = MakeString(pBinPathStr);
@ -967,21 +877,19 @@ Java_com_xypower_mpapp_MicroPhotoService_recordingFinished(
extern "C" JNIEXPORT jboolean JNICALL extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs( Java_com_xypower_mpapp_MicroPhotoService_reloadConfigs(
JNIEnv* env, JNIEnv* env,
jobject pThis, jlong handler, jint channelToClean) { jobject pThis, jlong handler) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler); CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL) if (pTerminal == NULL)
{ {
return JNI_FALSE; return JNI_FALSE;
} }
if (channelToClean != -1)
{ bool res = pTerminal->LoadAppConfigs();
pTerminal->CleanCaptureSchedules((uint32_t)((int)channelToClean));
}
bool res = pTerminal->LoadAppConfigs(true);
return res ? JNI_TRUE : JNI_FALSE; return res ? JNI_TRUE : JNI_FALSE;
} }
extern "C" JNIEXPORT jboolean JNICALL extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto( Java_com_xypower_mpapp_MicroPhotoService_sendExternalPhoto(
JNIEnv* env, jclass cls, jlong handler, jstring path, jlong photoInfo) { JNIEnv* env, jclass cls, jlong handler, jstring path, jlong photoInfo) {
@ -1028,18 +936,9 @@ Java_com_xypower_mpapp_MicroPhotoService_infoLog(
return; return;
} }
std::string str = jstring2string(env, msg); const char *msgStr = env->GetStringUTFChars(msg, 0);
XYLOG(XYLOG_SEVERITY_INFO, str.c_str()); XYLOG(XYLOG_SEVERITY_INFO, msgStr);
} env->ReleaseStringUTFChars(msg, msgStr);
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_usingEthernet(
JNIEnv* env, jclass cls) {
#ifdef USING_ETHERNET
return JNI_TRUE;
#else
return JNI_FALSE;
#endif
} }
extern "C" JNIEXPORT void JNICALL extern "C" JNIEXPORT void JNICALL
@ -1437,142 +1336,8 @@ Java_com_xypower_mpapp_MicroPhotoService_updateEhernet(
CPhoneDevice* device = (CPhoneDevice*)pTerminal->GetDevice(); CPhoneDevice* device = (CPhoneDevice*)pTerminal->GetDevice();
if (device != NULL) if (device != NULL)
{ {
bool changed = false; device->UpdateEthernet(static_cast<net_handle_t>(networkHandle), available != JNI_FALSE);
device->UpdateNetwork(static_cast<net_handle_t>(networkHandle), available != JNI_FALSE, false, changed);
if (changed)
{
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Changed and Check socket connection");
pTerminal->ResetNetwork();
}
else
{
XYLOG(XYLOG_SEVERITY_DEBUG, "Ethernet Changing Not Cause Socket Disconnection");
}
}
return JNI_TRUE;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_updateActiveNetwork(
JNIEnv* env, jobject pThis, jlong handle, jlong networkHandle, jboolean available) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
CPhoneDevice* device = (CPhoneDevice*)pTerminal->GetDevice();
if (device != NULL)
{
bool changed = false;
device->UpdateNetwork(static_cast<net_handle_t>(networkHandle), available != JNI_FALSE, true, changed);
}
return JNI_TRUE;
}
extern "C" JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_MicroPhotoService_requestPowerControl(
JNIEnv* env, jclass cls, jint type) {
if (type == 1) // Net
{
NetCameraPowerCtrl* powerControl = new NetCameraPowerCtrl(2);
return reinterpret_cast<jlong>(powerControl);
}
return 0L;
}
extern "C" JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_releasePowerControl(
JNIEnv* env, jclass cls, jlong powerControlHandle) {
PowerControl* powerControl = reinterpret_cast<PowerControl*>(powerControlHandle);
delete powerControl;
return JNI_TRUE;
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_xypower_mpapp_MicroPhotoService_getCustomAppId(JNIEnv *env, jobject thiz) {
#ifdef USING_N938
return 2;
#elif defined(USING_PTZ)
return 1;
#else
return 0;
#endif
}
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendCameraCtrl(
JNIEnv* env, jobject pThis, jlong handle, jint channel, jint preset, jint cmd) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return;
} }
pTerminal->SendCameraCtrl(channel, preset, cmd);
}
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_mpapp_MicroPhotoService_notifyTimeUpdated(
JNIEnv* env, jobject pThis, jlong handle) {
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handle);
if (pTerminal == NULL)
{
return;
}
std::thread t([pTerminal]()
{
pTerminal->OnTimeUpdated();
});
t.detach();
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendBasicInfo(JNIEnv *env, jobject thiz, jlong handler) {
// TODO: implement sendBasicInfo()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
pTerminal->SendBasicInfo();
return JNI_TRUE;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendWorkStatus(JNIEnv *env, jobject thiz, jlong handler) {
// TODO: implement sendWorkStatus()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
pTerminal->SendWorkStatus();
return JNI_TRUE;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mpapp_MicroPhotoService_sendFault(JNIEnv *env, jobject thiz, jlong handler, jstring faultCode, jstring faultInfo) {
// TODO: implement sendFault()
CTerminal* pTerminal = reinterpret_cast<CTerminal *>(handler);
if (pTerminal == NULL)
{
return JNI_FALSE;
}
std::string faultInfoStr = jstring2string(env, faultInfo);
pTerminal->SendFaultInfo(faultInfoStr);
return JNI_TRUE; return JNI_TRUE;
} }

File diff suppressed because it is too large Load Diff

@ -31,8 +31,6 @@
#include <android/multinetwork.h> #include <android/multinetwork.h>
#include "SensorsProtocol.h" #include "SensorsProtocol.h"
#include "PtzController.h"
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "error", __VA_ARGS__)) #define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "error", __VA_ARGS__))
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, "debug", __VA_ARGS__)) #define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, "debug", __VA_ARGS__))
@ -156,22 +154,11 @@ void MatToBitmap(JNIEnv *env, cv::Mat& mat, jobject& bitmap) {
#endif #endif
class PowerControl; class PowerControl;
class VendorCtrl;
class Streaming;
struct STREAMING_CONTEXT
{
std::shared_ptr<Streaming> stream;
std::shared_ptr<PowerControl> powerCtrl;
std::shared_ptr<PowerControl> ethernetPowerCtrl;
};
class CPhoneDevice : public IDevice class CPhoneDevice : public IDevice
{ {
public: public:
friend PtzController;
struct NETWORK struct NETWORK
{ {
std::string iface; std::string iface;
@ -185,7 +172,7 @@ public:
public: public:
CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params); CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params);
virtual ~CPhoneCamera(); virtual ~CPhoneCamera();
virtual bool on_image(cv::Mat rgb); virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg); virtual void on_error(const std::string& msg);
virtual void onDisconnected(ACameraDevice* device); virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames); virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
@ -221,7 +208,7 @@ public:
uint64_t uid; uint64_t uid;
}; };
CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, uint64_t activeNetHandle, unsigned int versionCode, const std::string& nativeLibDir); CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode, const std::string& nativeLibDir);
virtual ~CPhoneDevice(); virtual ~CPhoneDevice();
virtual void SetListener(IListener* listener); virtual void SetListener(IListener* listener);
@ -232,28 +219,25 @@ public:
virtual bool UpdateSchedules(); virtual bool UpdateSchedules();
virtual bool QuerySystemProperties(map<string, string>& properties); virtual bool QuerySystemProperties(map<string, string>& properties);
virtual bool InstallAPP(const std::string& path, unsigned int delayedTime); virtual bool InstallAPP(const std::string& path, unsigned int delayedTime);
virtual bool Reboot(int resetType, bool manually, const std::string& reason, uint32_t timeout = 1000); virtual bool Reboot(int resetType, bool manually, const std::string& reason);
virtual bool EnableGPS(bool enabled); virtual bool EnableGPS(bool enabled);
virtual int QueryBattaryVoltage(int timesForAvg, int* isCharging); virtual float QueryBattaryVoltage(int timesForAvg, bool* isCharging);
virtual uint32_t QueryLdr();
virtual bool RequestPosition(); virtual bool RequestPosition();
virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto); virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto);
virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path); virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path);
virtual bool CloseCamera(); virtual bool CloseCamera();
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 1); virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 0);
virtual bool UnregisterTimer(timer_uid_t uid); virtual bool UnregisterTimer(timer_uid_t uid);
virtual uint64_t RequestWakelock(uint64_t timeout); virtual uint64_t RequestWakelock(uint64_t timeout);
virtual bool ReleaseWakelock(uint64_t wakelock); virtual bool ReleaseWakelock(uint64_t wakelock);
virtual std::string GetVersion() const;
virtual int GetWData(WEATHER_INFO *weatherInfo, D_SENSOR_PARAM *sensorParam); virtual int GetWData(WEATHER_INFO *weatherInfo, D_SENSOR_PARAM *sensorParam);
virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, D_SENSOR_PARAM *sensorParam); virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, D_SENSOR_PARAM *sensorParam);
virtual bool OpenSensors(int sensortype); virtual bool OpenSensors(int sensortype);
virtual bool CloseSensors(int sensortype, uint32_t delayedCloseTime); virtual bool CloseSensors(int sensortype, uint32_t delayedCloseTime);
virtual bool OpenPTZSensors(uint32_t sec); virtual bool OpenPTZSensors(uint32_t sec);
virtual bool ClosePTZSensors(uint32_t delayedCloseTime); virtual bool ClosePTZSensors(uint32_t delayedCloseTime);
virtual bool GetPTZSensorsStatus(time_t waittime); virtual bool GetPTZSensorsStatus();
virtual bool GetCameraStatus(); virtual bool GetCameraStatus();
virtual void CameraCtrl(unsigned short waitTime, unsigned short delayTime, unsigned char channel, int cmdidx, unsigned char presetno, const char *serfile, unsigned int baud, int addr); virtual void CameraCtrl(unsigned short waitTime, unsigned short delayTime, unsigned char channel, int cmdidx, unsigned char presetno, const char *serfile, unsigned int baud, int addr);
virtual int GetSerialPhoto(int devno, D_IMAGE_DEF *photo); virtual int GetSerialPhoto(int devno, D_IMAGE_DEF *photo);
@ -264,7 +248,7 @@ public:
void UpdatePosition(double lon, double lat, double radius, time_t ts); void UpdatePosition(double lon, double lat, double radius, time_t ts);
bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId); bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId);
bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat mat, unsigned int photoId); bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat& mat, unsigned int photoId);
void UpdateSignalLevel(int signalLevel); void UpdateSignalLevel(int signalLevel);
void UpdateTfCardPath(const std::string& tfCardPath) void UpdateTfCardPath(const std::string& tfCardPath)
@ -276,23 +260,20 @@ public:
mBuildTime = buildTime; mBuildTime = buildTime;
} }
void UpdateSimcard(const std::string& simcard); void UpdateSimcard(const std::string& simcard);
void UpdateNetwork(net_handle_t nethandle, bool available, bool defaultOrEthernet, bool& changed); void UpdateEthernet(net_handle_t nethandle, bool available);
net_handle_t GetEthnetHandle() const; net_handle_t GetNetHandle() const;
VendorCtrl* MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle, bool syncTime);
protected: protected:
std::string GetFileName() const; std::string GetFileName() const;
std::string GetVersion() const;
bool SendBroadcastMessage(std::string action, int value); bool SendBroadcastMessage(std::string action, int value);
// bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_, // bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
bool TakePhotoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr); bool TakePhotoWithNetCamera(IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool TakeVideoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr); bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat& mat);
bool StartPushStreaming(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat, time_t takingTime);
inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const
{ {
if (m_listener != NULL) if (m_listener != NULL)
@ -334,10 +315,10 @@ protected:
return false; return false;
} }
void QueryFlowInfo(std::map<std::string, std::string>& powerInfo); void QueryPowerInfo(std::map<std::string, std::string>& powerInfo);
std::string QueryCpuTemperature(); std::string QueryCpuTemperature();
bool OnImageReady(cv::Mat mat); bool OnImageReady(cv::Mat& mat);
bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb); bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames); bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames); bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
@ -361,10 +342,6 @@ protected:
void SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway); void SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway);
void ConvertDngToPng(const std::string& dngPath, const std::string& pngPath); void ConvertDngToPng(const std::string& dngPath, const std::string& pngPath);
void SetStaticIp(); void SetStaticIp();
void ShutdownEthernet();
int ExecuteCommand(const std::string& cmd);
static std::string BuildCaptureResultInfo(ACameraMetadata* result, uint32_t ldr, uint32_t duration, bool burst);
protected: protected:
@ -377,8 +354,7 @@ protected:
std::string m_nativeLibraryDir; std::string m_nativeLibraryDir;
NETWORK* m_network; NETWORK* m_network;
net_handle_t m_defNetHandle; net_handle_t m_netHandle;
net_handle_t m_ethnetHandle;
jmethodID mRegisterHeartbeatMid; jmethodID mRegisterHeartbeatMid;
jmethodID mUpdateCaptureScheduleMid; jmethodID mUpdateCaptureScheduleMid;
@ -388,7 +364,7 @@ protected:
jmethodID mRequestWakelockMid; jmethodID mRequestWakelockMid;
jmethodID mReleaseWakelockMid; jmethodID mReleaseWakelockMid;
jmethodID mGetFlowInfoMid; jmethodID mGetSystemInfoMid;
jmethodID mRebootMid; jmethodID mRebootMid;
jmethodID mInstallAppMid; jmethodID mInstallAppMid;
@ -396,7 +372,6 @@ protected:
jmethodID mRequestPositionMid; jmethodID mRequestPositionMid;
jmethodID mExecHdrplusMid; jmethodID mExecHdrplusMid;
jmethodID mSetStaticIpMid; jmethodID mSetStaticIpMid;
jmethodID mExecuteCmdMid;
jmethodID mConvertDngToPngMid; jmethodID mConvertDngToPngMid;
@ -408,6 +383,7 @@ protected:
IListener* m_listener; IListener* m_listener;
const CFG_RECOGNIZATION* m_pRecognizationCfg; const CFG_RECOGNIZATION* m_pRecognizationCfg;
bool mAIInitialized; bool mAIInitialized;
unsigned int mNetId;
unsigned int mVersionCode; unsigned int mVersionCode;
time_t mBuildTime; time_t mBuildTime;
@ -424,17 +400,16 @@ protected:
std::thread m_threadClose; std::thread m_threadClose;
std::shared_ptr<PowerControl> m_powerCtrlPtr; std::shared_ptr<PowerControl> m_powerCtrlPtr;
uint32_t m_ethernetFailures;
int m_signalLevel; int m_signalLevel;
time_t m_signalLevelUpdateTime; time_t m_signalLevelUpdateTime;
std::string m_simcard; std::string m_simcard;
mutable std::mutex m_cameraLocker; mutable std::mutex m_cameraLocker;
bool m_cameraStatus;
bool m_sensorsStatus;
time_t m_lastTime; time_t m_lastTime;
std::atomic<bool> m_shouldStopWaiting; std::atomic<bool> m_shouldStopWaiting;
std::atomic<bool> m_isSelfTesting{false};
IDevice::ICE_TAIL m_tempData; IDevice::ICE_TAIL m_tempData;
mutable std::mutex m_dataLocker; mutable std::mutex m_dataLocker;
@ -444,10 +419,6 @@ protected:
std::atomic<bool> m_collecting; std::atomic<bool> m_collecting;
unsigned long long localDelayTime; unsigned long long localDelayTime;
std::map<uint8_t, STREAMING_CONTEXT > m_streamings;
PtzController* m_ptzController;
}; };

@ -1,462 +0,0 @@
//
// Created by Matthew on 2025/3/5.
//
#include "PtzController.h"
#include "SensorsProtocol.h"
#include "GPIOControl.h"
#include "PhoneDevice.h"
#include "time.h"
#include <memory>
PtzController::PtzController(CPhoneDevice* pPhoneDevice) : m_pPhoneDevice(pPhoneDevice)
{
m_exit = false;
}
void PtzController::Startup()
{
m_thread = std::thread(PtzThreadProc, this);
}
void PtzController::PtzThreadProc(PtzController* pThis)
{
pThis->PtzProc();
}
void PtzController::AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr)
{
SERIAL_CMD cmd = { 0 };
cmd.channel = channel;
cmd.preset = preset;
cmd.cmdidx = cmdidx;
cmd.bImageSize = bImageSize;
strcpy(cmd.serfile, serfile);
cmd.baud = baud;
cmd.addr = addr;
cmd.ts = time(NULL);
m_locker.lock();
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
}
void PtzController::AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds)
{
IDevice::SerialsPhotoParam param = { "", 0, 0 };
m_pPhoneDevice->GetPhotoSerialsParamCb(param);
SERIAL_CMD cmdPreset = { 0 };
time_t ts = time(NULL);
#if 1
// if (photoInfo.preset != 0 && photoInfo.preset != 0xFF)
{
cmdPreset.ts = photoInfo.selfTestingTime;
cmdPreset.delayTime = photoInfo.closeDelayTime;
cmdPreset.channel = photoInfo.channel;
cmdPreset.channel = photoInfo.preset;
cmdPreset.cmdidx = PHOTO_OPEN_POWER;
strcpy(cmdPreset.serfile, param.serfile);
cmdPreset.baud = param.baud;
cmdPreset.addr = param.addr;
}
#endif
SERIAL_CMD cmd = { 0 };
cmd.ts = ts;
cmd.delayTime = photoInfo.closeDelayTime;
cmd.channel = photoInfo.channel;
cmd.preset = photoInfo.preset;
cmd.cmdidx = Take_Photo;
cmd.bImageSize = photoInfo.resolution;
strcpy(cmd.serfile, param.serfile);
cmd.baud = param.baud;
cmd.addr = param.addr;
PtzPhotoParams* ppp = new PtzPhotoParams(photoInfo, path, osds);
cmd.photoParams.reset(ppp);
// cmd.delayTime;
// uint8_t bImageSize;
// char serfile[128];
// uint32_t baud;
// int addr;
m_locker.lock();
#if 1
if (cmdPreset.cmdidx != 0)
{
m_cmds.push_back(cmdPreset);
}
#endif
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
m_sem.release();
}
void PtzController::ExitAndWait()
{
m_exit = true;
m_sem.release();
if (m_thread.joinable())
{
m_thread.join();
}
}
void PtzController::PtzProc()
{
PROC_PTZ_STATE state = PTZS_POWER_OFF;
SERIAL_CMD cmd;
PTZ_STATE ptz_state;
bool hasCmd = false;
int i=0;
int closecmd=0;
std::shared_ptr<PowerControl> powerCtrl;
time_t selfTestingStartTime = 0;
time_t selfTestingWaitTime = 0;
time_t PTZ_preset_start_time = 0;
time_t PTZ_preset_wait_time = 0;
time_t close_delay_time = CAMERA_CLOSE_DELAYTIME;
time_t start_delay_time = 0;
time_t auto_delay_time = 0;
time_t auto_wait_time = WAIT_TIME_AUTO_CLOSE;
time_t photo_move_preset_time = 0;
int iwaitime = 0;
while(true)
{
m_sem.acquire();
if (m_exit)
{
break;
}
hasCmd = false;
m_locker.lock();
for (auto it = m_cmds.begin(); it != m_cmds.end(); ++it)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
// find first non-taking-photo cmd
if (it->cmdidx != Take_Photo)
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
else
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
m_locker.unlock();
if (!hasCmd)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
time_t timeout = time(NULL) - selfTestingStartTime;
if(timeout < 0)
selfTestingStartTime = time(NULL);
if (timeout >= selfTestingWaitTime)
{
XYLOG(XYLOG_SEVERITY_INFO, "超时(%u秒)未收到云台自检结束应答,状态改为空闲!", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
else
{
//if(timeout >= CAMERA_SELF_TEST_TIME)
{
#ifndef NDEBUG
if (timeout == 1 || ((timeout % 10) == 0))
#endif
{
XYLOG(XYLOG_SEVERITY_INFO, "开始查询云台自检状态timeout=%u秒", (uint32_t)timeout);
}
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到云台自检结束应答状态改为空闲timeout=%u秒", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
}
}
}
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
m_sem.release();
continue;
}
if(0 == start_delay_time)
{
if(0 == iwaitime)
{
auto_delay_time = time(NULL);
iwaitime += 1;
m_sem.release();
continue;
}
else
{
if(time(NULL) - auto_delay_time < 0)
{
auto_delay_time = time(NULL);
}
if(time(NULL) - auto_delay_time >= auto_wait_time)
{
iwaitime = 0;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机自动上电延时时间超过%u秒准备关闭摄像机", (uint32_t)auto_wait_time);
}
else
{
m_sem.release();
continue;
}
}
}
else
{
if(time(NULL) - start_delay_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间不会关摄像机电源*/
start_delay_time = time(NULL);
}
if(time(NULL) - start_delay_time >= close_delay_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机空闲时间超过%u秒准备关闭摄像机", (uint32_t)close_delay_time);
}
else
{
m_sem.release();
continue;
}
}
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发,摄像机本来就处于关机状态!");
// Do Nothing
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
powerCtrl.reset();
closecmd = 0;
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "自动触发关闭云台电源state=%d", state);
}
start_delay_time = 0;
continue;
}
switch (cmd.cmdidx)
{
case Take_Photo:
{
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
//powerCtrl = std::make_shared<PlzCameraPowerCtrl>(cmd.photoParams->mPhotoInfo.closeDelayTime);
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = cmd.photoParams->mPhotoInfo.selfTestingTime;
state = PTZS_PHOTO_SELF_TESTING;
XYLOG(XYLOG_SEVERITY_INFO, "1、收到拍照指令摄像机从关机状态改为自检状态");
m_locker.lock();
m_cmds.insert(m_cmds.begin(), cmd);
m_locker.unlock();
m_sem.release();
continue;
}
}
if(cmd.photoParams->mPhotoInfo.scheduleTime == 0)
{
if(1 == closecmd)
{
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令但同时后续收到关机指令等待拍完照片再关机。state=%d", state);
}
else
{
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令state=%d", state);
}
}
else
XYLOG(XYLOG_SEVERITY_INFO, "2、收到自动拍照指令state=%d", state);
state = PTZS_TAKING_PHOTO;
if (cmd.preset != 0 && cmd.preset != 0xFF)
{
CameraPhotoCmd(0, cmd.channel, MOVE_PRESETNO, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
#if 0
if(START_ONCE_SELF == cmd.preset)
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "拍照调用200号预置点指令摄像机启动一次性自检从拍照状态改为自检状态取消拍照动作设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
break;
}
#endif
PTZ_preset_start_time = time(NULL);
if(START_ONCE_SELF == cmd.preset)
PTZ_preset_wait_time = CAMERA_SELF_TEST_TIME;
else
PTZ_preset_wait_time = MOVE_PRESET_WAIT_TIME;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前开始调用预置点%ustate=%d", (uint32_t)cmd.preset, state);
for(;;)
{
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u收到移动结束应答移动时长=%d秒 state=%d", (uint32_t)cmd.preset, (uint32_t)(time(NULL)-PTZ_preset_start_time), state);
break;
}
}
if(time(NULL) - PTZ_preset_start_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间等待摄像机到达预置点*/
PTZ_preset_start_time = time(NULL);
}
if(time(NULL) - PTZ_preset_start_time >= PTZ_preset_wait_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u摄像机在%u秒内未收到调用预置点结束应答state=%d", (uint32_t)cmd.preset, (uint32_t)PTZ_preset_wait_time, state);
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(10));
photo_move_preset_time = time(NULL);
}
}
if(cmd.photoParams->mPhotoInfo.mediaType == 1)
m_pPhoneDevice->TakeVideoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
else if ((cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF))
{
m_pPhoneDevice->StartPushStreaming(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
}
else
m_pPhoneDevice->TakePhotoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
state = PTZS_IDLE;
}
break;
case PHOTO_OPEN_POWER:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_PHOTO_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机摄像机处于state=%d", state);
}
break;
case OPEN_TOTAL:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令摄像机处于state=%d", state);
}
closecmd = 0;
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动打开摄像机指令刷新关机计时初始值state=%d", state);
break;
case CLOSE_TOTAL:
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令,摄像机本来就处于关机状态!");
// Do Nothing
}
else if(PTZS_PHOTO_SELF_TESTING == state)
{
closecmd = 1;
XYLOG(XYLOG_SEVERITY_INFO, "在拍照自检过程中收到关机指令取消延时关机转到自动关机处理state=%d", state);
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
closecmd = 0;
powerCtrl.reset();
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "关闭云台电源state=%d", state);
}
start_delay_time = 0;
break;
default:
{
if (state == PTZS_POWER_OFF)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令,摄像机处于关机状态,无法执行!");
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
break;
}
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令刷新关机计时初始值state=%d", state);
if(cmd.ts <= photo_move_preset_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "丢弃拍照调预置点期间收到的控制云台指令,指令时间" FMT_TIME_T ",拍照时间" FMT_TIME_T "state=%d", cmd.ts, photo_move_preset_time, state);
}
else
{
if((MOVE_PRESETNO == cmd.cmdidx) && (START_ONCE_SELF == cmd.preset))
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到调用200号预置点指令摄像机启动一次性自检从当前状态改为自检状态设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
}
}
break;
}
}
}

@ -1,100 +0,0 @@
//
// Created by Matthew on 2025/3/5.
//
#ifndef MICROPHOTO_PTZCONTROLLER_H
#define MICROPHOTO_PTZCONTROLLER_H
#include <Buffer.h>
#include <thread>
#include <vector>
#include <memory>
#include <string>
#include <mutex>
#include <SemaphoreEx.h>
#include <Client/Device.h>
enum PROC_PTZ_STATE
{
PTZS_POWER_OFF = 0,
PTZS_IDLE = 1,
PTZS_SELF_TESTING = 2,
PTZS_MOVING = 3,
PTZS_TAKING_PHOTO = 4,
PTZS_PHOTO_SELF_TESTING = 5,
};
#define CAMERA_SELF_TEST_TIME 150 /* Camera self-test time (excluding PTZ self-test)*/
#define MOVE_PRESET_WAIT_TIME 20 /* Waiting for the maximum time for the PTZ to move to the preset position*/
#define CAMERA_CLOSE_DELAYTIME 360 /* Auto Power-Off Timer Setting After Manual Power-On (for Camera)*/
#define PHOTO_OPEN_POWER 16000
#define WAIT_TIME_AUTO_CLOSE 2 /* In order to automatically capture multiple preset point images at the same time and prevent the camera from self checking every time it takes a picture.*/
class PtzPhotoParams
{
public:
PtzPhotoParams(const IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds) :
mPhotoInfo(photoInfo), mPath(path), mOsds(osds)
{
}
~PtzPhotoParams()
{
}
IDevice::PHOTO_INFO mPhotoInfo;
std::string mPath;
std::vector<IDevice::OSD_INFO> mOsds;
};
struct SERIAL_CMD
{
uint8_t channel;
uint8_t preset;
time_t ts;
int cmdidx;
uint32_t delayTime;
uint8_t bImageSize;
char serfile[128];
uint32_t baud;
int addr;
std::shared_ptr<PtzPhotoParams> photoParams;
};
class CPhoneDevice;
class PtzController
{
public:
PtzController(CPhoneDevice* pPhoneDevice);
void Startup();
// ();
void AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr);
void AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds);
void ExitAndWait();
protected:
static void PtzThreadProc(PtzController* pThis);
void PtzProc();
protected:
protected:
std::mutex m_locker;
std::vector<SERIAL_CMD> m_cmds;
CSemaphore m_sem;
bool m_exit;
std::thread m_thread;
CPhoneDevice* m_pPhoneDevice;
};
#endif //MICROPHOTO_PTZCONTROLLER_H

@ -0,0 +1,30 @@
//
// Created by Matthew on 2025/1/27.
//
#include <jni.h>
#include <string>
#include <thread>
#include <stdlib.h>
extern "C" {
#include "rtmp/rtmpsuck.h"
}
extern STREAMING_SERVER *rtmpServer;
extern "C"
JNIEXPORT jlong JNICALL
Java_com_xypower_mpapp_RtmpService_startService(JNIEnv *env, jobject thiz) {
// TODO: implement startService()
RtmpSuckMain(0);
return (jlong)rtmpServer;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_xypower_mpapp_RtmpService_stopService(JNIEnv *env, jobject thiz, jlong native_handle) {
stopStreaming(rtmpServer);
// free(rtmpServer);
}

File diff suppressed because it is too large Load Diff

@ -27,7 +27,6 @@
#define IOT_PARAM_WRITE 0xAE #define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF #define IOT_PARAM_READ 0xAF
#define MAX_FIELDS_NUM 20 /* BD_NMEA0183单组字符串数据内含数据最大数量*/
#define MAX_SERIAL_DEV_NUM 25 /* 最大接串口传感器数量*/ #define MAX_SERIAL_DEV_NUM 25 /* 最大接串口传感器数量*/
#define MAX_SERIAL_PORT_NUM 5 #define MAX_SERIAL_PORT_NUM 5
#define MAX_DEV_VALUE_NUM 12 /* 一台装置最大的采样值数量*/ #define MAX_DEV_VALUE_NUM 12 /* 一台装置最大的采样值数量*/
@ -40,7 +39,6 @@
#define PELCO_D_PROTOCOL 6 /* 摄像机Pelco_D协议序号*/ #define PELCO_D_PROTOCOL 6 /* 摄像机Pelco_D协议序号*/
#define SERIALCAMERA_PROTOCOL 8 /* 串口摄像机协议序号*/ #define SERIALCAMERA_PROTOCOL 8 /* 串口摄像机协议序号*/
#define MUTIWEATHER_PROTOCOL 9 /*多合一气象*/ #define MUTIWEATHER_PROTOCOL 9 /*多合一气象*/
#define NMEA0183_PROTOCOL 10 /* 单一北斗NMEA0183标准协议*/
#define RESERVE2_PROTOCOL 17 /* 备用2协议序号*/ #define RESERVE2_PROTOCOL 17 /* 备用2协议序号*/
#define RESERVE4_PROTOCOL 19 /* 备用4协议序号*/ #define RESERVE4_PROTOCOL 19 /* 备用4协议序号*/
#define RESERVE5_PROTOCOL 20 /* 备用5协议序号*/ #define RESERVE5_PROTOCOL 20 /* 备用5协议序号*/
@ -108,10 +106,10 @@
#define D_OPEN_MODULE_POWER 0x0009000C /* 打开机芯电源(1 有效)*/ #define D_OPEN_MODULE_POWER 0x0009000C /* 打开机芯电源(1 有效)*/
/* 摄像机下发命令宏定义*/ /* 摄像机下发命令宏定义*/
#define TAKE_PHOTO 20000 /* 拍照*/ #define Take_Photo 0 /* 拍照*/
#define SET_BAUD 10000 /* 设置球机波特率*/ #define Stop_Baud 10000 /* 设置球机波特率*/
#define STOP_CMD 10005 /* 取消或停止指令*/ #define Stop_Cmd 10005 /* 取消或停止指令*/
#define AUTO_SCAN 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/ #define Auto_Scan 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define IRIS_CLOSE 10007 /* 光圈缩小(1 有效)*/ #define IRIS_CLOSE 10007 /* 光圈缩小(1 有效)*/
#define IRIS_OPEN 10008 /* 光圈放大(1 有效)*/ #define IRIS_OPEN 10008 /* 光圈放大(1 有效)*/
#define FOCUS_NEAR 10009 /* 近距离聚焦(1 有效)*/ #define FOCUS_NEAR 10009 /* 近距离聚焦(1 有效)*/
@ -126,14 +124,9 @@
#define SAVE_PRESETNO 10018 // 设置预置点 #define SAVE_PRESETNO 10018 // 设置预置点
#define OPEN_TOTAL 10019 /* 打开总电源(1 有效)*/ #define OPEN_TOTAL 10019 /* 打开总电源(1 有效)*/
#define OPEN_MODULE_POWER 10020 /* 打开机芯电源(1 有效)*/ #define OPEN_MODULE_POWER 10020 /* 打开机芯电源(1 有效)*/
#define NOTIFY_PTZ_CLOSE 10021 // 通知云台关闭
#define QUERY_PTZ_STATE 10022 // 查询云台状态
#define CLOSE_TOTAL 10040 /* 关闭总电源*/
#define SPEED_DOME_CAMERA 0 /* 球机摄像机*/ #define SPEED_DOME_CAMERA 0 /* 球机摄像机*/
#define SERIAL_CAMERA 2 /* 串口摄像机a*/ #define SERIAL_CAMERA 2 /* 串口摄像机a*/
#define START_ONCE_SELF 200 /* 一次性自检需要的调用的预置点200*/
#define COLLECT_DATA 0 /* 调试使用*/ #define COLLECT_DATA 0 /* 调试使用*/
#define HexCharToInt( c ) (((c) >= '0') && ((c) <= '9') ? (c) - '0' : ((c) >= 'a') && ((c) <= 'f') ? (c) - 'a' + 10 :((c) >= 'A') && ((c) <= 'F') ? (c) - 'A' + 10 : 0 ) #define HexCharToInt( c ) (((c) >= '0') && ((c) <= '9') ? (c) - '0' : ((c) >= 'a') && ((c) <= 'f') ? (c) - 'a' + 10 :((c) >= 'A') && ((c) <= 'F') ? (c) - 'A' + 10 : 0 )
@ -221,48 +214,6 @@ typedef struct
uint8_t Phase; /* 传感器所安装相别指拉力和倾角11表示A1....*/ uint8_t Phase; /* 传感器所安装相别指拉力和倾角11表示A1....*/
} SERIAL_PARAM; } SERIAL_PARAM;
// 云台状态数据
typedef struct
{
uint8_t ptz_process; /* 云台所处过程(1:自检状态;2:调用预置点;3:一般状态;)*/
uint8_t ptz_status; /* 云台当前状态值(0:停止;1:运动;2:机芯未上电;其他:其他错误*/
int presetno; /* 云台所处预置点值*/
float x_coordinate; /* 云台所处位置水平方向坐标*/
float y_coordinate; /* 云台所处位置垂直方向坐标*/
} PTZ_STATE;
/*
$--RMC IDRMC --
2 UTCtime hhmmss.ss UTC
3 status
V=
A=
4 lat ddmm.mmmmm 2
5 uLat N-S-
6 lon dddmm.mmmm
m
3
7 uLon E-W-西
8 spd
9 cog
10 date ddmmyy dd mm yy
11 mv
12 mvE E-W-西
13 mode [1]
14 navStatus V
NMEA 4.1
15 CS 16 $*$**/
// 北斗卫星数据
typedef struct
{
struct tm UTC_time; /* UTC时间*/
int ms_time; /* 毫秒*/
double lat; /* 纬度,原值(前 2 字符表示度,后面的字符表示分)转换后为° */
char uLat; /* 纬度方向N-北S-南*/
double lon; /* 经度,原值(前 3 字符表示度,后面的字符表示分)转换后为°*/
char uLon; /* 经度'E'-东,'W'-西*/
char status; /* 'A'=数据有效 其他字符表示数据无效*/
} BD_GNSS_DATA;
typedef struct typedef struct
{ {
int m_iRevStatus; /* */ int m_iRevStatus; /* */
@ -292,19 +243,8 @@ typedef struct
使*/ 使*/
PHOTO_DEF image; /* 临时存储图片数据*/ PHOTO_DEF image; /* 临时存储图片数据*/
int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/ int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/
PTZ_STATE ptz_state;
int sendptzstatecmd; // 查询命令次数控制
BD_GNSS_DATA bd_data;
} SIO_PARAM_SERIAL_DEF; } SIO_PARAM_SERIAL_DEF;
typedef const struct
{
//char *account; // 命令说明
char *cmd_name; // 命令名称
int (*recv_process)(SIO_PARAM_SERIAL_DEF *); /* urc数据处理*/
}BD_NMEA0183_PROC_FUNC;
//串口相关装置所有参数集中定义 //串口相关装置所有参数集中定义
typedef struct typedef struct
{ {
@ -392,11 +332,11 @@ void Gm_OpenSerialPort(int devidx);
// 关闭串口通讯 // 关闭串口通讯
void Gm_CloseSerialPort(); void Gm_CloseSerialPort();
void DBG_LOG(int commid, char flag, const char* format, ...); void DebugLog(int commid, char *szbuf, char flag);
int SaveLogTofile(int commid, const char *szbuf); int SaveLogTofile(int commid, char *szbuf);
// 功能说明:串口发送数据 返回实际发送的字节数 // 功能说明:串口发送数据 返回实际发送的字节数
int GM_SerialComSend(const unsigned char * cSendBuf, size_t nSendLen, int commid); int GM_SerialComSend(const unsigned char * cSendBuf, size_t nSendLen, int commid);
void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, const char *filedir,const char *log); void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, char *filedir,const char *log);
// 启动串口通讯 // 启动串口通讯
void GM_StartSerialComm(); void GM_StartSerialComm();
// 启动使用串口拍照 // 启动使用串口拍照
@ -532,26 +472,6 @@ int GM_IsCloseCamera(SIO_PARAM_SERIAL_DEF *pPortParam);
int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam); int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
int QueryPtzState(PTZ_STATE *ptz_state, int cmdidx, const char *serfile, unsigned int baud, int addr);
void MakePtzStateQueryCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx);
int Query_BDGNSS_Data(BD_GNSS_DATA *BD_data, int samptime, const char *serfile, unsigned int baud);
int GM_BdSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
void GM_BdSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam);
void BdRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, u_char *buf, int len);
unsigned char BDXorCheck(unsigned char *msg, int len);
void BD_NMEA0183_PortDataProcess(SIO_PARAM_SERIAL_DEF *curserial);
char** BD_NMEA0183_SplitString(char *str, int *total_fields);
int BD_get_BDRMC_data(SIO_PARAM_SERIAL_DEF *curserial);
#endif // __SENSOR_PROTOCOL_H__ #endif // __SENSOR_PROTOCOL_H__

File diff suppressed because it is too large Load Diff

@ -1,724 +0,0 @@
/* Copyright Statement:
*
* This software/firmware and related documentation ("MediaTek Software") are
* protected under relevant copyright laws. The information contained herein is
* confidential and proprietary to MediaTek Inc. and/or its licensors. Without
* the prior written permission of MediaTek inc. and/or its licensors, any
* reproduction, modification, use or disclosure of MediaTek Software, and
* information contained herein, in whole or in part, shall be strictly
* prohibited.
*
* MediaTek Inc. (C) 2010. All rights reserved.
*
* BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
* THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
* RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
* ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
* WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
* RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
* INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
* TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
* RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
* OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
* SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
* RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
* STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
* ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
* RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
* MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
* CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
*
* The following software/firmware and/or related documentation ("MediaTek
* Software") have been modified by MediaTek Inc. All revisions are subject to
* any receiver's applicable license agreements with MediaTek Inc.
*/
#ifndef _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
#define _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section {
MTK_HAL_REQUEST = 0xC000, // MTK HAL internal metadata become from 0xC000 0000
MTK_P1NODE,
MTK_P2NODE,
MTK_3A_TUNINING,
MTK_3A_EXIF,
MTK_MF_EXIF,
MTK_EIS,
MTK_STEREO,
MTK_FRAMESYNC,
MTK_VHDR,
MTK_PIPELINE,
MTK_NR,
MTK_PLUGIN,
MTK_DUALZOOM,
MTK_FEATUREPIPE,
MTK_POSTPROC,
MTK_FEATURE,
MTK_FSC,
} mtk_platform_metadata_section_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section_start {
MTK_HAL_REQUEST_START = MTK_HAL_REQUEST << 16,
MTK_P1NODE_START = MTK_P1NODE << 16,
MTK_P2NODE_START = MTK_P2NODE << 16,
MTK_3A_TUNINING_START = MTK_3A_TUNINING << 16,
MTK_3A_EXIF_START = MTK_3A_EXIF << 16,
MTK_EIS_START = MTK_EIS << 16,
MTK_STEREO_START = MTK_STEREO << 16,
MTK_FRAMESYNC_START = MTK_FRAMESYNC << 16,
MTK_VHDR_START = MTK_VHDR << 16,
MTK_PIPELINE_START = MTK_PIPELINE << 16,
MTK_NR_START = MTK_NR << 16,
MTK_PLUGIN_START = MTK_PLUGIN << 16,
MTK_DUALZOOM_START = MTK_DUALZOOM << 16,
MTK_FEATUREPIPE_START = MTK_FEATUREPIPE << 16,
MTK_POSTPROC_START = MTK_POSTPROC << 16,
MTK_FEATURE_START = MTK_FEATURE << 16,
MTK_FSC_START = MTK_FSC << 16,
} mtk_platform_metadata_section_start_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_tag {
MTK_HAL_REQUEST_REQUIRE_EXIF = MTK_HAL_REQUEST_START, //MUINT8
MTK_HAL_REQUEST_DUMP_EXIF, //MUINT8
MTK_HAL_REQUEST_REPEAT, //MUINT8
MTK_HAL_REQUEST_DUMMY, //MUINT8
MTK_HAL_REQUEST_SENSOR_SIZE, //MSize
MTK_HAL_REQUEST_SENSOR_ID, //MINT32
MTK_HAL_REQUEST_DEVICE_ID, //MINT32
MTK_HAL_REQUEST_HIGH_QUALITY_CAP, //MUINT8
MTK_HAL_REQUEST_ISO_SPEED, //MINT32
MTK_HAL_REQUEST_BRIGHTNESS_MODE, //MINT32
MTK_HAL_REQUEST_CONTRAST_MODE, //MINT32
MTK_HAL_REQUEST_HUE_MODE, //MINT32
MTK_HAL_REQUEST_SATURATION_MODE, //MINT32
MTK_HAL_REQUEST_EDGE_MODE, //MINT32
MTK_HAL_REQUEST_PASS1_DISABLE, //MINT32
MTK_HAL_REQUEST_ERROR_FRAME, // used for error handling //MUINT8
MTK_HAL_REQUEST_PRECAPTURE_START, // 4cell //MUINT8
MTK_HAL_REQUEST_AF_TRIGGER_START, // 4cell //MUINT8
MTK_HAL_REQUEST_IMG_IMGO_FORMAT, //MINT32
MTK_HAL_REQUEST_IMG_RRZO_FORMAT, //MINT32
MTK_HAL_REQUEST_INDEX, //MINT32
MTK_HAL_REQUEST_COUNT, //MINT32
MTK_HAL_REQUEST_SMVR_FPS, //MUINT8 // 0: NOT batch request
MTK_HAL_REQUEST_REMOSAIC_ENABLE, //MUINT8 // 0: preview mode 1: capture mode
MTK_HAL_REQUEST_INDEX_BSS, //MINT32
MTK_HAL_REQUEST_ZSD_CAPTURE_INTENT, //MUINT8
MTK_HAL_REQUEST_REAL_CAPTURE_SIZE, //MSize
MTK_HAL_REQUEST_VIDEO_SIZE, //MSize
MTK_HAL_REQUEST_RAW_IMAGE_INFO, //MINT32 // index[0]: raw fmt, index[1]: raw stride, index[2]: raw size(width), index[3]: raw size(height)
MTK_HAL_REQUEST_ISP_PIPELINE_MODE, //MINT32
MTK_P1NODE_SCALAR_CROP_REGION = MTK_P1NODE_START, //MRect
MTK_P1NODE_BIN_CROP_REGION, //MRect
MTK_P1NODE_DMA_CROP_REGION, //MRect
MTK_P1NODE_BIN_SIZE, //MSize
MTK_P1NODE_RESIZER_SIZE, //MSize
MTK_P1NODE_RESIZER_SET_SIZE, //MSize
MTK_P1NODE_CTRL_RESIZE_FLUSH, //MBOOL
MTK_P1NODE_CTRL_READOUT_FLUSH, //MBOOL
MTK_P1NODE_CTRL_RECONFIG_SENSOR_SETTING, //MBOOL
MTK_P1NODE_PROCESSOR_MAGICNUM, //MINT32
MTK_P1NODE_MIN_FRM_DURATION, //MINT64
MTK_P1NODE_RAW_TYPE, //MINT32
MTK_P1NODE_SENSOR_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER2_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_SIZE, //MSize
MTK_P1NODE_SENSOR_MODE, //MINT32
MTK_P1NODE_SENSOR_VHDR_MODE, //MINT32
MTK_P1NODE_METADATA_TAG_INDEX, //MINT32
MTK_P1NODE_RSS_SIZE, //MSize
MTK_P1NODE_SENSOR_STATUS, //MINT32
MTK_P1NODE_SENSOR_RAW_ORDER, //MINT32
MTK_P1NODE_TWIN_SWITCH, //MINT32
MTK_P1NODE_TWIN_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCH, //MINT32
MTK_P1NODE_RESIZE_QUALITY_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_LEVEL, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCHING, //MBOOL
MTK_P1NODE_RESUME_SHUTTER_TIME_US, //MINT32
MTK_P1NODE_FRAME_START_TIMESTAMP, //MINT64
MTK_P1NODE_FRAME_START_TIMESTAMP_BOOT, //MINT64
MTK_P1NODE_REQUEST_PROCESSED_WITHOUT_WB, //MBOOL
MTK_P1NODE_ISNEED_GMV, //MBOOL
MTK_P2NODE_HIGH_SPEED_VDO_FPS = MTK_P2NODE_START, //MINT32
MTK_P2NODE_HIGH_SPEED_VDO_SIZE, //MSize
MTK_P2NODE_CTRL_CALTM_ENABLE, //MBOOL
MTK_P2NODE_FD_CROP_REGION, //MRect
MTK_P2NODE_CROP_REGION, //MRect // for removing black edge
MTK_P2NODE_DSDN_ENABLE, //MBOOL // for DSDN on/off controled by Policy
MTK_P2NODE_SENSOR_CROP_REGION, //MRect
MTK_3A_AE_HIGH_ISO_BINNING, //MBOOL // for 3HDR high iso binning mode
MTK_SENSOR_SCALER_CROP_REGION, //MRect
MTK_PROCESSOR_CAMINFO = MTK_3A_TUNINING_START, //IMemory
MTK_ISP_ATMS_MAPPING_INFO, //IMemory
MTK_3A_ISP_PROFILE, //MUINT8
MTK_3A_ISP_P1_PROFILE, //MUINT8
MTK_CAMINFO_LCSOUT_INFO, //IMemory
MTK_3A_ISP_BYPASS_LCE, //MBOOL
MTK_3A_ISP_DISABLE_NR, //MBOOL
MTK_3A_ISP_NR3D_SW_PARAMS, //MINT32[14] //GMVX, GMVY, confX, confY, MAX_GMV, frameReset, GMV_Status,ISO_cutoff
MTK_3A_ISP_NR3D_HW_PARAMS, //IMemory
MTK_3A_ISP_LCE_GAIN, //MINT32, bits[0:15]: LCE gain, bits[16:31]: LCE gain confidence ratio (0-100)
MTK_3A_ISP_FUS_NUM, //MINT32
MTK_3A_AE_CAP_PARAM, //IMemory
MTK_3A_AE_CAP_SINGLE_FRAME_HDR, //MUINT8
MTK_3A_AE_BV_TRIGGER, //MBOOL
MTK_3A_AF_LENS_POSITION, //MINT32
MTK_3A_FLICKER_RESULT, //MINT32
MTK_3A_DUMMY_BEFORE_REQUEST_FRAME, //MBOOL // Dummy frame before capture, only for capture intent, preview don't use
MTK_3A_DUMMY_AFTER_REQUEST_FRAME, //MBOOL // Dummy frame after capture, only for capture intent, preview don't use
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MAX, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MIN, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE, //MINT32
MTK_3A_HDR_MODE, //MUINT8
MTK_3A_AE_HDR_MIXED_ISO, //MUINT32
MTK_3A_AE_ZSL_STABLE, //MINT32 ( MBOOL )
MTK_3A_PGN_ENABLE, //MUINT8
MTK_3A_SKIP_HIGH_QUALITY_CAPTURE, //MUINT8
MTK_3A_AI_SHUTTER, //MBOOL
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, //MINT32
MTK_3A_FEATURE_AE_TARGET_MODE, //MINT32
MTK_3A_OPEN_ID, //MINT32
MTK_LSC_TBL_DATA, //IMemory
MTK_LSC_TSF_DATA, //IMemory
MTK_LSC_TSF_DUMP_NO, //IMemory
MTK_ISP_P2_ORIGINAL_SIZE, //MSize
MTK_ISP_P2_CROP_REGION, //MRect
MTK_ISP_P2_RESIZER_SIZE, //MSize
MTK_ISP_P2_IN_IMG_FMT, //MINT32, 0 or not exist: RAW->YUV, 1: YUV->YUV
MTK_ISP_P2_TUNING_UPDATE_MODE, //MUINT8, [0 or not exist]: as default; [1]: keep existed parameters but some parts will be updated; [2]: keep all existed parameters (force mode) [3] LPCNR Pass1 [4] LPCNR Pass2
MTK_ISP_P2_IN_IMG_RES_REVISED, //MINT32, describes P2 input image revised resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_ISP_APP_TARGET_SIZE, //MINT32, describes APP Target resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_MSF_SCALE_INDEX, //MINT32, which scale stage index, would only exist with scaling flow
MTK_MSF_FRAME_NUM, //MINT32, After BSS which frame number is this stage using
MTK_TOTAL_MULTI_FRAME_NUM, //MINT32, MSYUV fuction used this input to know frame nunber
MTK_TOTAL_MULTI_FRAME_NUM_CAPTURED, //MINT32, MSF function used
MTK_SW_DSDN_VERSION, //MINT32, distinguish different dsdn version
MTK_ISP_COLOR_SPACE, //MINT32
MTK_ISP_DRC_CURVE, //IMemory
MTK_ISP_DRC_CURVE_SIZE, //MINT32
MTK_ISP_FEO_DATA, //IMemory
MTK_ISP_FEO_ENABLE, //MINT32
MTK_ISP_FEO_INFO, //IMemory
MTK_ISP_HLR_RATIO, //MINT32, which is a HDR ratio applied in HLR
MTK_ISP_STAGE, //MINT32
MTK_FOCUS_AREA_POSITION, //MINT32
MTK_FOCUS_AREA_SIZE, //MSize
MTK_FOCUS_AREA_RESULT, //MUINT8
MTK_FOCUS_PAUSE, //MUINT8
MTK_FOCUS_MZ_ON, //MUINT8
MTK_3A_AF_FOCUS_VALUE, //MINT64
MTK_3A_PRV_CROP_REGION, //MRect
MTK_3A_ISP_MDP_TARGET_SIZE, //MSize
MTK_3A_REPEAT_RESULT, //MUINT8
MTK_3A_SKIP_PRECAPTURE, //MBOOL //if CUST_ENABLE_FLASH_DURING_TOUCH is true, MW can skip precapture
MTK_3A_SKIP_BAD_FRAME, //MBOOL
MTK_3A_FLARE_IN_MANUAL_CTRL_ENABLE, //MBOOL
MTK_3A_DYNAMIC_SUBSAMPLE_COUNT, //MINT32 30fps = 1, 60fps = 2, ... , 120fps = 4
MTK_3A_AE_LV_VALUE, //MINT32
MTK_APP_CONTROL, //MINT32
MTK_3A_CUST_PARAMS, //IMemory
MTK_3A_SETTING_CUST_PARAMS, //IMemory
MTK_3A_PERFRAME_INFO, //IMemory
MTK_SENSOR_MODE_INFO_ACTIVE_ARRAY_CROP_REGION, //MRect
MTK_3A_AE_BV, //MINT32
MTK_3A_AE_CWV, //MINT32
MTK_ISP_P2_PROCESSED_RAW, //MINT32
MTK_3A_EXIF_METADATA = MTK_3A_EXIF_START, //IMetadata
MTK_EIS_REGION = MTK_EIS_START, //MINT32
MTK_EIS_INFO, //MINT64
MTK_EIS_VIDEO_SIZE, //MRect
MTK_EIS_NEED_OVERRIDE_TIMESTAMP, //MBOOL
MTK_EIS_LMV_DATA, //IMemory
MTK_STEREO_JPS_MAIN1_CROP = MTK_STEREO_START, //MRect
MTK_STEREO_JPS_MAIN2_CROP, //MRect
MTK_STEREO_SYNC2A_MODE, //MINT32
MTK_STEREO_SYNCAF_MODE, //MINT32
MTK_STEREO_HW_FRM_SYNC_MODE, //MINT32
MTK_STEREO_NOTIFY, //MINT32
MTK_STEREO_SYNC2A_MASTER_SLAVE, //MINT32[2]
MTK_STEREO_SYNC2A_STATUS, //IMemory
MTK_JPG_ENCODE_TYPE, //MINT8
MTK_CONVERGENCE_DEPTH_OFFSET, //MFLOAT
MTK_N3D_WARPING_MATRIX_SIZE, //MUINT32
MTK_P1NODE_MAIN2_HAL_META, //IMetadata
MTK_P2NODE_BOKEH_ISP_PROFILE, //MUINT8
MTK_STEREO_FEATURE_DENOISE_MODE, //MINT32
MTK_STEREO_FEATURE_SENSOR_PROFILE, //MINT32
MTK_P1NODE_MAIN2_APP_META, //IMetadata
MTK_STEREO_FEATURE_OPEN_ID, //MINT32
MTK_STEREO_FRAME_PER_CAPTURE, //MINT32
MTK_STEREO_ENABLE_MFB, //MINT32
MTK_STEREO_BSS_RESULT, //MINT32
MTK_STEREO_FEATURE_FOV_CROP_REGION, //MINT32[6] // p.x, p.y, p.w, p.h, srcW, srcH
MTK_STEREO_DCMF_FEATURE_MODE, //MINT32 // mtk_platform_metadata_enum_dcmf_feature_mode
MTK_STEREO_HDR_EV, //MINT32
MTK_STEREO_DELAY_FRAME_COUNT, //MINT32
MTK_STEREO_DCMF_DEPTHMAP_SIZE, //MSize
MTK_STEREO_WITH_CAMSV, //MBOOL
MTK_FRAMESYNC_ID = MTK_FRAMESYNC_START, //MINT32
MTK_FRAMESYNC_TOLERANCE, //MINT64
MTK_FRAMESYNC_FAILHANDLE, //MINT32
MTK_FRAMESYNC_RESULT, //MINT64
MTK_FRAMESYNC_TYPE, //MINT32
MTK_FRAMESYNC_MODE, //MUINT8
MTK_VHDR_LCEI_DATA = MTK_VHDR_START, //Memory
MTK_VHDR_IMGO_3A_ISP_PROFILE, //MUINT8
MTK_HDR_FEATURE_HDR_HAL_MODE,
MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM,
MTK_VHDR_MULTIFRAME_TIMESTAMP, //MINT64
MTK_VHDR_MULTIFRAME_EXPOSURE_TIME, //MINT64
MTK_PIPELINE_UNIQUE_KEY = MTK_PIPELINE_START, //MINT32
MTK_PIPELINE_FRAME_NUMBER, //MINT32
MTK_PIPELINE_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_EV_VALUE, //MINT32
MTK_PIPELINE_DUMP_UNIQUE_KEY, //MINT32
MTK_PIPELINE_DUMP_FRAME_NUMBER, //MINT32
MTK_PIPELINE_DUMP_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_VIDEO_RECORD, //MINT32
MTK_NR_MODE = MTK_NR_START, //MINT32
MTK_NR_MNR_THRESHOLD_ISO, //MINT32
MTK_NR_SWNR_THRESHOLD_ISO, //MINT32
MTK_REAL_LV, //MINT32
MTK_ANALOG_GAIN, //MUINT32
MTK_AWB_RGAIN, //MINT32
MTK_AWB_GGAIN, //MINT32
MTK_AWB_BGAIN, //MINT32
MTK_PLUGIN_MODE = MTK_PLUGIN_START, //MINT64
MTK_PLUGIN_COMBINATION_KEY, //MINT64
MTK_PLUGIN_P2_COMBINATION, //MINT64
MTK_PLUGIN_PROCESSED_FRAME_COUNT, //MINT32
MTK_PLUGIN_CUSTOM_HINT, //MINT32
MTK_PLUGIN_DETACT_JOB_SYNC_TOKEN, //MINT64, may be not exists.
MTK_PLUGIN_UNIQUEKEY,
MTK_DUALZOOM_DROP_REQ = MTK_DUALZOOM_START, //MINT32
MTK_DUALZOOM_FORCE_ENABLE_P2, //MINT32
MTK_DUALZOOM_DO_FRAME_SYNC, //MINT32
MTK_DUALZOOM_ZOOM_FACTOR, //MINT32
MTK_DUALZOOM_DO_FOV, //MINT32
MTK_DUALZOOM_FOV_RECT_INFO, //MINT32
MTK_DUALZOOM_FOV_CALB_INFO, //MINT32
MTK_DUALZOOM_FOV_MARGIN_PIXEL, //MSize
MTK_DUALCAM_AF_STATE, //MUINT8
MTK_DUALCAM_LENS_STATE, //MUINT8
MTK_DUALCAM_TIMESTAMP, //MINT64
MTK_DUALZOOM_3DNR_MODE, //MINT32
MTK_DUALZOOM_ZOOMRATIO, //MINT32
MTK_DUALZOOM_CENTER_SHIFT, //MINT32
MTK_DUALZOOM_FOV_RATIO, //MFLOAT
MTK_DUALZOOM_REAL_MASTER, //MINT32
MTK_DUALZOOM_FD_TARGET_MASTER, //MINT32
MTK_DUALZOOM_FD_REAL_MASTER, //MINT32 // maybe not set
MTK_LMV_SEND_SWITCH_OUT, //MINT32
MTK_LMV_SWITCH_OUT_RESULT, //MINT32
MTK_LMV_VALIDITY, //MINT32
MTK_VSDOF_P1_MAIN1_ISO, //MINT32
MTK_DUALZOOM_IS_STANDBY, //MBOOL
MTK_DUALZOOM_CAP_CROP, //MRect
MTK_DUALZOOM_MASTER_UPDATE_MODE, //MBOOL
MTK_DUALZOOM_STREAMING_NR, //MINT32
MTK_FEATUREPIPE_APP_MODE = MTK_FEATUREPIPE_START, //MINT32
MTK_POSTPROC_TYPE = MTK_POSTPROC_START, //MINT32
MTK_FEATURE_STREAMING = MTK_FEATURE_START, //MINT64
MTK_FEATURE_CAPTURE, //MINT64
MTK_FEATURE_CAPTURE_PHYSICAL, //MINT64
MTK_FEATURE_FREE_MEMORY_MBYTE, //MINT32
MTK_FEATURE_MFNR_NVRAM_QUERY_INDEX, //MINT32
MTK_FEATURE_MFNR_NVRAM_DECISION_ISO, //MINT32
MTK_FEATURE_MFNR_TUNING_INDEX_HINT, //MINT64
MTK_FEATURE_MFNR_FINAL_EXP, //MINT32
MTK_FEATURE_MFNR_OPEN_ID, //MINT32
MTK_FEATURE_AINR_MDLA_MODE, //MINT32
MTK_ISP_AINR_MDLA_MODE, //MINT32
MTK_ISP_LTM_BIT_MODE, //MINT32
MTK_FEATURE_BSS_SELECTED_FRAME_COUNT, //MINT32
MTK_FEATURE_BSS_FORCE_DROP_NUM, //MINT32
MTK_FEATURE_BSS_FIXED_LSC_TBL_DATA, //MUINT8
MTK_FEATURE_BSS_PROCESS, //MINT32
MTK_FEATURE_BSS_ISGOLDEN, //MBOOL
MTK_FEATURE_BSS_REORDER, //MBOOL
MTK_FEATURE_BSS_MANUAL_ORDER, //MUINT8
MTK_FEATURE_BSS_RRZO_DATA, //MUINT8
MTK_FEATURE_BSS_DOWNSAMPLE, //MBOOL
MTK_FEATURE_PACK_RRZO, //MUINT8
MTK_FEATURE_FACE_RECTANGLES, //MRect array
MTK_FEATURE_FACE_POSE_ORIENTATIONS, //MINT32[n*3] array, each struct include: xAsix, yAsix, zAsix
MTK_FEATURE_CAP_YUV_PROCESSING, //MUINT8
MTK_FEATURE_CAP_PIPE_DCE_CONTROL, //MUINT8
MTK_FEATURE_MULTIFRAMENODE_BYPASSED, //MUINT8
MTK_FEATURE_FACE_APPLIED_GAMMA, //MINT32
MTK_FEATURE_CAP_PQ_USERID, //MINT64
MTK_FEATURE_FLIP_IN_P2A, //MINT32
MTK_FSC_CROP_DATA = MTK_FSC_START, //IMemory
MTK_FSC_WARP_DATA, //IMemory
MTK_STAGGER_ME_META, //IMetadata
MTK_STAGGER_SE_META, //IMetadata
MTK_STAGGER_BLOB_IMGO_ORDER //MUINT8
} mtk_platform_metadata_tag_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_3a_exif_metadata_tag {
MTK_3A_EXIF_FNUMBER, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH_35MM, //MINT32
MTK_3A_EXIF_SCENE_MODE, //MINT32
MTK_3A_EXIF_AWB_MODE, //MINT32
MTK_3A_EXIF_LIGHT_SOURCE, //MINT32
MTK_3A_EXIF_EXP_PROGRAM, //MINT32
MTK_3A_EXIF_SCENE_CAP_TYPE, //MINT32
MTK_3A_EXIF_FLASH_LIGHT_TIME_US, //MINT32
MTK_3A_EXIF_AE_METER_MODE, //MINT32
MTK_3A_EXIF_AE_EXP_BIAS, //MINT32
MTK_3A_EXIF_CAP_EXPOSURE_TIME, //MINT32
MTK_3A_EXIF_AE_ISO_SPEED, //MINT32
MTK_3A_EXIF_REAL_ISO_VALUE, //MINT32
MTK_3A_EXIF_AE_BRIGHTNESS_VALUE, //MINT32
MTK_3A_EXIF_FLASH_FIRING_STATUS, //MINT32
MTK_3A_EXIF_FLASH_RETURN_DETECTION, //MINT32
MTK_3A_EXIF_FLASH_MODE, //MINT32
MTK_3A_EXIF_FLASH_FUNCTION, //MINT32
MTK_3A_EXIF_FLASH_REDEYE, //MINT32
MTK_3A_EXIF_DEBUGINFO_BEGIN, // debug info begin
// key: MINT32
MTK_3A_EXIF_DBGINFO_AAA_KEY = MTK_3A_EXIF_DEBUGINFO_BEGIN, //MINT32
MTK_3A_EXIF_DBGINFO_AAA_DATA,
MTK_3A_EXIF_DBGINFO_SDINFO_KEY,
MTK_3A_EXIF_DBGINFO_SDINFO_DATA,
MTK_3A_EXIF_DBGINFO_ISP_KEY,
MTK_3A_EXIF_DBGINFO_ISP_DATA,
//
MTK_CMN_EXIF_DBGINFO_KEY,
MTK_CMN_EXIF_DBGINFO_DATA,
//
MTK_MF_EXIF_DBGINFO_MF_KEY,
MTK_MF_EXIF_DBGINFO_MF_DATA,
//
MTK_N3D_EXIF_DBGINFO_KEY,
MTK_N3D_EXIF_DBGINFO_DATA,
//
MTK_POSTNR_EXIF_DBGINFO_NR_KEY,
MTK_POSTNR_EXIF_DBGINFO_NR_DATA,
//
MTK_RESVB_EXIF_DBGINFO_KEY,
MTK_RESVB_EXIF_DBGINFO_DATA,
//
MTK_RESVC_EXIF_DBGINFO_KEY,
MTK_RESVC_EXIF_DBGINFO_DATA,
// data: Memory
MTK_3A_EXIF_DEBUGINFO_END, // debug info end
} mtk_platform_3a_exif_metadata_tag_t;
// MTK_3A_FEATURE_AE_EXPOSURE_LEVEL
typedef enum mtk_camera_metadata_enum_ae_exposure_level {
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NONE = 0,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_SHORT,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NORMAL,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG,
} mtk_camera_metadata_enum_ae_exposure_level_t;
// MTK_3A_FEATURE_AE_TARGET_MODE
typedef enum mtk_camera_metadata_enum_ae_target_mode {
MTK_3A_FEATURE_AE_TARGET_MODE_NORMAL = 0,
MTK_3A_FEATURE_AE_TARGET_MODE_IVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_ZVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_LE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_SE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_4CELL_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR_RTO1X,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_2EXP,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_3EXP,
} mtk_camera_metadata_enum_ae_target_mode_t;
//MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM
typedef enum mtk_camera_metadata_enum_stagger_valid_exposure_num {
MTK_STAGGER_VALID_EXPOSURE_NON = 0,
MTK_STAGGER_VALID_EXPOSURE_1 = 1,
MTK_STAGGER_VALID_EXPOSURE_2 = 2,
MTK_STAGGER_VALID_EXPOSURE_3 = 3
} mtk_camera_metadata_enum_stagger_valid_exposure_num_t;
//MTK_3A_ISP_FUS_NUM
typedef enum mtk_camera_metadata_enum_3a_isp_fus_num {
MTK_3A_ISP_FUS_NUM_NON = 0,
MTK_3A_ISP_FUS_NUM_1 = 1,
MTK_3A_ISP_FUS_NUM_2 = 2,
MTK_3A_ISP_FUS_NUM_3 = 3,
} mtk_camera_metadata_enum_3a_isp_fus_num_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_enum_nr_mode {
MTK_NR_MODE_OFF = 0,
MTK_NR_MODE_MNR,
MTK_NR_MODE_SWNR,
MTK_NR_MODE_AUTO
} mtk_platform_metadata_enum_nr_mode_t;
typedef enum mtk_platform_metadata_enum_mfb_mode {
MTK_MFB_MODE_OFF = 0,
MTK_MFB_MODE_MFLL,
MTK_MFB_MODE_AIS,
MTK_MFB_MODE_NUM,
} mtk_platform_metadata_enum_mfb_mode_t;
typedef enum mtk_platform_metadata_enum_custom_hint {
MTK_CUSTOM_HINT_0 = 0,
MTK_CUSTOM_HINT_1,
MTK_CUSTOM_HINT_2,
MTK_CUSTOM_HINT_3,
MTK_CUSTOM_HINT_4,
MTK_CUSTOM_HINT_NUM,
} mtk_platform_metadata_enum_custom_hint_t;
typedef enum mtk_platform_metadata_enum_plugin_mode {
MTK_PLUGIN_MODE_COMBINATION = 1 << 0,
MTK_PLUGIN_MODE_NR = 1 << 1,
MTK_PLUGIN_MODE_HDR = 1 << 2,
MTK_PLUGIN_MODE_MFNR = 1 << 3,
MTK_PLUGIN_MODE_COPY = 1 << 4,
MTK_PLUGIN_MODE_TEST_PRV = 1 << 5,
MTK_PLUGIN_MODE_BMDN = 1 << 6,
MTK_PLUGIN_MODE_MFHR = 1 << 7,
MTK_PLUGIN_MODE_BMDN_3rdParty = 1 << 8,
MTK_PLUGIN_MODE_MFHR_3rdParty = 1 << 9,
MTK_PLUGIN_MODE_FUSION_3rdParty = 1 << 10,
MTK_PLUGIN_MODE_VSDOF_3rdParty = 1 << 11,
MTK_PLUGIN_MODE_COLLECT = 1 << 12,
MTK_PLUGIN_MODE_HDR_3RD_PARTY = 1 << 13,
MTK_PLUGIN_MODE_MFNR_3RD_PARTY = 1 << 14,
MTK_PLUGIN_MODE_BOKEH_3RD_PARTY = 1 << 15,
MTK_PLUGIN_MODE_DCMF_3RD_PARTY = 1 << 16,
} mtk_platform_metadata_enum_plugin_mode_t;
typedef enum mtk_platform_metadata_enum_p2_plugin_combination {
MTK_P2_RAW_PROCESSOR = 1 << 0,
MTK_P2_ISP_PROCESSOR = 1 << 1,
MTK_P2_YUV_PROCESSOR = 1 << 2,
MTK_P2_MDP_PROCESSOR = 1 << 3,
MTK_P2_CAPTURE_REQUEST = 1 << 4,
MTK_P2_PREVIEW_REQUEST = 1 << 5
} mtk_platform_metadata_enum_p2_plugin_combination;
typedef enum mtk_platform_metadata_enum_isp_color_space {
MTK_ISP_COLOR_SPACE_SRGB = 0 ,
MTK_ISP_COLOR_SPACE_DISPLAY_P3 = 1 ,
MTK_ISP_COLOR_SPACE_CUSTOM_1 = 2
} mtk_platform_metadata_enum_isp_color_space;
typedef enum mtk_platform_metadata_enum_dualzoom_drop_req {
MTK_DUALZOOM_DROP_NEVER_DROP = 0,
MTK_DUALZOOM_DROP_NONE = 1,
MTK_DUALZOOM_DROP_DIRECTLY = 2,
MTK_DUALZOOM_DROP_NEED_P1,
MTK_DUALZOOM_DROP_NEED_SYNCMGR,
MTK_DUALZOOM_DROP_NEED_SYNCMGR_NEED_STREAM_F_PIPE,
} mtk_platform_metadata_enum_dualzoom_drop_req_t;
typedef enum mtk_platform_metadata_enum_p1_sensor_status {
MTK_P1_SENSOR_STATUS_NONE = 0,
MTK_P1_SENSOR_STATUS_STREAMING = 1,
MTK_P1_SENSOR_STATUS_SW_STANDBY = 2,
MTK_P1_SENSOR_STATUS_HW_STANDBY = 3,
} mtk_platform_metadata_enum_p1_sensor_status_t;
typedef enum mtk_platform_metadata_enum_p1_twin_switch {
MTK_P1_TWIN_SWITCH_NONE = 0,
MTK_P1_TWIN_SWITCH_ONE_TG = 1,
MTK_P1_TWIN_SWITCH_TWO_TG = 2
} mtk_platform_metadata_enum_p1_twin_switch_t;
typedef enum mtk_platform_metadata_enum_p1_twin_status {
MTK_P1_TWIN_STATUS_NONE = 0,
MTK_P1_TWIN_STATUS_TG_MODE_1 = 1,
MTK_P1_TWIN_STATUS_TG_MODE_2 = 2,
MTK_P1_TWIN_STATUS_TG_MODE_3 = 3,
} mtk_platform_metadata_enum_p1_twin_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_switch {
MTK_P1_RESIZE_QUALITY_SWITCH_NONE = 0,
MTK_P1_RESIZE_QUALITY_SWITCH_L_L = 1,
MTK_P1_RESIZE_QUALITY_SWITCH_L_H = 2,
MTK_P1_RESIZE_QUALITY_SWITCH_H_L = 3,
MTK_P1_RESIZE_QUALITY_SWITCH_H_H = 4,
} mtk_platform_metadata_enum_p1_resize_quality_switch_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_status {
MTK_P1_RESIZE_QUALITY_STATUS_NONE = 0,
MTK_P1_RESIZE_QUALITY_STATUS_ACCEPT = 1,
MTK_P1_RESIZE_QUALITY_STATUS_IGNORE = 2,
MTK_P1_RESIZE_QUALITY_STATUS_REJECT = 3,
MTK_P1_RESIZE_QUALITY_STATUS_ILLEGAL = 4,
} mtk_platform_metadata_enum_p1_resize_quality_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_level {
MTK_P1_RESIZE_QUALITY_LEVEL_UNKNOWN = 0,
MTK_P1_RESIZE_QUALITY_LEVEL_L = 1,
MTK_P1_RESIZE_QUALITY_LEVEL_H = 2,
} mtk_platform_metadata_enum_p1_resize_quality_level_t;
typedef enum mtk_platform_metadata_enum_lmv_result {
MTK_LMV_RESULT_OK = 0,
MTK_LMV_RESULT_FAILED,
MTK_LMV_RESULT_SWITCHING
} mtk_platform_metadata_enum_lmv_result_t;
typedef enum mtk_platform_metadata_enum_featurepipe_app_mode {
MTK_FEATUREPIPE_PHOTO_PREVIEW = 0,
MTK_FEATUREPIPE_VIDEO_PREVIEW = 1,
MTK_FEATUREPIPE_VIDEO_RECORD = 2,
MTK_FEATUREPIPE_VIDEO_STOP = 3,
} mtk_platform_metadata_enum_featurepipe_app_mode_t;
typedef enum mtk_platform_metadata_enum_dcmf_feature_mode {
MTK_DCMF_FEATURE_BOKEH = 0,
MTK_DCMF_FEATURE_MFNR_BOKEH = 1,
MTK_DCMF_FEATURE_HDR_BOKEH = 2,
} mtk_platform_metadata_enum_dcmf_feature_mode_t;
typedef enum mtk_platform_metadata_enum_smvr_fps {
MTK_SMVR_FPS_30 = 0,
MTK_SMVR_FPS_120 = 1,
MTK_SMVR_FPS_240 = 2,
MTK_SMVR_FPS_480 = 3,
MTK_SMVR_FPS_960 = 4,
} mtk_platform_metadata_enum_smvr_fps_t;
//MTK_FRAMESYNC_FAILHANDLE
typedef enum mtk_platform_metadata_enum_fremesync_failhandle {
MTK_FRAMESYNC_FAILHANDLE_CONTINUE,
MTK_FRAMESYNC_FAILHANDLE_DROP,
} mtk_platform_metadata_enum_fremesync_failhandle_t;
//MTK_FRAMESYNC_RESULT
typedef enum mtk_platform_metadata_enum_fremesync_result {
MTK_FRAMESYNC_RESULT_PASS,
MTK_FRAMESYNC_RESULT_FAIL_CONTINUE,
MTK_FRAMESYNC_RESULT_FAIL_DROP,
} mtk_platform_metadata_enum_fremesync_result_t;
//MTK_FRAMESYNC_MODE
typedef enum mtk_platform_metadata_enum_fremesync_mode {
MTK_FRAMESYNC_MODE_VSYNC_ALIGNMENT,
MTK_FRAMESYNC_MODE_READOUT_CENTER_ALIGNMENT,
} mtk_platform_metadata_enum_fremesync_mode_t;
//MTK_FEATURE_MULTIFRAMENODE_BYPASSED
typedef enum mtk_platform_metadata_enum_multiframenode_bypassed {
MTK_FEATURE_MULTIFRAMENODE_NOT_BYPASSED = 0,
MTK_FEATURE_MULTIFRAMENODE_TO_BE_BYPASSED = 1
} mtk_platform_metadata_enum_mfllnode_bypassed_t;
//MTK_FEATURE_BSS_PROCESS
typedef enum mtk_platform_metadata_enum_bss_processing {
MTK_FEATURE_BSS_PROCESS_ENABLE = 0,
MTK_FEATURE_BSS_PROCESS_DISABLE = 1
} mtk_platform_metadata_enum_bss_processing_t;
//MTK_FEATURE_BSS_MANUAL_ORDER
typedef enum mtk_platform_metadata_enum_bss_manual_order {
MTK_FEATURE_BSS_MANUAL_ORDER_OFF = 0,
MTK_FEATURE_BSS_MANUAL_ORDER_GOLDEN = 1
} mtk_platform_metadata_enum_bss_manual_order_t;
//MTK_FEATURE_CAP_YUV_PROCESSING
typedef enum mtk_platform_metadata_enum_cap_yuv_processing {
MTK_FEATURE_CAP_YUV_PROCESSING_NOT_NEEDED = 0,
MTK_FEATURE_CAP_YUV_PROCESSING_NEEDED = 1
} mtk_platform_metadata_enum_cap_yuv_processing_t;
//MTK_FEATURE_CAP_PIPE_DCE_CONTROL
typedef enum mtk_platform_metadata_enum_cap_pipe_control {
MTK_FEATURE_CAP_PIPE_DCE_ENABLE_BUT_NOT_APPLY = 2,
MTK_FEATURE_CAP_PIPE_DCE_MANUAL_DISABLE = 1,
MTK_FEATURE_CAP_PIPE_DCE_DEFAULT_APPLY = 0
} mtk_platform_metadata_enum_cap_pipe_dce_control_t;
// MTK_FEATURE_AINR_MDLA_MODE, MTK_ISP_AINR_MDLA_MODE
typedef enum mtk_platform_metadata_enum_ainr_mdla_mode {
MTK_FEATURE_AINR_MDLA_MODE_NONE = 0,
MTK_FEATURE_AINR_MDLA_MODE_DRCOUT_16BIT = 1,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_12BIT = 2,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_16BIT = 3,
} mtk_platform_metadata_enum_ainr_mdla_mode_t;
//MTK_ISP_P2_PROCESSED_RAW
typedef enum mtk_platform_metadata_enum_p2_processed_raw {
MTK_ISP_P2_PROCESSED_RAW_NOT_NEEDED = 0,
MTK_ISP_P2_PROCESSED_RAW_NEEDED = 1
} mtk_platform_metadata_enum_p2_processed_raw_t;
//MTK_DUALZOOM_STREAMING_NR
typedef enum mtk_platform_metadata_enum_dualzoom_streaming_nr {
MTK_DUALZOOM_STREAMING_NR_AUTO = 0,
MTK_DUALZOOM_STREAMING_NR_OFF = 1
} mtk_platform_metadata_enum_dualzoom_streaming_nr_t;
//MTK_STAGGER_BLOB_IMGO_ORDER
typedef enum mtk_platform_metadata_enum_stagger_blob_imgo_order {
MTK_STAGGER_IMGO_NONE = 0,
MTK_STAGGER_IMGO_NE = 1,
MTK_STAGGER_IMGO_ME = 2,
MTK_STAGGER_IMGO_SE = 3
} mtk_platform_metadata_enum_stagger_blob_imgo_order_t;
//MTK_3A_EXIF_FLASH_FIRING_STATUS
typedef enum mtk_platform_metadata_enum_3a_exif_flash_firing_status_t {
MTK_3A_EXIF_FLASH_FIRING_STATUS_NOT_FIRED = 0,
MTK_3A_EXIF_FLASH_FIRING_STATUS_FIRED = 1,
} mtk_platform_metadata_enum_3a_exif_flash_firing_status_t;
//MTK_3A_EXIF_FLASH_RETURN_DETECTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_return_detection_t {
MTK_3A_EXIF_FLASH_RETURN_DETECTION_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_RESERVED = 1,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_NOT_DETECTED = 2,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_DETECTED = 3,
} mtk_platform_metadata_enum_3a_exif_flash_return_detection_t;
//MTK_3A_EXIF_FLASH_MODE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_mode_t {
MTK_3A_EXIF_FLASH_MODE_UNKNOWN = 0,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_FIRING = 1,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_SUPPRESSION = 2,
MTK_3A_EXIF_FLASH_MODE_AUTO = 3,
} mtk_platform_metadata_enum_3a_exif_flash_mode_t;
//MTK_3A_EXIF_FLASH_FUNCTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_function_t {
MTK_3A_EXIF_FLASH_FUNCTION_SUPPORT = 0,
MTK_3A_EXIF_FLASH_FUNCTION_NOT_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_function_t;
//MTK_3A_EXIF_FLASH_REDEYE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_redeye_t {
MTK_3A_EXIF_FLASH_REDEYE_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_REDEYE_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_redeye_t;
//MTK_FEATURE_ABF
typedef enum mtk_platform_metadata_enum_abf_mode {
MTK_ABF_MODE_OFF = 0,
MTK_ABF_MODE_ON,
} mtk_platform_metadata_enum_abf_mode_t;
#endif

File diff suppressed because it is too large Load Diff

@ -23,7 +23,6 @@
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
#include "Camera2Helper.h" #include "Camera2Helper.h"
#include <mutex> #include <mutex>
#include <map>
#include <set> #include <set>
/** /**
@ -40,9 +39,6 @@ static const uint64_t kMaxExposureTime = static_cast<uint64_t>(250000000);
#define WAIT_AF_LOCKED 4 #define WAIT_AF_LOCKED 4
#define PREVIEW_REQUEST_IDX 0 #define PREVIEW_REQUEST_IDX 0
#define CAPTURE_REQUEST_IDX 1
#define DEFAULT_WARMUP_TIME 250 // 250ms
class CameraManager class CameraManager
{ {
@ -85,11 +81,10 @@ public:
unsigned int orientation:3; unsigned int orientation:3;
unsigned int zoom : 1; unsigned int zoom : 1;
unsigned int wait3ALocked : 3; unsigned int wait3ALocked : 3;
unsigned int burstRawCapture : 3; unsigned int burstRawCapture : 2;
unsigned int customHdr : 1; unsigned int customHdr : 1;
unsigned int hdrStep : 3; unsigned int hdrStep : 3;
unsigned int minFps : 4; unsigned int reserved : 12;
unsigned int reserved : 7;
int64_t exposureTime; int64_t exposureTime;
unsigned int sensitivity; unsigned int sensitivity;
int compensation; int compensation;
@ -165,12 +160,12 @@ public:
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height); void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height);
void CreateSession(ANativeWindow* previewWindow); void CreateSession(ANativeWindow* previewWindow);
CaptureRequest* CreateRequest(bool isPreviewRequest, int32_t sensitivity = -1); CaptureRequest* CreateRequest(bool isPreviewRequest);
void DestroyRequest(CaptureRequest* request); void DestroyRequest(CaptureRequest* request);
void DestroySession(); void DestroySession();
virtual bool on_image(cv::Mat rgb); virtual bool on_image(cv::Mat& rgb);
virtual void on_error(const std::string& msg); virtual void on_error(const std::string& msg);
virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height); virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height);
virtual void onDisconnected(ACameraDevice* device); virtual void onDisconnected(ACameraDevice* device);
@ -188,7 +183,6 @@ public:
void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult); void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult);
void FireBurstCapture(); void FireBurstCapture();
void FireOneCapture(uint64_t ts);
uint32_t GetLdr() const uint32_t GetLdr() const
{ {
@ -201,17 +195,10 @@ public:
} }
bool IsCameraAvailable(const std::string& cameraId); bool IsCameraAvailable(const std::string& cameraId);
int64_t GetTimestamp(const ACameraMetadata* result);
static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height); static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height);
static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult); static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult);
protected:
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity);
void Setup3DNR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
void SetupHDR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
bool SetupTonemapCurve(ACameraMetadata* characteristics, ACaptureRequest* request);
protected: protected:
std::mutex m_locker; std::mutex m_locker;
std::set<std::string> m_availableCameras; std::set<std::string> m_availableCameras;
@ -249,11 +236,9 @@ protected:
bool mCaptureTriggered; bool mCaptureTriggered;
bool mFocusTriggered; bool mFocusTriggered;
bool mCaptureDispatched;
uint32_t mStableFrameCount;
CAPTURE_RESULT mResult; CAPTURE_RESULT mResult;
uint64_t m_startTime; unsigned long long m_startTime;
protected: protected:
@ -274,24 +259,32 @@ protected:
ACameraOutputTarget* mOutputTarget; ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput; ACaptureSessionOutput* mSessionOutput;
AImageReader* mImageReader2;
ANativeWindow* mImageWindow2;
ACameraOutputTarget* mOutputTarget2;
ACaptureSessionOutput* mSessionOutput2;
std::shared_ptr<ACameraMetadata> mCharacteristics; std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests; std::vector<CaptureRequest*> mCaptureRequests;
ACameraCaptureSession* capture_session;
std::shared_ptr<ACameraMetadata> mPreviewResults; std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults; std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
std::map<int64_t, std::shared_ptr<ACameraMetadata> > mCaptureResultMap;
uint32_t mLdr; uint32_t mLdr;
uint32_t mFinalLdr; uint32_t mFinalLdr;
uint32_t mFinalBurstCaptures; uint32_t mFinalBurstCaptures;
int32_t mFinalOutputFormat; int32_t mFinalOutputFormat;
std::vector<std::shared_ptr<AImage> > mCaptureFrames; std::vector<std::shared_ptr<AImage> > mCaptureFrames;
// cv::Mat mOneFrame; cv::Mat mOneFrame;
std::vector<std::pair<int64_t, cv::Mat> > mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames; std::vector<std::vector<uint8_t> > mRawFrames;
int64_t m_minTimestamp;
ACameraCaptureSession* capture_session;
// AImageReader* image_reader;
// ANativeWindow* image_reader_surface;
// ACameraOutputTarget* image_reader_target;
// ACaptureRequest* capture_request;
// ACaptureSessionOutput* capture_session_output;
}; };

@ -1,428 +0,0 @@
//
// Created by Matthew on 2025/3/1.
//
#include "RTSPRecorder.h"
#include <chrono>
#include <thread>
#include <android/log.h>
#include <errno.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
#define LOG_TAG "libcurl"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
#include <libavutil/log.h>
#include <android/log.h>
void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) {
// Map FFmpeg log levels to Android log levels
int android_log_level;
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL:
android_log_level = ANDROID_LOG_FATAL;
break;
case AV_LOG_ERROR:
android_log_level = ANDROID_LOG_ERROR;
break;
case AV_LOG_WARNING:
android_log_level = ANDROID_LOG_WARN;
break;
case AV_LOG_INFO:
android_log_level = ANDROID_LOG_INFO;
break;
case AV_LOG_VERBOSE:
android_log_level = ANDROID_LOG_VERBOSE;
break;
case AV_LOG_DEBUG:
case AV_LOG_TRACE:
android_log_level = ANDROID_LOG_DEBUG;
break;
default:
android_log_level = ANDROID_LOG_INFO;
break;
}
// Format the log message
char log_message[1024];
vsnprintf(log_message, sizeof(log_message), fmt, vl);
// Send the log message to logcat
__android_log_print(android_log_level, "FFmpeg", "%s", log_message);
}
int setup_output_streams(AVFormatContext *input_ctx, AVFormatContext *output_ctx) {
// Copy streams and fix time_base
for (unsigned int i = 0; i < input_ctx->nb_streams; i++) {
AVStream *in_stream = input_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(output_ctx, NULL);
if (!out_stream) {
return AVERROR_UNKNOWN;
}
// Copy codec parameters
int ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
if (ret < 0) {
return ret;
}
// Fix time base
out_stream->time_base = in_stream->time_base;
// Clear any existing flags
out_stream->codecpar->codec_tag = 0;
}
return 0;
}
int write_mp4_header(AVFormatContext *output_ctx) {
AVDictionary *opts = NULL;
// MP4 specific options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write header
int ret = avformat_write_header(output_ctx, &opts);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Header write failed: %s (code: %d)\n", errbuf, ret);
}
av_dict_free(&opts);
return ret;
}
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
// Open input RTMP stream
if (avformat_open_input(&inputFormatContext, rtmpUrl, nullptr, nullptr) != 0) {
fprintf(stderr, "Could not open input file '%s'\n", rtmpUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
fprintf(stderr, "Could not find stream information\n");
avformat_close_input(&inputFormatContext);
return;
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
avformat_close_input(&inputFormatContext);
return;
}
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
AVStream* outStream = avformat_new_stream(outputFormatContext, nullptr);
if (!outStream) {
fprintf(stderr, "Failed to allocate output stream\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
if (avcodec_parameters_copy(outStream->codecpar, inStream->codecpar) < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
outStream->codecpar->codec_tag = 0;
}
// Open output file
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Could not open output file '%s'\n", outputPath);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
}
// Write output file header
if (avformat_write_header(outputFormatContext, nullptr) < 0) {
fprintf(stderr, "Error occurred when writing header to output file\n");
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
// Read packets from input and write them to output
while (av_read_frame(inputFormatContext, &packet) >= 0) {
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
packet.pos = -1;
if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
fprintf(stderr, "Error muxing packet\n");
break;
}
av_packet_unref(&packet);
}
stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);
// Clean up
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&outputFormatContext->pb);
}
avformat_free_context(outputFormatContext);
}
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle)
{
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
AVPacket packet;
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_WARNING);
#endif
std::string url = rtspUrl;
AVDictionary* options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
av_dict_set(&options, "stimeout", "5000000", 0);
if (!userName.empty())
{
av_dict_set(&options, "username", userName.c_str(), 0); // Replace with actual username
av_dict_set(&options, "password", password.c_str(), 0); // Replace with actual password
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", userName.c_str(), password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Open input RTSP stream
int res = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &options);
av_dict_free(&options);
if (res != 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Could not open input: %s (error code: %d)\n", errbuf, res);
// fprintf(stderr, "Could not open input file '%s'\n", rtspUrl);
return;
}
// Retrieve input stream information
if (avformat_find_stream_info(inputFormatContext, nullptr) < 0) {
// fprintf(stderr, "Could not find stream information\n");
avformat_close_input(&inputFormatContext);
return;
}
// Open output MP4 file
if (avformat_alloc_output_context2(&outputFormatContext, nullptr, "mp4", outputPath) < 0) {
fprintf(stderr, "Could not create output context\n");
avformat_close_input(&inputFormatContext);
return;
}
// Copy stream information from input to output
for (unsigned int i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
const AVCodecParameters *in_codecpar = inStream->codecpar;
// Skip audio streams
if (inStream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
continue;
}
if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
// Copy video stream as-is
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *out_stream = avformat_new_stream(outputFormatContext, codec);
if (!out_stream) {
return;
}
avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
out_stream->codecpar->codec_tag = 0;
out_stream->time_base = (AVRational){1, 90000};
out_stream->avg_frame_rate = inStream->avg_frame_rate;
}
else if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
// Setup AAC audio stream
const AVCodec *aac_encoder = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!aac_encoder) {
fprintf(stderr, "AAC encoder not found\n");
return;
}
AVStream *out_stream = avformat_new_stream(outputFormatContext, aac_encoder);
if (!out_stream) {
return;
}
// Set AAC parameters
out_stream->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
out_stream->codecpar->codec_id = AV_CODEC_ID_AAC;
out_stream->codecpar->sample_rate = in_codecpar->sample_rate;
out_stream->codecpar->format = AV_SAMPLE_FMT_FLTP;
out_stream->codecpar->channels = in_codecpar->channels;
out_stream->codecpar->channel_layout = av_get_default_channel_layout(in_codecpar->channels);
out_stream->codecpar->bit_rate = 128000;
out_stream->codecpar->frame_size = 1024; // AAC frame size
out_stream->time_base = (AVRational){1, in_codecpar->sample_rate};
}
}
// Open output file
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&outputFormatContext->pb, outputPath, AVIO_FLAG_WRITE) < 0) {
fprintf(stderr, "Could not open output file '%s'\n", outputPath);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
}
AVDictionary *opts = NULL;
// Set output format options
av_dict_set(&opts, "movflags", "faststart+frag_keyframe", 0);
av_dict_set(&opts, "brand", "mp42", 0);
// Write output file header
res = avformat_write_header(outputFormatContext, &opts);
av_dict_free(&opts);
if (res < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(res, errbuf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Error occurred when writing header to output file: %s (error code: %d)\n", errbuf, res);
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
return;
}
#if 0
// Start a thread to stop the streaming after the specified duration
std::thread stop_thread([&]() {
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
av_read_pause(inputFormatContext);
});
#endif
uint32_t framesToSkip = 16;
uint32_t framesSkipped = 0;
// Skip initial frames
while (framesSkipped < framesToSkip) {
if (av_read_frame(inputFormatContext, &packet) < 0)
break;
if (packet.stream_index == 0) { // Video stream
framesSkipped++;
}
av_packet_unref(&packet);
}
auto startTime = av_gettime();
// int64_t durationNs = (int64_t)duration * 1000000;
int64_t durationNs = (int64_t)(duration + 32) * 1000;
// Read packets from input and write them to output
while (1) {
if ((av_gettime() - startTime) >= durationNs) {
// printf("Duration limit reached (%d seconds)\n", ctx->duration_secs);
break;
}
#if 0
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
packet.pts = av_rescale_q_rnd(packet.pts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts, inStream->time_base, outStream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration, inStream->time_base, outStream->time_base);
packet.pos = -1;
if (av_interleaved_write_frame(outputFormatContext, &packet) < 0) {
fprintf(stderr, "Error muxing packet\n");
break;
}
#endif
if (av_read_frame(inputFormatContext, &packet) < 0) break;
// Skip audio packets
if (inputFormatContext->streams[packet.stream_index]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
{
av_packet_unref(&packet);
continue;
}
// Adjust packet timebase
AVStream *in_stream = inputFormatContext->streams[packet.stream_index];
AVStream *out_stream = outputFormatContext->streams[packet.stream_index];
av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base);
packet.pos = -1;
res = av_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (res < 0)
{
break;
}
}
// stop_thread.join();
// Write output file trailer
av_write_trailer(outputFormatContext);
// Clean up
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&outputFormatContext->pb);
}
avformat_free_context(outputFormatContext);
}

@ -1,20 +0,0 @@
//
// Created by Matthew on 2025/3/1.
//
#ifndef MICROPHOTO_RTSPRECORDER_H
#define MICROPHOTO_RTSPRECORDER_H
#include <string>
#include <android/multinetwork.h>
// void dumpRtspToMp4(const std::string &rtspUrl, const std::string &outputPath, uint32_t durationInMs);
void dumpRtmpToMp4(const char* rtmpUrl, const char* outputPath, uint32_t duration, net_handle_t netHandle);
void dumpRtspToMp4(const char* rtspUrl, const char* outputPath, uint32_t duration, const std::string& userName, const std::string& password, net_handle_t netHandle);
class RTSPRecorder {
};
#endif //MICROPHOTO_RTSPRECORDER_H

@ -1,186 +0,0 @@
//
// Created by Matthew on 2025/2/28.
//
#include "RTSPToMP4.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <unistd.h>
#include <cstring>
#include <limits>
int32_t getMaxInputSize(AMediaExtractor* extractor, size_t trackIndex)
{
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, trackIndex);
int32_t maxInputSize = 0;
if (AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, &maxInputSize)) {
// LOGI("Max input size for track %zu: %d", trackIndex, maxInputSize);
} else {
// LOGE("Failed to get max input size for track %zu", trackIndex);
}
AMediaFormat_delete(format);
return maxInputSize;
}
RTSPToMP4::RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs/* = 0*/)
: fd(-1), codec(nullptr), extractor(nullptr), muxer(nullptr), videoTrackIndex(-1), durationInMs(durationInMs), running(false) {
initExtractor(rtspUrl);
initCodec("video/avc");
initMuxer(outputPath);
}
RTSPToMP4::~RTSPToMP4() {
if (codec) AMediaCodec_delete(codec);
if (extractor) AMediaExtractor_delete(extractor);
if (muxer) AMediaMuxer_delete(muxer);
if (fd != -1)
{
fdatasync(fd);
close(fd);
fd = -1;
}
}
void RTSPToMP4::initCodec(const char* mime) {
codec = AMediaCodec_createDecoderByType(mime);
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mime);
// Set other format parameters as needed
// ...
AMediaCodec_configure(codec, format, nullptr, nullptr, 0);
AMediaFormat_delete(format);
}
void RTSPToMP4::initExtractor(const char* rtspUrl) {
extractor = AMediaExtractor_new();
media_status_t status = AMediaExtractor_setDataSource(extractor, rtspUrl);
if (status != AMEDIA_OK) {
// Handle error
// ...
}
}
void RTSPToMP4::initMuxer(const char* outputPath) {
fd = open(outputPath, O_CREAT | O_WRONLY, 0644);
muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
int numTracks = AMediaExtractor_getTrackCount(extractor);
if (numTracks <= 0) {
// LOGE("No tracks found in RTSP stream");
AMediaExtractor_delete(extractor);
return;
}
for (int i = 0; i < numTracks; ++i) {
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, i);
const char* mime;
if (AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime) && strncmp(mime, "video/", 6) == 0) {
videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
AMediaExtractor_selectTrack(extractor, i);
}
AMediaFormat_delete(format);
}
if (videoTrackIndex == -1) {
// LOGE("No video track found in RTSP stream");
AMediaExtractor_delete(extractor);
AMediaMuxer_delete(muxer);
return;
}
int32_t maxInputSize = getMaxInputSize(extractor, videoTrackIndex);
if (maxInputSize <= 0) {
// LOGE("Invalid max input size");
// releaseMediaExtractor(extractor);
sampleData.resize(1920 * 1080 * 4, 0);
return;
}
sampleData.resize(maxInputSize, 0);
}
void RTSPToMP4::startDecodingAndMuxing() {
AMediaCodec_start(codec);
size_t bufferSize = sampleData.size();
uint8_t* buffer = &sampleData[0];
int64_t sampleTime = 0;
int64_t startTime = 0;
bool firstSampleData = true;
int64_t durationTime = (durationInMs == 0) ? std::numeric_limits<int64_t>::max() : (int64_t)durationInMs * 1000;
while (running) {
// Extract data from RTSP stream
ssize_t sampleSize = AMediaExtractor_readSampleData(extractor, buffer, bufferSize);
if (sampleSize < 0) {
break; // End of stream
}
sampleTime = AMediaExtractor_getSampleTime(extractor);
if (firstSampleData)
{
startTime = sampleTime;
firstSampleData = false;
}
sampleTime -= startTime;
// Feed data to codec
size_t inputBufferIndex;
uint8_t* inputBuffer = AMediaCodec_getInputBuffer(codec, inputBufferIndex, &bufferSize);
memcpy(inputBuffer, buffer, sampleSize);
AMediaCodec_queueInputBuffer(codec, inputBufferIndex, 0, sampleSize, sampleTime, 0);
// Retrieve decoded frames and write to muxer
AMediaCodecBufferInfo bufferInfo;
ssize_t outputBufferIndex = AMediaCodec_dequeueOutputBuffer(codec, &bufferInfo, 0);
if (outputBufferIndex >= 0) {
bufferInfo.offset = 0;
bufferInfo.size = sampleSize;
bufferInfo.presentationTimeUs = sampleTime;
bufferInfo.flags = AMediaExtractor_getSampleFlags(extractor);
uint8_t* outputBuffer = AMediaCodec_getOutputBuffer(codec, outputBufferIndex, &bufferSize);
AMediaMuxer_writeSampleData(muxer, videoTrackIndex, outputBuffer, &bufferInfo);
AMediaCodec_releaseOutputBuffer(codec, outputBufferIndex, false);
}
AMediaExtractor_advance(extractor);
if (sampleTime > durationTime)
{
break;
}
}
AMediaCodec_stop(codec);
AMediaMuxer_stop(muxer);
if (fd != -1)
{
fdatasync(fd);
close(fd);
fd = -1;
}
}
void RTSPToMP4::start() {
// Add video track to muxer
AMediaFormat* format = AMediaExtractor_getTrackFormat(extractor, 0);
videoTrackIndex = AMediaMuxer_addTrack(muxer, format);
running = true;
AMediaMuxer_start(muxer);
startDecodingAndMuxing();
}
void RTSPToMP4::stop() {
running = false;
}

@ -1,38 +0,0 @@
//
// Created by Matthew on 2025/2/28.
//
#ifndef MICROPHOTO_RTSPTOMP4_H
#define MICROPHOTO_RTSPTOMP4_H
#include <media/NdkMediaCodec.h>
#include <media/NdkMediaExtractor.h>
#include <media/NdkMediaMuxer.h>
#include <vector>
class RTSPToMP4 {
public:
RTSPToMP4(const char* rtspUrl, const char* outputPath, uint64_t durationInMs = 0);
~RTSPToMP4();
void start();
void stop();
private:
void initCodec(const char* mime);
void initExtractor(const char* rtspUrl);
void initMuxer(const char* outputPath);
void startDecodingAndMuxing();
int fd;
AMediaCodec* codec;
AMediaExtractor* extractor;
AMediaMuxer* muxer;
int videoTrackIndex;
uint64_t durationInMs;
bool running;
std::vector<uint8_t> sampleData;
};
#endif //MICROPHOTO_RTSPTOMP4_H

@ -1,547 +0,0 @@
//
// Created by Matthew on 2025/3/11.
//
#include "Streaming.h"
#include <iostream>
#include <string>
#include <thread>
#include <atomic>
#include <android/api-level.h>
#include <android/log.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
extern void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl);
#if 0
StreamForwarder::~StreamForwarder() {
stop();
if (inputCtx) {
avformat_close_input(&inputCtx);
}
if (outputCtx) {
if (outputCtx->pb) {
avio_closep(&outputCtx->pb);
}
avformat_free_context(outputCtx);
}
}
bool StreamForwarder::initialize(const std::string& inputUrl, const std::string& outputUrl) {
if (!openInput(inputUrl)) {
return false;
}
if (!openOutput(outputUrl)) {
return false;
}
return true;
}
bool StreamForwarder::openInput(const std::string& inputUrl) {
inputCtx = avformat_alloc_context();
if (!inputCtx) {
return false;
}
if (avformat_open_input(&inputCtx, inputUrl.c_str(), nullptr, nullptr) < 0) {
return false;
}
if (avformat_find_stream_info(inputCtx, nullptr) < 0) {
return false;
}
return true;
}
bool StreamForwarder::openOutput(const std::string& outputUrl) {
int ret = avformat_alloc_output_context2(&outputCtx, nullptr, "flv", outputUrl.c_str());
if (ret < 0) {
return false;
}
// Copy streams from input to output
for (unsigned int i = 0; i < inputCtx->nb_streams; i++) {
AVStream* inStream = inputCtx->streams[i];
AVStream* outStream = avformat_new_stream(outputCtx, inStream->codec->codec);
if (!outStream) {
return false;
}
ret = avcodec_copy_context(outStream->codec, inStream->codec);
if (ret < 0) {
return false;
}
}
// Open output file
if (!(outputCtx->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&outputCtx->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
return false;
}
}
// Write header
ret = avformat_write_header(outputCtx, nullptr);
if (ret < 0) {
return false;
}
return true;
}
void StreamForwarder::setFrameCallback(std::function<void(uint8_t*, int, int, int)> callback) {
frameCallback = callback;
}
void StreamForwarder::start() {
isRunning = true;
forwardPackets();
}
void StreamForwarder::stop() {
isRunning = false;
}
void StreamForwarder::forwardPackets() {
AVPacket packet;
AVFrame* frame = av_frame_alloc();
while (isRunning) {
if (av_read_frame(inputCtx, &packet) < 0) {
break;
}
// Process video frames if callback is set
if (frameCallback && packet.stream_index == 0) { // Assuming video is stream 0
AVCodecContext* codecCtx = inputCtx->streams[packet.stream_index]->codec;
int ret = avcodec_send_packet(codecCtx, &packet);
if (ret < 0) {
continue;
}
while (ret >= 0) {
ret = avcodec_receive_frame(codecCtx, frame);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
goto end;
}
processFrame(frame);
}
}
// Forward packet
av_packet_rescale_ts(&packet,
inputCtx->streams[packet.stream_index]->time_base,
outputCtx->streams[packet.stream_index]->time_base);
int ret = av_interleaved_write_frame(outputCtx, &packet);
if (ret < 0) {
break;
}
av_packet_unref(&packet);
}
end:
av_frame_free(&frame);
av_write_trailer(outputCtx);
}
void StreamForwarder::processFrame(AVFrame* frame) {
if (frameCallback) {
frameCallback(frame->data[0], frame->linesize[0],
frame->width, frame->height);
}
}
#endif
RtspForwarder::RtspForwarder(const std::string& input, const std::string& output)
: inputUrl(input), outputUrl(output), isRunning(false)
{
}
bool RtspForwarder::isStreaming() const
{
return isRunning;
}
bool RtspForwarder::start()
{
run();
return true;
}
bool RtspForwarder::stop()
{
isRunning = false;
return true;
}
int RtspForwarder::run()
{
#ifndef NDEBUG
// Set the custom log callback
av_log_set_callback(ffmpeg_log_callback);
av_log_set_level(AV_LOG_DEBUG);
#endif
isRunning = true;
AVFormatContext* inputFormatContext = nullptr;
AVFormatContext* outputFormatContext = nullptr;
int ret;
int videoStreamIndex = -1;
int64_t startTime = AV_NOPTS_VALUE;
AVBSFContext* bsf_ctx = nullptr;
std::string url = inputUrl;
if (!m_userName.empty())
{
char auth[512] = { 0 };
snprintf(auth, sizeof(auth), "%s:%s@", m_userName.c_str(), m_password.c_str());
url.insert(url.begin() + 7, auth, auth + strlen(auth));
}
// Input options
AVDictionary* inputOptions = nullptr;
av_dict_set(&inputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&inputOptions, "stimeout", "5000000", 0); // 5 second timeout
// av_dict_set(&inputOptions, "buffer_size", "1024000", 0); // 1MB buffer
std::cout << "Opening input: " << url << std::endl;
// Open input
ret = avformat_open_input(&inputFormatContext, url.c_str(), nullptr, &inputOptions);
av_dict_free(&inputOptions);
if (ret < 0) {
std::cerr << "Could not open input: " << av_err2str(ret) << std::endl;
return ret;
}
// Get stream info
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) {
// std::cerr << "Failed to get stream info: " << av_err2str(ret) << std::endl;
avformat_close_input(&inputFormatContext);
return ret;
}
// Find video stream
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
if (inputFormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStreamIndex = i;
break;
}
}
if (videoStreamIndex == -1) {
// std::cerr << "No video stream found" << std::endl;
avformat_close_input(&inputFormatContext);
return -1;
}
// Create stream mapping
std::vector<int> streamMapping(inputFormatContext->nb_streams, -1);
int outputStreamIdx = 0;
// Allocate output context
ret = avformat_alloc_output_context2(&outputFormatContext, nullptr, "rtsp", outputUrl.c_str());
if ((ret < 0) || !outputFormatContext) {
std::cerr << "Could not create output context" << std::endl;
avformat_close_input(&inputFormatContext);
return false;
}
// FIXED VERSION - remove the redundant stream creation
for (unsigned i = 0; i < inputFormatContext->nb_streams; i++) {
AVStream* inStream = inputFormatContext->streams[i];
const AVCodecParameters *in_codecpar = inStream->codecpar;
// Skip non-video streams if needed
if (in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO) {
streamMapping[i] = -1;
continue;
}
// Create only ONE stream per input stream
const AVCodec *codec = avcodec_find_decoder(in_codecpar->codec_id);
AVStream *outStream = avformat_new_stream(outputFormatContext, codec);
if (!outStream) {
return false;
}
ret = avcodec_parameters_copy(outStream->codecpar, in_codecpar);
outStream->codecpar->codec_tag = 0;
outStream->time_base = (AVRational){1, 90000};
outStream->avg_frame_rate = inStream->avg_frame_rate;
// Map input stream to output stream
streamMapping[i] = outputStreamIdx++;
}
const AVBitStreamFilter* filter = av_bsf_get_by_name("h264_mp4toannexb");
if (filter)
{
for (unsigned i = 0; i < outputFormatContext->nb_streams; i++) {
AVStream* stream = outputFormatContext->streams[i];
if (stream->codecpar->codec_id == AV_CODEC_ID_H264) {
ret = av_bsf_alloc(filter, &bsf_ctx);
if (ret < 0) {
std::cerr << "Failed to allocate bitstream filter context: " << av_err2str(ret) << std::endl;
return false;
}
// Copy parameters from input to bsf
ret = avcodec_parameters_copy(bsf_ctx->par_in, stream->codecpar);
if (ret < 0) {
std::cerr << "Failed to copy parameters to bsf: " << av_err2str(ret) << std::endl;
return false;
}
// Initialize the bsf context
ret = av_bsf_init(bsf_ctx);
if (ret < 0) {
std::cerr << "Failed to initialize bitstream filter: " << av_err2str(ret) << std::endl;
return false;
}
// Update output parameters
ret = avcodec_parameters_copy(stream->codecpar, bsf_ctx->par_out);
if (ret < 0) {
std::cerr << "Failed to copy parameters from bsf: " << av_err2str(ret) << std::endl;
return false;
}
break; // Only apply to the first H.264 stream
}
}
}
AVDictionary* outputOptions = nullptr;
av_dict_set(&outputOptions, "rtsp_transport", "tcp", 0);
av_dict_set(&outputOptions, "rtsp_flags", "filter_src", 0);
av_dict_set(&outputOptions, "timeout", "5000000", 0);
av_dict_set(&outputOptions, "allowed_media_types", "video", 0);
av_dict_set(&outputOptions, "buffer_size", "1024000", 0); // 1MB buffer
av_dict_set(&outputOptions, "fflags", "nobuffer", 0); // Reduce latency
av_dict_set(&outputOptions, "muxdelay", "0.1", 0); // Reduce delay
av_dict_set(&outputOptions, "max_delay", "500000", 0);
av_dict_set(&outputOptions, "preset", "ultrafast", 0);
av_dict_set(&outputOptions, "tune", "zerolatency", 0);
av_dict_set(&outputOptions, "rtsp_flags", "prefer_tcp", 0);
// Open output
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
// Output options
// ret = avio_open(&outputFormatContext->pb, outputUrl.c_str(), AVIO_FLAG_WRITE);
ret = avio_open2(&outputFormatContext->pb, outputFormatContext->url, AVIO_FLAG_WRITE, NULL, &outputOptions);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Could not open output URL: " << errbuf << std::endl;
avformat_close_input(&inputFormatContext);
avformat_free_context(outputFormatContext);
av_dict_free(&outputOptions);
return ret;
}
}
// Write header
ret = avformat_write_header(outputFormatContext, &outputOptions);
av_dict_free(&outputOptions);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing header: " << errbuf << std::endl;
avformat_close_input(&inputFormatContext);
if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}
// Main loop - read and write packets
AVPacket packet;
AVMediaType medaiType;
while (isRunning) {
ret = av_read_frame(inputFormatContext, &packet);
if (ret < 0) {
if (ret == AVERROR_EOF || ret == AVERROR(EAGAIN)) {
std::cerr << "End of stream or timeout, reconnecting in "
<< reconnectDelayMs << "ms" << std::endl;
std::this_thread::sleep_for(std::chrono::milliseconds(reconnectDelayMs));
avformat_close_input(&inputFormatContext);
ret = avformat_open_input(&inputFormatContext, inputUrl.c_str(), nullptr, &inputOptions);
if (ret < 0) continue;
ret = avformat_find_stream_info(inputFormatContext, nullptr);
if (ret < 0) continue;
continue;
}
break;
}
// Later when writing packets:
int original_stream_index = packet.stream_index;
if (streamMapping[original_stream_index] >= 0) {
packet.stream_index = streamMapping[original_stream_index];
// Write packet...
} else {
// Skip this packet
av_packet_unref(&packet);
continue;
}
// Skip audio packets
medaiType = inputFormatContext->streams[original_stream_index]->codecpar->codec_type;
if (medaiType == AVMEDIA_TYPE_AUDIO || medaiType == AVMEDIA_TYPE_DATA)
{
av_packet_unref(&packet);
continue;
}
#if 0
// Fix timestamps if enabled
if (fixTimestamps) {
// Handle timestamp issues similar to FFmpeg warning
AVStream* inStream = inputFormatContext->streams[packet.stream_index];
AVStream* outStream = outputFormatContext->streams[packet.stream_index];
if (packet.pts == AV_NOPTS_VALUE) {
// Generate PTS if missing
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
inStream->time_base);
packet.dts = packet.pts;
}
// Rescale timestamps to output timebase
packet.pts = av_rescale_q_rnd(packet.pts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.dts = av_rescale_q_rnd(packet.dts,
inStream->time_base,
outStream->time_base,
static_cast<AVRounding>(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
packet.duration = av_rescale_q(packet.duration,
inStream->time_base,
outStream->time_base);
}
// Write packet to output
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
std::cerr << "Error writing frame: " << av_err2str(ret) << std::endl;
break;
}
#endif
AVStream *in_stream = inputFormatContext->streams[original_stream_index];
AVStream *out_stream = outputFormatContext->streams[packet.stream_index];
av_packet_rescale_ts(&packet, in_stream->time_base, out_stream->time_base);
// CRITICAL: Fix timestamp issues
if (packet.dts != AV_NOPTS_VALUE && packet.pts != AV_NOPTS_VALUE && packet.dts > packet.pts) {
packet.dts = packet.pts;
}
// Handle missing timestamps
if (packet.pts == AV_NOPTS_VALUE) {
if (startTime == AV_NOPTS_VALUE) {
startTime = av_gettime();
}
packet.pts = av_rescale_q(av_gettime() - startTime,
AV_TIME_BASE_Q,
out_stream->time_base);
packet.dts = packet.pts;
}
packet.pos = -1;
// Apply bitstream filter if it's H.264
if (bsf_ctx && out_stream->codecpar->codec_id == AV_CODEC_ID_H264) {
ret = av_bsf_send_packet(bsf_ctx, &packet);
if (ret < 0) {
std::cerr << "Error sending packet to bitstream filter: " << av_err2str(ret) << std::endl;
break;
}
while (ret >= 0) {
ret = av_bsf_receive_packet(bsf_ctx, &packet);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
// Need more input or end of file
break;
} else if (ret < 0) {
std::cerr << "Error receiving packet from bitstream filter: " << av_err2str(ret) << std::endl;
break;
}
// Write the filtered packet
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing frame: " << errbuf << std::endl;
break;
}
}
} else {
// Write the packet without filtering
ret = av_interleaved_write_frame(outputFormatContext, &packet);
av_packet_unref(&packet);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = { 0 };
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
std::cerr << "Error writing frame: " << errbuf << std::endl;
break;
}
}
}
cleanup:
// Free the bitstream filter context
if (bsf_ctx) {
av_bsf_free(&bsf_ctx);
}
// Write trailer
av_write_trailer(outputFormatContext);
// Cleanup
avformat_close_input(&inputFormatContext);
if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE))
avio_closep(&outputFormatContext->pb);
avformat_free_context(outputFormatContext);
return ret;
}

@ -1,90 +0,0 @@
//
// Created by Matthew on 2025/3/11.
//
#ifndef MICROPHOTO_STREAMING_H
#define MICROPHOTO_STREAMING_H
#include <string>
#include <memory>
#include <functional>
#include <iostream>
#include <thread>
#include <atomic>
#include <android/multinetwork.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
}
class Streaming
{
public:
virtual ~Streaming() {}
virtual bool start() { return false; }
virtual bool stop() { return false; }
virtual bool isStreaming() const { return false; }
void setAuth(const std::string& userName, const std::string& password)
{
m_userName = userName;
m_password = password;
}
protected:
std::string m_userName;
std::string m_password;
};
#if 0
class StreamForwarder : public Streaming
{
private:
AVFormatContext* inputCtx = nullptr;
AVFormatContext* outputCtx = nullptr;
bool isRunning = false;
public:
StreamForwarder() = default;
virtual ~StreamForwarder();
bool initialize(const std::string& inputUrl, const std::string& outputUrl);
virtual void start();
virtual void stop();
private:
bool openInput(const std::string& inputUrl);
bool openOutput(const std::string& outputUrl);
void forwardPackets();
void setFrameCallback(std::function<void(uint8_t*, int, int, int)> callback);
};
#endif
class RtspForwarder : public Streaming {
private:
std::string inputUrl;
std::string outputUrl;
std::atomic<bool> isRunning;
// Options
int reconnectDelayMs = 5000;
bool fixTimestamps = true;
public:
RtspForwarder(const std::string& input, const std::string& output);
virtual bool start();
virtual bool stop();
virtual bool isStreaming() const;
int run();
};
#endif //MICROPHOTO_STREAMING_H

@ -1,330 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "HangYuCtrl.h"
#include "netcamera.h"
#include "httpclient.h"
#include <LogThread.h>
#include <SpecData_JSON.h>
#include <cstring>
#include <algorithm>
HangYuCtrl::~HangYuCtrl()
{
}
bool HangYuCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
//流类型范围1-4,1为主流
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return 0;
}
std::string xmlString(resData.begin(), resData.end());
size_t widthStart = xmlString.find("<ResolutionWidth>");
size_t widthEnd = xmlString.find("</ResolutionWidth>");
if (widthStart != std::string::npos && widthEnd != std::string::npos) {
widthStart += std::string("<ResolutionWidth>").length();
xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX));
}
size_t heightStart = xmlString.find("<ResolutionHeigth>");
size_t heightEnd = xmlString.find("</ResolutionHeigth>");
if (heightStart != std::string::npos && heightEnd != std::string::npos) {
heightStart += std::string("<ResolutionHeigth>").length();
xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY));
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
return 0;
}
return true;
}
bool HangYuCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
//左上OSD
bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos);
size_t posi = osdstring.find("$$DATETIME$$");
if (posi != std::string::npos) {
size_t endPos = posi + 12;
while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) {
endPos++;
}
osdstring.erase(posi, endPos - posi);
}
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
std::replace(osdstring.begin(), osdstring.end(), '\n', '^');
string xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><MultiLineOSD><DisplayTime><Enable>" + string(hasDateTime ? "true" : "false") + "</Enable><PosX>8</PosX><PosY>0</PosY></DisplayTime><OSD><ID>1</ID><Enable>false</Enable><Text>"+ osdstring+ "</Text><x>8</x><y>" + string(hasDateTime ? "24" : "0") + "</y></MultiLineOSD>";
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
return res;
}
void HangYuCtrl::EnableOsd(bool enable, uint8_t channel)
{
//航煜 只能显示时间和一个OSD
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return;
}
std::string xmlString(resData.begin(), resData.end());
std::string enableStartTag = "<Enable>";
std::string enableEndTag = "</Enable>";
size_t pos = 0;
while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) {
size_t startPos = pos + enableStartTag.length();
size_t endPos = xmlString.find(enableEndTag, startPos);
if (endPos == std::string::npos) {
break;
}
std::string newValue = enable ? "true" : "false";
xmlString.replace(startPos, endPos - startPos, newValue);
pos = endPos + enableEndTag.length();
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
// return;
}
}
std::string HangYuCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = 0;
for (int idx = 0; idx < 10; idx++)
{
res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res == 0 && !resData.empty())
{
break;
}
}
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
const char* start = strstr((const char*)&resData[0], "<RTSPURI>");
if (start == NULL)
{
return "";
}
start += 9;
const char* end = strstr(start, "</RTSPURI>");
if (end == NULL)
{
return "";
}
return std::string(start, end);
}
bool HangYuCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
// <?xml version="1.0" encoding="utf-8"?>
//<Time>
//<DateTimeFormat>
//<!--req,string,YYYYMMDDWhhmmss,YYYYMMDDhhmmss,MMDDYYYYWhhmmss,MMD
// DYYYYhhmmss,DDMMYYYYWhhmmss,DDMMYYYYhhmmss-->
//</DateTimeFormat>
//<TimeFormat><!--req,xs:string,12hour,24hour--></TimeFormat>
//<SystemTime><!--req,xs:datetime,” 20040503T173008+08”--></SystemTime>
//<SyncNTPFlag><!--req,xs:string,"Sync,NoSync"--></SyncNTPFlag>
//</Time>
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool HangYuCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
bool res = false;
std::vector<uint8_t> data;
// /Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/%u/1/RemoteImageCaptureV2?ImageFormat=jpg", m_ip.c_str(), (uint32_t)streamID);
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
if (0 == nRet)
{
bool qualityDowngraded = false;
std::string originalConfig;
if (img.size() < 1000)
{
qualityDowngraded = DowngradeQuality(originalConfig);
XYLOG(XYLOG_SEVERITY_INFO,"Reduce Img Quality");
}
nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
if (!originalConfig.empty())
{
UpdateQuality(originalConfig);
}
std::vector<uint8_t> header = {0xFF, 0xD8, 0xFF, 0xE0}; // JPEG
std::vector<uint8_t>::iterator it = std::search(img.begin(), img.end(), header.begin(), header.end());
if (it != img.end() && it != img.begin())
{
img.erase(img.begin(), it);
#ifndef NDEBUG
int aa = 0;
#endif
}
}
return nRet == 0;
}
bool HangYuCtrl::DowngradeQuality(std::string& originalConfig)
{
bool res = false;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet)
{
std::string str = ByteArrayToString(&data[0], data.size());
originalConfig = str;
if (replaceAll(str, "<Quality>middle</Quality>", "<Quality>low</Quality>") == 0)
{
res = (replaceAll(str, "<Quality>high</Quality>", "<Quality>middle</Quality>") != 0);
}
else
{
res = true;
}
if (!res)
{
return res;
}
data.clear();
if (res)
{
nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data);
return 0 == nRet;
}
}
return false;
}
bool HangYuCtrl::UpdateQuality(const std::string& originalConfig)
{
std::vector<uint8_t> data;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
int nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, originalConfig.c_str(), data);
return 0 == nRet;
}
bool HangYuCtrl::UpgradeQuality()
{
bool res = false;
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet)
{
std::string str = ByteArrayToString(&data[0], data.size());
if (replaceAll(str, "<Quality>low</Quality>", "<Quality>middle</Quality>") == 0)
{
res = (replaceAll(str, "<Quality>middle</Quality>", "<Quality>high</Quality>") != 0);
}
else
{
res = true;
}
if (!res)
{
return res;
}
data.clear();
if (res)
{
nRet = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, str.c_str(), data);
return 0 == nRet;
}
}
return false;
}
bool HangYuCtrl::QueryQuality(std::string& qualityContents)
{
char url[64] = { 0 };
snprintf(url, sizeof(url), "http://%s/Snapshot/Config", m_ip.c_str());
std::vector<uint8_t> data;
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, data);
if (0 == nRet && !data.empty())
{
qualityContents = ByteArrayToString(&data[0], data.size());
}
return (0 == nRet);
}
bool HangYuCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path)
{
return false;
}

@ -1,34 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef __MICROPHOTO_HANGYUCTRL_H__
#define __MICROPHOTO_HANGYUCTRL_H__
#include "VendorCtrl.h"
class HangYuCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~HangYuCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool HasAuthOnStreaming() const { return true; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
bool QueryQuality(std::string& qualityContents);
bool DowngradeQuality(std::string& originalConfig);
bool UpdateQuality(const std::string& originalConfig);
bool UpgradeQuality();
};
#endif //__MICROPHOTO_HANGYUCTRL_H__

@ -1,204 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "HikonCtrl.h"
#include "netcamera.h"
#include "httpclient.h"
#include <LogThread.h>
#include <SpecData_JSON.h>
#include <cstring>
#include <algorithm>
HikonCtrl::~HikonCtrl()
{
}
bool HikonCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
//流类型范围1-4,1为主流
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return 0;
}
std::string xmlString(resData.begin(), resData.end());
size_t widthStart = xmlString.find("<ResolutionWidth>");
size_t widthEnd = xmlString.find("</ResolutionWidth>");
if (widthStart != std::string::npos && widthEnd != std::string::npos) {
widthStart += std::string("<ResolutionWidth>").length();
xmlString.replace(widthStart, widthEnd - widthStart, std::to_string(resX));
}
size_t heightStart = xmlString.find("<ResolutionHeigth>");
size_t heightEnd = xmlString.find("</ResolutionHeigth>");
if (heightStart != std::string::npos && heightEnd != std::string::npos) {
heightStart += std::string("<ResolutionHeigth>").length();
xmlString.replace(heightStart, heightEnd - heightStart, std::to_string(resY));
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
return 0;
}
return true;
}
bool HikonCtrl::SetOsd(uint8_t channel, std::string osdstring, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
//左上OSD
bool hasDateTime = (osdstring.find("$$DATETIME$$") != std::string::npos);
size_t posi = osdstring.find("$$DATETIME$$");
if (posi != std::string::npos) {
size_t endPos = posi + 12;
while (endPos < osdstring.size() && (osdstring[endPos] == ' ' || osdstring[endPos] == '\n')) {
endPos++;
}
osdstring.erase(posi, endPos - posi);
}
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
std::replace(osdstring.begin(), osdstring.end(), '\n', '^');
string xmlString = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?><MultiLineOSD><DisplayTime><Enable>" + string(hasDateTime ? "true" : "false") + "</Enable><PosX>8</PosX><PosY>0</PosY></DisplayTime><OSD><ID>1</ID><Enable>false</Enable><Text>"+ osdstring+ "</Text><x>8</x><y>" + string(hasDateTime ? "24" : "0") + "</y></MultiLineOSD>";
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
return res;
}
void HikonCtrl::EnableOsd(bool enable, uint8_t channel)
{
//航煜 只能显示时间和一个OSD
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Pictures/%u/MultiOSDV2", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return;
}
std::string xmlString(resData.begin(), resData.end());
std::string enableStartTag = "<Enable>";
std::string enableEndTag = "</Enable>";
size_t pos = 0;
while ((pos = xmlString.find(enableStartTag, pos)) != std::string::npos) {
size_t startPos = pos + enableStartTag.length();
size_t endPos = xmlString.find(enableEndTag, startPos);
if (endPos == std::string::npos) {
break;
}
std::string newValue = enable ? "true" : "false";
xmlString.replace(startPos, endPos - startPos, newValue);
pos = endPos + enableEndTag.length();
}
res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, xmlString.c_str(), resData);
if (res != 0)
{
// return;
}
}
std::string HikonCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/Streams/%u/1/Transport", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = 0;
for (int idx = 0; idx < 10; idx++)
{
res = DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res == 0 && !resData.empty())
{
break;
}
}
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
const char* start = strstr((const char*)&resData[0], "<RTSPURI>");
if (start == NULL)
{
return "";
}
start += 9;
const char* end = strstr(start, "</RTSPURI>");
if (end == NULL)
{
return "";
}
return std::string(start, end);
}
bool HikonCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
// <?xml version="1.0" encoding="utf-8"?>
//<Time>
//<DateTimeFormat>
//<!--req,string,YYYYMMDDWhhmmss,YYYYMMDDhhmmss,MMDDYYYYWhhmmss,MMD
// DYYYYhhmmss,DDMMYYYYWhhmmss,DDMMYYYYhhmmss-->
//</DateTimeFormat>
//<TimeFormat><!--req,xs:string,12hour,24hour--></TimeFormat>
//<SystemTime><!--req,xs:datetime,” 20040503T173008+08”--></SystemTime>
//<SyncNTPFlag><!--req,xs:string,"Sync,NoSync"--></SyncNTPFlag>
//</Time>
std::string reqData = "<?xml version=\"1.0\" encoding=\"utf-8\"?><Time><SystemTime>"
+ FormatLocalDateTime("%d%02d%02dT%02d%02d%02d") + "+08</SystemTime></Time>";
std::string url = "http://" + m_ip + "/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, reqData.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool HikonCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/ISAPI/Streaming/channels/1/picture?", m_ip.c_str());
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
return nRet == 0;
}
bool HikonCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path)
{
return false;
}

@ -1,34 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef __MICROPHOTO_HIKONCTRL_H__
#define __MICROPHOTO_HIKONCTRL_H__
#include "VendorCtrl.h"
class HikonCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~HikonCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t channel);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool HasAuthOnStreaming() const { return true; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
bool QueryQuality(std::string& qualityContents);
bool DowngradeQuality(std::string& originalConfig);
bool UpdateQuality(const std::string& originalConfig);
bool UpgradeQuality();
};
#endif //__MICROPHOTO_HIKONCTRL_H__

@ -1,27 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "VendorCtrl.h"
#include <curl/curl.h>
VendorCtrl::VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime/* = true*/) :
m_ip(ip), m_userName(userName), m_password(password), m_channel(channel), m_netHandle(netHandle)
{
}
std::string VendorCtrl::CvtJSONToString(const Json::Value& data)
{
Json::StreamWriterBuilder builder;
#ifndef NDEBUG
builder["indentation"] = "\t"; // assume default for comments is None
builder["emitUTF8"] = true;
#else
builder["indentation"] = "";
#endif
return Json::writeString(builder, data);
}
bool VendorCtrl::IsTimeout() const
{
return m_lastErrorCode == CURLE_OPERATION_TIMEDOUT;
}

@ -1,50 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef MICROPHOTO_VENDORCTRL_H
#define MICROPHOTO_VENDORCTRL_H
#include <string>
#include <json/json.h>
#include <android/multinetwork.h>
#define LEFT_TOP 0
#define RIGHT_TOP 1
#define LEFT_BOTTOM 2
#define RIGHT_BOTTOM 3
class VendorCtrl {
public:
VendorCtrl(const std::string& ip, const std::string& userName, const std::string& password, uint8_t channel, net_handle_t netHandle, bool syncTime = true);
virtual ~VendorCtrl() {}
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos) = 0;
virtual void EnableOsd(bool enable, uint8_t channel) = 0;
virtual std::string GetStreamingUrl(uint8_t channel) = 0;
virtual bool UpdateTime(time_t ts) = 0;
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img) = 0;
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path) = 0;
virtual bool HasAuthOnStreaming() const { return false; }
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY) = 0;
void UpdateNetHandle(net_handle_t netHandle) { m_netHandle = netHandle; }
int GetLastError() const { return m_lastErrorCode; }
bool IsTimeout() const;
protected:
std::string CvtJSONToString(const Json::Value& data);
protected:
std::string m_ip;
std::string m_userName;
std::string m_password;
uint8_t m_channel;
net_handle_t m_netHandle;
int m_lastErrorCode;
};
#endif //MICROPHOTO_VENDORCTRL_H

@ -1,237 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#include "YuShiCtrl.h"
#include "httpclient.h"
#include "netcamera.h"
#include <json/json.h>
YuShiCtrl::~YuShiCtrl()
{
}
bool YuShiCtrl::SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY)
{
return false;
}
bool YuShiCtrl::SetOsd(uint8_t channel, std::string osd, uint8_t pos)
{
// /LAPI/V1.0/Channels/<ID>/Media/OSDs/Contents
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
string jsonstring;
switch (pos) {
case LEFT_TOP:
{
OSDJson(0, 1, osd, 0, 0, true, jsonstring);
break;
}
case RIGHT_TOP:
{
OSDJson(1, 1, osd, 9900, 0, false, jsonstring);
break;
}
case LEFT_BOTTOM:
{
OSDJson(2, 1, osd, 0, 9900, false, jsonstring);
break;
}
case RIGHT_BOTTOM:
{
OSDJson(3, 1, osd, 9900, 9900, false, jsonstring);
break;
}
}
int res = DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, jsonstring.c_str(), resData);
return res;
}
void YuShiCtrl::EnableOsd(bool enable, uint8_t channel)
{
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/OSDs/Contents", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res =DoGetRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
std::string jsonString(resData.begin(), resData.end());
Json::CharReaderBuilder reader;
Json::Value root;
std::string errors;
std::istringstream s(jsonString);
if (!Json::parseFromStream(reader, s, &root, &errors)) {
XYLOG(XYLOG_SEVERITY_ERROR, "Failed to parse JSON:%s", errors.c_str());
return;
}
Json::Value& data = root["Response"]["Data"];
if (data.isNull()) {
XYLOG(XYLOG_SEVERITY_ERROR,"Data not found in JSON");
return;
}
Json::Value& contentList = data["ContentList"];
for (auto& content : contentList) {
content["Enabled"] = enable ? 1 : 0;
}
Json::StreamWriterBuilder writer;
std::string putJsonString = Json::writeString(writer, data);
DoPutRequest(url, HTTP_AUTH_TYPE_BASIC, m_userName.c_str(), m_password.c_str(), m_netHandle, putJsonString.c_str(), resData);
}
std::string YuShiCtrl::GetStreamingUrl(uint8_t channel)
{
// /LAPI/V1.0/Channels/<ID>/Media/Video/Streams/<ID>/LiveStreamURL?TransType=<Tran
// sType>&TransProtocol=<TransProtocol>
char url[128] = { 0 };
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/0/LiveStreamURL", m_ip.c_str(), (uint32_t)channel);
std::vector<uint8_t> resData;
int res = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, resData);
if (res != 0 || resData.empty())
{
return "";
}
resData.push_back(0);
Json::CharReaderBuilder builder;
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
Json::Value json;
const char* doc = (const char*)&(resData[0]);
if (reader->parse(doc, doc + resData.size() - 1, &json, NULL))
{
if (json.isMember("Response"))
{
Json::Value& jsonRes = json["Response"];
if (jsonRes.isMember("Data"))
{
Json::Value& jsonData = jsonRes["Data"];
if (jsonData.isMember("URL"))
{
return std::string(jsonData["URL"].asCString());
}
}
}
}
return "";
}
bool YuShiCtrl::UpdateTime(time_t ts)
{
// /LAPI/V1.0/System/Time
#if 0
Json::Value jsonData(Json::objectValue);
jsonData["TimeZone"] = "GMT+08:00";
jsonData["DeviceTime"] = (int64_t)ts;
jsonData["DateFormat"] = 0; // YYYY-MM-DD
jsonData["HourFormat"] = 1; // 24H
#endif
std::string contents = "{\"TimeZone\":\"GMT+08:00\",\"DateFormat\":0,\"HourFormat\":1,\"DeviceTime\":" + std::to_string(ts) + "}";
std::string url = "http://" + m_ip + "/LAPI/V1.0/System/Time";
std::vector<uint8_t> resData;
int res = DoPutRequest(url.c_str(), HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, contents.c_str(), resData);
if (res != 0)
{
return false;
}
return true;
}
bool YuShiCtrl::TakePhoto(uint8_t streamID, std::vector<uint8_t>& img)
{
// Yu Shi
char url[128] = { 0 };
int streamSid = 0; // should put into config
snprintf(url, sizeof(url), "http://%s/LAPI/V1.0/Channels/%u/Media/Video/Streams/%d/Snapshot", m_ip.c_str(), (uint32_t)streamID, streamSid);
int nRet = DoGetRequest(url, HTTP_AUTH_TYPE_DIGEST, m_userName.c_str(), m_password.c_str(), m_netHandle, img, &m_lastErrorCode);
return nRet == 0;
}
bool YuShiCtrl::TakeVideo(uint8_t streamID, uint32_t duration, std::string path) {
return false;
}
void YuShiCtrl::OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString)
{
Json::Value root;
root["Num"] = 1;
Json::Value contentList(Json::arrayValue);
Json::Value content;
content["ID"] = id;
content["Enabled"] = enabled;
int row = 1;
for (char ch : osdString) {
if (ch == '\n') {
row++;
}
}
content["Num"] = row;
Json::Value contentInfo(Json::arrayValue);
size_t start = 0;
size_t end = osdString.find('\n');
if(timeOn)
{
//如果在此位置显示时间
Json::Value info;
info["ContentType"] = 2;
info["Value"] = "";
contentInfo.append(info);
}
for (int i = 0; i < row; i++)
{
std::string line;
if (end == std::string::npos) {
line = osdString.substr(start);
} else {
line = osdString.substr(start, end - start);
start = end + 1;
end = osdString.find('\n', start);
}
Json::Value info;
info["ContentType"] = 1;
info["Value"] = line;
contentInfo.append(info);
}
content["ContentInfo"] = contentInfo;
Json::Value area;
Json::Value topLeft;
topLeft["X"] = x; //9900
topLeft["Y"] = y;
area["TopLeft"] = topLeft;
content["Area"] = area;
contentList.append(content);
root["ContentList"] = contentList;
Json::StreamWriterBuilder writer;
jsonString = Json::writeString(writer, root);
}

@ -1,30 +0,0 @@
//
// Created by Matthew on 2025/3/4.
//
#ifndef MICROPHOTO_YUSHICTRL_H
#define MICROPHOTO_YUSHICTRL_H
#include "VendorCtrl.h"
class YuShiCtrl : public VendorCtrl
{
public:
using VendorCtrl::VendorCtrl;
virtual ~YuShiCtrl();
virtual bool SetOsd(uint8_t channel, std::string osd, uint8_t pos);
virtual void EnableOsd(bool enable, uint8_t channel);
virtual std::string GetStreamingUrl(uint8_t streamID);
virtual bool UpdateTime(time_t ts);
virtual bool TakePhoto(uint8_t streamID, std::vector<uint8_t>& img);
virtual bool TakeVideo(uint8_t streamID, uint32_t duration, std::string path);
virtual bool SetResolution(uint8_t channel, uint8_t streamID, uint32_t resX, uint32_t resY);
private:
void OSDJson(int id, bool enabled, std::string osdString, int x, int y, bool timeOn, std::string& jsonString);
};
#endif //MICROPHOTO_YUSHICTRL_H

@ -10,7 +10,6 @@ static size_t OnWriteData(void* buffer, size_t size, size_t nmemb, void* lpVoid)
std::vector<uint8_t>* data = (std::vector<uint8_t>*)lpVoid; std::vector<uint8_t>* data = (std::vector<uint8_t>*)lpVoid;
if( NULL == data || NULL == buffer ) if( NULL == data || NULL == buffer )
{ {
XYLOG(XYLOG_SEVERITY_ERROR,"OnWriteData callback -1");
return -1; return -1;
} }
uint8_t* begin = (uint8_t *)buffer; uint8_t* begin = (uint8_t *)buffer;
@ -28,12 +27,11 @@ static int SockOptCallback(void *clientp, curl_socket_t curlfd, curlsocktype pur
{ {
int errcode = errno; int errcode = errno;
printf("android_setsocknetwork errno=%d", errcode); printf("android_setsocknetwork errno=%d", errcode);
XYLOG(XYLOG_SEVERITY_ERROR,"setsocknetwork -1, errcode=%d",errcode);
} }
return res == 0 ? CURL_SOCKOPT_OK : CURL_SOCKOPT_ERROR; return res == 0 ? CURL_SOCKOPT_OK : CURL_SOCKOPT_ERROR;
} }
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data, int* curlResVal/* = NULL*/) int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data)
{ {
CURLcode nRet; CURLcode nRet;
std::string auth; std::string auth;
@ -63,10 +61,8 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char
if (netHandle != NETWORK_UNSPECIFIED) if (netHandle != NETWORK_UNSPECIFIED)
{ {
#if 0
curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback); curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback);
curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle); curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle);
#endif
} }
curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1); curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1);
@ -87,10 +83,6 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10); curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10);
nRet = curl_easy_perform(curl); nRet = curl_easy_perform(curl);
if (curlResVal != NULL)
{
*curlResVal = nRet;
}
long responseCode = 0; long responseCode = 0;
if (CURLE_OK == nRet) if (CURLE_OK == nRet)
@ -116,8 +108,7 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char
} }
else else
{ {
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &responseCode); XYLOG(XYLOG_SEVERITY_WARNING, "Net Photo failure, nRet=%d", (int)nRet);
XYLOG(XYLOG_SEVERITY_WARNING, "Net Photo failure, nRet=%d, code=%d", (int)nRet, (int)responseCode);
// printf("GET err=%d", nRet); // printf("GET err=%d", nRet);
} }
curl_easy_cleanup(curl); curl_easy_cleanup(curl);
@ -125,7 +116,7 @@ int DoGetRequest(const char* url, int authType, const char* userName, const char
return ((0 == nRet) && (responseCode == 200)) ? 0 : 1; return ((0 == nRet) && (responseCode == 200)) ? 0 : 1;
} }
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector<uint8_t>& data, int* curlResVal/* = NULL*/) int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data)
{ {
std::string auth; std::string auth;
@ -154,10 +145,8 @@ int DoPutRequest(const char* url, int authType, const char* userName, const char
if (netHandle != NETWORK_UNSPECIFIED) if (netHandle != NETWORK_UNSPECIFIED)
{ {
#if 0
curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback); curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, SockOptCallback);
curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle); curl_easy_setopt(curl, CURLOPT_SOCKOPTDATA, &netHandle);
#endif
} }
if(contents != NULL) if(contents != NULL)
@ -174,10 +163,6 @@ int DoPutRequest(const char* url, int authType, const char* userName, const char
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10); curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10);
CURLcode nRet = curl_easy_perform(curl); CURLcode nRet = curl_easy_perform(curl);
if (curlResVal != NULL)
{
*curlResVal = nRet;
}
if (CURLE_OK != nRet) if (CURLE_OK != nRet)
{ {
printf("GET err=%d", nRet); printf("GET err=%d", nRet);
@ -212,7 +197,6 @@ bool requestCapture(uint8_t channel, uint8_t preset, const NET_PHOTO_INFO& photo
if (fp != NULL) if (fp != NULL)
{ {
fwrite(&data[0], data.size(), 1, fp); fwrite(&data[0], data.size(), 1, fp);
fdatasync(fileno(fp));
fclose(fp); fclose(fp);
res = true; res = true;
} }
@ -251,11 +235,10 @@ int UniviewResolutionSet(const NET_PHOTO_INFO& photoInfo, int channel, unsigned
Json::StreamWriterBuilder writer; Json::StreamWriterBuilder writer;
std::string sendbuf = Json::writeString(writer, outdata); std::string sendbuf = Json::writeString(writer, outdata);
std::vector<uint8_t> respContent; char respContent[1024];
DoPutRequest(path.c_str(), photoInfo.authType, photoInfo.userName, photoInfo.password, photoInfo.netHandle, sendbuf.c_str(), respContent); DoPutRequest(path.c_str(), photoInfo.authType, photoInfo.userName, photoInfo.password, photoInfo.netHandle, sendbuf.c_str(), respContent);
// respContent.push_back(0); XYLOG(XYLOG_SEVERITY_INFO, "sendlen= %zu, respContent=%s", sendbuf.size(), respContent);
// XYLOG(XYLOG_SEVERITY_DEBUG, "Sendlen= %zu, respContent=%s", sendbuf.size(), (const char*)&respContent[0]);
return 0; return 0;
} }

@ -18,7 +18,7 @@
bool setIPAddress(const char *if_name, const char *ip_addr, const char *net_mask, const char *gateway_addr); bool setIPAddress(const char *if_name, const char *ip_addr, const char *net_mask, const char *gateway_addr);
int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data, int* curlResVal = NULL); int DoGetRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, std::vector<uint8_t>& data);
int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, std::vector<uint8_t>& data, int* curlResVal = NULL); int DoPutRequest(const char* url, int authType, const char* userName, const char* password, net_handle_t netHandle, const char* contents, char* data);
#endif // __HTTP_CLIENT__ #endif // __HTTP_CLIENT__

File diff suppressed because it is too large Load Diff

@ -0,0 +1,51 @@
//
// Created by Matthew on 2025/1/27.
//
#ifndef MICROPHOTO_RTMPSUCK_H
#define MICROPHOTO_RTMPSUCK_H
#include <librtmp/rtmp_sys.h>
#include <librtmp/log.h>
enum
{
STREAMING_ACCEPTING,
STREAMING_IN_PROGRESS,
STREAMING_STOPPING,
STREAMING_STOPPED
};
typedef struct Flist
{
struct Flist *f_next;
FILE *f_file;
AVal f_path;
} Flist;
typedef struct Plist
{
struct Plist *p_next;
RTMPPacket p_pkt;
} Plist;
typedef struct
{
int sockfd;
int state;
uint32_t stamp;
RTMP rs;
RTMP rc;
Plist *rs_pkt[2]; /* head, tail */
Plist *rc_pkt[2]; /* head, tail */
Flist *f_head, *f_tail;
Flist *f_cur;
} STREAMING_SERVER;
STREAMING_SERVER *startStreaming(const char *address, int port);
void stopStreaming(STREAMING_SERVER * server);
int RtmpSuckMain(int logAll);
#endif //MICROPHOTO_RTMPSUCK_H

@ -0,0 +1,58 @@
/* Thread compatibility glue
* Copyright (C) 2009 Howard Chu
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with RTMPDump; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
*/
#include "thread.h"
#include <librtmp/log.h>
#ifdef WIN32
#include <errno.h>
HANDLE
ThreadCreate(thrfunc *routine, void *args)
{
HANDLE thd;
thd = (HANDLE) _beginthread(routine, 0, args);
if (thd == -1L)
RTMP_LogPrintf("%s, _beginthread failed with %d\n", __FUNCTION__, errno);
return thd;
}
#else
pthread_t
ThreadCreate(thrfunc *routine, void *args)
{
pthread_t id = 0;
pthread_attr_t attributes;
int ret;
pthread_attr_init(&attributes);
pthread_attr_setdetachstate(&attributes, PTHREAD_CREATE_DETACHED);
ret =
pthread_create(&id, &attributes, routine, args);
if (ret != 0)
RTMP_LogPrintf("%s, pthread_create failed with %d\n", __FUNCTION__, ret);
return id;
}
#endif

@ -0,0 +1,40 @@
/* Thread compatibility glue
* Copyright (C) 2009 Howard Chu
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with RTMPDump; see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
*/
#ifndef __THREAD_H__
#define __THREAD_H__ 1
#ifdef WIN32
#include <windows.h>
#include <process.h>
#define TFTYPE void
#define TFRET()
#define THANDLE HANDLE
#else
#include <pthread.h>
#define TFTYPE void *
#define TFRET() return 0
#define THANDLE pthread_t
#endif
typedef TFTYPE (thrfunc)(void *arg);
THANDLE ThreadCreate(thrfunc *routine, void *args);
#endif /* __THREAD_H__ */

@ -112,6 +112,37 @@ int set_port_attr (int fd, int baudrate, int databit, const char *stopbit, char
return (tcsetattr (fd, TCSANOW, &opt)); return (tcsetattr (fd, TCSANOW, &opt));
} }
static void setInt(int cmd, int value)
{
int fd = open("/dev/mtkgpioctrl", O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
// LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value);
if( fd > 0 )
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
close(fd);
}
return;
}
static void setRS485Enable(bool z) {
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
}
static void set485WriteMode() {
setInt(CMD_SET_485_STATE, 1);
}
static void set485ReadMode() {
setInt(CMD_SET_485_STATE, 0);
}
static void set12VEnable(bool z) {
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
}
/********************************************************************************* /*********************************************************************************
* * * *
**********************************************************************************/ **********************************************************************************/

@ -8,6 +8,10 @@
#include <string> #include <string>
#include "GPIOControl.h" #include "GPIOControl.h"
#define MAX_STRING_LEN 32
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, "serial_port_comm", fmt, ##args) #define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, "serial_port_comm", fmt, ##args)
// 串口参数 // 串口参数
@ -30,6 +34,14 @@ typedef struct
unsigned char m_au8RecvBuf[128];/* */ unsigned char m_au8RecvBuf[128];/* */
} SIO_PARAM_SERIAL_DEF; } SIO_PARAM_SERIAL_DEF;
typedef struct
{
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
void PortDataProcess( void ); void PortDataProcess( void );
int serial_port_comm(); int serial_port_comm();

@ -46,9 +46,6 @@ public class BridgeProvider extends ContentProvider {
private final static String PATH_RECOG_PIC = "/recogPic"; private final static String PATH_RECOG_PIC = "/recogPic";
private final static String PATH_REQUEST_PWR_CTRL = "/requestPwrCtrl";
private final static String PATH_RELEASE_PWR_CTRL = "/releasePwrCtrl";
public BridgeProvider() { public BridgeProvider() {
Log.i(TAG, "BridgeProvider"); Log.i(TAG, "BridgeProvider");
} }
@ -88,9 +85,6 @@ public class BridgeProvider extends ContentProvider {
matcher.addURI(AUTHORITY, PATH_QUERY_SEC_VERSION, 1); matcher.addURI(AUTHORITY, PATH_QUERY_SEC_VERSION, 1);
matcher.addURI(AUTHORITY, PATH_QUERY_BATTERY_VOLTAGE, 2); matcher.addURI(AUTHORITY, PATH_QUERY_BATTERY_VOLTAGE, 2);
matcher.addURI(AUTHORITY, PATH_RECOG_PIC, 3); matcher.addURI(AUTHORITY, PATH_RECOG_PIC, 3);
matcher.addURI(AUTHORITY, PATH_REQUEST_PWR_CTRL, 4);
matcher.addURI(AUTHORITY, PATH_RELEASE_PWR_CTRL, 5);
Cursor cursor = null; Cursor cursor = null;
int matched = matcher.match(uri); int matched = matcher.match(uri);
@ -104,12 +98,6 @@ public class BridgeProvider extends ContentProvider {
case 3: case 3:
cursor = recoganizePicture(uri, selection, selectionArgs); cursor = recoganizePicture(uri, selection, selectionArgs);
break; break;
case 4:
cursor = requestPowerControl(uri, selection, selectionArgs);
break;
case 5:
cursor = recoganizePicture(uri, selection, selectionArgs);
break;
default: default:
break; break;
} }
@ -181,48 +169,6 @@ public class BridgeProvider extends ContentProvider {
return matrixCursor; return matrixCursor;
} }
private Cursor requestPowerControl(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
int type = 0;
if (!TextUtils.isEmpty(decodedSelection)) {
Uri u = Uri.parse("http://a.com/?" + decodedSelection);
String val = u.getQueryParameter("type");
try {
type = Integer.parseInt(val);
} catch (Exception ex) {
ex.printStackTrace();
}
}
long nativeHandle = MicroPhotoService.requestPowerControl(type);
String[] columns = { "pwrCtrl" };
MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
matrixCursor.addRow(new Object[] { Long.valueOf(nativeHandle) });
return matrixCursor;
}
private Cursor releasePowerControl(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
long nativeHandle = 0;
if (!TextUtils.isEmpty(decodedSelection)) {
Uri u = Uri.parse("http://a.com/?" + decodedSelection);
String val = u.getQueryParameter("handle");
try {
nativeHandle = Long.parseLong(val);
} catch (Exception ex) {
ex.printStackTrace();
}
}
boolean res = MicroPhotoService.releasePowerControl(nativeHandle);
String[] columns = { "result" };
MatrixCursor matrixCursor = new MatrixCursor(columns, 1);
matrixCursor.addRow(new Object[] { Integer.valueOf(res ? 1 : 0) });
return matrixCursor;
}
private Cursor recoganizePicture(Uri uri, String selection, String[] selectionArgs) { private Cursor recoganizePicture(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection); String decodedSelection = stringFromBase64(selection);

@ -0,0 +1,222 @@
package com.xypower.mpapp;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.graphics.Rect;
import android.os.IBinder;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class FloatingWindow extends Service {
private Context mContext;
private WindowManager mWindowManager;
private View mView;
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
mContext = this;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
allAboutLayout(intent);
moveView();
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onDestroy() {
try {
if (mView != null) {
mWindowManager.removeView(mView);
}
} catch (Exception ex) {
// ex.printStackTrace();
Log.e("FW", "Exception " + ex.getMessage());
}
super.onDestroy();
}
WindowManager.LayoutParams mWindowsParams;
private void moveView() {
/*
DisplayMetrics metrics = mContext.getResources().getDisplayMetrics();
int width = (int) (metrics.widthPixels * 1f);
int height = (int) (metrics.heightPixels * 1f);
mWindowsParams = new WindowManager.LayoutParams(
width,//WindowManager.LayoutParams.WRAP_CONTENT,
height,//WindowManager.LayoutParams.WRAP_CONTENT,
//WindowManager.LayoutParams.TYPE_SYSTEM_ALERT,
(Build.VERSION.SDK_INT <= 25) ? WindowManager.LayoutParams.TYPE_PHONE : WindowManager.LayoutParams.TYPE_APPLICATION_OVERLAY
,
//WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL,
WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL
| WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN // Not displaying keyboard on bg activity's EditText
| WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON
| WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
| WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON,
//WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, //Not work with EditText on keyboard
PixelFormat.TRANSLUCENT);
mWindowsParams.gravity = Gravity.TOP | Gravity.LEFT;
//params.x = 0;
mWindowsParams.y = 100;
mWindowManager.addView(mView, mWindowsParams);
mView.setOnTouchListener(new View.OnTouchListener() {
private int initialX;
private int initialY;
private float initialTouchX;
private float initialTouchY;
long startTime = System.currentTimeMillis();
@Override
public boolean onTouch(View v, MotionEvent event) {
if (System.currentTimeMillis() - startTime <= 300) {
return false;
}
if (isViewInBounds(mView, (int) (event.getRawX()), (int) (event.getRawY()))) {
editTextReceiveFocus();
} else {
editTextDontReceiveFocus();
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
initialX = mWindowsParams.x;
initialY = mWindowsParams.y;
initialTouchX = event.getRawX();
initialTouchY = event.getRawY();
break;
case MotionEvent.ACTION_UP:
break;
case MotionEvent.ACTION_MOVE:
mWindowsParams.x = initialX + (int) (event.getRawX() - initialTouchX);
mWindowsParams.y = initialY + (int) (event.getRawY() - initialTouchY);
mWindowManager.updateViewLayout(mView, mWindowsParams);
break;
}
return false;
}
});
*/
}
private boolean isViewInBounds(View view, int x, int y) {
Rect outRect = new Rect();
int[] location = new int[2];
view.getDrawingRect(outRect);
view.getLocationOnScreen(location);
outRect.offset(location[0], location[1]);
return outRect.contains(x, y);
}
private void editTextReceiveFocus() {
if (!wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
}
}
private void editTextDontReceiveFocus() {
if (wasInFocus) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = false;
hideKeyboard(mContext, edt1);
}
}
private boolean wasInFocus = true;
private EditText edt1;
private void allAboutLayout(Intent intent) {
LayoutInflater layoutInflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mView = layoutInflater.inflate(R.layout.ovelay_window, null);
edt1 = (EditText) mView.findViewById(R.id.edt1);
final TextView tvValue = (TextView) mView.findViewById(R.id.tvValue);
Button btnClose = (Button) mView.findViewById(R.id.btnClose);
edt1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mWindowsParams.flags = WindowManager.LayoutParams.FLAG_NOT_TOUCH_MODAL | WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH;
// mWindowsParams.softInputMode = WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE;
mWindowManager.updateViewLayout(mView, mWindowsParams);
wasInFocus = true;
showSoftKeyboard(v);
}
});
edt1.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {
tvValue.setText(edt1.getText());
}
@Override
public void afterTextChanged(Editable editable) {
}
});
btnClose.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
stopSelf();
}
});
}
private void hideKeyboard(Context context, View view) {
if (view != null) {
InputMethodManager imm = (InputMethodManager) context.getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
}
}
public void showSoftKeyboard(View view) {
if (view.requestFocus()) {
InputMethodManager imm = (InputMethodManager)
getSystemService(Context.INPUT_METHOD_SERVICE);
imm.showSoftInput(view, InputMethodManager.SHOW_IMPLICIT);
}
}
}

@ -1,19 +0,0 @@
package com.xypower.mpapp;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
public class HeartBeatResponseReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if ("com.systemui.ACTION_HEARTBEAT_RESPONSE".equals(action)) {
long timestamp = intent.getLongExtra("timestamp", 0);
Log.d("MpApp","系统广播监听 timestamp:"+timestamp);
MicroPhotoService.infoLog("收到heartbeat广播 timestamp:" + timestamp);
}
}
}

@ -1,20 +1,15 @@
package com.xypower.mpapp; package com.xypower.mpapp;
import android.Manifest; import android.Manifest;
import android.app.Activity;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.Context; import android.content.Context;
import android.content.DialogInterface; import android.content.DialogInterface;
import android.content.Intent; import android.content.Intent;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.location.Location; import android.location.Location;
import android.location.LocationListener; import android.location.LocationListener;
import android.net.Uri;
import android.os.Build; import android.os.Build;
import android.os.Handler; import android.os.Handler;
import android.os.Messenger; import android.os.Messenger;
import android.os.PowerManager;
import android.os.StrictMode; import android.os.StrictMode;
import androidx.appcompat.app.ActionBar; import androidx.appcompat.app.ActionBar;
@ -23,8 +18,6 @@ import androidx.core.app.ActivityCompat;
import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle; import android.os.Bundle;
import android.os.SystemClock;
import android.provider.Settings;
import android.telephony.SubscriptionManager; import android.telephony.SubscriptionManager;
import android.text.TextUtils; import android.text.TextUtils;
import android.util.Log; import android.util.Log;
@ -35,11 +28,11 @@ import android.widget.Toast;
import com.dev.devapi.api.SysApi; import com.dev.devapi.api.SysApi;
import com.xypower.common.CameraUtils; import com.xypower.common.CameraUtils;
import com.xypower.common.FilesUtils;
import com.xypower.common.MicroPhotoContext; import com.xypower.common.MicroPhotoContext;
import com.xypower.mpapp.databinding.ActivityMainBinding; import com.xypower.mpapp.databinding.ActivityMainBinding;
import com.xypower.mpapp.utils.LocationUtil; import com.xypower.mpapp.utils.LocationUtil;
import java.io.File; import java.io.File;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
@ -55,20 +48,10 @@ public class MainActivity extends AppCompatActivity {
private Messenger mMessenger = null; private Messenger mMessenger = null;
private long mConfigModificationTime = 0;
@Override @Override
protected void onCreate(Bundle savedInstanceState) { protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
Log.d(TAG, "Start inflate");
binding = ActivityMainBinding.inflate(getLayoutInflater());
Log.d(TAG, "Finish inflate");
setContentView(binding.getRoot());
// getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
try {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) { if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) {
int activeSubId = SubscriptionManager.getActiveDataSubscriptionId(); int activeSubId = SubscriptionManager.getActiveDataSubscriptionId();
if (activeSubId == -1) { if (activeSubId == -1) {
@ -76,25 +59,18 @@ public class MainActivity extends AppCompatActivity {
} }
} }
Log.d(TAG, "Start inflate");
binding = ActivityMainBinding.inflate(getLayoutInflater());
Log.d(TAG, "Finish inflate");
setContentView(binding.getRoot());
// getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
ActionBar actionBar = getSupportActionBar(); ActionBar actionBar = getSupportActionBar();
Date date = new Date(BuildConfig.BUILD_TIMESTAMP); Date date = new Date(BuildConfig.BUILD_TIMESTAMP);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
String caption = "MP"; actionBar.setTitle(actionBar.getTitle().toString() + " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date));
switch (MicroPhotoService.getCustomAppId()) {
case 1:
caption = "RP";
break;
case 2:
caption = "N938";
break;
default:
break;
}
caption += " v" + MicroPhotoContext.getVersionName(getApplicationContext()) + " " + sdf.format(date);
sdf = new SimpleDateFormat("MM-dd HH:mm:ss");
caption += " / " + sdf.format(new Date());
actionBar.setTitle(caption);
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy); StrictMode.setThreadPolicy(policy);
@ -114,7 +90,7 @@ public class MainActivity extends AppCompatActivity {
if (!MicroPhotoContext.hasMpAppConfig(appContext)) { if (!MicroPhotoContext.hasMpAppConfig(appContext)) {
String mstPath = MicroPhotoContext.buildMpResAppDir(appContext); String mstPath = MicroPhotoContext.buildMasterAppDir(appContext);
File mstPathFile = new File(mstPath); File mstPathFile = new File(mstPath);
File mpdataFile = new File(mstPathFile, "mpdata"); File mpdataFile = new File(mstPathFile, "mpdata");
@ -122,7 +98,7 @@ public class MainActivity extends AppCompatActivity {
File dataFile = new File(appPathFile, "data"); File dataFile = new File(appPathFile, "data");
if (dataFile.exists()) { if (dataFile.exists()) {
try { try {
FilesUtils.delete(dataFile); dataFile.delete();
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} }
@ -134,18 +110,6 @@ public class MainActivity extends AppCompatActivity {
ex.printStackTrace(); ex.printStackTrace();
} }
} }
else {
Intent resIntent = getPackageManager().getLaunchIntentForPackage(MicroPhotoContext.PACKAGE_NAME_MPRES);
if (resIntent != null) {
resIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
resIntent.putExtra("initres", 1);
String sn = MicroPhotoService.getSerialNumber();
if (!TextUtils.isEmpty(sn)) {
resIntent.putExtra("sn", sn);
}
startActivity(resIntent);
}
}
} }
Intent intent = getIntent(); Intent intent = getIntent();
@ -161,7 +125,38 @@ public class MainActivity extends AppCompatActivity {
Log.d(TAG, "MainActivity: reboot=" + rebootFlag + " noDelay=" + noDelay); Log.d(TAG, "MainActivity: reboot=" + rebootFlag + " noDelay=" + noDelay);
MicroPhotoContext.AppConfig appConfig = loadConfigInfo();
final MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(appContext);
if (TextUtils.isEmpty(appConfig.cmdid)) {
appConfig.cmdid = MicroPhotoService.getSerialNumber();
binding.cmdid.setText(appConfig.cmdid);
} else {
binding.cmdid.setText(appConfig.cmdid);
}
binding.server.setText(appConfig.server);
binding.port.setText(appConfig.port != 0 ? Integer.toString(appConfig.port) : "");
String protocolStr = appConfig.protocol + "-";
for (int idx = 0; idx < binding.protocol.getCount(); idx++) {
String item = binding.protocol.getItemAtPosition(idx).toString();
if (item.startsWith(protocolStr)) {
binding.protocol.setSelection(idx);
break;
}
}
if (appConfig.networkProtocol < binding.networkProtocol.getCount()) {
binding.networkProtocol.setSelection(appConfig.networkProtocol);
}
if (appConfig.encryption < binding.encryptions.getCount()) {
binding.encryptions.setSelection(appConfig.encryption);
}
binding.heartbeat.setText((appConfig.heartbeat > 0) ? Integer.toString(appConfig.heartbeat) : "");
binding.packetSize.setText((appConfig.packetSize > 0) ? Integer.toString(appConfig.packetSize) : "");
if (appConfig.network < binding.network.getCount()) {
binding.network.setSelection(appConfig.network);
}
binding.btnStartServ.setEnabled(!MicroPhotoService.isRunning); binding.btnStartServ.setEnabled(!MicroPhotoService.isRunning);
binding.btnStopServ.setEnabled(MicroPhotoService.isRunning); binding.btnStopServ.setEnabled(MicroPhotoService.isRunning);
@ -169,37 +164,29 @@ public class MainActivity extends AppCompatActivity {
if (MicroPhotoService.isRunning) { if (MicroPhotoService.isRunning) {
Intent intent2 = new Intent(MainActivity.this, MicroPhotoService.class); Intent intent2 = new Intent(MainActivity.this, MicroPhotoService.class);
try { try {
// stopService(intent2); stopService(intent2);
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} }
} }
if (MicroPhotoContext.hasMpAppConfig(appContext)) { if (MicroPhotoContext.hasMpAppConfig(appContext)) {
final Runnable runnable = new Runnable() { Runnable runnable = new Runnable() {
@Override @Override
public void run() { public void run() {
if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) { if (!MicroPhotoService.isRunning && !TextUtils.isEmpty(appConfig.cmdid) && !TextUtils.isEmpty(appConfig.server) && appConfig.port != 0) {
if (binding.btnStartServ.isEnabled()) { if (binding.btnStartServ.isEnabled()) {
Log.i(TAG, "Perform AutoStart");
binding.btnStartServ.performClick(); binding.btnStartServ.performClick();
} }
} }
} }
}; };
long timeout = 500;
if (SystemClock.elapsedRealtime() < 180000) {
// In 3 minutes
timeout = 10000; // in 10 seconds
}
Handler handler = new Handler(); Handler handler = new Handler();
handler.postDelayed(runnable, timeout); handler.postDelayed(runnable, 500);
Log.i(TAG, "Set AutoStart after " + Long.toString(timeout) + "ms");
}
} catch (Exception ex) {
ex.printStackTrace();
} }
} }
@ -208,63 +195,6 @@ public class MainActivity extends AppCompatActivity {
super.onDestroy(); super.onDestroy();
} }
@Override
protected void onResume() {
super.onResume();
try {
File file = MicroPhotoContext.getMpAppConfigFile(getApplicationContext());
if (file.lastModified() > mConfigModificationTime) {
loadConfigInfo();
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
protected MicroPhotoContext.AppConfig loadConfigInfo() {
final MicroPhotoContext.AppConfig appConfig = MicroPhotoContext.getMpAppConfig(getApplicationContext());
mConfigModificationTime = appConfig.modificationTime;
if (TextUtils.isEmpty(appConfig.cmdid)) {
appConfig.cmdid = MicroPhotoService.getSerialNumber();
binding.cmdid.setText(appConfig.cmdid);
} else {
binding.cmdid.setText(appConfig.cmdid);
}
binding.server.setText(appConfig.server);
binding.port.setText(appConfig.port != 0 ? Integer.toString(appConfig.port) : "");
String protocolStr = appConfig.protocol + "-";
for (int idx = 0; idx < binding.protocol.getCount(); idx++) {
String item = binding.protocol.getItemAtPosition(idx).toString();
if (item.startsWith(protocolStr)) {
binding.protocol.setSelection(idx);
break;
}
}
protocolStr = appConfig.networkProtocol + "-";
for (int idx = 0; idx < binding.networkProtocol.getCount(); idx++) {
String item = binding.networkProtocol.getItemAtPosition(idx).toString();
if (item.startsWith(protocolStr)) {
binding.networkProtocol.setSelection(idx);
break;
}
}
if (appConfig.encryption < binding.encryptions.getCount()) {
binding.encryptions.setSelection(appConfig.encryption);
}
binding.heartbeat.setText((appConfig.heartbeat > 0) ? Integer.toString(appConfig.heartbeat) : "");
binding.packetSize.setText((appConfig.packetSize > 0) ? Integer.toString(appConfig.packetSize) : "");
if (appConfig.network < binding.network.getCount()) {
binding.network.setSelection(appConfig.network);
}
return appConfig;
}
protected void initListener() { protected void initListener() {
this.binding.btnStartServ.setOnClickListener(new View.OnClickListener() { this.binding.btnStartServ.setOnClickListener(new View.OnClickListener() {
@ -296,7 +226,6 @@ public class MainActivity extends AppCompatActivity {
startMicroPhotoService(appContext, curAppConfig, mMessenger); startMicroPhotoService(appContext, curAppConfig, mMessenger);
Log.i(TAG, "Service auto-started");
binding.btnStartServ.setEnabled(false); binding.btnStartServ.setEnabled(false);
binding.btnStopServ.setEnabled(true); binding.btnStopServ.setEnabled(true);
} }
@ -400,7 +329,6 @@ public class MainActivity extends AppCompatActivity {
@Override @Override
public void onClick(View view) { public void onClick(View view) {
MicroPhotoService.infoLog("Call stopTerminalService Manually");
MicroPhotoService.stopTerminalService(getApplicationContext()); MicroPhotoService.stopTerminalService(getApplicationContext());
binding.btnStartServ.setEnabled(true); binding.btnStartServ.setEnabled(true);
@ -434,53 +362,8 @@ public class MainActivity extends AppCompatActivity {
binding.btnRestartApp.setOnClickListener(new View.OnClickListener() { binding.btnRestartApp.setOnClickListener(new View.OnClickListener() {
@Override @Override
public void onClick(View v) { public void onClick(View v) {
restartSelfWithStartActivity(); Context context = v.getContext().getApplicationContext();
// restartSelfWithAlarmManager(); MicroPhotoService.restartApp(context, context.getPackageName(), "Manual Restart From MainActivity");
}
private void restartSelfWithStartActivity() {
final Context context = getApplicationContext();
Intent intent = new Intent(context, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
intent.putExtra("reason", "Manual Restart From MainActivity");
context.startActivity(intent);
final Handler handler = new Handler();
finish();
handler.postDelayed(new Runnable() {
@Override
public void run() {
System.exit(0);
}
}, 0);
}
private void restartSelfWithAlarmManager() {
Intent intent = new Intent(MainActivity.this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
int noDelay = 1;
intent.putExtra("noDelay", noDelay);
intent.putExtra("reason", "Manual Restart From MainActivity");
// Create PendingIntent
PendingIntent pendingIntent = PendingIntent.getActivity(
MainActivity.this, 12312, intent, PendingIntent.FLAG_UPDATE_CURRENT/* | PendingIntent.FLAG_IMMUTABLE*/);
AlarmManager alarmManager = (AlarmManager) MainActivity.this.getSystemService(Context.ALARM_SERVICE);
if (alarmManager != null) {
alarmManager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + 200, pendingIntent);
}
MainActivity.this.finish();
System.exit(0);
} }
}); });
@ -512,6 +395,8 @@ public class MainActivity extends AppCompatActivity {
binding.btnCameraInfo.setOnClickListener(new View.OnClickListener() { binding.btnCameraInfo.setOnClickListener(new View.OnClickListener() {
@Override @Override
public void onClick(View view) { public void onClick(View view) {
MicroPhotoService.setOtgState(true);
MicroPhotoService.setCam3V3Enable(true); MicroPhotoService.setCam3V3Enable(true);
Runnable runnable = new Runnable() { Runnable runnable = new Runnable() {
@ -522,6 +407,7 @@ public class MainActivity extends AppCompatActivity {
Log.d(TAG, cameraInfo); Log.d(TAG, cameraInfo);
MicroPhotoService.setCam3V3Enable(false); MicroPhotoService.setCam3V3Enable(false);
MicroPhotoService.setOtgState(false);
MicroPhotoService.infoLog(cameraInfo); MicroPhotoService.infoLog(cameraInfo);
Toast.makeText(view.getContext(), cameraInfo, Toast.LENGTH_LONG).show(); Toast.makeText(view.getContext(), cameraInfo, Toast.LENGTH_LONG).show();
@ -533,6 +419,28 @@ public class MainActivity extends AppCompatActivity {
} }
}); });
this.binding.btnStartRtmp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Context appContext = getApplicationContext();
startRtmpSuckService(appContext);
binding.btnStartRtmp.setEnabled(false);
binding.btnStopRtmp.setEnabled(true);
}
});
this.binding.btnStopRtmp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
RtmpService.stopRtmpService(getApplicationContext());
binding.btnStartRtmp.setEnabled(true);
binding.btnStopRtmp.setEnabled(false);
}
});
} }
public static void startMicroPhotoService(Context context, MicroPhotoContext.AppConfig curAppConfig, Messenger messenger) { public static void startMicroPhotoService(Context context, MicroPhotoContext.AppConfig curAppConfig, Messenger messenger) {
@ -560,6 +468,19 @@ public class MainActivity extends AppCompatActivity {
} }
} }
public static void startRtmpSuckService(Context context) {
Intent intent = new Intent(context, RtmpService.class);
intent.setAction(RtmpService.ACTION_START);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(intent);
} else {
context.startService(intent);
}
}
private void takePhoto(int channel, int preset, boolean photoOrVideo) { private void takePhoto(int channel, int preset, boolean photoOrVideo) {
if (binding.btnStartServ.isEnabled()) { if (binding.btnStartServ.isEnabled()) {
String appPath = MicroPhotoContext.buildMpAppDir(getApplicationContext()); String appPath = MicroPhotoContext.buildMpAppDir(getApplicationContext());
@ -698,6 +619,4 @@ public class MainActivity extends AppCompatActivity {
} }
} }

File diff suppressed because it is too large Load Diff

@ -0,0 +1,230 @@
package com.xypower.mpapp;
import android.app.AlarmManager;
import android.app.Notification;
import android.app.NotificationChannel;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.os.Handler;
import android.os.IBinder;
import android.os.SystemClock;
import android.util.Log;
import android.widget.RemoteViews;
import android.widget.Toast;
import androidx.core.app.NotificationCompat;
import java.util.concurrent.atomic.AtomicInteger;
public class RtmpService extends Service {
public RtmpService() {
}
static {
System.loadLibrary("rtmpdump");
}
public static final String TAG = "RTMP";
public static final String ACTION_START = "com.xypower.mprtmp.ACT_START";
public static final String ACTION_STOP = "com.xypower.mprtmp.ACT_STOP";
public static final String ACTION_MAIN = "com.xypower.mprtmp.ACT_MAIN";
public static final int NOTIFICATION_ID_FOREGROUND_SERVICE = 8466603;
private static final String FOREGROUND_CHANNEL_ID = "fg_rtmp";
public static class STATE_SERVICE {
public static final int CONNECTED = 10;
public static final int NOT_CONNECTED = 0;
}
private static int mStateService = STATE_SERVICE.NOT_CONNECTED;
private NotificationManager mNotificationManager;
private ScreenActionReceiver mScreenaActionReceiver = null;
private Handler mHander = null;
private Thread mServiceThread;
private long mNativeHandle = 0;
static AtomicInteger reqCode = new AtomicInteger(100);
private native long startService();
private native void stopService(long nativeHandle);
@Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public void onCreate() {
super.onCreate();
mHander = new Handler();
mNotificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
mStateService = STATE_SERVICE.NOT_CONNECTED;
mScreenaActionReceiver = new ScreenActionReceiver();
}
@Override
public void onDestroy() {
mStateService = STATE_SERVICE.NOT_CONNECTED;
unregisterReceiver(mScreenaActionReceiver);
super.onDestroy();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent == null) {
stopForeground(true);
stopSelf();
return START_NOT_STICKY;
}
// if user starts the service
switch (intent.getAction()) {
case ACTION_START:
Log.d(TAG, "Received user starts foreground intent");
startForeground(NOTIFICATION_ID_FOREGROUND_SERVICE, prepareNotification());
connect();
registerReceiver(mScreenaActionReceiver, mScreenaActionReceiver.getFilter());
if (mServiceThread == null) {
mServiceThread = new Thread(new Runnable() {
@Override
public void run() {
mNativeHandle = startService();
Log.d(TAG, "RTMP service finishes");
}
});
mServiceThread.start();
}
break;
case ACTION_STOP:
unregisterReceiver(mScreenaActionReceiver);
stopForeground(true);
stopSelf();
break;
default:
stopForeground(true);
stopSelf();
}
return START_NOT_STICKY;
}
private void connect() {
// after 10 seconds its connected
mHander.postDelayed(
new Runnable() {
public void run() {
// Log.d(TAG, "Bluetooth Low Energy device is connected!!");
Toast.makeText(getApplicationContext(), "RTMP Connected!", Toast.LENGTH_SHORT).show();
mStateService = STATE_SERVICE.CONNECTED;
startForeground(NOTIFICATION_ID_FOREGROUND_SERVICE, prepareNotification());
}
}, 10000);
}
private Notification prepareNotification() {
// handle build version above android oreo
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O &&
mNotificationManager.getNotificationChannel(FOREGROUND_CHANNEL_ID) == null) {
CharSequence name = getString(R.string.text_name_notification);
int importance = NotificationManager.IMPORTANCE_DEFAULT;
NotificationChannel channel = new NotificationChannel(FOREGROUND_CHANNEL_ID, name, importance);
channel.enableVibration(false);
mNotificationManager.createNotificationChannel(channel);
}
Intent notificationIntent = new Intent(this, MainActivity.class);
notificationIntent.setAction(ACTION_MAIN);
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
// if min sdk goes below honeycomb
/*if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB) {
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
} else {
notificationIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
}*/
int uniqueReqCode = reqCode.getAndIncrement();
PendingIntent pendingIntent = PendingIntent.getActivity(this, uniqueReqCode, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);
// make a stop intent
Intent stopIntent = new Intent(this, RtmpService.class);
stopIntent.setAction(ACTION_STOP);
uniqueReqCode = reqCode.getAndIncrement();
PendingIntent pendingStopIntent = PendingIntent.getService(this, uniqueReqCode, stopIntent, PendingIntent.FLAG_UPDATE_CURRENT);
RemoteViews remoteViews = new RemoteViews(getPackageName(), R.layout.notification);
remoteViews.setOnClickPendingIntent(R.id.btn_stop, pendingStopIntent);
// if it is connected
switch (mStateService) {
case STATE_SERVICE.NOT_CONNECTED:
remoteViews.setTextViewText(R.id.tv_state, "DISCONNECTED");
break;
case STATE_SERVICE.CONNECTED:
remoteViews.setTextViewText(R.id.tv_state, "CONNECTED");
break;
}
// notification builder
NotificationCompat.Builder notificationBuilder;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.O) {
notificationBuilder = new NotificationCompat.Builder(this, FOREGROUND_CHANNEL_ID);
} else {
notificationBuilder = new NotificationCompat.Builder(this);
}
notificationBuilder
.setContent(remoteViews)
.setSmallIcon(R.drawable.ic_rtmpsuck)
.setCategory(NotificationCompat.CATEGORY_SERVICE)
.setOnlyAlertOnce(true)
.setOngoing(true)
.setAutoCancel(true)
.setContentIntent(pendingIntent);
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
notificationBuilder.setVisibility(NotificationCompat.VISIBILITY_PUBLIC);
}
return notificationBuilder.build();
}
public static void stopRtmpService(Context context) {
Intent alarmIntent = new Intent();
alarmIntent.setPackage(context.getPackageName());
alarmIntent.setAction(ACTION_STOP);
int uniqueReqCode = reqCode.getAndIncrement();
PendingIntent pendingIntent = PendingIntent.getBroadcast(context.getApplicationContext(), uniqueReqCode, alarmIntent, 0);
AlarmManager alarmManager = (AlarmManager) context.getApplicationContext().getSystemService(ALARM_SERVICE);
alarmManager.setExactAndAllowWhileIdle(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime() + 100, pendingIntent);
}
}

@ -0,0 +1,76 @@
package com.xypower.mpapp;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Build;
import android.util.Log;
import android.widget.Toast;
public class ScreenActionReceiver extends BroadcastReceiver {
private String TAG = "ScreenActionReceiver";
@Override
public void onReceive(Context context, Intent intent) {
//LOG
StringBuilder sb = new StringBuilder();
sb.append("Action: " + intent.getAction() + "\n");
// sb.append("URI: " + intent.toUri(Intent.URI_INTENT_SCHEME).toString() + "\n");
String log = sb.toString();
Log.d(TAG, log);
Toast.makeText(context, log, Toast.LENGTH_SHORT).show();
String action = intent.getAction();
try {
if (Intent.ACTION_SCREEN_ON.equals(action)) {
Log.d(TAG, "screen is on...");
Toast.makeText(context, "screen ON", Toast.LENGTH_SHORT);
//Run the locker
context.startService(new Intent(context, FloatingWindow.class));
} else if (Intent.ACTION_SCREEN_OFF.equals(action)) {
Log.d(TAG, "screen is off...");
Toast.makeText(context, "screen OFF", Toast.LENGTH_SHORT);
} else if (Intent.ACTION_USER_PRESENT.equals(action)) {
Log.d(TAG, "screen is unlock...");
Toast.makeText(context, "screen UNLOCK", Toast.LENGTH_SHORT);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
context.startService(new Intent(context, FloatingWindow.class));
}
} else if (Intent.ACTION_BOOT_COMPLETED.equals(action)) {
Log.d(TAG, "boot completed...");
Toast.makeText(context, "BOOTED..", Toast.LENGTH_SHORT);
//Run the locker
/* Intent i = new Intent(context, FloatingWindow.class);
context.startService(i);
*/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// context.startForegroundService(new Intent(context, FloatingWindow.class));
} else {
// context.startService(new Intent(context, FloatingWindow.class));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
public IntentFilter getFilter(){
final IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_SCREEN_OFF);
filter.addAction(Intent.ACTION_SCREEN_ON);
return filter;
}
}

@ -769,10 +769,7 @@ public class Camera2VideoActivity extends AppCompatActivity {
@Override @Override
public void run() { public void run() {
Log.i("OSD", "Record Stop " + Long.toString(mDuration)); Log.i("OSD", "Record Stop " + Long.toString(mDuration));
if (mGPUCameraRecorder != null) {
mGPUCameraRecorder.stop(); mGPUCameraRecorder.stop();
}
int aa = 0; int aa = 0;
} }
@ -812,7 +809,6 @@ public class Camera2VideoActivity extends AppCompatActivity {
.cameraId(Integer.toString(mCameraId)) .cameraId(Integer.toString(mCameraId))
.mute(true) .mute(true)
.duration(mDuration * 1000) .duration(mDuration * 1000)
.rotation(mOrientation)
.build(); .build();
Log.i("OSD", "mGPUCameraRecorder created"); Log.i("OSD", "mGPUCameraRecorder created");

@ -3,6 +3,7 @@ package com.xypower.mpapp.video;
import android.Manifest; import android.Manifest;
import android.app.Activity; import android.app.Activity;
import android.app.Dialog; import android.app.Dialog;
import android.content.ComponentName;
import android.content.Context; import android.content.Context;
import android.content.DialogInterface; import android.content.DialogInterface;
import android.content.Intent; import android.content.Intent;
@ -17,6 +18,7 @@ import android.graphics.PorterDuff;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.RectF; import android.graphics.RectF;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.graphics.drawable.BitmapDrawable;
import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraCharacteristics;
@ -34,6 +36,8 @@ import androidx.appcompat.app.AlertDialog;
import androidx.core.app.ActivityCompat; import androidx.core.app.ActivityCompat;
import androidx.fragment.app.DialogFragment; import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment; import androidx.fragment.app.Fragment;
import androidx.legacy.app.FragmentCompat;
import androidx.legacy.app.FragmentCompat;
import androidx.localbroadcastmanager.content.LocalBroadcastManager; import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import android.os.Environment; import android.os.Environment;
@ -50,6 +54,7 @@ import android.view.TextureView;
import android.view.View; import android.view.View;
import android.view.ViewGroup; import android.view.ViewGroup;
import android.widget.Button; import android.widget.Button;
import android.widget.Toast;
import com.xypower.mpapp.MicroPhotoService; import com.xypower.mpapp.MicroPhotoService;
import com.xypower.mpapp.R; import com.xypower.mpapp.R;
@ -71,7 +76,7 @@ import java.util.concurrent.TimeUnit;
* Use the {@link VideoFragment#newInstance} factory method to * Use the {@link VideoFragment#newInstance} factory method to
* create an instance of this fragment. * create an instance of this fragment.
*/ */
public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener { public class VideoFragment extends Fragment implements View.OnClickListener, MediaRecorder.OnInfoListener, FragmentCompat.OnRequestPermissionsResultCallback {
public static final String ACTION_FINISH = "com.xypower.mvapp.ACT_FINISH"; public static final String ACTION_FINISH = "com.xypower.mvapp.ACT_FINISH";
public static final String ACTION_MP_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED"; public static final String ACTION_MP_VIDEO_FINISHED = "com.xypower.mpapp.ACT_V_FINISHED";

@ -0,0 +1,5 @@
<vector android:height="24dp" android:tint="#07EEB8"
android:viewportHeight="24" android:viewportWidth="24"
android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="@android:color/white" android:pathData="M12,4l-1.41,1.41L16.17,11H4v2h12.17l-5.58,5.59L12,20l8,-8z"/>
</vector>

@ -205,13 +205,13 @@
app:layout_constraintTop_toTopOf="@+id/btnStartServ" /> app:layout_constraintTop_toTopOf="@+id/btnStartServ" />
<Button <Button
android:id="@+id/btnSendHb" android:id="@+id/btnLogs"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:text="@string/main_send_hb"
android:layout_marginStart="@dimen/activity_horizontal_margin" android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width" android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height" android:minHeight="@dimen/activity_btn_min_height"
android:text="日志"
app:layout_constraintStart_toEndOf="@+id/btnChannels" app:layout_constraintStart_toEndOf="@+id/btnChannels"
app:layout_constraintTop_toTopOf="@+id/btnStartServ" /> app:layout_constraintTop_toTopOf="@+id/btnStartServ" />
@ -261,15 +261,15 @@
app:layout_constraintTop_toTopOf="@+id/btnTakePhoto" /> app:layout_constraintTop_toTopOf="@+id/btnTakePhoto" />
<Button <Button
android:id="@+id/btnSendWs" android:id="@+id/btnStartRtmp"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:text="@string/main_send_ws"
android:layout_marginStart="@dimen/activity_horizontal_margin" android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width" android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height" android:minHeight="@dimen/activity_btn_min_height"
android:text="RTMP转发"
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto4" app:layout_constraintStart_toEndOf="@+id/btnTakePhoto4"
app:layout_constraintTop_toTopOf="@+id/btnTakePhoto" /> app:layout_constraintTop_toTopOf="@+id/btnTakePhoto4" />
<Button <Button
android:id="@+id/takeVideoBtn" android:id="@+id/takeVideoBtn"
@ -317,25 +317,25 @@
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" /> app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" />
<Button <Button
android:id="@+id/btnSendBi" android:id="@+id/btnStopRtmp"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:text="@string/main_send_bi"
android:layout_marginStart="@dimen/activity_horizontal_margin" android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width" android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height" android:minHeight="@dimen/activity_btn_min_height"
android:text="停止RTMP"
app:layout_constraintStart_toEndOf="@+id/takeVideoBtn4" app:layout_constraintStart_toEndOf="@+id/takeVideoBtn4"
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" /> app:layout_constraintTop_toTopOf="@+id/takeVideoBtn4" />
<Button <Button
android:id="@+id/btnLogs" android:id="@+id/btnSendHb"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:text="@string/main_send_hb"
android:layout_marginStart="@dimen/activity_horizontal_margin" android:layout_marginStart="@dimen/activity_horizontal_margin"
android:layout_marginTop="@dimen/activity_vertical_margin_small" android:layout_marginTop="@dimen/activity_vertical_spacing_small"
android:minWidth="@dimen/activity_btn_min_width" android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height" android:minHeight="@dimen/activity_btn_min_height"
android:text="日志"
app:layout_constraintStart_toStartOf="parent" app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/takeVideoBtn" /> app:layout_constraintTop_toBottomOf="@+id/takeVideoBtn" />
@ -347,8 +347,8 @@
android:layout_marginStart="@dimen/activity_horizontal_margin" android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width" android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height" android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnLogs" app:layout_constraintStart_toEndOf="@+id/btnSendHb"
app:layout_constraintTop_toTopOf="@+id/btnLogs" /> app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button <Button
android:id="@+id/btnReboot" android:id="@+id/btnReboot"
@ -359,7 +359,7 @@
android:minWidth="@dimen/activity_btn_min_width" android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height" android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnRestartApp" app:layout_constraintStart_toEndOf="@+id/btnRestartApp"
app:layout_constraintTop_toTopOf="@+id/btnLogs" /> app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button <Button
android:id="@+id/btnCameraInfo" android:id="@+id/btnCameraInfo"
@ -370,18 +370,7 @@
android:minWidth="@dimen/activity_btn_min_width" android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height" android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnReboot" app:layout_constraintStart_toEndOf="@+id/btnReboot"
app:layout_constraintTop_toTopOf="@+id/btnLogs" /> app:layout_constraintTop_toTopOf="@+id/btnSendHb" />
<Button
android:id="@+id/btnSendFault"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/main_send_fault"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
app:layout_constraintStart_toEndOf="@+id/btnCameraInfo"
app:layout_constraintTop_toTopOf="@+id/btnLogs" />
<Button <Button
android:id="@+id/btnDowseCamera" android:id="@+id/btnDowseCamera"

@ -263,6 +263,17 @@
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto3" app:layout_constraintStart_toEndOf="@+id/btnTakePhoto3"
app:layout_constraintTop_toBottomOf="@+id/simchange" /> app:layout_constraintTop_toBottomOf="@+id/simchange" />
<Button
android:id="@+id/btnStartRtmp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
android:text="RTMP转发"
app:layout_constraintStart_toEndOf="@+id/btnTakePhoto4"
app:layout_constraintTop_toTopOf="@+id/btnTakePhoto4" />
<Button <Button
android:id="@+id/takeVideoBtn" android:id="@+id/takeVideoBtn"
android:layout_width="wrap_content" android:layout_width="wrap_content"
@ -304,6 +315,17 @@
app:layout_constraintStart_toEndOf="@+id/takeVideoBtn3" app:layout_constraintStart_toEndOf="@+id/takeVideoBtn3"
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" /> app:layout_constraintTop_toTopOf="@+id/takeVideoBtn" />
<Button
android:id="@+id/btnStopRtmp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:minWidth="@dimen/activity_btn_min_width"
android:minHeight="@dimen/activity_btn_min_height"
android:text="停止RTMP"
app:layout_constraintStart_toEndOf="@+id/takeVideoBtn4"
app:layout_constraintTop_toTopOf="@+id/takeVideoBtn4" />
<Button <Button
android:id="@+id/btnSendHb" android:id="@+id/btnSendHb"
android:layout_width="wrap_content" android:layout_width="wrap_content"

@ -1,8 +1,7 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<string-array name="networkProtocols"> <string-array name="networkProtocols">
<item>0-TCP</item> <item>TCP</item>
<item>1-UDP</item> <item>UDP</item>
<item>10-MQTT</item>
</string-array> </string-array>
</resources> </resources>

@ -6,10 +6,8 @@
<item>65282-江苏</item> <item>65282-江苏</item>
<item>65283-湖南</item> <item>65283-湖南</item>
<item>65284-浙江</item> <item>65284-浙江</item>
<item>65285-河南统一</item> <item>65285-河南</item>
<item>65286-郑州</item> <item>65286-郑州</item>
<item>65290-河南全景</item>
<item>65298-宁夏</item> <item>65298-宁夏</item>
<item>65310-山西智洋</item>
</string-array> </string-array>
</resources> </resources>

@ -7,9 +7,6 @@
<string name="main_packet_size_default">默认2K</string> <string name="main_packet_size_default">默认2K</string>
<string name="main_server">支持域名自动转IP</string> <string name="main_server">支持域名自动转IP</string>
<string name="main_send_hb">心跳</string> <string name="main_send_hb">心跳</string>
<string name="main_send_ws">工作状态</string>
<string name="main_send_bi">基本信息</string>
<string name="main_send_fault">故障</string>
<string name="main_restart_app">重启APP</string> <string name="main_restart_app">重启APP</string>
<string name="main_reboot">重启设备</string> <string name="main_reboot">重启设备</string>
<string name="main_camera_info">摄像头</string> <string name="main_camera_info">摄像头</string>

@ -30,10 +30,9 @@ android {
dependencies { dependencies {
implementation 'androidx.core:core:1.6.0' implementation 'androidx.appcompat:appcompat:1.3.0'
// implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0' implementation 'com.google.android.material:material:1.4.0'
// implementation 'com.linkedin.dexmaker:dexmaker:2.28.3' implementation 'com.linkedin.dexmaker:dexmaker:2.28.3'
testImplementation 'junit:junit:4.13.2' testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3' androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'

@ -6,6 +6,7 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.StreamConfigurationMap; import android.hardware.camera2.params.StreamConfigurationMap;
import android.text.TextUtils;
import android.util.Log; import android.util.Log;
import android.util.Size; import android.util.Size;
@ -44,16 +45,6 @@ public class CameraUtils {
Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
builder.append(orientation == null ? "" : orientation.toString()); builder.append(orientation == null ? "" : orientation.toString());
int[] capabilities = cameraCharacteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
boolean hasRaw = false;
for (int capability : capabilities) {
if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) {
hasRaw = true;
break;
}
}
builder.append(" raw=" + (hasRaw ? "1" : "0"));
StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); StreamConfigurationMap map = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] sizes = map.getOutputSizes(ImageFormat.YUV_420_888); Size[] sizes = map.getOutputSizes(ImageFormat.YUV_420_888);

@ -33,23 +33,19 @@ public class FileDownloader {
URL url = new URL(urlString); URL url = new URL(urlString);
connection = (HttpURLConnection) url.openConnection(); connection = (HttpURLConnection) url.openConnection();
connection.setRequestProperty("Accept-Encoding", "gzip"); connection.setRequestProperty("Accept-Encoding", "gzip");
connection.setConnectTimeout(10000); connection.setConnectTimeout(5000);
connection.setReadTimeout(30000); connection.setReadTimeout(120000);
connection.setDoInput(true); connection.setDoInput(true);
connection.connect(); connection.connect();
final File temp = new File(filePath); final File temp = new File(filePath);
if (temp.exists()) { if (temp.exists())
long fileSize = temp.length(); temp.delete();
connection.setRequestProperty("Range", "bytes=" + Long.toString(fileSize) + "-"); temp.createNewFile();
}
// if (temp.exists())
// temp.delete();
// temp.createNewFile();
temp.setReadable(true, false); temp.setReadable(true, false);
temp.setWritable(true, false); temp.setWritable(true, false);
downloadFile = temp; downloadFile = temp;
Log.d("download", "url " + urlString + "\n save to " + temp); Log.d("download", "url " + urlString + "\n save to " + temp);
os = new FileOutputStream(temp, true); os = new FileOutputStream(temp);
String encoding = connection.getContentEncoding(); String encoding = connection.getContentEncoding();
is = connection.getInputStream(); is = connection.getInputStream();

@ -6,7 +6,6 @@ import java.io.BufferedInputStream;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
@ -93,22 +92,8 @@ public class FileUploader {
} }
request.writeBytes(this.CRLF); request.writeBytes(this.CRLF);
FileInputStream fis = null; byte[] bytes = Files.readAllBytes(uploadFile.toPath());
try { request.write(bytes);
fis = new FileInputStream(uploadFile);
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
int length = -1;
while ((length = fis.read(buffer)) != -1) {
request.write(buffer, 0, length);
}
} catch (Exception ex) {
ex.printStackTrace();
} finally {
FilesUtils.closeFriendly(fis);
}
// byte[] bytes = Files.readAllBytes(uploadFile.toPath());
// request.write(bytes);
} }
/** /**

@ -1,14 +1,10 @@
package com.xypower.common; package com.xypower.common;
import android.content.Context;
import android.content.res.AssetManager;
import android.text.TextUtils; import android.text.TextUtils;
import android.util.Log;
import org.w3c.dom.Text; import org.w3c.dom.Text;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.Closeable; import java.io.Closeable;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
@ -17,7 +13,6 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.nio.channels.Channels; import java.nio.channels.Channels;
import java.nio.channels.SeekableByteChannel; import java.nio.channels.SeekableByteChannel;
@ -256,217 +251,4 @@ public class FilesUtils {
public static byte[] readAllBytes(String file) { public static byte[] readAllBytes(String file) {
return readAllBytes(new File(file)); return readAllBytes(new File(file));
} }
public static boolean delete(File file) {
if (!file.exists()) {
return false;
} else {
if (file.isFile())
return deleteSingleFile(file);
else
return deleteDirectory(file.getAbsolutePath());
}
}
private static boolean deleteSingleFile(File file) {
// 如果文件路径所对应的文件存在,并且是一个文件,则直接删除
if (file.exists() && file.isFile()) {
if (file.delete()) {
// Log.e("--Method--", "Copy_Delete.deleteSingleFile: 删除单个文件" + filePath$Name + "成功!");
return true;
} else {
return false;
}
} else {
return false;
}
}
/**
* @param filePath
* @return truefalse
*/
private static boolean deleteDirectory(String filePath) {
// 如果dir不以文件分隔符结尾自动添加文件分隔符
if (!filePath.endsWith(File.separator))
filePath = filePath + File.separator;
File dirFile = new File(filePath);
// 如果dir对应的文件不存在或者不是一个目录则退出
if ((!dirFile.exists()) || (!dirFile.isDirectory())) {
return false;
}
boolean flag = true;
// 删除文件夹中的所有文件包括子目录
File[] files = dirFile.listFiles();
for (File file : files) {
// 删除子文件
if (file.isFile()) {
flag = deleteSingleFile(file);
if (!flag)
break;
}
// 删除子目录
else if (file.isDirectory()) {
flag = deleteDirectory(file
.getAbsolutePath());
if (!flag)
break;
}
}
if (!flag) {
return false;
}
// 删除当前目录
if (dirFile.delete()) {
// Log.e("--Method--", "Copy_Delete.deleteDirectory: 删除目录" + filePath + "成功!");
return true;
} else {
return false;
}
}
public static void copyAssetsDir(Context context, String directory, String destPath) {
try {
AssetManager assetManager = context.getAssets();
String[] fileList = assetManager.list(directory);
if (fileList != null && fileList.length > 0) {
File file = new File(destPath);
if (!file.exists()) {
file.mkdirs();
}
if (!directory.endsWith(File.separator)) {
directory += File.separator;
}
if (!destPath.endsWith(File.separator)) {
destPath += File.separator;
}
for (String fileName : fileList) {
copyAssetsDir(context, directory + fileName, destPath + fileName);
}
} else {
// Try to file
copyAssetsFile(context, directory, destPath);
}
} catch (Exception e) {
e.printStackTrace();
}
// else {//如果是文件
// InputStream inputStream=context.getAssets().open(filePath);
// File file=new File(context.getFilesDir().getAbsolutePath()+ File.separator+filePath);
// Log.i("copyAssets2Phone","file:"+file);
// if(!file.exists() || file.length()==0) {
// FileOutputStream fos=new FileOutputStream(file);
// int len=-1;
// byte[] buffer=new byte[1024];
// while ((len=inputStream.read(buffer))!=-1){
// fos.write(buffer,0,len);
// }
// fos.flush();
// inputStream.close();
// fos.close();
// showToast(context,"模型文件复制完毕");
// } else {
// showToast(context,"模型文件已存在,无需复制");
// }
// }
}
public static void copyAssetsFile(Context context, String fileName, String destPath) {
InputStream inputStream = null;
FileOutputStream fos = null;
try {
inputStream = context.getAssets().open(fileName);
//getFilesDir() 获得当前APP的安装路径 /data/data/包名/files 目录
File file = new File(destPath);
if (file.exists()) {
file.delete();
}
File parentDir = file.getParentFile();
if (parentDir != null && !parentDir.exists()) {
parentDir.mkdirs();
}
if (parentDir != null && !parentDir.canWrite()) {
Log.e("FilesUtils", "No write permission to directory: " + parentDir.getAbsolutePath());
return;
}
fos = new FileOutputStream(file);
int len = -1;
byte[] buffer = new byte[1024];
while ((len = inputStream.read(buffer)) != -1) {
try {
fos.write(buffer, 0, len);
} catch (Exception ex) {
ex.printStackTrace();
}
}
fos.flush();
} catch (Exception e) {
e.printStackTrace();
} finally {
FilesUtils.closeFriendly(inputStream);
FilesUtils.closeFriendly(fos);
}
}
/**
* AssetsCRLFLF
*
* @param context
* @param fileName Assets
* @param destPath
*/
public static void copyAndNormalizeTextAssetsFile(Context context, String fileName, String destPath) {
InputStream inputStream = null;
BufferedReader reader = null;
BufferedWriter writer = null;
try {
inputStream = context.getAssets().open(fileName);
reader = new BufferedReader(new InputStreamReader(inputStream));
// 创建目标文件
File file = new File(destPath);
if (file.exists()) {
file.delete();
}
File parentDir = file.getParentFile();
if (parentDir != null && !parentDir.exists()) {
parentDir.mkdirs();
}
if (parentDir != null && !parentDir.canWrite()) {
Log.e("FilesUtils", "No write permission to directory: " + parentDir.getAbsolutePath());
return;
}
// 使用BufferedWriter写入文件同时处理行尾符
writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file)));
String line;
// 逐行读取并写入由BufferedWriter自动处理行尾
while ((line = reader.readLine()) != null) {
writer.write(line);
writer.newLine(); // 使用平台默认的换行符在Android上是LF
}
writer.flush();
Log.d("FilesUtils", "File normalized and copied successfully: " + destPath);
} catch (Exception e) {
Log.e("FilesUtils", "Error normalizing file: " + e.getMessage(), e);
} finally {
closeFriendly(reader);
closeFriendly(writer);
closeFriendly(inputStream);
}
}
} }

@ -0,0 +1,248 @@
package com.xypower.common;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.os.Handler;
import androidx.annotation.RequiresApi;
import android.util.Log;
import androidx.annotation.RequiresApi;
import com.android.dx.stock.ProxyBuilder;
import java.io.File;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
public class HotspotManager {
@RequiresApi(api = Build.VERSION_CODES.O)
public static class OreoWifiManager {
private static final String TAG = OreoWifiManager.class.getSimpleName();
private Context mContext;
private WifiManager mWifiManager;
private ConnectivityManager mConnectivityManager;
public OreoWifiManager(Context c) {
mContext = c;
mWifiManager = (WifiManager) mContext.getSystemService(Context.WIFI_SERVICE);
mConnectivityManager = (ConnectivityManager) mContext.getSystemService(ConnectivityManager.class);
}
/**
* This sets the Wifi SSID and password
* Call this before {@code startTethering} if app is a system/privileged app
* Requires: android.permission.TETHER_PRIVILEGED which is only granted to system apps
*/
public void configureHotspot(String name, String password) {
WifiConfiguration apConfig = new WifiConfiguration();
apConfig.SSID = name;
apConfig.preSharedKey = password;
apConfig.allowedKeyManagement.set(WifiConfiguration.KeyMgmt.WPA_PSK);
try {
Method setConfigMethod = mWifiManager.getClass().getMethod("setWifiApConfiguration", WifiConfiguration.class);
boolean status = (boolean) setConfigMethod.invoke(mWifiManager, apConfig);
Log.d(TAG, "setWifiApConfiguration - success? " + status);
} catch (Exception e) {
Log.e(TAG, "Error in configureHotspot");
e.printStackTrace();
}
}
/**
* Checks where tethering is on.
* This is determined by the getTetheredIfaces() method,
* that will return an empty array if not devices are tethered
*
* @return true if a tethered device is found, false if not found
*/
/*public boolean isTetherActive() {
try {
Method method = mConnectivityManager.getClass().getDeclaredMethod("getTetheredIfaces");
if (method == null) {
Log.e(TAG, "getTetheredIfaces is null");
} else {
String res[] = (String[]) method.invoke(mConnectivityManager, null);
Log.d(TAG, "getTetheredIfaces invoked");
Log.d(TAG, Arrays.toString(res));
if (res.length > 0) {
return true;
}
}
} catch (Exception e) {
Log.e(TAG, "Error in getTetheredIfaces");
e.printStackTrace();
}
return false;
}
*/
/**
* This enables tethering using the ssid/password defined in Settings App>Hotspot & tethering
* Does not require app to have system/privileged access
* Credit: Vishal Sharma - https://stackoverflow.com/a/52219887
*/
public boolean startTethering(final OnStartTetheringCallback callback) {
// On Pie if we try to start tethering while it is already on, it will
// be disabled. This is needed when startTethering() is called programmatically.
/*if (isTetherActive()) {
Log.d(TAG, "Tether already active, returning");
return false;
}*/
File outputDir = mContext.getCodeCacheDir();
Object proxy;
try {
proxy = ProxyBuilder.forClass(OnStartTetheringCallbackClass())
.dexCache(outputDir).handler(new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
switch (method.getName()) {
case "onTetheringStarted":
callback.onTetheringStarted();
break;
case "onTetheringFailed":
callback.onTetheringFailed();
break;
default:
ProxyBuilder.callSuper(proxy, method, args);
}
return null;
}
}).build();
} catch (Exception e) {
Log.e(TAG, "Error in enableTethering ProxyBuilder");
e.printStackTrace();
return false;
}
Method method = null;
try {
method = mConnectivityManager.getClass().getDeclaredMethod("startTethering", int.class, boolean.class, OnStartTetheringCallbackClass(), Handler.class);
if (method == null) {
Log.e(TAG, "startTetheringMethod is null");
} else {
method.invoke(mConnectivityManager, ConnectivityManager.TYPE_MOBILE, false, proxy, null);
Log.d(TAG, "startTethering invoked");
}
return true;
} catch (Exception e) {
Log.e(TAG, "Error in enableTethering");
e.printStackTrace();
}
return false;
}
public void stopTethering() {
try {
Method method = mConnectivityManager.getClass().getDeclaredMethod("stopTethering", int.class);
if (method == null) {
Log.e(TAG, "stopTetheringMethod is null");
} else {
method.invoke(mConnectivityManager, ConnectivityManager.TYPE_MOBILE);
Log.d(TAG, "stopTethering invoked");
}
} catch (Exception e) {
Log.e(TAG, "stopTethering error: " + e.toString());
e.printStackTrace();
}
}
private Class OnStartTetheringCallbackClass() {
try {
return Class.forName("android.net.ConnectivityManager$OnStartTetheringCallback");
} catch (ClassNotFoundException e) {
Log.e(TAG, "OnStartTetheringCallbackClass error: " + e.toString());
e.printStackTrace();
}
return null;
}
}
public static abstract class OnStartTetheringCallback {
/**
* Called when tethering has been successfully started.
*/
public abstract void onTetheringStarted();
/**
* Called when starting tethering failed.
*/
public abstract void onTetheringFailed();
}
@RequiresApi(api = Build.VERSION_CODES.O)
private static void setHotspotOnPhone(Context mContext, boolean isEnable) {
OreoWifiManager mTestOreoWifiManager = null;
if (mTestOreoWifiManager ==null) {
mTestOreoWifiManager = new OreoWifiManager(mContext);
}
if (isEnable){
OnStartTetheringCallback callback = new OnStartTetheringCallback() {
@Override
public void onTetheringStarted() {
}
@Override
public void onTetheringFailed() {
}
};
mTestOreoWifiManager.startTethering(callback);
}else{
mTestOreoWifiManager.stopTethering();
}
}
/*
public static void setWiFiApEnable(Context context, boolean isEnable) {
ConnectivityManager mConnectivityManager= (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
if (isEnable) {
mConnectivityManager.startTethering(ConnectivityManager.TETHERING_WIFI, false, new ConnectivityManager.OnStartTetheringCallback() {
@Override
public void onTetheringStarted() {
Log.d(TAG, "onTetheringStarted");
// Don't fire a callback here, instead wait for the next update from wifi.
}
@Override
public void onTetheringFailed() {
Log.d(TAG, "onTetheringFailed");
// TODO: Show error.
}
});
} else {
mConnectivityManager.stopTethering(ConnectivityManager.TETHERING_WIFI);
}
}
*/
public static void enableHotspot(Context context, boolean isEnable) {
// R: Adnroid 11
// O: Android 8
if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// Android 11
setHotspotOnPhone(context, isEnable);
}/* else if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// Android 8
}
*/
}
}

@ -2,7 +2,6 @@ package com.xypower.common;
import android.content.Context; import android.content.Context;
import android.os.Environment; import android.os.Environment;
import android.text.TextUtils;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONException; import org.json.JSONException;
@ -108,32 +107,13 @@ public class JSONUtils {
return false; return false;
} }
public static JSONObject getConfigFile(String path, String fileName) {
JSONObject jsonObject = null;
File configFile = new File(Environment.getExternalStorageDirectory(), path);
if (!configFile.exists()) {
configFile.mkdirs();
}
configFile = new File(configFile, fileName);
if (!configFile.exists()) {
return jsonObject;
}
jsonObject = JSONUtils.loadJson(configFile.getAbsolutePath());
if (jsonObject == null) {
jsonObject = new JSONObject();
}
return jsonObject;
}
public static boolean updateConfigFile(String path, String fileName, String name, int fieldType, Object val) { public static boolean updateConfigFile(String path, String fileName, String name, int fieldType, Object val) {
if (name == null) { if (name == null) {
return false; return false;
} }
File configFile = new File(path.trim()); File configFile = new File(Environment.getExternalStorageDirectory(), path);
if (!configFile.exists()) { if (!configFile.exists()) {
if (val == null) { if (val == null) {
// Should delete the config field // Should delete the config field

@ -1,8 +1,5 @@
package com.xypower.common; package com.xypower.common;
import android.content.Context;
import java.io.FileInputStream;
import java.security.MessageDigest; import java.security.MessageDigest;
/* loaded from: ds_base_2.0.9_23030112.aar:classes.jar:com/dowse/base/util/MD5Util.class */ /* loaded from: ds_base_2.0.9_23030112.aar:classes.jar:com/dowse/base/util/MD5Util.class */
@ -32,25 +29,4 @@ public class MD5Util {
} }
return r.toString(); return r.toString();
} }
public static String getFileMd5(String filePath) {
try (FileInputStream fis = new FileInputStream(filePath)) {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] buffer = new byte[8192]; // 使用大缓冲区提升性能:ml-citation{ref="5,7" data="citationList"}
int len;
while ((len = fis.read(buffer)) != -1) {
md.update(buffer, 0, len);
}
byte[] digest = md.digest();
StringBuilder sb = new StringBuilder();
for (byte b : digest) {
sb.append(String.format("%02x", b & 0xff)); // 处理字节转十六进制:ml-citation{ref="3,7" data="citationList"}
}
return sb.toString();
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
} }

@ -30,13 +30,7 @@ public class MicroPhotoContext {
public static final String PACKAGE_NAME_MPAPP = "com.xypower.mpapp"; public static final String PACKAGE_NAME_MPAPP = "com.xypower.mpapp";
public static final String PACKAGE_NAME_MPMASTER = "com.xypower.mpmaster"; public static final String PACKAGE_NAME_MPMASTER = "com.xypower.mpmaster";
public static final String PACKAGE_NAME_MPRES = "com.xypower.mpres";
public static final String SERVICE_NAME_MPSERVICE = PACKAGE_NAME_MPAPP + ".MicroPhotoService";
public static final String SERVICE_NAME_MPMASTER = PACKAGE_NAME_MPMASTER + ".MpMasterService";
public static final String ACTION_HEARTBEAT_MP = "com.xypower.mpapp.ACT_HB"; public static final String ACTION_HEARTBEAT_MP = "com.xypower.mpapp.ACT_HB";
public static final String ACTION_TAKEPHOTO_MP = "com.xypower.mpapp.ACT_TP";
public static final String ACTION_RESTART_MP = "com.xypower.mpapp.ACT_RESTART"; public static final String ACTION_RESTART_MP = "com.xypower.mpapp.ACT_RESTART";
public static final String ACTION_UPDATE_CONFIGS_MP = "com.xypower.mpapp.ACT_UPD_CFG"; public static final String ACTION_UPDATE_CONFIGS_MP = "com.xypower.mpapp.ACT_UPD_CFG";
@ -50,8 +44,6 @@ public class MicroPhotoContext {
public final static int DEFAULT_HEARTBEAT_FOR_SHARED_NW = 10; // minutes public final static int DEFAULT_HEARTBEAT_FOR_SHARED_NW = 10; // minutes
public final static int DEFAULT_QUICK_HEARTBEAT = 60; // second public final static int DEFAULT_QUICK_HEARTBEAT = 60; // second
public static final long BUILD_TIME_WO_SID_20250418 = 1744905600000L;
public static class AppConfig { public static class AppConfig {
public String cmdid; public String cmdid;
public String server; public String server;
@ -63,7 +55,6 @@ public class MicroPhotoContext {
public int packetSize; public int packetSize;
public int encryption; //0不加密 1明文 2加密 public int encryption; //0不加密 1明文 2加密
public int channels; //摄像头通道数目 public int channels; //摄像头通道数目
public long modificationTime = 0;
} }
public static class MasterConfig { public static class MasterConfig {
@ -92,7 +83,7 @@ public class MicroPhotoContext {
public static String getPrimaryStoragePath(Context context) { public static String getPrimaryStoragePath(Context context) {
try { try {
StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE); StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE);
Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[]) null); Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[])null);
Object[] args = null; Object[] args = null;
String[] paths = (String[]) getVolumePathsMethod.invoke(sm, args); String[] paths = (String[]) getVolumePathsMethod.invoke(sm, args);
// first element in paths[] is primary storage path // first element in paths[] is primary storage path
@ -108,7 +99,7 @@ public class MicroPhotoContext {
public static String getSecondaryStoragePath(Context context) { public static String getSecondaryStoragePath(Context context) {
try { try {
StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE); StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE);
Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[]) null); Method getVolumePathsMethod = StorageManager.class.getMethod("getVolumePaths", (Class<?>[])null);
Object[] args = null; Object[] args = null;
String[] paths = (String[]) getVolumePathsMethod.invoke(sm, args); String[] paths = (String[]) getVolumePathsMethod.invoke(sm, args);
// second element in paths[] is secondary storage path // second element in paths[] is secondary storage path
@ -123,7 +114,7 @@ public class MicroPhotoContext {
public String getStorageState(Context context, String path) { public String getStorageState(Context context, String path) {
try { try {
StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE); StorageManager sm = (StorageManager) context.getSystemService(Context.STORAGE_SERVICE);
Method getVolumeStateMethod = StorageManager.class.getMethod("getVolumeState", new Class[]{String.class}); Method getVolumeStateMethod = StorageManager.class.getMethod("getVolumeState", new Class[] {String.class});
String state = (String) getVolumeStateMethod.invoke(sm, path); String state = (String) getVolumeStateMethod.invoke(sm, path);
return state; return state;
} catch (Exception e) { } catch (Exception e) {
@ -149,13 +140,13 @@ public class MicroPhotoContext {
return str; return str;
} }
public static boolean isAppAlive(Context context, String packageName, String serviceClassName) { public static boolean isAppAlive(Context context, String packageName) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> services = am.getRunningServices(Integer.MAX_VALUE); List<ActivityManager.RunningServiceInfo> services = am.getRunningServices(Integer.MAX_VALUE);
boolean isRunning = false; boolean isRunning = false;
for (ActivityManager.RunningServiceInfo rsi : services) { for (ActivityManager.RunningServiceInfo rsi : services) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName()) && TextUtils.equals(serviceClassName, rsi.service.getClassName())) { if (packageName.equalsIgnoreCase(rsi.service.getPackageName())) {
isRunning = true; isRunning = true;
break; break;
} }
@ -164,21 +155,6 @@ public class MicroPhotoContext {
return isRunning; return isRunning;
} }
public static int getProcessIdOfService(Context context, String packageName, String serviceClassName) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> services = am.getRunningServices(Integer.MAX_VALUE);
int pid = 0;
for (ActivityManager.RunningServiceInfo rsi : services) {
if (packageName.equalsIgnoreCase(rsi.service.getPackageName()) && TextUtils.equals(serviceClassName, rsi.service.getClassName())) {
pid = rsi.pid;
break;
}
}
return pid;
}
public static String buildAppDir(Context contxt) { public static String buildAppDir(Context contxt) {
String path = Environment.getExternalStorageDirectory().getAbsolutePath(); String path = Environment.getExternalStorageDirectory().getAbsolutePath();
@ -250,22 +226,6 @@ public class MicroPhotoContext {
return path; return path;
} }
public static String buildMpResAppDir(Context contxt) {
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
if (!path.endsWith(File.separator)) {
path += File.separator;
}
path += PACKAGE_NAME_MPRES + File.separator;
File pathFile = new File(path);
if (!pathFile.exists() && !pathFile.mkdirs()) {
return null;
}
return path;
}
public static boolean hasMpAppConfig(Context context) { public static boolean hasMpAppConfig(Context context) {
boolean existed = true; boolean existed = true;
String appPath = MicroPhotoContext.buildMpAppDir(context); String appPath = MicroPhotoContext.buildMpAppDir(context);
@ -281,26 +241,15 @@ public class MicroPhotoContext {
return getMpAppConfig(context, appPath + "data/App.json"); return getMpAppConfig(context, appPath + "data/App.json");
} }
public static File getMpAppConfigFile(Context context) {
String appPath = buildMpAppDir(context);
return new File(appPath + "data/App.json");
}
public static AppConfig getMpAppConfig(Context context, String path) { public static AppConfig getMpAppConfig(Context context, String path) {
AppConfig appConfig = new AppConfig(); AppConfig appConfig = new AppConfig();
File file = new File(path);
try { try {
if (file.exists()) {
appConfig.modificationTime = file.lastModified();
String content = FilesUtils.readTextFile(path); String content = FilesUtils.readTextFile(path);
JSONObject jsonObject = TextUtils.isEmpty(content) ? new JSONObject() : new JSONObject(content); JSONObject jsonObject = TextUtils.isEmpty(content) ? new JSONObject() : new JSONObject(content);
appConfig.cmdid = jsonObject.optString(jsonObject.has("CMDID") ? "CMDID" : "cmdid", ""); appConfig.cmdid = jsonObject.optString(jsonObject.has("cmdid") ? "cmdid" : "CMDID", "");
appConfig.server = jsonObject.optString(jsonObject.has("server") ? "server" : "Server", ""); appConfig.server = jsonObject.optString(jsonObject.has("server") ? "server" : "Server", "");
appConfig.port = jsonObject.optInt(jsonObject.has("port") ? "port" : "Port", 0); appConfig.port = jsonObject.optInt(jsonObject.has("port") ? "port" : "Port", 0);
appConfig.protocol = jsonObject.optInt(jsonObject.has("protocol") ? "protocol" : "Protocol", DEFAULT_PROTOCOL); appConfig.protocol = jsonObject.optInt(jsonObject.has("protocol") ? "protocol" : "Protocol", DEFAULT_PROTOCOL);
@ -314,7 +263,6 @@ public class MicroPhotoContext {
if (appConfig.protocol == 0) { if (appConfig.protocol == 0) {
appConfig.protocol = DEFAULT_PROTOCOL; appConfig.protocol = DEFAULT_PROTOCOL;
} }
}
} catch (JSONException e) { } catch (JSONException e) {
e.printStackTrace(); e.printStackTrace();
} }
@ -424,22 +372,22 @@ public class MicroPhotoContext {
} }
} }
// public static void restartMpApp(Context context, String reason) { public static void restartMpApp(Context context, String reason) {
// /* /*
// Context context = MicroPhotoService.this.getApplicationContext(); Context context = MicroPhotoService.this.getApplicationContext();
// Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName()); Intent intent = getPackageManager().getLaunchIntentForPackage(context.getPackageName());
//
// int noDelay = 1; int noDelay = 1;
// intent.putExtra("noDelay", noDelay); intent.putExtra("noDelay", noDelay);
// PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0); PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0);
// AlarmManager mgr = (AlarmManager)getSystemService(Context.ALARM_SERVICE); AlarmManager mgr = (AlarmManager)getSystemService(Context.ALARM_SERVICE);
// mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1000, restartIntent); // 1秒钟后重启应用 mgr.set(AlarmManager.RTC, System.currentTimeMillis() + 1000, restartIntent); // 1秒钟后重启应用
// System.exit(0); System.exit(0);
//
// */ */
//
// restartApp(context, PACKAGE_NAME_MPAPP, reason); restartApp(context, PACKAGE_NAME_MPAPP, reason);
// } }
public static void restartMpApp(Context context, String reason, long delayedTimeMs) { public static void restartMpApp(Context context, String reason, long delayedTimeMs) {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(PACKAGE_NAME_MPAPP); Intent intent = context.getPackageManager().getLaunchIntentForPackage(PACKAGE_NAME_MPAPP);
@ -450,9 +398,9 @@ public class MicroPhotoContext {
intent.putExtra("reason", reason); intent.putExtra("reason", reason);
} }
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
PendingIntent restartIntent = PendingIntent.getActivity(context, 100, intent, PendingIntent.FLAG_UPDATE_CURRENT); PendingIntent restartIntent = PendingIntent.getActivity(context, 0, intent, 0);
AlarmManager mgr = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE); AlarmManager mgr = (AlarmManager)context.getSystemService(Context.ALARM_SERVICE);
mgr.set(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + ((delayedTimeMs > 0) ? delayedTimeMs : 10), restartIntent); mgr.set(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + delayedTimeMs, restartIntent);
} }
public static void restartApp(Context context, String packageName, String reason) { public static void restartApp(Context context, String packageName, String reason) {
@ -469,36 +417,27 @@ public class MicroPhotoContext {
*/ */
// try {
// Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
// if (intent != null) {
// intent.putExtra("noDelay", 1);
// if (!TextUtils.isEmpty(reason)) {
// intent.putExtra("reason", reason);
// }
// intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
// context.startActivity(intent);
// }
// } catch (Exception e) {
// e.printStackTrace();
// }
SysApi.forceStopApp(context,packageName);
try { try {
Thread.sleep(100); if (TextUtils.equals(packageName, PACKAGE_NAME_MPAPP)) {
} catch (InterruptedException e) {
throw new RuntimeException(e); Intent intent = new Intent(ACTION_RESTART_MP);
intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason);
}
intent.setPackage(PACKAGE_NAME_MPAPP);
context.sendBroadcast(intent);
} else {
SysApi.forceStopApp(context, packageName);
} }
//// 然后启动目标应用
try {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName); Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
if (intent != null) { if (intent != null) {
intent.putExtra("noDelay", 1); intent.putExtra("noDelay", 1);
if (!TextUtils.isEmpty(reason)) { if (!TextUtils.isEmpty(reason)) {
intent.putExtra("reason", reason); intent.putExtra("reason", reason);
} }
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
context.startActivity(intent); context.startActivity(intent);
} }
} catch (Exception e) { } catch (Exception e) {

@ -1,28 +1,19 @@
package com.xypower.common; package com.xypower.common;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.app.usage.NetworkStats;
import android.app.usage.NetworkStatsManager;
import android.content.ContentResolver; import android.content.ContentResolver;
import android.content.ContentValues; import android.content.ContentValues;
import android.content.Context; import android.content.Context;
import android.database.Cursor; import android.database.Cursor;
import android.net.ConnectivityManager; import android.net.ConnectivityManager;
import android.net.LinkProperties;
import android.net.Network;
import android.net.NetworkCapabilities;
import android.net.NetworkInfo; import android.net.NetworkInfo;
import android.net.Uri; import android.net.Uri;
import android.net.wifi.WifiManager;
import android.os.RemoteException;
import android.telephony.TelephonyManager; import android.telephony.TelephonyManager;
import android.text.TextUtils; import android.text.TextUtils;
import android.text.format.Formatter;
import java.net.Inet4Address; import java.net.Inet4Address;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.NetworkInterface; import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.URI; import java.net.URI;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -64,7 +55,7 @@ public class NetworkUtils {
} }
public static String getMobileNetworkIp(Context context) { public static String getMobileNetworkIp(Context context) {
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); ConnectivityManager connectivityManager = (ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE);
@SuppressLint("MissingPermission") NetworkInfo[] networkInfos = connectivityManager.getAllNetworkInfo(); @SuppressLint("MissingPermission") NetworkInfo[] networkInfos = connectivityManager.getAllNetworkInfo();
if (networkInfos == null || networkInfos.length == 0) { if (networkInfos == null || networkInfos.length == 0) {
@ -98,29 +89,6 @@ public class NetworkUtils {
} }
public static String getMobileIPAddress() {
try {
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while (interfaces.hasMoreElements()) {
NetworkInterface networkInterface = interfaces.nextElement();
if (networkInterface.isUp() && !networkInterface.isLoopback()) {
if (networkInterface.getName() != null && !networkInterface.getName().contains("ap")) {
Enumeration<InetAddress> addresses = networkInterface.getInetAddresses();
while (addresses.hasMoreElements()) {
InetAddress address = addresses.nextElement();
if (!address.isLoopbackAddress() && address.getAddress().length == 4) { // IPv4
return address.getHostAddress();
}
}
}
}
}
} catch (SocketException e) {
e.printStackTrace();
}
return null;
}
public static int addAPN(Context context, String name, String desc, String numeric, String user, String pwd) { public static int addAPN(Context context, String name, String desc, String numeric, String user, String pwd) {
int id = -1; int id = -1;
String NUMERIC = getSIMInfo(context); String NUMERIC = getSIMInfo(context);
@ -196,40 +164,4 @@ public class NetworkUtils {
*/ */
public static class Usage {
public long mobleRxBytes;//移动 下载字节
public long mobleTxBytes;//移动 上传字节
public String uid;//包名
}
/**
*
* @param context
* @param startTime
* @param endTime
* @param uid uid
*/
public static Usage getApplicationQuerySummary(Context context, long startTime, long endTime, int uid) {
Usage usage = new Usage();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
NetworkStatsManager nsm = (NetworkStatsManager) context.getSystemService(Context.NETWORK_STATS_SERVICE);
assert nsm != null;
try {
NetworkStats mobile = nsm.querySummary(ConnectivityManager.TYPE_MOBILE, null, startTime, endTime);
do {
NetworkStats.Bucket bucket = new NetworkStats.Bucket();
mobile.getNextBucket(bucket);
if(bucket.getUid() == uid) {
usage.mobleRxBytes += bucket.getRxBytes();
usage.mobleTxBytes += bucket.getTxBytes();
}
} while (mobile.hasNextBucket());
} catch (RemoteException e) {
e.printStackTrace();
}
}
return usage;
}
} }

@ -6,9 +6,6 @@ import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.FilenameFilter; import java.io.FilenameFilter;
import java.io.IOException; import java.io.IOException;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
@ -18,7 +15,6 @@ public class ZipUtils {
public static void ZipFolder(File srcDirectory, File zipFile, FilenameFilter filter) { public static void ZipFolder(File srcDirectory, File zipFile, FilenameFilter filter) {
ZipOutputStream outZip = null; ZipOutputStream outZip = null;
WritableByteChannel writableByteChannel = null;
FileInputStream inputStream = null; FileInputStream inputStream = null;
FileOutputStream fileOutputStream = null; FileOutputStream fileOutputStream = null;
@ -26,8 +22,8 @@ public class ZipUtils {
fileOutputStream = new FileOutputStream(zipFile); fileOutputStream = new FileOutputStream(zipFile);
outZip = new ZipOutputStream(fileOutputStream); outZip = new ZipOutputStream(fileOutputStream);
writableByteChannel = Channels.newChannel(outZip); int len;
byte[] buffer = new byte[1024 * 256];
ZipEntry zipEntry = null; ZipEntry zipEntry = null;
File[] subFiles = srcDirectory.listFiles(filter); File[] subFiles = srcDirectory.listFiles(filter);
@ -38,11 +34,8 @@ public class ZipUtils {
inputStream = new FileInputStream(subFile); inputStream = new FileInputStream(subFile);
FileChannel fileChannel = inputStream.getChannel(); while ((len = inputStream.read(buffer)) != -1) {
try { outZip.write(buffer, 0, len);
fileChannel.transferTo(0, fileChannel.size(), writableByteChannel);
} finally {
FilesUtils.closeFriendly(fileChannel);
} }
FilesUtils.closeFriendly(inputStream); FilesUtils.closeFriendly(inputStream);
@ -59,22 +52,20 @@ public class ZipUtils {
ex.printStackTrace(); ex.printStackTrace();
} }
FilesUtils.closeFriendly(fileOutputStream); FilesUtils.closeFriendly(fileOutputStream);
FilesUtils.closeFriendly(writableByteChannel);
FilesUtils.closeFriendly(outZip); FilesUtils.closeFriendly(outZip);
} }
} }
public static void ZipFolders(Map<String, File> srcDirectories, File zipFile, FilenameFilter filter) { public static void ZipFolders(Map<String, File> srcDirectories, File zipFile, FilenameFilter filter) {
ZipOutputStream outZip = null; ZipOutputStream outZip = null;
WritableByteChannel writableByteChannel = null;
FileInputStream inputStream = null; FileInputStream inputStream = null;
FileOutputStream fileOutputStream = null; FileOutputStream fileOutputStream = null;
try { try {
fileOutputStream = new FileOutputStream(zipFile); fileOutputStream = new FileOutputStream(zipFile);
outZip = new ZipOutputStream(fileOutputStream); outZip = new ZipOutputStream(fileOutputStream);
writableByteChannel = Channels.newChannel(outZip); int len;
byte[] buffer = new byte[1024 * 256];
ZipEntry zipEntry = null; ZipEntry zipEntry = null;
for (Map.Entry<String, File> srcDirectory : srcDirectories.entrySet()) { for (Map.Entry<String, File> srcDirectory : srcDirectories.entrySet()) {
@ -88,11 +79,8 @@ public class ZipUtils {
inputStream = new FileInputStream(subFile); inputStream = new FileInputStream(subFile);
FileChannel fileChannel = inputStream.getChannel(); while ((len = inputStream.read(buffer)) != -1) {
try { outZip.write(buffer, 0, len);
fileChannel.transferTo(0, fileChannel.size(), writableByteChannel);
} finally {
FilesUtils.closeFriendly(fileChannel);
} }
FilesUtils.closeFriendly(inputStream); FilesUtils.closeFriendly(inputStream);
@ -109,9 +97,7 @@ public class ZipUtils {
} catch (Exception ex) { } catch (Exception ex) {
ex.printStackTrace(); ex.printStackTrace();
} }
FilesUtils.closeFriendly(fileOutputStream); FilesUtils.closeFriendly(fileOutputStream);
FilesUtils.closeFriendly(writableByteChannel);
FilesUtils.closeFriendly(outZip); FilesUtils.closeFriendly(outZip);
} }
} }
@ -119,13 +105,13 @@ public class ZipUtils {
public static void ZipFiles(List<String> srcFiles, File zipFile) { public static void ZipFiles(List<String> srcFiles, File zipFile) {
ZipOutputStream outZip = null; ZipOutputStream outZip = null;
WritableByteChannel writableByteChannel = null;
FileInputStream inputStream = null; FileInputStream inputStream = null;
FileOutputStream fileOutputStream = null; FileOutputStream fileOutputStream = null;
try { try {
fileOutputStream = new FileOutputStream(zipFile); fileOutputStream = new FileOutputStream(zipFile);
outZip = new ZipOutputStream(fileOutputStream); outZip = new ZipOutputStream(fileOutputStream);
writableByteChannel = Channels.newChannel(outZip); int len = 0;
byte[] buffer = new byte[1024 * 256];
for (String path : srcFiles) { for (String path : srcFiles) {
File file = new File(path); File file = new File(path);
@ -133,16 +119,12 @@ public class ZipUtils {
continue; continue;
} }
ZipEntry zipEntry = new ZipEntry(srcFiles.size() > 1 ? path.substring(1) : file.getName()); ZipEntry zipEntry = new ZipEntry(srcFiles.size() > 1 ? path.substring(1) : file.getName());
outZip.putNextEntry(zipEntry);
inputStream = new FileInputStream(file); inputStream = new FileInputStream(file);
FileChannel fileChannel = inputStream.getChannel();
try {
fileChannel.transferTo(0, fileChannel.size(), writableByteChannel);
} finally {
FilesUtils.closeFriendly(fileChannel);
}
outZip.putNextEntry(zipEntry);
while ((len = inputStream.read(buffer)) != -1) {
outZip.write(buffer, 0, len);
}
outZip.closeEntry(); outZip.closeEntry();
FilesUtils.closeFriendly(inputStream); FilesUtils.closeFriendly(inputStream);
} }
@ -159,7 +141,6 @@ public class ZipUtils {
} }
} }
FilesUtils.closeFriendly(fileOutputStream); FilesUtils.closeFriendly(fileOutputStream);
FilesUtils.closeFriendly(writableByteChannel);
FilesUtils.closeFriendly(outZip); FilesUtils.closeFriendly(outZip);
} }
} }

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"burstCaptures":4,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":2,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"hdrStep":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"recognization":0,"requestTemplate":1,"resolutionCX":3264,"resolutionCY":2448,"sceneMode":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1 +0,0 @@
{"bsManufacturer":"\u4e0a\u6d77\u6b23\u5f71\u7535\u529b\u79d1\u6280\u80a1\u4efd\u6709\u9650\u516c\u53f8","channels":3,"encryption":0,"equipName":"\u56fe\u50cf\u5728\u7ebf\u76d1\u6d4b","heartbeat":10,"imgQuality":80,"model":"MSRDT-1-WP","network":0,"networkProtocol":0,"outputDbgInfo":0,"packetBase":1,"packetSize":32768,"port":6891,"postDataPaused":0,"productionDate":1717200000,"protocol":65298,"quality":80,"reportFault":0,"server":"61.169.135.146","timeForKeepingLogs":1296000,"timeForKeepingPhotos":1296000,"upgradePacketBase":1,"workStatusTimes":3}

@ -1 +0,0 @@
{"absHeartbeats":[33420,85808],"heartbeat":10,"mntnMode":0,"mpappMonitorTimeout":1800000,"port":40101,"quickHbMode":0,"quickHeartbeat":60,"separateNetwork":1,"server":"61.169.135.150","timeForKeepingLogs":15,"usingAbsHbTime":1}

@ -1,100 +0,0 @@
[{"v":6015, "c":1},
{"v":6283, "c":2},
{"v":6442, "c":3},
{"v":6553, "c":4},
{"v":6641, "c":5},
{"v":6708, "c":6},
{"v":6735, "c":7},
{"v":6742, "c":8},
{"v":6746, "c":9},
{"v":6751, "c":10},
{"v":6757, "c":11},
{"v":6765, "c":12},
{"v":6774, "c":13},
{"v":6785, "c":14},
{"v":6797, "c":15},
{"v":6811, "c":16},
{"v":6822, "c":17},
{"v":6833, "c":18},
{"v":6844, "c":19},
{"v":6853, "c":20},
{"v":6863, "c":21},
{"v":6871, "c":22},
{"v":6878, "c":23},
{"v":6883, "c":24},
{"v":6891, "c":25},
{"v":6896, "c":26},
{"v":6897, "c":27},
{"v":6901, "c":28},
{"v":6903, "c":29},
{"v":6904, "c":30},
{"v":6906, "c":31},
{"v":6907, "c":32},
{"v":6908, "c":33},
{"v":6910, "c":34},
{"v":6911, "c":35},
{"v":6911, "c":36},
{"v":6913, "c":37},
{"v":6914, "c":38},
{"v":6914, "c":39},
{"v":6915, "c":40},
{"v":6917, "c":41},
{"v":6918, "c":42},
{"v":6918, "c":43},
{"v":6921, "c":44},
{"v":6922, "c":45},
{"v":6924, "c":46},
{"v":6926, "c":47},
{"v":6927, "c":48},
{"v":6929, "c":49},
{"v":6931, "c":50},
{"v":6934, "c":51},
{"v":6938, "c":52},
{"v":6941, "c":53},
{"v":6946, "c":54},
{"v":6948, "c":55},
{"v":6952, "c":56},
{"v":6954, "c":57},
{"v":6957, "c":58},
{"v":6959, "c":59},
{"v":6961, "c":60},
{"v":6963, "c":61},
{"v":6965, "c":62},
{"v":6967, "c":63},
{"v":6971, "c":64},
{"v":6973, "c":65},
{"v":6976, "c":66},
{"v":6978, "c":67},
{"v":6980, "c":68},
{"v":6982, "c":69},
{"v":6984, "c":70},
{"v":6986, "c":71},
{"v":6988, "c":72},
{"v":6989, "c":73},
{"v":6991, "c":74},
{"v":6992, "c":75},
{"v":6993, "c":76},
{"v":6995, "c":77},
{"v":6997, "c":78},
{"v":6998, "c":79},
{"v":7000, "c":80},
{"v":7003, "c":81},
{"v":7004, "c":82},
{"v":7006, "c":83},
{"v":7008, "c":84},
{"v":7011, "c":85},
{"v":7014, "c":86},
{"v":7018, "c":87},
{"v":7021, "c":88},
{"v":7024, "c":89},
{"v":7029, "c":90},
{"v":7033, "c":91},
{"v":7039, "c":92},
{"v":7044, "c":93},
{"v":7052, "c":94},
{"v":7062, "c":95},
{"v":7073, "c":96},
{"v":7087, "c":97},
{"v":7104, "c":98},
{"v":7122, "c":99},
{"v":7142, "c":100}]

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":0,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":5376,"resolutionCY":3024,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"burstCaptures":4,"cameraType":0,"compensation":0,"customHdr":0,"exposureTime":0,"hdrStep":0,"ldrEnabled":0,"orientation":3,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"quality":80,"recognization":0,"requestTemplate":2,"resolutionCX":1920,"resolutionCY":1080,"sceneMode":0,"sensitivity":0,"usbCamera":0,"usingRawFormat":2,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0,"zoom":0,"zoomRatio":1}

@ -1 +0,0 @@
{"autoExposure":1,"autoFocus":1,"awbMode":1,"cameraType":0,"compensation":0,"customHdr":0,"hdrStep":0,"ldrEnabled":0,"orientation":4,"osd":{"leftTop":"%%DATETIME%% CH:%%CH%%\r\n\u4fe1\u53f7:%%SL%% %%BV%%V"},"recognization":0,"requestTemplate":1,"resolutionCX":3264,"resolutionCY":2448,"sceneMode":0,"usbCamera":0,"usingRawFormat":0,"usingSysCamera":0,"vendor":0,"videoCX":1280,"videoCY":720,"videoDuration":5,"wait3ALocked":0}

@ -1 +0,0 @@
{"blobName16":"354","blobName32":"366","blobName8":"output","borderColor":16776960,"enabled":0,"items":[{"enabled":1,"iid":0,"name":"\u6316\u6398\u673a","prob":0.5,"subType":5,"type":1},{"enabled":1,"iid":1,"name":"\u540a\u5854","prob":0.5,"subType":2,"type":1},{"enabled":1,"iid":2,"name":"\u540a\u8f66","prob":0.5,"subType":1,"type":1},{"enabled":1,"iid":3,"name":"\u6c34\u6ce5\u6cf5\u8f66","prob":0.5,"subType":4,"type":1},{"enabled":1,"iid":4,"name":"\u5c71\u706b","prob":0.5,"subType":40,"type":4},{"enabled":1,"iid":5,"name":"\u70df\u96fe","prob":0.5,"subType":41,"type":4},{"enabled":1,"iid":6,"name":"\u63a8\u571f\u673a","prob":0.5,"subType":3,"type":1},{"enabled":1,"iid":7,"name":"\u7ffb\u6597\u8f66","prob":0.5,"subType":10,"type":1},{"enabled":1,"iid":8,"name":"\u5bfc\u7ebf\u5f02\u7269","prob":0.5,"subType":1,"type":3},{"enabled":1,"iid":9,"name":"\u9632\u5c18\u7f51","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":10,"name":"\u538b\u8def\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":11,"name":"\u6405\u62cc\u8f66","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":12,"name":"\u6869\u673a","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":13,"name":"\u56f4\u6321","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":14,"name":"\u6c34\u9a6c","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":15,"name":"\u5b89\u5168\u5e3d","prob":1.0099999904632568,"subType":2,"type":3},{"enabled":1,"iid":16,"name":"\u4e95\u76d6\u7f3a\u5931","prob":1.0099999904632568,"subType":2,"type":3}],"textColor":16776960,"thickness":4,"version":"2024-12-30"}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save