liuguijing 1 year ago
commit 8b190fdb1a

@ -5,7 +5,7 @@ plugins {
// 10,00,000 major-minor-build // 10,00,000 major-minor-build
def AppMajorVersion = 1 def AppMajorVersion = 1
def AppMinorVersion = 0 def AppMinorVersion = 0
def AppBuildNumber = 133 def AppBuildNumber = 138
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber

@ -273,7 +273,8 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
JNIEnv* env, JNIEnv* env,
jobject pThis, jstring appPath, jobject pThis, jstring appPath,
jstring ip, jint port, jstring cmdid, jint protocol, jstring ip, jint port, jstring cmdid, jint protocol,
jint networkProtocol, jint encryptData, jlong netHandle, jint signalLevel, jint versionCode, jstring simcard) { jint networkProtocol, jint encryptData, jlong netHandle, jint signalLevel,
jint versionCode, jlong buildTime, jstring simcard) {
/* /*
google_breakpad::MinidumpDescriptor descriptor("."); google_breakpad::MinidumpDescriptor descriptor(".");
@ -315,6 +316,7 @@ Java_com_xypower_mpapp_MicroPhotoService_init(
CPhoneDevice* device = new CPhoneDevice(vm, pThis, appPathStr, NETID_UNSET, versionCode); CPhoneDevice* device = new CPhoneDevice(vm, pThis, appPathStr, NETID_UNSET, versionCode);
device->SetListener(pTerminal); device->SetListener(pTerminal);
device->UpdateSignalLevel(signalLevel); device->UpdateSignalLevel(signalLevel);
device->SetBuildTime(buildTime / 1000);
device->UpdateSimcard(simcardStr); device->UpdateSimcard(simcardStr);
pTerminal->InitServerInfo(appPathStr, cmdidStr, ipStr, port, udpOrTcp, encryptData); pTerminal->InitServerInfo(appPathStr, cmdidStr, ipStr, port, udpOrTcp, encryptData);

@ -175,6 +175,7 @@ CPhoneDevice::CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPa
m_signalLevel = 0; m_signalLevel = 0;
m_signalLevelUpdateTime = time(NULL); m_signalLevelUpdateTime = time(NULL);
mBuildTime = 0;
RegisterHandlerForSignal(SIGUSR2); RegisterHandlerForSignal(SIGUSR2);
@ -367,6 +368,9 @@ bool CPhoneDevice::QuerySystemProperties(std::map<std::string, std::string>& pro
version += std::to_string((mVersionCode % 100000) / 1000); version += std::to_string((mVersionCode % 100000) / 1000);
version += "."; version += ".";
version += std::to_string(mVersionCode % 1000); version += std::to_string(mVersionCode % 1000);
#if 0
version += " " + FormatLocalTime(mBuildTime);
#endif
it->second = version; it->second = version;
} }
else if (it->first == PROP_PROD_DATE) else if (it->first == PROP_PROD_DATE)
@ -904,6 +908,11 @@ IDevice::timer_uid_t CPhoneDevice::RegisterHeartbeat(unsigned int timerType, uns
bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path) bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path)
{ {
if (photoInfo.width == 0 || photoInfo.height == 0)
{
XYLOG(XYLOG_SEVERITY_ERROR, "TP: Invalid Size: (%u-%u) PHOTOID=%u", (unsigned int)photoInfo.width, (unsigned int)photoInfo.height, photoInfo.photoId);
return false;
}
if (m_threadClose.joinable()) if (m_threadClose.joinable())
{ {
XYLOG(XYLOG_SEVERITY_INFO, "TP: Wait Prev Thread CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId); XYLOG(XYLOG_SEVERITY_INFO, "TP: Wait Prev Thread CH=%u PR=%X PHOTOID=%u", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset, photoInfo.photoId);
@ -931,19 +940,19 @@ bool CPhoneDevice::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<
params.autoExposure = mPhotoInfo.autoExposure; params.autoExposure = mPhotoInfo.autoExposure;
params.focusTimeout = mPhotoInfo.focusTimeout * 1000; params.focusTimeout = mPhotoInfo.focusTimeout * 1000;
params.exposureTime = mPhotoInfo.exposureTime; params.exposureTime = mPhotoInfo.exposureTime;
params.sensibility = mPhotoInfo.sensibility; params.sensitivity = mPhotoInfo.sensitivity;
params.compensation = mPhotoInfo.compensation; params.compensation = mPhotoInfo.compensation;
params.orientation = mPhotoInfo.orientation; params.orientation = mPhotoInfo.orientation;
params.zoom = mPhotoInfo.zoom; params.zoom = mPhotoInfo.zoom;
params.zoomRatio = mPhotoInfo.zoomRatio; params.zoomRatio = mPhotoInfo.zoomRatio;
if (photoInfo.channel == 2 || photoInfo.channel == 3) if (photoInfo.ldrEnabled)
{ {
if (GpioControl::getLightAdc() > 1400) if (GpioControl::getLightAdc() > 1400)
{ {
params.autoExposure = 0; params.autoExposure = 0;
params.exposureTime = 1200; params.exposureTime = 1200;
params.sensibility = 1200; params.sensitivity = 1200;
} }
} }
@ -1215,13 +1224,13 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
pt.x = it->x + it->w - textSize.width; pt.x = it->x + it->w - textSize.width;
} }
#ifdef _DEBUG #ifdef OUTPUT_CAMERA_DBG_INFO
char buf[128]; char buf[128];
snprintf(buf, sizeof(buf), "Draw Label: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)", snprintf(buf, sizeof(buf), "AI: %d=%s (%f,%f)-(%f,%f) Text:(%d,%d)-(%d,%d)",
it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height); it->label, item.name.c_str(), it->x, it->y, it->w, it->h, pt.x, pt.y, textSize.width, textSize.height);
ALOGI(buf); XYLOG(XYLOG_SEVERITY_DEBUG, buf);
#endif #endif
ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)), pt, fontSize, textColor, thickness, cv::LINE_AA, false, true); ft2->putText(mat, item.name + std::to_string((int)(it->prob * 100.0)) + "%", pt, fontSize, textColor, thickness, cv::LINE_AA, false, true);
} }
++it; ++it;
} }
@ -1233,16 +1242,68 @@ bool CPhoneDevice::OnImageReady(cv::Mat& mat)
cv::Scalar scalarRed(0, 0, 255); // red cv::Scalar scalarRed(0, 0, 255); // red
NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult(); NdkCamera::CAPTURE_RESULT captureResult = mCamera->getCaptureResult();
if (captureResult.avgY < 25 && mPhotoInfo.autoExposure != 0)
{
// Take another photo
CPhoneDevice* pThis = this;
std::string path = mPath;
IDevice::PHOTO_INFO photoInfo = mPhotoInfo;
std::vector<IDevice::OSD_INFO> osds = mOsds;
photoInfo.photoId += 1;
photoInfo.autoExposure = 0;
if (captureResult.avgY == 0)
{
photoInfo.exposureTime = 600;
photoInfo.sensitivity = 2500;
}
else if (captureResult.avgY <= 6)
{
photoInfo.exposureTime = captureResult.exposureTime / 1000000 * 150 / captureResult.avgY;
photoInfo.sensitivity = photoInfo.sensitivity * 80 / captureResult.avgY;
if (photoInfo.sensitivity < captureResult.sensitivity)
{
photoInfo.sensitivity = captureResult.sensitivity;
}
else if (photoInfo.sensitivity > 3000)
{
photoInfo.sensitivity = 3000;
}
}
else
{
photoInfo.exposureTime = captureResult.exposureTime / 1000000 * 120 / captureResult.avgY;
photoInfo.sensitivity = photoInfo.sensitivity * 60 / captureResult.avgY;
if (photoInfo.sensitivity < captureResult.sensitivity)
{
photoInfo.sensitivity = captureResult.sensitivity;
}
else if (photoInfo.sensitivity > 3000)
{
photoInfo.sensitivity = 3000;
}
}
std::thread t([=]
{
std::this_thread::sleep_for(std::chrono::milliseconds(5000));
pThis->TakePhoto(photoInfo, osds, path);
});
t.detach();
}
char extimeunit[4] = { 0 }; char extimeunit[4] = { 0 };
unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000)); unsigned int extime = (captureResult.exposureTime >= 1000000) ? ((unsigned int)(captureResult.exposureTime / 1000000)) : ((unsigned int)(captureResult.exposureTime / 1000));
strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "ns"); strcpy(extimeunit, (captureResult.exposureTime >= 1000000) ? "ms" : "ns");
char str[128] = { 0 }; char str[128] = { 0 };
snprintf(str, sizeof(str), "AE=%u EXPS=%u%s(%d) ISO=%d AF=%u LDR=%d AFS=%u AES=%u SCENE=%d AWB=%u %0.1fx", captureResult.autoExposure, snprintf(str, sizeof(str), "AE=%u EXPS=%u%s(%d) ISO=%d AF=%u LDR=%d(%u) AFS=%u AES=%u SCENE=%d AWB=%u %0.1fx", captureResult.autoExposure,
extime, extimeunit, captureResult.compensation, extime, extimeunit, captureResult.compensation,
captureResult.sensitibity, captureResult.sensitivity,
captureResult.autoFocus, captureResult.autoFocus,
// isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance, // isnan(captureResult.FocusDistance) ? 0 : captureResult.FocusDistance,
GpioControl::getLightAdc(), GpioControl::getLightAdc(), (unsigned int)captureResult.avgY,
(unsigned int)captureResult.afState, (unsigned int)captureResult.afState,
(unsigned int)captureResult.aeState, (unsigned int)captureResult.aeState,
captureResult.sceneMode, captureResult.sceneMode,

@ -202,6 +202,10 @@ public:
void UpdatePosition(double lon, double lat, double radius, time_t ts); void UpdatePosition(double lon, double lat, double radius, time_t ts);
bool OnVideoReady(bool result, const char* path, unsigned int photoId); bool OnVideoReady(bool result, const char* path, unsigned int photoId);
void UpdateSignalLevel(int signalLevel); void UpdateSignalLevel(int signalLevel);
void SetBuildTime(time_t buildTime)
{
mBuildTime = buildTime;
}
void UpdateSimcard(const std::string& simcard); void UpdateSimcard(const std::string& simcard);
protected: protected:
@ -282,6 +286,7 @@ protected:
const CFG_RECOGNIZATION* m_pRecognizationCfg; const CFG_RECOGNIZATION* m_pRecognizationCfg;
unsigned int mNetId; unsigned int mNetId;
unsigned int mVersionCode; unsigned int mVersionCode;
time_t mBuildTime;
atomic_ulong m_timerUidFeed; atomic_ulong m_timerUidFeed;
atomic_ulong m_wakelockIdFeed; atomic_ulong m_wakelockIdFeed;

@ -16,6 +16,7 @@
#include <string> #include <string>
#include <thread> #include <thread>
#include <numeric>
#include <android/log.h> #include <android/log.h>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
@ -122,6 +123,13 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
sceneModeSupported = false; sceneModeSupported = false;
activeArraySize[0] = 0;
activeArraySize[1] = 0;
maxRegions[0] = 0;
maxRegions[1] = 0;
maxRegions[2] = 0;
camera_manager_cb.context = this; camera_manager_cb.context = this;
camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback; camera_manager_cb.onCameraAvailable = ::onAvailabilityCallback;
camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback; camera_manager_cb.onCameraUnavailable = ::onUnavailabilityCallback;
@ -136,6 +144,8 @@ NdkCamera::NdkCamera(int32_t width, int32_t height, const NdkCamera::CAMERA_PARA
capture_session = 0; capture_session = 0;
captureSequenceId = 0; captureSequenceId = 0;
lightDetected = false;
mResult = { 0 }; mResult = { 0 };
} }
@ -393,6 +403,27 @@ int NdkCamera::open(const std::string& cameraId) {
} }
} }
{
ACameraMetadata_const_entry val = {0};
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, &val);
if (status == ACAMERA_OK)
{
activeArraySize[0] = val.data.i32[2];
activeArraySize[1] = val.data.i32[3];
}
}
{
ACameraMetadata_const_entry val = {0};
status = ACameraMetadata_getConstEntry(camera_metadata, ACAMERA_CONTROL_MAX_REGIONS, &val);
if (status == ACAMERA_OK)
{
maxRegions[0] = val.data.i32[0];
maxRegions[1] = val.data.i32[1];
maxRegions[2] = val.data.i32[2];
}
}
{ {
ACameraMetadata_const_entry e = {0}; ACameraMetadata_const_entry e = {0};
status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e); status = ACameraMetadata_getConstEntry(camera_metadata,ACAMERA_CONTROL_AVAILABLE_SCENE_MODES, &e);
@ -455,18 +486,39 @@ int NdkCamera::open(const std::string& cameraId) {
: TEMPLATE_STILL_CAPTURE; : TEMPLATE_STILL_CAPTURE;
status = ACameraDevice_createCaptureRequest(camera_device, templateId, &capture_request); status = ACameraDevice_createCaptureRequest(camera_device, templateId, &capture_request);
int32_t fpsRange[2] = {1,1}; int32_t fpsRange[2] = {1,10};
status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_TARGET_FPS_RANGE,2,fpsRange); status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_TARGET_FPS_RANGE,2,fpsRange);
} }
if (afSupported && m_params.autoFocus) { if (afSupported && m_params.autoFocus) {
if (!m_params.zoom)
{
if (maxRegions[2] > 0)
{
int32_t centerX = activeArraySize[0] >> 1;
int32_t centerY = activeArraySize[1] >> 1;
int32_t sizeX = activeArraySize[0] >> 4;
int32_t sizeY = activeArraySize[1] >> 4;
int32_t afRegions[] = {centerX - sizeX, centerY - sizeY, centerX + sizeX, centerY + sizeY, 1000};
status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AF_REGIONS, 5, afRegions);
if (status == ACAMERA_OK)
{
// m_imagesCaptured = ~0;
#ifdef _DEBUG
int aa = 0;
#endif
}
}
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO; // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE; uint8_t afMode = ACAMERA_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
// uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO; // uint8_t afMode = ACAMERA_CONTROL_AF_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_MODE, 1, &afMode); status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_MODE, 1, &afMode);
if (!m_params.zoom)
{
uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL; uint8_t trig = ACAMERA_CONTROL_AF_TRIGGER_CANCEL;
// status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig); // status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AF_TRIGGER, 1, &trig);
@ -519,6 +571,19 @@ int NdkCamera::open(const std::string& cameraId) {
} }
} }
if (maxRegions[0] > 0)
{
int32_t aeRegions[] = {0, 0, activeArraySize[0] - 1, activeArraySize[1] - 1, 1000};
status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_CONTROL_AE_REGIONS, 5, aeRegions);
if (status == ACAMERA_OK)
{
// m_imagesCaptured = ~0;
#ifdef _DEBUG
int aa = 0;
#endif
}
}
uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START; uint8_t aePrecatureTrigger = ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER_START;
status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger); status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER, 1, &aePrecatureTrigger);
if (status == ACAMERA_OK) if (status == ACAMERA_OK)
@ -534,9 +599,9 @@ int NdkCamera::open(const std::string& cameraId) {
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF; uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode); status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
if (m_params.sensibility > 0) if (m_params.sensitivity > 0)
{ {
int32_t sensitivity = m_params.sensibility; int32_t sensitivity = m_params.sensitivity;
status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity); status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
} }
if (m_params.exposureTime > 0) if (m_params.exposureTime > 0)
@ -551,6 +616,14 @@ int NdkCamera::open(const std::string& cameraId) {
uint8_t awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO; uint8_t awbMode = ACAMERA_CONTROL_AWB_MODE_AUTO;
status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode); status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AWB_MODE, 1, &awbMode);
#if 0
uint8_t antiBandingMode = ACAMERA_CONTROL_AE_ANTIBANDING_MODE_60HZ;
status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_ANTIBANDING_MODE, 1, &antiBandingMode);
uint8_t flicker = ACAMERA_STATISTICS_SCENE_FLICKER_60HZ;
status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_STATISTICS_SCENE_FLICKER, 1, &flicker);
#endif
status = ACameraOutputTarget_create(image_reader_surface, &image_reader_target); status = ACameraOutputTarget_create(image_reader_surface, &image_reader_target);
status = ACaptureRequest_addTarget(capture_request, image_reader_target); status = ACaptureRequest_addTarget(capture_request, image_reader_target);
} }
@ -687,14 +760,54 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
ALOGD("onImageAvailable %p", reader); ALOGD("onImageAvailable %p", reader);
AImage* image = 0; AImage* image = 0;
media_status_t status = AImageReader_acquireLatestImage(reader, &image); media_status_t mstatus = AImageReader_acquireLatestImage(reader, &image);
if (status != AMEDIA_OK) if (mstatus != AMEDIA_OK)
{ {
// error // error
XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", status); XYLOG(XYLOG_SEVERITY_ERROR, "AImageReader_acquireLatestImage error: %d", mstatus);
return;
}
uint8_t* y_data = 0;
int y_len = 0;
#if 0
if (!lightDetected)
{
AImage_getPlaneData(image, 0, &y_data, &y_len);
lightDetected = true;
#if __cplusplus >= 201703L
uint64_t avgY = std::reduce(y_data, y_data + y_len, 0);
#else
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
avgY = avgY / (uint64_t)y_len;
mResult.avgY = avgY;
#if 1
if (avgY < 50)
{
if (m_params.autoExposure)
{
uint8_t aeMode = ACAMERA_CONTROL_AE_MODE_OFF;
camera_status_t status = ACaptureRequest_setEntry_u8(capture_request, ACAMERA_CONTROL_AE_MODE, 1, &aeMode);
int32_t sensitivity = (avgY < 5) ? 2000 : (mResult.sensitivity * 60.0 / avgY);
status = ACaptureRequest_setEntry_i32(capture_request, ACAMERA_SENSOR_SENSITIVITY, 1, &sensitivity);
int64_t exposureTime = (avgY < 5) ? 200 * 1000000 : (mResult.exposureTime * 120.0 / avgY);
status = ACaptureRequest_setEntry_i64(capture_request, ACAMERA_SENSOR_EXPOSURE_TIME, 1, &exposureTime);
XYLOG(XYLOG_SEVERITY_WARNING, "YUV Light: %u EXPO:%lld => %lld ISO: %u => %u", (uint32_t)avgY,
mResult.exposureTime, exposureTime, mResult.sensitivity, sensitivity);
}
AImage_delete(image);
return; return;
} }
#endif
}
#endif
if (m_imagesCaptured == ~0 || m_imagesCaptured >= 1) if (m_imagesCaptured == ~0 || m_imagesCaptured >= 1)
{ {
@ -728,16 +841,25 @@ void NdkCamera::onImageAvailable(AImageReader* reader)
AImage_getPlaneRowStride(image, 1, &u_rowStride); AImage_getPlaneRowStride(image, 1, &u_rowStride);
AImage_getPlaneRowStride(image, 2, &v_rowStride); AImage_getPlaneRowStride(image, 2, &v_rowStride);
uint8_t* y_data = 0; // uint8_t* y_data = 0;
uint8_t* u_data = 0; uint8_t* u_data = 0;
uint8_t* v_data = 0; uint8_t* v_data = 0;
int y_len = 0; // int y_len = 0;
int u_len = 0; int u_len = 0;
int v_len = 0; int v_len = 0;
AImage_getPlaneData(image, 0, &y_data, &y_len); AImage_getPlaneData(image, 0, &y_data, &y_len);
AImage_getPlaneData(image, 1, &u_data, &u_len); AImage_getPlaneData(image, 1, &u_data, &u_len);
AImage_getPlaneData(image, 2, &v_data, &v_len); AImage_getPlaneData(image, 2, &v_data, &v_len);
#if 1
#if __cplusplus >= 201703L
uint64_t avgY = std::reduce(y_data, y_data + y_len, 0);
#else
uint64_t avgY = std::accumulate(y_data, y_data + y_len, 0);
#endif
mResult.avgY = avgY / y_len;
#endif
if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width) if (u_data == v_data + 1 && v_data == y_data + width * height && y_pixelStride == 1 && u_pixelStride == 2 && v_pixelStride == 2 && y_rowStride == width && u_rowStride == width && v_rowStride == width)
{ {
// already nv21 :) // already nv21 :)
@ -966,6 +1088,18 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val); status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_AE_STATE, &val);
mResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE; mResult.aeState = (status == ACAMERA_OK) ? *(val.data.u8) : ACAMERA_CONTROL_AE_STATE_INACTIVE;
if (!lightDetected)
{
val = { 0 };
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_EXPOSURE_TIME, &val);
int64_t exTime = (status == ACAMERA_OK) ? val.data.i64[0] : -1;
mResult.exposureTime = exTime;
val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
mResult.sensitivity = *(val.data.i32);
}
if (afSupported && (m_params.autoFocus != 0)) if (afSupported && (m_params.autoFocus != 0))
{ {
val = { 0 }; val = { 0 };
@ -1084,7 +1218,7 @@ void NdkCamera::onCaptureCompleted(ACameraCaptureSession* session, ACaptureReque
val = {0}; val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val); status = ACameraMetadata_getConstEntry(result, ACAMERA_SENSOR_SENSITIVITY, &val);
mResult.sensitibity = *(val.data.i32); mResult.sensitivity = *(val.data.i32);
val = {0}; val = {0};
status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val); status = ACameraMetadata_getConstEntry(result, ACAMERA_CONTROL_SCENE_MODE, &val);

@ -77,7 +77,7 @@ public:
unsigned int zoom; unsigned int zoom;
unsigned int reserved : 7; unsigned int reserved : 7;
unsigned int exposureTime; // ms unsigned int exposureTime; // ms
unsigned int sensibility; unsigned int sensitivity;
int compensation; int compensation;
float zoomRatio; float zoomRatio;
}; };
@ -91,10 +91,11 @@ public:
uint8_t awbState; uint8_t awbState;
int64_t exposureTime; int64_t exposureTime;
float FocusDistance; float FocusDistance;
int32_t sensitibity; int32_t sensitivity;
int32_t compensation; int32_t compensation;
uint8_t sceneMode; uint8_t sceneMode;
float zoomRatio; float zoomRatio;
uint8_t avgY;
}; };
NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params); NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params);
@ -142,12 +143,16 @@ protected:
bool aeLockAvailable; bool aeLockAvailable;
bool awbLockAvailable; bool awbLockAvailable;
bool lightDetected;
// int64_t exposureTime_; // int64_t exposureTime_;
RangeValue<int64_t> exposureRange; RangeValue<int64_t> exposureRange;
// int32_t sensitivity_; // int32_t sensitivity_;
RangeValue<int32_t> sensitivityRange; RangeValue<int32_t> sensitivityRange;
RangeValue<int32_t> aeCompensationRange; RangeValue<int32_t> aeCompensationRange;
ACameraMetadata_rational aeCompensationStep; ACameraMetadata_rational aeCompensationStep;
int32_t activeArraySize[2];
int32_t maxRegions[3];
unsigned int m_imagesCaptured; unsigned int m_imagesCaptured;

@ -150,6 +150,7 @@ public class ChannelActivity extends AppCompatActivity {
binding.btnUsbCamera.setChecked(jsonObject.optInt("usbCamera", 0) == 1); binding.btnUsbCamera.setChecked(jsonObject.optInt("usbCamera", 0) == 1);
binding.btnAutoExplosure.setChecked(jsonObject.optInt("autoExposure", 1) == 1); binding.btnAutoExplosure.setChecked(jsonObject.optInt("autoExposure", 1) == 1);
binding.btnAutoFocus.setChecked(jsonObject.optInt("autoFocus", 1) == 1); binding.btnAutoFocus.setChecked(jsonObject.optInt("autoFocus", 1) == 1);
binding.ldrEnabled.setChecked(jsonObject.optInt("ldrEnabled", 0) == 1);
// binding.btnHdrMode.setChecked(jsonObject.optInt("hdrMode", 0) == 1); // binding.btnHdrMode.setChecked(jsonObject.optInt("hdrMode", 0) == 1);
// binding.btnNightMode.setChecked(jsonObject.optInt("nightMode", 0) == 1); // binding.btnNightMode.setChecked(jsonObject.optInt("nightMode", 0) == 1);
int sceneMode = jsonObject.optInt("sceneMode", 0); int sceneMode = jsonObject.optInt("sceneMode", 0);
@ -162,7 +163,7 @@ public class ChannelActivity extends AppCompatActivity {
} }
} }
binding.exposuretime.setText(Integer.toString(jsonObject.optInt("exposureTime", 0))); binding.exposuretime.setText(Integer.toString(jsonObject.optInt("exposureTime", 0)));
binding.sensitivity.setText(Integer.toString(jsonObject.optInt("sensibility", 0))); binding.sensitivity.setText(Integer.toString(jsonObject.optInt("sensitivity", 0)));
binding.btnZoom.setChecked(jsonObject.optInt("zoom", 0) == 1); binding.btnZoom.setChecked(jsonObject.optInt("zoom", 0) == 1);
if (jsonObject.has("compensation")) { if (jsonObject.has("compensation")) {
binding.compensation.setText(Integer.toString(jsonObject.optInt("compensation", 0))); binding.compensation.setText(Integer.toString(jsonObject.optInt("compensation", 0)));
@ -254,6 +255,7 @@ public class ChannelActivity extends AppCompatActivity {
jsonObject.put("usbCamera", binding.btnUsbCamera.isChecked() ? 1 : 0); jsonObject.put("usbCamera", binding.btnUsbCamera.isChecked() ? 1 : 0);
jsonObject.put("autoExposure", binding.btnAutoExplosure.isChecked() ? 1 : 0); jsonObject.put("autoExposure", binding.btnAutoExplosure.isChecked() ? 1 : 0);
jsonObject.put("autoFocus", binding.btnAutoFocus.isChecked() ? 1 : 0); jsonObject.put("autoFocus", binding.btnAutoFocus.isChecked() ? 1 : 0);
jsonObject.put("ldrEnabled", binding.ldrEnabled.isChecked() ? 1 : 0);
// jsonObject.put("hdrMode", binding.btnHdrMode.isChecked() ? 1 : 0); // jsonObject.put("hdrMode", binding.btnHdrMode.isChecked() ? 1 : 0);
// jsonObject.put("nightMode", binding.btnNightMode.isChecked() ? 1 : 0); // jsonObject.put("nightMode", binding.btnNightMode.isChecked() ? 1 : 0);
int sceneMode = 0; int sceneMode = 0;
@ -262,7 +264,7 @@ public class ChannelActivity extends AppCompatActivity {
sceneMode = Integer.parseInt(sceneModeText); sceneMode = Integer.parseInt(sceneModeText);
jsonObject.put("sceneMode", sceneMode); jsonObject.put("sceneMode", sceneMode);
jsonObject.put("exposureTime", Integer.parseInt(binding.exposuretime.getText().toString())); jsonObject.put("exposureTime", Integer.parseInt(binding.exposuretime.getText().toString()));
jsonObject.put("sensibility", Integer.parseInt(binding.sensitivity.getText().toString())); jsonObject.put("sensitivity", Integer.parseInt(binding.sensitivity.getText().toString()));
jsonObject.put("zoom", binding.btnZoom.isChecked() ? 1 : 0); jsonObject.put("zoom", binding.btnZoom.isChecked() ? 1 : 0);
jsonObject.put("orientation", binding.orientations.getSelectedItemPosition()); jsonObject.put("orientation", binding.orientations.getSelectedItemPosition());
jsonObject.put("recognization", binding.recognization.getSelectedItemPosition()); jsonObject.put("recognization", binding.recognization.getSelectedItemPosition());

@ -675,7 +675,7 @@ public class MicroPhotoService extends Service {
simcard = ""; simcard = "";
} }
service.mNativeHandle = init(appPath, server, port, cmdid, protocol, networkProtocol, encryptData, 0, service.getSignalLevel(), versionCode, simcard); service.mNativeHandle = init(appPath, server, port, cmdid, protocol, networkProtocol, encryptData, 0, service.getSignalLevel(), versionCode, BuildConfig.BUILD_TIMESTAMP, simcard);
if (service.mNativeHandle != 0) { if (service.mNativeHandle != 0) {
isRunning = true; isRunning = true;
@ -1127,7 +1127,7 @@ CellSignalStrengthGsm cellSignalStrengthGsm = cellInfoGsm.getCellSignalStrength(
cellSignalStrengthGsm.getDbm(); cellSignalStrengthGsm.getDbm();
*/ */
protected native long init(String appPath, String ip, int port, String cmdid, int protocol, int networkProtocl, int encryptData, long netHandle, int signalLevel, int versionCode, String simcard); protected native long init(String appPath, String ip, int port, String cmdid, int protocol, int networkProtocl, int encryptData, long netHandle, int signalLevel, int versionCode, long buildTime, String simcard);
protected native long getHeartbeatDuration(long handler); protected native long getHeartbeatDuration(long handler);
protected native long[] getPhotoTimeData(long handler, long startTime); protected native long[] getPhotoTimeData(long handler, long startTime);
protected native long[] getPhotoTimeData2(long handler); protected native long[] getPhotoTimeData2(long handler);

@ -8,7 +8,9 @@ import android.graphics.Canvas;
import android.graphics.Color; import android.graphics.Color;
import android.graphics.Insets; import android.graphics.Insets;
import android.graphics.Paint; import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.PorterDuff; import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect; import android.graphics.Rect;
import android.net.Uri; import android.net.Uri;
import android.opengl.GLException; import android.opengl.GLException;
@ -47,7 +49,9 @@ import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.IntBuffer; import java.nio.IntBuffer;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List;
public class Camera2VideoActivity extends AppCompatActivity { public class Camera2VideoActivity extends AppCompatActivity {
@ -91,6 +95,16 @@ public class Camera2VideoActivity extends AppCompatActivity {
private int mTimeMask = 0; private int mTimeMask = 0;
private int mStatusBarHeight = -1; private int mStatusBarHeight = -1;
private static class OSD_ITEM
{
String text;
int mask;
Point origin;
Rect previousRect;
}
private List<OSD_ITEM> mOSDItems = new ArrayList<>();
private final static int TIME_MASK_LT_TS = 1; private final static int TIME_MASK_LT_TS = 1;
private final static int TIME_MASK_LT_DT = 2; private final static int TIME_MASK_LT_DT = 2;
private final static int TIME_MASK_LT_ML = 4; private final static int TIME_MASK_LT_ML = 4;
@ -119,8 +133,8 @@ public class Camera2VideoActivity extends AppCompatActivity {
ms = 0; ms = 0;
} }
updateOSD(ts); // updateOSD(ts);
initOSD(ts);
mHandler.postDelayed(this, 1000 - ms); mHandler.postDelayed(this, 1000 - ms);
} }
}; };
@ -160,19 +174,14 @@ public class Camera2VideoActivity extends AppCompatActivity {
}); });
} }
public static int px2dip(Context context, float pxValue) { public static int px2dip(Context context, float pxValue) {
final float scale = context.getResources().getDisplayMetrics().density; final float scale = context.getResources().getDisplayMetrics().density;
return (int) (pxValue / scale + 0.5f); return (int) (pxValue / scale + 0.5f);
} }
public int getStatusBarHeight(Context context) { public int getStatusBarHeight(Context context) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
WindowMetrics windowMetrics = wm.getCurrentWindowMetrics(); WindowMetrics windowMetrics = wm.getCurrentWindowMetrics();
WindowInsets windowInsets = windowMetrics.getWindowInsets(); WindowInsets windowInsets = windowMetrics.getWindowInsets();
@ -187,7 +196,6 @@ public class Camera2VideoActivity extends AppCompatActivity {
return statusBarHeight; return statusBarHeight;
} }
protected void onCreateActivity() { protected void onCreateActivity() {
// //
@ -300,7 +308,7 @@ public class Camera2VideoActivity extends AppCompatActivity {
releaseCamera(); releaseCamera();
} }
private void updateOSD(long ts) { private void initOSD(long ts) {
if (mStatusBarHeight == -1) { if (mStatusBarHeight == -1) {
mStatusBarHeight = getStatusBarHeight(this); mStatusBarHeight = getStatusBarHeight(this);
@ -310,64 +318,248 @@ public class Camera2VideoActivity extends AppCompatActivity {
int bmWidth = mBitmap.getWidth(); int bmWidth = mBitmap.getWidth();
int bmHeight = mBitmap.getHeight(); int bmHeight = mBitmap.getHeight();
int margin = mOSDMargin; int margin = mOSDMargin;
int x = 0;
int y = 0;
// mOSDFilter. // mOSDFilter.
Canvas canvas = new Canvas(mBitmap); Canvas canvas = new Canvas(mBitmap);
Rect textBounds = new Rect();
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
if (!TextUtils.isEmpty(mOSDLeftTop)) { if (!TextUtils.isEmpty(mOSDLeftTop)) {
String osd = ((mTimeMask | TIME_MASK_LT) == 0) ? mOSDLeftTop : updateOSDTime(mOSDLeftTop, ts); String[] items = mOSDLeftTop.split("\n");
// mPaint.setTextAlign(Paint.Align.LEFT); Point origin = new Point(margin, margin + statusHeight);
int x = margin;
int y = margin + statusHeight; for (String item : items) {
canvas.drawText(osd, x, y, mPaint);
canvas.drawText(osd, x, y, mPaintStroker); int mask = 0;
if (item.indexOf(TIME_MICRO_TS) != 0) {
mask |= TIME_MASK_LT_TS;
}
if (item.indexOf(TIME_MICRO_DT) != 0) {
mask |= TIME_MASK_LT_DT;
}
OSD_ITEM osdItem = new OSD_ITEM();
osdItem.text = item;
osdItem.mask = mask;
osdItem.origin = new Point(origin);
if (mask == 0) {
canvas.drawText(item, origin.x, origin.y, mPaint);
canvas.drawText(item, origin.x, origin.y, mPaintStroker);
mPaintStroker.getTextBounds(item, 0, item.length(), textBounds);
} else {
String newText = updateOSDTime(item, ts);
Log.d("OSD", "INIT OSD x=" + origin.x + " y=" + origin.y);
canvas.drawText(newText, origin.x, origin.y, mPaint);
canvas.drawText(newText, origin.x, origin.y, mPaintStroker);
mPaintStroker.getTextBounds(newText, 0, item.length(), textBounds);
}
osdItem.previousRect = new Rect(origin.x, origin.y, origin.x + textBounds.width(), origin.y + textBounds.height());
mOSDItems.add(osdItem);
origin.y += (textBounds.height() * 5) >> 2;
}
} }
if (!TextUtils.isEmpty(mOSDLeftBottom)) { if (!TextUtils.isEmpty(mOSDLeftBottom)) {
String osd = ((mTimeMask | TIME_MASK_LB) == 0) ? mOSDLeftBottom : updateOSDTime(mOSDLeftBottom, ts);
// mPaint.setTextAlign(Paint.Align.LEFT); String[] items = mOSDLeftBottom.split("\n");
Rect textBounds = new Rect(); Point origin = new Point(margin, bmHeight - margin);
mPaint.getTextBounds(osd, 0, osd.length(), textBounds);
float y = bmHeight - margin - textBounds.height(); for(int idx = items.length-1; idx >= 0; idx--) {
canvas.drawText(osd, margin, y, mPaint);
canvas.drawText(osd, margin, y, mPaintStroker); int mask = 0;
String item = items[idx];
if (item.indexOf(TIME_MICRO_TS) != 0) {
mask |= TIME_MASK_LB_TS;
}
if (item.indexOf(TIME_MICRO_DT) != 0) {
mask |= TIME_MASK_LB_DT;
}
OSD_ITEM osdItem = new OSD_ITEM();
osdItem.text = item;
osdItem.mask = mask;
osdItem.origin = new Point(origin);
if (mask == 0) {
mPaintStroker.getTextBounds(item, 0, item.length(), textBounds);
y = origin.y - textBounds.height();
canvas.drawText(item, origin.x, y, mPaint);
canvas.drawText(item, origin.x, y, mPaintStroker);
} else {
String newText = updateOSDTime(item, ts);
mPaintStroker.getTextBounds(newText, 0, newText.length(), textBounds);
y = origin.y - textBounds.height();
canvas.drawText(newText, origin.x, y, mPaint);
canvas.drawText(newText, origin.x, y, mPaintStroker);
}
osdItem.previousRect = new Rect(origin.x, y, origin.x + textBounds.width(), y + textBounds.height());
mOSDItems.add(osdItem);
origin.y -= (textBounds.height() * 5) >> 2;
}
} }
if (!TextUtils.isEmpty(mOSDRightTop)) { if (!TextUtils.isEmpty(mOSDRightTop)) {
String osd = ((mTimeMask | TIME_MASK_RT) == 0) ? mOSDRightTop : updateOSDTime(mOSDRightTop, ts);
// mPaint.setTextAlign(Paint.Align.RIGHT); String[] items = mOSDRightTop.split("\n");
Rect textBounds = new Rect(); Point origin = new Point(bmWidth - margin, margin + statusHeight);
mPaint.getTextBounds(osd, 0, osd.length(), textBounds);
float x = bmWidth - margin - textBounds.width(); for (String item : items) {
int y = margin + statusHeight;
canvas.drawText(osd, x, y, mPaint); int mask = 0;
canvas.drawText(osd, x, y, mPaintStroker); if (item.indexOf(TIME_MICRO_TS) != 0) {
mask |= TIME_MASK_RT_TS;
}
if (item.indexOf(TIME_MICRO_DT) != 0) {
mask |= TIME_MASK_RT_DT;
}
OSD_ITEM osdItem = new OSD_ITEM();
osdItem.text = item;
osdItem.origin = new Point(origin);
osdItem.mask = mask;
if (mask == 0) {
mPaintStroker.getTextBounds(item, 0, item.length(), textBounds);
canvas.drawText(item, origin.x - textBounds.width(), origin.y, mPaint);
canvas.drawText(item, origin.x - textBounds.width(), origin.y, mPaintStroker);
} else {
String newText = updateOSDTime(item, ts);
mPaintStroker.getTextBounds(newText, 0, item.length(), textBounds);
canvas.drawText(newText, origin.x - textBounds.width(), origin.y, mPaint);
canvas.drawText(newText, origin.x - textBounds.width(), origin.y, mPaintStroker);
}
osdItem.previousRect = new Rect(origin.x - textBounds.width(), origin.y, origin.x, origin.y + textBounds.height());
mOSDItems.add(osdItem);
origin.y += (textBounds.height() * 5) >> 2;
}
} }
if (!TextUtils.isEmpty(mOSDRightBottom)) { if (!TextUtils.isEmpty(mOSDRightBottom)) {
String osd = ((mTimeMask | TIME_MASK_RB) == 0) ? mOSDRightBottom : updateOSDTime(mOSDRightBottom, ts);
// mPaint.setTextAlign(Paint.Align.RIGHT); String[] items = mOSDRightBottom.split("\n");
Rect textBounds = new Rect(); Point origin = new Point(bmWidth - margin, bmHeight - margin);
mPaint.getTextBounds(osd, 0, osd.length(), textBounds);
float x = bmWidth - margin - textBounds.width(); for(int idx = items.length-1; idx >= 0; idx--) {
float y = bmHeight - margin - textBounds.height();
canvas.drawText(osd, x, y, mPaint); int mask = 0;
canvas.drawText(osd, x, y, mPaintStroker); String item = items[idx];
if (item.indexOf(TIME_MICRO_TS) != 0) {
mask |= TIME_MASK_RB_TS;
} }
if (item.indexOf(TIME_MICRO_DT) != 0) {
mask |= TIME_MASK_RB_DT;
} }
OSD_ITEM osdItem = new OSD_ITEM();
osdItem.text = item;
osdItem.origin = new Point(origin);
osdItem.mask = mask;
if (mask == 0) {
mPaintStroker.getTextBounds(item, 0, item.length(), textBounds);
/* canvas.drawText(item, origin.x - textBounds.width(), origin.y - textBounds.height(), mPaint);
SurfaceHolder surfaceHolder = sampleGLView.getHolder(); canvas.drawText(item, origin.x - textBounds.width(), origin.y - textBounds.height(), mPaintStroker);
Canvas surfaceCanvas = surfaceHolder.lockCanvas(); } else {
if (surfaceCanvas != null) { String newText = updateOSDTime(item, ts);
surfaceCanvas.drawBitmap(mBitmap, 0, 0, null); mPaintStroker.getTextBounds(newText, 0, item.length(), textBounds);
surfaceHolder.unlockCanvasAndPost(surfaceCanvas);
canvas.drawText(newText, origin.x - textBounds.width(), origin.y - textBounds.height(), mPaint);
canvas.drawText(newText, origin.x - textBounds.width(), origin.y - textBounds.height(), mPaintStroker);
} }
*/ osdItem.previousRect = new Rect(origin.x - textBounds.width(), origin.y - textBounds.height(), origin.x, origin.y);
mOSDItems.add(osdItem);
origin.y -= (textBounds.height() * 5) >> 2;
}
}
}
}
private void updateOSD(long ts) {
if (mStatusBarHeight == -1) {
mStatusBarHeight = getStatusBarHeight(this);
}
int statusHeight = mStatusBarHeight;
synchronized (mBitmap) {
int bmWidth = mBitmap.getWidth();
int bmHeight = mBitmap.getHeight();
int margin = mOSDMargin;
Canvas canvas = new Canvas(mBitmap);
boolean aa = canvas.isHardwareAccelerated();
Rect textBounds = new Rect();
// mBitmap.eraseColor(Color.argb(0, 0, 0, 0));
// bitmap.eraseColor(Color.argb(0, 0, 0, 0));
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
for (OSD_ITEM osdItem : mOSDItems) {
String text = updateOSDTime(osdItem.text, ts);
int x = osdItem.previousRect.left;
int y = osdItem.previousRect.top;
if ((osdItem.mask & TIME_MASK_LT) != 0) {
// canvas.drawRect(osdItem.previousRect, mEmptyPaint);
mPaintStroker.getTextBounds(text, 0, text.length(), textBounds);
Log.d("OSD", "UPD OSD x=" + x + " y=" + y);
canvas.drawText(text, x, y, mPaint);
canvas.drawText(text, x, y, mPaintStroker);
osdItem.previousRect.set(x, y, x + textBounds.width(), y + textBounds.height());
} else if ((osdItem.mask & TIME_MASK_LB) != 0) {
// canvas.drawRect(osdItem.previousRect, mEmptyPaint);
mPaintStroker.getTextBounds(text, 0, text.length(), textBounds);
y = osdItem.origin.y - textBounds.height();
canvas.drawText(text, x, y, mPaint);
canvas.drawText(text, x, y, mPaintStroker);
osdItem.previousRect.set(x, y, x + textBounds.width(), y + textBounds.height());
} else if ((osdItem.mask & TIME_MASK_RT) != 0) {
// canvas.drawRect(osdItem.previousRect, mEmptyPaint);
mPaintStroker.getTextBounds(text, 0, text.length(), textBounds);
x = osdItem.origin.x - textBounds.width();
canvas.drawText(text, x, osdItem.origin.y, mPaint);
canvas.drawText(text, x, osdItem.origin.y, mPaintStroker);
osdItem.previousRect.set(x, osdItem.origin.y, x + textBounds.width(), osdItem.origin.y + textBounds.height());
} else if ((osdItem.mask & TIME_MASK_RB) != 0) {
// canvas.drawRect(osdItem.previousRect, mEmptyPaint);
mPaintStroker.getTextBounds(text, 0, text.length(), textBounds);
x = osdItem.origin.x - textBounds.width();
y = osdItem.origin.y - textBounds.height();
canvas.drawText(text, x, y, mPaint);
canvas.drawText(text, x, y, mPaintStroker);
osdItem.previousRect.set(x, y, x + textBounds.width(), y + textBounds.height());
} else {
canvas.drawText(text, x, y, mPaint);
canvas.drawText(text, x, y, mPaintStroker);
}
}
}
} }
private String updateOSDTime(String osd, long ts) { private String updateOSDTime(String osd, long ts) {
@ -404,7 +596,6 @@ public class Camera2VideoActivity extends AppCompatActivity {
} }
} }
private void setUpCameraView() { private void setUpCameraView() {
runOnUiThread(() -> { runOnUiThread(() -> {
@ -480,7 +671,7 @@ public class Camera2VideoActivity extends AppCompatActivity {
if (mTimeMask != 0) { if (mTimeMask != 0) {
long ts = System.currentTimeMillis(); long ts = System.currentTimeMillis();
long ms = ts % 1000; long ms = ts % 1000;
updateOSD(ts - ms); initOSD(ts - ms);
mHandler.postDelayed(mTimerRunnable, 1000 - ms); mHandler.postDelayed(mTimerRunnable, 1000 - ms);
} }

@ -267,6 +267,16 @@
app:layout_constraintTop_toTopOf="@+id/textViewCompensation" app:layout_constraintTop_toTopOf="@+id/textViewCompensation"
app:layout_constraintBottom_toBottomOf="@+id/textViewCompensation" /> app:layout_constraintBottom_toBottomOf="@+id/textViewCompensation" />
<Switch
android:id="@+id/ldrEnabled"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="@dimen/activity_horizontal_margin"
android:text="@string/channel_cfg_ldr"
app:layout_constraintStart_toEndOf="@+id/compensation"
app:layout_constraintTop_toTopOf="@+id/compensation" />
<EditText <EditText
android:id="@+id/osdLeftTop" android:id="@+id/osdLeftTop"
android:layout_width="match_parent" android:layout_width="match_parent"

@ -47,6 +47,7 @@
<string name="channel_cfg_usb_camera">USB Camera</string> <string name="channel_cfg_usb_camera">USB Camera</string>
<string name="channel_cfg_video_duration">短视频时长(秒)</string> <string name="channel_cfg_video_duration">短视频时长(秒)</string>
<string name="channel_cfg_compensation">曝光补偿</string> <string name="channel_cfg_compensation">曝光补偿</string>
<string name="channel_cfg_ldr">光敏控制</string>
<!-- TODO: Remove or change this placeholder text --> <!-- TODO: Remove or change this placeholder text -->
<string name="hello_blank_fragment">Hello blank fragment</string> <string name="hello_blank_fragment">Hello blank fragment</string>
<string name="record">Record</string> <string name="record">Record</string>

@ -3,6 +3,7 @@ package com.xypower.gpuv.egl.filter;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas; import android.graphics.Canvas;
import android.graphics.Color; import android.graphics.Color;
import android.graphics.PorterDuff;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLUtils; import android.opengl.GLUtils;
import android.util.Size; import android.util.Size;
@ -46,6 +47,7 @@ public abstract class GlOverlayFilter extends GlFilter {
private void createBitmap() { private void createBitmap() {
releaseBitmap(bitmap); releaseBitmap(bitmap);
bitmap = Bitmap.createBitmap(inputResolution.getWidth(), inputResolution.getHeight(), Bitmap.Config.ARGB_8888); bitmap = Bitmap.createBitmap(inputResolution.getWidth(), inputResolution.getHeight(), Bitmap.Config.ARGB_8888);
bitmap.eraseColor(Color.argb(0, 0, 0, 0));
} }
@Override @Override
@ -71,7 +73,7 @@ public abstract class GlOverlayFilter extends GlFilter {
createBitmap(); createBitmap();
} }
bitmap.eraseColor(Color.argb(0, 0, 0, 0)); // bitmap.eraseColor(Color.argb(0, 0, 0, 0));
Canvas bitmapCanvas = new Canvas(bitmap); Canvas bitmapCanvas = new Canvas(bitmap);
bitmapCanvas.scale(1, -1, bitmapCanvas.getWidth() / 2, bitmapCanvas.getHeight() / 2); bitmapCanvas.scale(1, -1, bitmapCanvas.getWidth() / 2, bitmapCanvas.getHeight() / 2);
drawCanvas(bitmapCanvas); drawCanvas(bitmapCanvas);

@ -2,7 +2,10 @@ package com.xypower.gpuv.egl.filter;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas; import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.Rect; import android.graphics.Rect;
import android.util.Log;
public class GlWatermarkFilter extends GlOverlayFilter { public class GlWatermarkFilter extends GlOverlayFilter {
@ -22,6 +25,8 @@ public class GlWatermarkFilter extends GlOverlayFilter {
@Override @Override
protected void drawCanvas(Canvas canvas) { protected void drawCanvas(Canvas canvas) {
synchronized (bitmap) { synchronized (bitmap) {
Log.d("OSD", "drawCanvas");
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bitmap, null, canvas.getClipBounds(), null); canvas.drawBitmap(bitmap, null, canvas.getClipBounds(), null);
} }

@ -4,7 +4,7 @@ plugins {
def AppMajorVersion = 1 def AppMajorVersion = 1
def AppMinorVersion = 0 def AppMinorVersion = 0
def AppBuildNumber = 33 def AppBuildNumber = 34
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber

@ -353,12 +353,14 @@ public class MpMasterService extends Service {
return; return;
} }
File mpappDb = new File(appPath + "data/App.db"); File mpappHb = new File(appPath + "data/alive/hb");
long modifiedTimeOfDb = 0; long modifiedTimeOfHb = getFileModificationTime(appPath + "data/alive/hb");
if (mpappDb.exists()) { long modifiedTimeOfPhoto = getFileModificationTime(appPath + "data/alive/taking");
modifiedTimeOfDb = mpappDb.lastModified(); long modifiedTimeOfUpload = getFileModificationTime(appPath + "data/alive/upload");
}
if ((ts - modifiedTimeOfDb) > mTimeOfMpAppAlive) { if (((ts - modifiedTimeOfHb) > mTimeOfMpAppAlive) ||
((ts - modifiedTimeOfPhoto) > mTimeOfMpAppAlive * 4) ||
((ts - modifiedTimeOfUpload) > mTimeOfMpAppAlive * 4)) {
// greater than 30m // greater than 30m
logger.warning("Start MpAPP as it is NOT running"); logger.warning("Start MpAPP as it is NOT running");
MicroPhotoContext.restartMpApp(context); MicroPhotoContext.restartMpApp(context);
@ -369,6 +371,16 @@ public class MpMasterService extends Service {
} }
} }
long getFileModificationTime(String path) {
File file = new File(path);
long mt = 0;
if (file.exists()) {
mt = file.lastModified();
}
return mt;
}
public String getMpAppVersion() { public String getMpAppVersion() {
if (TextUtils.isEmpty(mMpAppVersion)) { if (TextUtils.isEmpty(mMpAppVersion)) {
PackageManager packageManager = getPackageManager(); PackageManager packageManager = getPackageManager();

Loading…
Cancel
Save