Compare commits

...

1235 Commits

Author SHA1 Message Date
Matthew dcd1d20a75 先移除fps的设置 21 hours ago
Matthew f9b79e67fd Update version to 1.3.196
Based Core Version to 1.4.106
5 days ago
Matthew 024d7f73b8 Update MpMaster version to 1.1.36 5 days ago
Matthew 476fa0ab7c 优化代码 5 days ago
Matthew 9ea9043a0e 优化代码 5 days ago
Matthew 8acd884797 改成常量 5 days ago
Matthew bbcd2691ed 增加flag 5 days ago
Matthew c65ffab9dd 加注释 5 days ago
Matthew 99d08c30ef 增加异常捕获 5 days ago
Matthew a38e3801ae 对于旧版本,直接注册重启设备的延迟广播来保活 5 days ago
Matthew a9ac0743f7 优化服务的返回值 5 days ago
Matthew a7c4356d93 移除不需要的代码和功能 5 days ago
Matthew 2baf96fe67 移除不需要的代码和功能 5 days ago
Matthew 913747e6c3 Update version to 1.3.195
Based Core Version to 1.4.105
6 days ago
Matthew 0b1aa8a13a Update version to 1.3.194
Based Core Version to 1.4.104
6 days ago
Matthew b5aec7ae17 增加清除缓存的拍照计划功能 6 days ago
Matthew 6f3cae9d02 根据新的接口调整实现 7 days ago
Matthew 7a67a142cb Merge branch 'main' of http://61.169.135.146:41230/xymp/TermApp 7 days ago
XI.CHEN 0a6850d92b 调整名字 7 days ago
Matthew 42dab55bc4 Merge branch 'main' of http://61.169.135.146:41230/xymp/TermApp 1 week ago
Matthew 662d3307b0 Update version to 1.3.193
Based Core Version to 1.4.103
1 week ago
XI.CHEN 2b9c1feae5 增加查询当日流量 1 week ago
Matthew aa04fca845 Update version to 1.3.192
Based Core Version to 1.4.101
1 week ago
Matthew a72118a5a6 优化回车换行的处理 1 week ago
Matthew d50b21d0d6 Update version to 1.3.191
Based Core Version to 1.4.101
1 week ago
Matthew 6255fa932c 增加最小fps控制 1 week ago
Matthew 8aec5f30ef 处理资源未释放的问题 2 weeks ago
Matthew c23f30ce71 增加山西智洋主站的规约实现 2 weeks ago
Matthew 118ffdc638 Update version to 1.3.190
Based Core Version to 1.4.100
2 weeks ago
Matthew 058e4b2f46 增加几个报文的手动触发 2 weeks ago
Matthew 48d61baf6f 调整配置目录结构 2 weeks ago
Matthew 9ff07b8c12 Update version to 1.3.189
Based Core Version to 1.4.99
2 weeks ago
Matthew 69958ba1d2 对于需要曝光补偿的场景,设置更明亮模式 2 weeks ago
Matthew 3979e03882 预览request上设置算法 2 weeks ago
Matthew b96fe6bbfd Update version to 1.3.188
Based Core Version to 1.4.98
3 weeks ago
Matthew 94edf1324f Update version to 1.3.187
Based Core Version to 1.4.97
3 weeks ago
Matthew 18efb1e439 错误处理 3 weeks ago
Matthew b2a1a5cde3 Update MpRes version to 1.0.14 3 weeks ago
Matthew cdf664b448 更新配置 3 weeks ago
Matthew e549126563 Update MpRes version to 1.0.13 3 weeks ago
Matthew 1322ddd8b2 更改默认配置 3 weeks ago
Matthew 139a6a5fde Update MpMaster version to 1.1.35 3 weeks ago
Matthew 9721eced21 网络信号信息优化 3 weeks ago
Matthew b425713f63 Update version to 1.3.186
Based Core Version to 1.4.96
3 weeks ago
Matthew 7e3ff1b741 Update version to 1.3.185
Based Core Version to 1.4.95
3 weeks ago
Matthew 284f61a86e Update version to 1.3.184
Based Core Version to 1.4.95
3 weeks ago
Matthew aa4ca70d07 优化脚本和日志 3 weeks ago
Matthew 6e1655813d Update version to 1.3.183
Based Core Version to 1.4.94
3 weeks ago
Matthew 8468978f19 以太网启用之后,多等一段时间,避免未生效 3 weeks ago
Matthew ce2b36422c Update version to 1.3.182
Based Core Version to 1.4.94
3 weeks ago
Matthew 38bfd44bcd 优化脚本,避免938下面ip丢失问题
更好的方法是固件优化
3 weeks ago
Matthew 5ac7838839 优化脚本,网卡启用之后,先等待1s 3 weeks ago
Matthew 86b7e63849 Update version to 1.3.181
Based Core Version to 1.4.94
3 weeks ago
Matthew adb24119ef 优化以太网网络设置 3 weeks ago
Matthew 436058fefa Update version to 1.3.180
Based Core Version to 1.4.94
3 weeks ago
Matthew 7a6497ee40 调整函数的使用 3 weeks ago
Matthew 03110ac486 优化以太网处理 3 weeks ago
Matthew 8612c66ed3 优化脚本 3 weeks ago
Matthew 60989b5c40 优化信号获取的实现 4 weeks ago
Matthew 4df8d72c42 调整移动信号的获取 4 weeks ago
Matthew f3e5cda1da Update version to 1.3.179
Based Core Version to 1.4.93
4 weeks ago
Matthew 6d68a33224 优化脚本 4 weeks ago
Matthew 9d0f93b6ff Merge branch 'main' of http://61.169.135.146:41230/xymp/TermApp 4 weeks ago
Matthew 789547b503 调整以太网络的设置 4 weeks ago
Matthew 514fc054a7 增加函数 4 weeks ago
XI.CHEN afc69b9dff Merge branch 'main' of http://61.169.135.146:41230/xymp/TermApp 4 weeks ago
XI.CHEN d29fb0280c 修复微拍网络拍照显示网络不存在问题 4 weeks ago
Matthew eca28cd6f7 Update version to 1.3.178
Based Core Version to 1.4.93
4 weeks ago
Matthew 9efab780cf Merge branch 'main' of http://61.169.135.146:41230/xymp/TermApp 4 weeks ago
Matthew c60587661b 修复bug 4 weeks ago
liuguijing 1b40738b71 修复微拍副机拍照缺少路由的bug 4 weeks ago
Matthew 8bff5f0741 获取信号强度并存储 4 weeks ago
Matthew 3c58e63c6a Update MpRes version to 1.0.12 4 weeks ago
Matthew 450556375e 根据全景平台调整packet size 4 weeks ago
Matthew 4cb359d0fd 增加河南统一 4 weeks ago
Matthew a39614c581 调整规约
65290名称改成河南全景 65285增加为河南统一
4 weeks ago
XI.CHEN 8c702f86c6 Merge branch 'main' of http://61.169.135.146:41230/xymp/TermApp 4 weeks ago
XI.CHEN 81d4cd700c vendorctrl保护 4 weeks ago
Matthew 15873ca510 Update version to 1.3.177
Based Core Version to 1.4.92
4 weeks ago
Matthew f06a7dfcff Update MpMaster version to 1.1.34 4 weeks ago
XI.CHEN 1fbb9ffe51 Revert "网络拍照在子类构造函数中调用updatetime"
This reverts commit 1772027e9e.
4 weeks ago
XI.CHEN 1772027e9e 网络拍照在子类构造函数中调用updatetime 4 weeks ago
XI.CHEN 8ff8a4bb1e 移除updatetime,云台视频流使用云台线程控制 4 weeks ago
XI.CHEN c071d8ec71 云台短视频录制 4 weeks ago
XI.CHEN e47acced9b 修复编译错误 4 weeks ago
XI.CHEN 4bc9005ebb 云台拍摄短视频、推流网络处理 4 weeks ago
XI.CHEN 5eb8e630c6 云台问题修复 4 weeks ago
Matthew 02e66332ea 优先用大写的名字 4 weeks ago
XI.CHEN bd3451f47c Merge branch 'main' of http://61.169.135.146:41230/xymp/TermApp 4 weeks ago
XI.CHEN 812688ae68 云台ip地址设置问题 4 weeks ago
Matthew 5de3b6e73e Update version to 1.3.176
Based Core Version to 1.4.92
4 weeks ago
Matthew c34a649fa5 允许和运维同时启动mpres。并增加异常处理 4 weeks ago
Matthew cf557a599a Update MpMaster version to 1.1.33 4 weeks ago
Matthew fd15553e70 通过MpRes来初始化配置 4 weeks ago
Matthew deb8b06820 Update MpRes version to 1.0.11 4 weeks ago
Matthew 740dc120f6 增加异常处理 4 weeks ago
Matthew f66955ba99 延迟操作 4 weeks ago
Matthew fed87bc421 主动退出 4 weeks ago
Matthew 3bec3c924b 加日志 4 weeks ago
Matthew 0d97a04da0 MpApp和MpMaster各自初始化 4 weeks ago
Matthew 0a88e9b918 初始化MpAPP和MpMaster 4 weeks ago
Matthew e5fd96efce 修复路径错误 4 weeks ago
Matthew 621bbb9c78 Update MpMaster version to 1.1.32 4 weeks ago
Matthew c7351b8000 改为2分钟之后再发送广播 4 weeks ago
Matthew d27321974f 显式指定参数值 4 weeks ago
Matthew 75864122c5 增加日志 4 weeks ago
Matthew 4eb14e755c 增加一种保活机制
心跳时设置一个系统服务延迟启动运维app的闹钟,并在下一次心跳时重置闹钟时间。
4 weeks ago
Matthew a22f41d537 增加日志 4 weeks ago
Matthew a58544eab1 Update version to 1.3.175
Based Core Version to 1.4.92
1 month ago
Matthew ebf8312f6f 移除调试日志 1 month ago
Matthew 46ec0a4f39 修复电压获取的错误处理 1 month ago
Matthew 189ebe3f41 Update version to 1.3.174
Based Core Version to 1.4.91
1 month ago
Matthew ba42b72744 优化938网络的处理 1 month ago
Matthew 92ce03d2e8 优化代码 1 month ago
Matthew 553bae38d9 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 1 month ago
Matthew d3b493efbc 编译条件写反 1 month ago
Matthew 90df33d1f7 调试时全部打印 1 month ago
liuguijing 38edbefcda 修复短信带su的时候,部分命令不执行的bug,只要短信中出现su,就切换到su模式 1 month ago
liuguijing 707aa9bd94 修复短信带su的时候,部分命令不执行的bug 1 month ago
Matthew 6239916828 Update version to 1.3.173
Based Core Version to 1.4.91
1 month ago
Matthew 3f4531ecd6 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 1 month ago
liuguijing 5945fc1d1c 修复短信带su的时候,部分命令不执行的bug 1 month ago
Matthew 0f54876da8 通过命令行的方式重启系统 1 month ago
Matthew f433903745 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 1 month ago
Matthew f216640381 统一日志函数,方便问题排查 1 month ago
Matthew afae4794d3 优化网络电控 1 month ago
Matthew d96f337162 优化权限 1 month ago
liuguijing ba500120b4 修改运维删除文件时 文件夹不删除的问题 1 month ago
Matthew 6b8e477ed1 Update version to 1.3.172
Based Core Version to 1.4.90
1 month ago
Matthew f7ae9d42d2 Update MpRes version to 1.0.10 1 month ago
Matthew 31d65ea586 使用SN作为CMDID 1 month ago
Matthew 911a39a22f 把序列号传给MpRes 1 month ago
Matthew 1f07b07004 Update version to 1.3.171
Based Core Version to 1.4.90
1 month ago
Matthew 6bc8849d3c N938需要设置ip和路由 1 month ago
Matthew c67bebed88 优化云台相关的代码 1 month ago
Matthew f7912c8882 Update MpRes version to 1.0.9 1 month ago
Matthew 35b31577b2 更新拍照时间表 1 month ago
Matthew f8e627abc5 Update version to 1.3.170
Based Core Version to 1.4.90
1 month ago
Matthew 92412cf7d7 修复编译错误 1 month ago
Matthew 4222c6ff8a Update MpRes version to 1.0.8 1 month ago
Matthew 2bba3867e9 更新默认配置 1 month ago
Matthew 5df43d28d6 Update MpRes version to 1.0.7
HeNan 2024
1 month ago
Matthew 724db375ca Update MpMaster version to 1.1.31 1 month ago
Matthew 2b416c3f0d Update version to 1.3.169
Based Core Version to 1.4.90
1 month ago
Matthew 713f69048f Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 1 month ago
Matthew c85a5b9371 增加新的重启方式 1 month ago
Matthew 031dc97548 增加新的重启方式 1 month ago
liuguijing 456268c682 默认切换到sh,收到su的时候切换到su 1 month ago
Matthew cc55b5aa4d Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 1 month ago
liuguijing 7558b79c5e 删除之前测试的日志
删除MpMaster中的读取电压的代码
1 month ago
liuguijing 0d2090153f 修复短信adb命令执行的bug 1 month ago
liuguijing 024cca3104 测试getInt锁住的问题 1 month ago
Matthew be9f65d3a3 Update version to 1.3.168
Based Core Version to 1.4.89
1 month ago
Matthew 792a694f66 移除这个调用,直接关电,避免系统卡死 1 month ago
Matthew b916e63a00 修复命令错误以及优化判断规则 1 month ago
Matthew eb5af62ad3 换一种命令 1 month ago
Matthew 67479e0acc Update version to 1.3.167
Based Core Version to 1.4.89
1 month ago
Matthew 05b9a7a5e5 网络可用时直接设置路由 1 month ago
Matthew c1a06203e3 Update version to 1.3.166
Based Core Version to 1.4.89
1 month ago
Matthew 02fc11fc22 修正错误的判断条件
Based Core Version to 1.4.89
1 month ago
Matthew 026bd1a022 Update version to 1.3.165
Based Core Version to 1.4.89
1 month ago
Matthew 200131ec67 当网络异常时,通过直接关电来关闭网络 1 month ago
Matthew 0b08c08d81 Update version to 1.3.164
Based Core Version to 1.4.89
1 month ago
Matthew 75f0fdc73c 增加日志 1 month ago
Matthew a7f4866ac8 Update version to 1.3.163
Based Core Version to 1.4.89
1 month ago
Matthew bc823212aa Update MpMaster version to 1.1.30 1 month ago
Matthew 7e23d09814 修复运维修改配置文件的bug 1 month ago
Matthew 5e869d0f92 优化下载参数 1 month ago
Matthew c6dbc278da 调整下载参数 1 month ago
Matthew a248c9e297 调整下载参数 1 month ago
Matthew 185ed0abff 换一种实现 1 month ago
Matthew 3a0ae0ab75 电压调整精度 1 month ago
Matthew 69bc6dced9 Update version to 1.3.162
Based Core Version to 1.4.88
1 month ago
Matthew 6e1625b58d 优化日志 1 month ago
Matthew 115a22dd47 Update version to 1.3.161
Based Core Version to 1.4.87
1 month ago
Matthew c104f26f7d 优化网络设置 1 month ago
Matthew d36d0d916d 优化网络实现 1 month ago
Matthew 0c57bca352 修复网络问题 1 month ago
Matthew 8abeaf9e04 Update MpRes version to 1.0.6 1 month ago
Matthew f0b82e235e Update version to 1.3.160
Based Core Version to 1.4.87
1 month ago
Matthew 3db1ae1c08 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 1 month ago
Matthew 43148b5962 更新充电电压和电量的的对应关系 1 month ago
Matthew a58e86b519 以太网启动之后主动设置ip和路由 1 month ago
XI.CHEN a6bfbdcea2 修复宇视url错误 1 month ago
Matthew 752b5fe318 Update version to 1.3.159
Based Core Version to 1.4.86
1 month ago
Matthew 4ca40f0424 修复URL拼接错误 1 month ago
Matthew d75ad0949e Update version to 1.3.158
Based Core Version to 1.4.86
1 month ago
Matthew e3388d77c3 Update MpMaster version to 1.1.29 1 month ago
Matthew 4067f2d6ed 优化重启的实现 1 month ago
Matthew aeb8e49b81 调整电池电压信息的获取方式 1 month ago
Matthew 3bc6775f84 调整运维获取电压的方式 1 month ago
Matthew dc17a15210 调整电池电压接口和实现 1 month ago
Matthew e4e174a083 调整gpio的处理方式 1 month ago
Matthew 44d780a0ce Update MpRes version to 1.0.5 1 month ago
Matthew 0097f11b69 移除调试代码 1 month ago
Matthew b533cd645f Update version to 1.3.157
Based Core Version to 1.4.85
1 month ago
Matthew e0063691f0 Update MpRes version to 1.0.4 1 month ago
Matthew 31039c213d 更换删除目录的方法 1 month ago
Matthew 86f22853ca 更换删除目录的方法 1 month ago
Matthew b3024a934b 增加删除目录的方法 1 month ago
Matthew b1b2b0d550 Update version to 1.3.156
Based Core Version to 1.4.85
1 month ago
Matthew 0c1143d0d0 修复编译错误 1 month ago
Matthew 362a23f58d Update MpMaster version to 1.1.28 1 month ago
Matthew ce762add59 Update MpRes version to 1.0.3 1 month ago
Matthew 8e44768f7e 更新配置 1 month ago
Matthew fc7fc294eb Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 1 month ago
Matthew ee0a2b632e 增加权限 1 month ago
Matthew d94124aa3c 增加权限 1 month ago
Matthew 22b81f8c46 优化重启运维的实现 1 month ago
Matthew 2811c694b7 增加函数
获取进程id
1 month ago
Matthew 06f38f3ff0 优化杀进程的方式 1 month ago
liuguijing f8ccfbba76 增量更新功能优化,增加安装成功之后,删除老的apk包和增量更新包。只保留最新的安装包 1 month ago
liuguijing 96b07fa223 增量更新功能优化 1 month ago
liuguijing ae6d49ed99 增量更新功能优化 1 month ago
Matthew 5d57a099cd Update version to 1.3.155
Based Core Version to 1.4.83
1 month ago
Matthew c8f34e9319 触发心跳动作 1 month ago
Matthew e01b32e166 Update MpMaster version to 1.1.27 1 month ago
Matthew db2ff686c6 Update version to 1.3.154
Based Core Version to 1.4.83
1 month ago
Matthew e022748f71 实现一个闹钟覆盖心跳、拍照、采样 1 month ago
Matthew c6405cf287 增加常量定义 1 month ago
Matthew 38fe69d93b 增加对拍照闹钟的处理(其中包含心跳) 1 month ago
Matthew e75fe1648c 优化日志 1 month ago
Matthew ab438f68b8 如果getInt正常,则删除日志文件 1 month ago
Matthew ba37692f73 重启设备前刷新日志内容进flash 1 month ago
Matthew 97c8cc3038 如果http访问是超时错误,则减少重试次数
避免耗电太高
1 month ago
Matthew 5872857f8d Update MpRes version to 1.0.2 1 month ago
Matthew 9c7be4fe96 Update MpMaster version to 1.1.26 1 month ago
Matthew 4e5b1b7b2a Update version to 1.3.153
Based Core Version to 1.4.82
1 month ago
Matthew cbf60b12f6 减小package size 1 month ago
Matthew 9a88a2dcd2 优化package size 1 month ago
Matthew 00a8de5e54 优化package size 1 month ago
Matthew 1fcc231ac1 Update MpRes version to 1.0.1 2 months ago
Matthew 667ebb60c8 Update version to 1.3.152
Based Core Version to 1.4.82
2 months ago
Matthew dcf3dffe02 Update MpMaster version to 1.1.25 2 months ago
Matthew 20c5eebd20 Update version to 1.3.151
Based Core Version to 1.4.81
2 months ago
Matthew eeabd15b87 优化日志 2 months ago
Matthew d930f65393 优化 2 months ago
Matthew 3974564746 优化代码 2 months ago
Matthew 3c5f49a563 移除不必要的依赖,降低apk size 2 months ago
Matthew b1b70395c7 消除警告 2 months ago
Matthew 21aac05206 调整名字 2 months ago
Matthew e2c4e51550 启动时如果需要初始化mpapp配置,则从assets中拷贝到特定目录 2 months ago
Matthew 75a7c17d62 增加mpres相关的定义和函数 2 months ago
Matthew d9b75e2c2f 如果缺少配置文件,调用mpres app初始化 2 months ago
Matthew f90cbd0446 移除MpAPP的配置文件 2 months ago
Matthew a90617698b 移除MpApp的配置初始化 2 months ago
Matthew 738f4f230a 加回5V5的gpio值备用 2 months ago
Matthew 55bda9770e 配置文件转到新app中 2 months ago
liuguijing e580737761 运维新增 下载增量更新功能 2 months ago
Matthew c0e0b3874f Update version to 1.3.150
Based Core Version to 1.4.81
2 months ago
Matthew 31391c2bab Refactor 2 months ago
Matthew 025c9fb07f Update version to 1.3.149
Based Core Version to 1.4.80
2 months ago
Matthew 07f7744ad8 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
Matthew 58d8c614ab 优化网络访问代码 2 months ago
Matthew 82e8212027 移除绑定网络的代码
通过路由来控制
2 months ago
Matthew 32a1fbbeb1 网络会不定时变化,增加更新网络handle函数 2 months ago
liuguijing 2dd6cb8f31 运维新增增量更新 完善增量更新时 计算差分代码 2 months ago
liuguijing 0d8bb73f17 运维新增增量更新 调试可用 待完善 2 months ago
XI.CHEN 89b263e4de Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 2 months ago
Matthew 2d600c18e4 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
XI.CHEN 424bd6ee88 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 2 months ago
XI.CHEN 987d2e8053 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 2 months ago
Matthew 387673bf99 修复编译错误 2 months ago
liuguijing 8bb1c53bc2 Merge remote-tracking branch 'origin/main' 2 months ago
Matthew 7ae8505809 Update version to 1.3.148
Based Core Version to 1.4.80
2 months ago
Matthew d2881112f2 修复编译错误 2 months ago
liuguijing 40d36e1577 新增系统心跳调试的日志 2 months ago
Matthew 47ff714ae5 Update version to 1.3.147
Based Core Version to 1.4.80
2 months ago
Matthew f8784c5b26 Update version to 1.3.146
Based Core Version to 1.4.80
2 months ago
Matthew ee492d0e60 修复编译错误 2 months ago
Matthew 8fd275a6e7 增加日志 2 months ago
Matthew a9fd536a8a Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
Matthew 142fca3dd0 实现海康接口 2 months ago
Matthew ed35d74c94 修复编译错误 2 months ago
liuguijing cd520ebefe 新增测试HeartBeat的广播 2 months ago
liuguijing 9ff79bbe1b 新增测试HeartBeat的广播 2 months ago
Matthew 6598b45d3e 修复编译错误 2 months ago
Matthew 62de685e4e Update version to 1.3.145
Based Core Version to 1.4.80
2 months ago
Matthew a0c4651e97 集成bspatch 2 months ago
Matthew 883e9f94e4 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
Matthew 44a5a32741 Update version to 1.3.144
Based Core Version to 1.4.79
2 months ago
jxjajs 31e33b065b 生成准确读取云台位置指令使用独立函数实现 2 months ago
Matthew 5a8be5883b 调整gpio监控 2 months ago
Matthew b734a9cc2a 优化文件名 2 months ago
Matthew be7b992bdd Update version to 1.3.143
Based Core Version to 1.4.78
2 months ago
Matthew 2b06803252 重启两个api都调用作双重保护 2 months ago
Matthew 396d20ea3f 移除HDRPlus相关的代码 2 months ago
Matthew fde6514d16 Update version to 1.3.142
Based Core Version to 1.4.77
2 months ago
Matthew f6d3ccbfa1 移除日志 2 months ago
Matthew 754d3bebb7 移除解决粉色的临时补丁 2 months ago
Matthew a04db636c6 Update version to 1.3.141
Based Core Version to 1.4.76
2 months ago
Matthew 6e379548e6 优化航煜拍照实现
如果照片太大出错,则降低图像质量后重试
2 months ago
Matthew 38a8498dbc 优化网络打开时间 2 months ago
Matthew 5dae67b03b 网络延迟打开 2 months ago
Matthew b821156ca1 Update MpMaster version to 1.1.24 2 months ago
Matthew 870cffab5c Update version to 1.3.140
Based Core Version to 1.4.76
2 months ago
Matthew 55c3776ac8 通过附件编译时间来选择不同的判断运维程序是否运行的方法 2 months ago
Matthew 126c1f5e1b 通过附件编译时间来选择不同的判断程序是否运行的方法 2 months ago
liuguijing 28be0c4c80 Update MpMaster version to 1.1.23 2 months ago
liuguijing 8c3103eef6 运维的MCU重启命令修改 2 months ago
liuguijing e59f6fbbea 运维修改重启方式 2 months ago
Matthew b216ad7546 Update version to 1.3.139
Based Core Version to 1.4.76
2 months ago
Matthew c3e4ef0847 微拍暂时不重置电源 2 months ago
Matthew 101d2b945a 微拍移除重置电源的代码 2 months ago
Matthew 7ea3b47abb Update version to 1.3.138
Based Core Version to 1.4.76
2 months ago
Matthew cf6c5c8158 网络设备的osd也临时关掉 2 months ago
Matthew b9fe6ce048 优化网络摄像头供应商的实现
移除航煜对时的实现
2 months ago
Matthew 138040baa8 增加算法的实现 2 months ago
Matthew e21e409712 实现3DNR 2 months ago
Matthew b3f3703c74 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
Matthew f445f5f6c8 Update version to 1.3.137
Based Core Version to 1.4.74
2 months ago
XI.CHEN cafa30c728 暂时去除航煜视频除时间外水印 2 months ago
XI.CHEN 167878de1a 暂时去除航煜视频除时间外水印 2 months ago
XI.CHEN c51fb6924b Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 2 months ago
XI.CHEN ec6a27df28 航煜OSD和分辨率设置 2 months ago
Matthew 11e5b98a90 充电电压如果小于3V,就直接设置为0 2 months ago
Matthew d4c5986943 Update version to 1.3.136
Based Core Version to 1.4.74
2 months ago
Matthew 5a91bd8dcb 移除调试的功能 2 months ago
Matthew 4c42820cea 移除调试的功能 2 months ago
Matthew db78ce7728 优化http请求错误的处理 2 months ago
Matthew b5a188a5b4 Update version to 1.3.135
Based Core Version to 1.4.74
2 months ago
Matthew 90465c4ac1 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
Matthew 7ef6086f50 优化前台服务实现 2 months ago
Matthew 5151f9291a 优化前台服务实现 2 months ago
Matthew 7037eab930 调整id防止冲突 2 months ago
XI.CHEN 2d67c94bc9 航煜视频水印根据时间是否设置改变 2 months ago
Matthew 39b94cee90 Update version to 1.3.134
Based Core Version to 1.4.72
2 months ago
Matthew 801a9e98ba 静态抓拍强制改为预览的范围从150开始 2 months ago
Matthew 868fbebb6b Update version to 1.3.133
Based Core Version to 1.4.72
2 months ago
Matthew 1806c36148 增加更多拍照日志 2 months ago
Matthew f2f29482a0 输出照片日志信息 2 months ago
Matthew a52255e9d9 增加日志 2 months ago
Matthew 2cc92025fc Update MpMaster version to 1.1.22 2 months ago
Matthew 744057c407 Update version to 1.3.132
Based Core Version to 1.4.72
2 months ago
Matthew d23116aa57 增加长时间无心跳重启app的机制,并优化运维日志 2 months ago
Matthew 3fab6d4542 保活时判断service是否存在 2 months ago
Matthew 21c789155c 优化日志 2 months ago
Matthew 11dbea7170 Update MpMaster version to 1.1.21 2 months ago
Matthew 17ae352c31 更新默认配置 2 months ago
liuguijing 31c0d81e13 Update version to 1.3.131 2 months ago
liuguijing 18ecc420ed mpapp:修复mpapp摄像头拍视频时视角变小的bug 2 months ago
liuguijing b58969cc5e Update version to 1.3.130 2 months ago
liuguijing 9bb9b6848c 修改短视频预览摄像头不正的bug 2 months ago
liuguijing 7a557ac8fd 修改短视频预览摄像头不正的bug 2 months ago
Matthew e0a23a6c3b Update version to 1.3.129
Based Core Version to 1.4.72
2 months ago
liuguijing 2d0fe6609b 为修复宁夏短视频拍照翻转的bug 临时修改方法 2 months ago
Matthew 7e57c3cc08 Update version to 1.3.128
Based Core Version to 1.4.71
2 months ago
Matthew 0fc7ff436e 统一GPIO
N938:
#define CMD_SET_3V3_PWR_EN                      360  // 改为132

云台:
#define CMD_SET_485_ENABLE                      512 // 改成131
#define CMD_SET_3V3_PWR_EN                      516 // 改成 132

固件从20250419的版本开始
2 months ago
Matthew 2009c6d472 调整hdr的代码 2 months ago
Matthew 24333d1882 优化逻辑 2 months ago
Matthew 04abdbeb61 Update version to 1.3.127
Based Core Version to 1.4.71
2 months ago
Matthew 4106ce8cb3 优化拍照时间的注册处理,避免始时钟丢失校时成功之后的错误 2 months ago
Matthew 7ad52b042c 增加HDR的支持 2 months ago
Matthew 6925da78b1 Update version to 1.3.126
Based Core Version to 1.4.71
2 months ago
Matthew 9f00a81b8e Update MpMaster version to 1.1.20 2 months ago
Matthew 1c8372d5a2 优化调试信息的实现 2 months ago
Matthew 1782269938 Update version to 1.3.125
Based Core Version to 1.4.71
2 months ago
Matthew 8398ae8246 增加时间发生变化后的更新处理 2 months ago
Matthew 49aff958d0 移除无用代码 2 months ago
Matthew b6932b116e 移除文件锁的实现 2 months ago
Matthew 4976337878 利用共享uid的方式来判断mpapp是否正在运行
加入时间变化监控的一些定义
2 months ago
Matthew 897b53e44e 利用共享uid的方式来判断运维app是否正在运行
加入时间变化监控的一些定义
2 months ago
Matthew 621cf4f3ba Update version to 1.3.124
Based Core Version to 1.4.70
2 months ago
Matthew 476ebbd5e0 强制走预览模式的范围改成200-400 2 months ago
Matthew c54cecacf0 设置共享uid 2 months ago
Matthew 9fedc2532c Update MpMaster version to 1.1.19 2 months ago
Matthew 8e9597eaf4 Update version to 1.3.123
Based Core Version to 1.4.70
2 months ago
Matthew b94039cbd6 设置共享uid 2 months ago
Matthew 6d2f4c7535 设置共享uid 2 months ago
Matthew 252e2575f5 调整回预览+静态抓拍模式
针对iso为200-300之间的粉色问题,做了补丁,强制走预览模式,临时绕过这个问题
2 months ago
Matthew 6587298604 Update version to 1.3.122
Based Core Version to 1.4.70
2 months ago
Matthew eb96c9c6c7 实现预览到静态抓拍的主动切换 2 months ago
Matthew d1d34ebcfc Update version to 1.3.121
Based Core Version to 1.4.70
2 months ago
Matthew 3a0a8b71a0 重启设备前记录日志 2 months ago
Matthew 030ca9c7d7 修复拍照设置的bug 2 months ago
Matthew a9535f2df7 增加对AIS的支持 2 months ago
Matthew 1ba1b9d56a Update version to 1.3.120
Based Core Version to 1.4.70
2 months ago
Matthew 7a51b96100 调整拍照的实现,改成一个request实现测光和拍照 2 months ago
Matthew 8a7d571e0e Update version to 1.3.119
Based Core Version to 1.4.70
2 months ago
Matthew 2336758523 Update version to 1.3.118
Based Core Version to 1.4.69
2 months ago
Matthew 95213a4ecd 网络摄像头实现自检等待 2 months ago
Matthew 8f49d82dec 优化日志 2 months ago
Matthew 0170e21b1d 优化网络重试次数和日志 2 months ago
Matthew 94ca4f77a4 Update version to 1.3.117
Based Core Version to 1.4.69
2 months ago
jxjajs 2af459d57a 解决查询云台位置命令不下发BUg 2 months ago
jxjajs 1f3943b34b 增加log,调试 2 months ago
Matthew ec5799f167 Update version to 1.3.116
Based Core Version to 1.4.69
2 months ago
Matthew 9f23994521 预览和拍照的模板改成同一个 2 months ago
Matthew e3aa7d04eb 移除ISP的设置
参考系统相机
2 months ago
Matthew 5049677133 Update version to 1.3.115
Based Core Version to 1.4.68
2 months ago
Matthew eb5aa8396a 移除全局变量的设置 2 months ago
Matthew d35bf3d71c 优化代码 2 months ago
Matthew 17e2ae7fd9 使用mtk内置扩展MFNR算法 2 months ago
Matthew 8108bf4c7c 修复系统相机bug 2 months ago
Matthew 996cb9109f Update version to 1.3.114
Based Core Version to 1.4.68
2 months ago
Matthew 164b14697c 优化返回值 2 months ago
XI.CHEN 74f42bcfd3 增加网络拍照失败时的gpio日志信息 2 months ago
jxjajs 3ea1fbde25 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
jxjajs 2c53f2fc0a 增加云台200号预置点临时一次性自检处理
1、单独调用,直接置云台为自检状态;
2、拍照带200号预置点,等待调预置点时间使用设置的自检时间。
2 months ago
Matthew a74db09f67 Update version to 1.3.113
Based Core Version to 1.4.67
2 months ago
Matthew ae213c7a9b 增加权限 2 months ago
Matthew 13cb6722f3 使用mtk扩展的多帧降噪 2 months ago
Matthew 820158e650 breakpad的集成准备 2 months ago
Matthew 78cacaeba1 增加网络协议的处理 2 months ago
Matthew 40e9533d31 Update version to 1.3.112
Based Core Version to 1.4.66
2 months ago
jxjajs 357c58112f Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
jxjajs 4afdbdae62 修改Bug,手动拍照过程中,收到关机指令,拍完照不关机 2 months ago
Matthew 79dc4e8ec7 Update version to 1.3.111
Based Core Version to 1.4.65
2 months ago
Matthew 6388002df2 优化网络拍照逻辑 2 months ago
Matthew ebe52ca252 Update version to 1.3.110
Based Core Version to 1.4.65
2 months ago
Matthew ce81c8dd4f 禁用HDR模式 2 months ago
jxjajs 7033ad7e0b Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
jxjajs 9397d53107 增加自动拍照等待时间,防止同一时间ptz拍多张图片时,中间会关电 2 months ago
Matthew 48856f0c0b 空指针保护 2 months ago
Matthew 2d5cc27c0e Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 2 months ago
Matthew e3c74db577 优化返回值 2 months ago
jxjajs dada0e876f 修改合并产生错误 2 months ago
XI.CHEN 9170d2bec0 增加照片长度有误时的日志 2 months ago
Matthew 06842bca13 Update version to 1.3.109
Based Core Version to 1.4.64
2 months ago
Matthew 410ded43f1 自定义HDR实现改为手动长曝光增亮 2 months ago
Matthew 1d92d624f8 优化代码 2 months ago
Matthew 5a26bd6353 Update version to 1.3.108
Based Core Version to 1.4.63

支持通过光敏自动切换日夜镜头
2 months ago
Matthew f247a6f4e0 实现获取光敏值的接口 2 months ago
Matthew 991fc0a449 Update version to 1.3.107
Based Core Version to 1.4.62
3 months ago
Matthew ac357883ac Merge branch 'PtzNew' 3 months ago
Matthew 604819a8a1 调整timer注册的默认值和内部判断逻辑 3 months ago
Matthew 95c0fb4415 移除不合理代码 3 months ago
Matthew 4e10f77ce3 Update version to 1.3.106
Based Core Version to 1.4.61
3 months ago
Matthew 3575377767 Update version to 1.3.105
Based Core Version to 1.4.60
3 months ago
Matthew f577a1be3d HTTP访问出错也计入网络出错
超过3次重启设备
3 months ago
Matthew 0b8c29eb91 直接注释代码 3 months ago
Matthew 4b09ea6428 Update version to 1.3.104
Based Core Version to 1.4.59
3 months ago
jxjajs 2b5916c9ce 云台控制逻辑优化 3 months ago
Matthew 0b5e19541e Update version to 1.3.103
Based Core Version to 1.4.58
3 months ago
Matthew bae16b9427 修复编译错误 3 months ago
Matthew 22bc6af87c Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 8951b8cdaf 每次心跳增加光敏值的采集,供后面规则设置 3 months ago
liuguijing ae9ea20948 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing 963ac6bdaf Update MpMaster version to 1.1.18 3 months ago
Matthew a2aef51df6 减少getInt的调用 3 months ago
Matthew 182c592f98 清理注释的代码 3 months ago
liuguijing c09ed76819 对电池电压新增单线程执行,优化修该,节省线程开销 3 months ago
liuguijing 3b492e7ea3 对电池电压新增单线程执行,优化修改 3 months ago
liuguijing bd7d4132e2 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing 9c2346befd 对电池电压新增单线程执行 3 months ago
Matthew 385c456401 MPAPP对运维保活 3 months ago
Matthew c5d9e19aff Update version to 1.3.102
Based Core Version to 1.4.57
3 months ago
Matthew 9a227b149e 移动预置位等待15秒 3 months ago
liuguijing 49b614b7b9 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing 4033f978a5 Update MpMaster version to 1.1.17 3 months ago
liuguijing 3c7e35b5ee 修复938下无法执行shell命令的bug 3 months ago
jxjajs e6228f7dfa 修改电源控制 3 months ago
jxjajs 8381d6a0b9 增加云台控制时间精准控制
增加云台控制时间精准控制
3 months ago
Matthew a42bf5390c 修复编译错误 3 months ago
Matthew 44aeadbe2d 云台单线程实现 3 months ago
Matthew d7a9212d90 Update version to 1.3.101
Based Core Version to 1.4.57
3 months ago
Matthew c106ae42fd 用更安全的方式访问jstring 3 months ago
Matthew 5f4af242be Update version to 1.3.100
Based Core Version to 1.4.57
3 months ago
Matthew 7212994a5f 修复编译错误和警告 3 months ago
Matthew 09dae14725 修复编译错误或者报警 3 months ago
Matthew 7b0c32e7b9 Update version to 1.3.99
Based Core Version to 1.4.56
3 months ago
Matthew b0c9e5f412 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 180b2efe0b 938禁用AI识别
移除部分日志
3 months ago
XI.CHEN bd81fd52d0 Update version to 1.3.98
Based Core Version to 1.4.55
3 months ago
XI.CHEN eaf7bfb875 关电时开电时间清零 3 months ago
liuguijing 82ebd92ad2 Update MpMaster version to 1.1.16 3 months ago
liuguijing ae3f25b14a Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing 9017c2bd7d 运维新增支持adb shell命令
修复sim卡2 在连接不上运维主站时  无法切换到sim卡1的情况
3 months ago
Matthew b1071b52c5 Update version to 1.3.97
Based Core Version to 1.4.54
3 months ago
Matthew 89443efa26 优化日志 3 months ago
Matthew d8defe3b63 移除5V的控制(固件层自动控制) 3 months ago
Matthew 496a6fbdce APP启动时重置所有电源 3 months ago
Matthew 3a9b85c868 增加马达相关的控制 3 months ago
Matthew e280e76015 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 41208a587c 增加马达相关的控制 3 months ago
Matthew b381fa562f 增加马达相关的控制定义 3 months ago
XI.CHEN 0375b13c67 电源关闭不对称 3 months ago
XI.CHEN 864a8dc0dd 增加关电时相机状态变更 3 months ago
Matthew a506598b42 Update version to 1.3.96
Based Core Version to 1.4.54
3 months ago
Matthew 441cdc40b1 以太网三次查询不到则重启设备 3 months ago
Matthew ab91aa6cef Update version to 1.3.95
Based Core Version to 1.4.54
3 months ago
Matthew b40f77076d 修复误删代码 3 months ago
Matthew 3fdf189af5 Update version to 1.3.94
Based Core Version to 1.4.54
3 months ago
Matthew 447f9999eb 修复误删代码 3 months ago
Matthew 0c33cc68c4 Update version to 1.3.93
Based Core Version to 1.4.54
3 months ago
Matthew e999149187 移除可能会导致粉色的测光操作 3 months ago
Matthew 0aabc2af52 Update version to 1.3.92
Based Core Version to 1.4.54
3 months ago
Matthew c17dc6a83e Update version to 1.3.91
Based Core Version to 1.4.53
3 months ago
Matthew 026508615d 删除无用代码 3 months ago
Matthew deaf5ec7e1 MIPI延迟关电 3 months ago
Matthew c8610406ef 重新实现关电控制
现在通过线程的方式来实现延迟关电,性能不是很好
3 months ago
Matthew 3902d69997 调整实现 3 months ago
Matthew 22e81844ac 修改云台宏 3 months ago
Matthew 4c426cf5ff 增加摄像头控制命令 3 months ago
Matthew 8f9fa84028 修复编译错误 3 months ago
Matthew 1925e923e2 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 9aa25b6d66 规约相关的源文件移到Core/Client目录 3 months ago
XI.CHEN 0cb2927b13 航煜、宇视OSD开关和设置,航煜分辨率设置 3 months ago
Matthew 5578b2e40f Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew f0c6738887 实现云台视频流 3 months ago
jxjajs 9f8e92030a Merge remote-tracking branch 'origin/main' 3 months ago
jxjajs f5c38b5127 提交南网mqtt(s938)代码 3 months ago
Matthew b18b9f54e4 修复误改的值 3 months ago
Matthew 808271c7f1 优化日志 3 months ago
Matthew 21fd57061c Update version to 1.3.90
Based Core Version to 1.4.51
3 months ago
Matthew 32698886d0 仍然绑定网络,确保当前不同版本的固件没有问题 3 months ago
Matthew 3dbb79e94c Update version to 1.3.89
Based Core Version to 1.4.51
3 months ago
Matthew ca7e8e5acf 修复编译错误 3 months ago
Matthew c8966009b8 实现视频流 3 months ago
Matthew ccc0e0e334 参数名格式一致 3 months ago
Matthew 5d312ed1f0 实现视频流 3 months ago
Matthew 120d7fdde7 优化代码 3 months ago
Matthew 238f0aeb4f 优化代码 3 months ago
Matthew 69a3997805 增加rtsp 认证的支持 3 months ago
Matthew ebcd0c4dca 扩展流媒体访问的接口 3 months ago
Matthew e74c0185a5 调整流转发的实现 3 months ago
Matthew b08adad0a4 Update version to 1.3.88
Based Core Version to 1.4.50
3 months ago
Matthew 955efa5389 RTSP录制视频 3 months ago
Matthew 2c0b32fe64 Update version to 1.3.87
Based Core Version to 1.4.50
3 months ago
Matthew 28315925ee 保存yuv原始数据 3 months ago
Matthew 1fd8c79f58 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew b4e7cf8fab 实现网络摄像机拍摄短视频 3 months ago
liuguijing 56d42d8774 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
XI.CHEN fe97b3c5bc Update version to 1.3.86
Based Core Version to 1.4.50
3 months ago
XI.CHEN 1583dc6a46 增加日志 3 months ago
XI.CHEN 44dc885717 增加工作状态报日志 3 months ago
Matthew fb42203056 Update version to 1.3.85
Based Core Version to 1.4.49
3 months ago
Matthew 5cf252b12f 改为单个请求连拍4帧 3 months ago
Matthew 6a80e97a43 增加不拍照的监控 3 months ago
Matthew 4d5020803c 增加MQTT选项,并优化UI逻辑 3 months ago
Matthew e05bcedb6e Update version to 1.3.84
Based Core Version to 1.4.49
3 months ago
Matthew 296858be26 优化日志 3 months ago
Matthew 34a1c4377b 以太网超过3次找不到,则重启设备(30分钟内不重复重启) 3 months ago
Matthew 620af9bb26 Update version to 1.3.82
Based Core Version to 1.4.49
3 months ago
Matthew 7624c2b249 常规拍照也曝光多张,挑最后一张上传 3 months ago
Matthew d939b8089e Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
XI.CHEN 99784847e3 Update version to 1.3.81
Based Core Version to 1.4.49
3 months ago
XI.CHEN eba500f734 Update version to 1.3.80
Based Core Version to 1.4.48
3 months ago
Matthew 454c5d6cc6 Update version to 1.3.80
Based Core Version to 1.4.47
3 months ago
Matthew c07508a711 修复bug 3 months ago
Matthew d51ff00473 修复bug 3 months ago
XI.CHEN b23224415e Update version to 1.3.79
Based Core Version to 1.4.47
3 months ago
XI.CHEN 4e80dced8b Update version to 1.3.78
Based Core Version to 1.4.46
3 months ago
Matthew b579a0062d Update version to 1.3.77
Based Core Version to 1.4.45
3 months ago
Matthew 2ae0368468 云台的5V和100M由固件控制 3 months ago
Matthew 9ad980f666 增加流量信息查询 3 months ago
Matthew 0cfd48ff92 电源打开和关闭成对 3 months ago
XI.CHEN 0957cc3a12 Update version to 1.3.76
Based Core Version to 1.4.43
3 months ago
liuguijing 6dee0b0ad4 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
XI.CHEN d37d4e0c13 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 3 months ago
XI.CHEN c308feff90 Update version to 1.3.75
Based Core Version to 1.4.42
3 months ago
Matthew 0f50e62000 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 24321ae011 APP启动时间显示秒 3 months ago
XI.CHEN bdfe3e2bd7 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 3 months ago
XI.CHEN 485301e714 Update version to 1.3.74
Based Core Version to 1.4.41
3 months ago
liuguijing 64316cedf3 Update MpMaster version to 1.1.12 3 months ago
liuguijing b5eb21d063 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing feb4981377 Update MpMaster version to 1.1.11 3 months ago
liuguijing 4109741628 修改单卡模式下短信的bug 3 months ago
liuguijing fedae228b1 修改运维重启系统失败的bug 3 months ago
XI.CHEN 0d92c10c60 Update version to 1.3.73
Based Core Version to 1.4.40
3 months ago
Matthew 8b51cfdefb Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew a7d3266624 938使用gpio机制重启 3 months ago
jxjajs b8416b8e1b 添加严格区分温湿度和风速风向及多要素气象传感器 3 months ago
jxjajs 2e12a1ac84 Merge remote-tracking branch 'origin/main' 3 months ago
jxjajs 3453e2d221 添加严格区分温湿度和风速风向及多要素气象传感器 3 months ago
liuguijing 12197fa28d Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing a1601cd71c 修复配置重启不生效的bug,修改重启 3 months ago
XI.CHEN a14c76c4ed Update version to 1.3.72
Based Core Version to 1.4.39
3 months ago
Matthew 4cae89014d Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 89fe772caa 注册下一个拍照时间以当前拍照时间为参考
避免系统提早触发闹钟
3 months ago
XI.CHEN 216a053086 Update version to 1.3.71
Based Core Version to 1.4.39
3 months ago
XI.CHEN 52608b8324 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 3 months ago
XI.CHEN 5734762c07 修改串口拍照指令宏定义 3 months ago
huangfeng 3a8ef64e2f Revert "perf: 调整registerHeartbeatTimer方法"
This reverts commit b82fe124d8.
3 months ago
Matthew 4ba4be427e 航煜网络相关调整 3 months ago
Matthew 5c7f6abb71 Update MpMaster version to 1.1.9 3 months ago
Matthew 02663dd08b 修复bug 3 months ago
Matthew fd84d84157 调整Toast位置 3 months ago
Matthew 70b795fe27 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew d206a59d9a 修复编译错误 3 months ago
陈曦 ee93791683 覆冰气象线程修改 3 months ago
陈曦 53578065f1 覆冰气象线程队列上传 3 months ago
Matthew 6f56bf0fe3 调整网络短视频实现 3 months ago
Matthew d1298663f3 调整网络短视频实现 3 months ago
huangfeng b82fe124d8 perf: 调整registerHeartbeatTimer方法 3 months ago
XI.CHEN 83d9e4e6c9 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 3 months ago
XI.CHEN 79e8a8ff71 增加信号日志 3 months ago
Matthew d9f6c6e8ac Update version to 1.3.70
Based Core Version to 1.4.38
3 months ago
Matthew c912c70572 调整APP标题 3 months ago
Matthew bbb78f44b9 调整APP标题 3 months ago
Matthew df1fc220d1 Revert "调整编译脚本"
This reverts commit 341045651c.
3 months ago
Matthew 4de7693aaa Revert "调整顺序"
This reverts commit d908a36d3b.
3 months ago
Matthew 79c828324d Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew a34e66c7ff 优化实现,避免大文件崩溃 3 months ago
XI.CHEN f2a965caf0 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 3 months ago
XI.CHEN 1bdc190a1b 修正拍照指令 3 months ago
Matthew 127b06de77 调整APP标题 3 months ago
Matthew d908a36d3b 调整顺序 3 months ago
Matthew 9eafc3d2f3 如果线程已经执行,则跳过 3 months ago
Matthew 2167afa1d8 优化代码 3 months ago
Matthew 341045651c 调整编译脚本 3 months ago
Matthew b6e58239d3 Update version to 1.3.66
Based Core Version to 1.4.37
3 months ago
Matthew b996c663a9 Update version to 1.3.65
Based Core Version to 1.4.36
3 months ago
Matthew 55a01c47b6 修改命令值 3 months ago
Matthew 87f9559cfd 调整接口 3 months ago
XI.CHEN cdd0fcc361 Update version to 1.3.64
Based Core Version to 1.4.35
3 months ago
jxjajs 6d84ed0ab7 Merge remote-tracking branch 'origin/main' 3 months ago
jxjajs ed08d8099c 控制下发云台命令次数 3 months ago
XI.CHEN 8ddc778cc4 修复格式错误 3 months ago
XI.CHEN 63339c199d 删除无用代码 3 months ago
jxjajs b15f12bf5f Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing f08871da64 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
liuguijing 691acaf67e 修复获取短信获取sim卡信息时,热点开启的情况下,ip返回热点的bug 3 months ago
Matthew 964fa2a91b Update version to 1.3.63
Based Core Version to 1.4.34
3 months ago
XI.CHEN aa434414ed 删除读取电压的条件编译 3 months ago
jxjajs e1721d52d7 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
jxjajs 7376d4cb0f 优化控制云台反a应速度,添加精准云台位置控制
优化控制云台反a应速度,添加精准云台位置控制
3 months ago
XI.CHEN 614b0d4500 底层修正云台电压的两个GPIO 3 months ago
Matthew ba223f90bd 增加MQTT相关的类和实现 3 months ago
Matthew 0d7d3a69b6 Update version to 1.3.62
Based Core Version to 1.4.34
3 months ago
Matthew 4c03d73752 Update MpMaster version to 1.1.7 3 months ago
Matthew e88c2ddfcf 通过线程执行,避免ANR 3 months ago
Matthew a6bf1e117c 新增控制口 3 months ago
Matthew 1f6276860e Update version to 1.3.61
Based Core Version to 1.4.34
3 months ago
Matthew 15fd93a73a 停止拍照后等待一段时间,确保session停止 3 months ago
Matthew ce96fcb46c 检查是否支持RAW 3 months ago
Matthew 995d9cc127 空指针保护 3 months ago
Matthew 6d1f0832c6 增加日志 3 months ago
Matthew 3033c8a68d 修复bug 3 months ago
Matthew cd0a6befd6 优化代码 3 months ago
Matthew d9c1cbac2f 优化名字 3 months ago
Matthew f8bbdc58cf Update version to 1.3.60
Based Core Version to 1.4.34
3 months ago
Matthew 05048bbf53 修复错误 3 months ago
Matthew a17f6a4278 Update version to 1.3.59
Based Core Version to 1.4.34
3 months ago
Matthew a9145e8871 优化日志 3 months ago
Matthew 295aadfc16 Update version to 1.3.58
Based Core Version to 1.4.33
3 months ago
Matthew 9a89445183 关闭sql日志 3 months ago
Matthew 8f595e415e Update MpMaster version to 1.1.7 3 months ago
Matthew 22741fe3e2 修复错误 3 months ago
Matthew 567a4b81fd Update MpMaster version to 1.1.6 3 months ago
Matthew c97342abc7 简化实现 3 months ago
Matthew 6db86856e7 Update version to 1.3.57
Based Core Version to 1.4.32
3 months ago
Matthew d841200500 优化日志信息 3 months ago
Matthew fd88feeacd 不设置静态IP 3 months ago
Matthew d0288551fc 移除无用的代码 3 months ago
Matthew b1d1e3f65c 运维展示优化
cmdid未设置时,显示序列号
3 months ago
Matthew f3bd23680d Update version to 1.3.56
Based Core Version to 1.4.32
3 months ago
Matthew 2b818f920c 打开网络摄像头也同时打开网络 3 months ago
Matthew 3d6a3b6559 Update version to 1.3.55
Based Core Version to 1.4.32
3 months ago
Matthew 878bae9961 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew a6e928c2d2 修复编译错误 3 months ago
Matthew 03a2888369 Update version to 1.3.54
Based Core Version to 1.4.32
3 months ago
Matthew fc39cd0eb1 云台打开时把交换机也打开 3 months ago
Matthew fa6d7929ff Update version to 1.3.53
Based Core Version to 1.4.32
3 months ago
Matthew 9e76c216f7 打开云台时同时打开网络 3 months ago
Matthew b8dad4ed13 Update version to 1.3.52
Based Core Version to 1.4.32
3 months ago
Matthew 06843dff13 Update version to 1.3.51
Based Core Version to 1.4.31
3 months ago
Matthew 9b0f50c8b3 修正电控 3 months ago
Matthew 3ec9d6e711 Update version to 1.3.50
Based Core Version to 1.4.31
3 months ago
Matthew 2baecd5289 Update version to 1.3.49
Based Core Version to 1.4.30
3 months ago
Matthew 05d265d559 Update version to 1.3.48
Based Core Version to 1.4.29
3 months ago
Matthew 369f4d7c87 云台网络的调整 3 months ago
Matthew f2023d130d Update version to 1.3.47
Based Core Version to 1.4.29
3 months ago
Matthew 293431c608 对于非3V3的电控,每次打开都实际执行打开动作 3 months ago
Matthew 65e56000cb Update version to 1.3.46
Based Core Version to 1.4.29
3 months ago
Matthew 5001581438 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 0f2b29a61e 修复编译错误 3 months ago
Matthew f9ca260bcc 增加日志 3 months ago
XI.CHEN 3633fda081 增加拍照日志 3 months ago
Matthew 4de735fb19 增加照片对象的有效性判断 3 months ago
Matthew e5c25f084e Update version to 1.3.45
Based Core Version to 1.4.29
3 months ago
Matthew 8fc0354f0c 增加日志 3 months ago
Matthew 20aaed0bb2 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Matthew 12035d2990 供应商接口封装 3 months ago
Matthew ee68539dde 流媒体实现 3 months ago
Hydromel 8d31b18917 Update MpMaster version to 1.1.5 3 months ago
Hydromel 92e5dabee8 短信:修复短信丢失的bug,修复重启mpapp崩溃的bug 3 months ago
Matthew a38f473ce8 Update version to 1.3.44
Based Core Version to 1.4.29
3 months ago
Matthew 1b6c7ef372 Update MpMaster version to 1.1.4 3 months ago
Matthew 04b8ce7796 减小系统重启后第一次启动的延迟,避免运维重启mpapp 3 months ago
Matthew 8a14578952 系统刚启动时,加大检查文件锁次数,确保mpapp已经启动
设备重启时,mpapp可能延迟启动
3 months ago
Matthew e01c19e9f8 Update MpMaster version to 1.1.3 3 months ago
Hydromel ace0d5babb Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 3 months ago
Hydromel 003193d95a 短信:短信接收拍照时间表功能修改,短信控制拍照修改 3 months ago
Matthew 853e556d8a Update version to 1.3.43
Based Core Version to 1.4.29
3 months ago
Matthew 2c46504b36 切回卡1时,主动发起一次心跳 3 months ago
Matthew 515864c93d 心跳广播处理 3 months ago
Matthew 148ce5cb57 统一使用总线电压 3 months ago
Matthew e66667eb11 Update version to 1.3.42
Based Core Version to 1.4.29
3 months ago
Matthew 3db0d0387b Update version to 1.3.41
Based Core Version to 1.4.28
4 months ago
Matthew 876ab5ab05 网络拍照重试次数增加,避免网络不稳定导致拍照失败 4 months ago
Matthew d8ab5050b3 优化注释 4 months ago
Matthew 78101816a4 6mipi 版本的网络需要打开 131 4 months ago
Matthew 291d72b135 调整河南统一编号 4 months ago
Matthew 1a36d83b29 避免两次调用 4 months ago
Matthew 9ae985965c Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
Matthew 226932ae02 Update version to 1.3.40
Based Core Version to 1.4.28
4 months ago
Matthew 4e16d4ff74 增加流媒体接口定义 4 months ago
Matthew 20736f0c56 增加马达定义 4 months ago
XI.CHEN 8f45e88547 增加拍照调试日志 4 months ago
Matthew 160dffd274 优化日志 4 months ago
Matthew 2d6db87e11 Update version to 1.3.39
Based Core Version to 1.4.27
4 months ago
Matthew 54e35bce2c 优化网络摄像头电源控制 4 months ago
Matthew 2898295236 Update version to 1.3.38
Based Core Version to 1.4.27
4 months ago
Matthew 8a3da517b4 Update version to 1.3.37
Based Core Version to 1.4.26
4 months ago
Matthew bba91ff3a6 显示app启动时间 4 months ago
Matthew 1801ceb965 Update version to 1.3.36
Based Core Version to 1.4.25
4 months ago
Matthew 4371f39872 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
Matthew a8c945be2b 文件写完后立即要求系统刷新缓存 4 months ago
XI.CHEN 4d39dda520 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 4 months ago
XI.CHEN d69c7d257a Update version to 1.3.35
Based Core Version to 1.4.24
4 months ago
Matthew bf91569905 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
Matthew 5e93009f25 优化日志 4 months ago
XI.CHEN 95bd8ff7b3 统一云台控制和拍照的电源状态判断 4 months ago
Matthew 84597d0108 设备重启后的启动,延迟进行 4 months ago
Matthew 3d4aa720d9 设备重启后的启动,延迟进行 4 months ago
Matthew 238d572f1e Update MpMaster version to 1.1.1 4 months ago
Matthew 80e8c5e7c7 临时移除重启MpApp 4 months ago
Matthew 63259f11c8 主分支运维APP版本升级为 1.1.0 4 months ago
Matthew 6fdfe988a7 Update version to 1.3.34
Based Core Version to 1.4.23
4 months ago
Matthew 97eb35e169 移除重复的锁 4 months ago
Matthew cff24f65ae Update version to 1.3.33
Based Core Version to 1.4.23
4 months ago
Matthew 856c1a8b10 打开电源的操作也仅在没有电源时执行 4 months ago
Matthew a98dc5126c 临时移除自动重启设备 4 months ago
Matthew 5a4840dcbf 增加日志 4 months ago
Matthew 3234185c8b Update version to 1.3.32
Based Core Version to 1.4.22
4 months ago
Matthew 486805b469 加大重启的延迟时间 4 months ago
Matthew 70ff5e7ea2 增加错误处理 4 months ago
Matthew e0d1cbb60a Update version to 1.3.31
Based Core Version to 1.4.22
4 months ago
Matthew a205ee22f9 优化照片写入文件的实现,增加日志 4 months ago
Matthew c64cc8ef44 Update version to 1.3.30
Based Core Version to 1.4.21
4 months ago
XI.CHEN d8ecf26722 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 4 months ago
XI.CHEN 522d0b96e3 增加拍照临时调试信息 4 months ago
Matthew ef8efd48b0 底层把云台电压的两个GPIO设反 4 months ago
Matthew 4ca8e83188 增加日志 4 months ago
Matthew 52e2b5f064 空图片的保护处理 4 months ago
Matthew 7fbffb2930 增加错误保护 4 months ago
Matthew 033e632e3b 网卡上电之后等待1秒 4 months ago
Matthew dabff33bbc Update version to 1.3.29
Based Core Version to 1.4.20
4 months ago
Matthew fa3bb289bf 启动时不判断服务是否运行 4 months ago
Matthew cae5b6996b 优化自启动 4 months ago
Matthew e39f42c28a Update version to 1.3.28
Based Core Version to 1.4.20
4 months ago
Matthew 6129d10749 空指针保护 4 months ago
Matthew 2be8a53a67 优化代码 4 months ago
Matthew 76e63a8808 Update version to 1.3.27
Based Core Version to 1.4.20
4 months ago
Matthew c186ab9098 优化重启的机制 4 months ago
Matthew ac43db4e4a 网络摄像机拍照的拍照时间不人为调整 4 months ago
Matthew 600c46f024 优化日志控制方式 4 months ago
Matthew 46bc13a608 绑定网络暂时去除 4 months ago
Matthew b91432cc4d Update version to 1.3.26
Based Core Version to 1.4.20
4 months ago
Matthew e27cd3e5fe 优化代码 4 months ago
Matthew d6dc5729fe 优化照片保存处理 4 months ago
Matthew 2d90f18220 增加Flags Mask 4 months ago
Matthew 5a74b57eb1 调整云台的电源控制 4 months ago
XI.CHEN cb36acfbd6 Update version to 1.3.25
Based Core Version to 1.4.18
4 months ago
XI.CHEN dc448adc34 增加日志和销毁mImageReader前释放资源 4 months ago
Matthew 79aa0134a5 Update version to 1.3.24
Based Core Version to 1.4.17
4 months ago
Matthew 0001c9dc5a Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
Matthew 6162c220b4 修改参数传递方式,避免内存提前被释放 4 months ago
XI.CHEN 5275d0c272 删除拍照测试日志 4 months ago
Matthew 4686003763 Update version to 1.3.23
Based Core Version to 1.4.17
4 months ago
Matthew a81dfe5bcf Update version to 1.3.22
Based Core Version to 1.4.16
4 months ago
Matthew 50ee50f3d9 优化代码 4 months ago
Matthew e20f442512 增加日志 4 months ago
Matthew 1e71375144 Update version to 1.3.21
Based Core Version to 1.4.15
4 months ago
XI.CHEN e1c99fa228 Update version to 1.3.20
Based Core Version to 1.4.14
4 months ago
XI.CHEN b751429199 增加拍照临时测试日志 4 months ago
XI.CHEN 99b0d9ef70 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 4 months ago
XI.CHEN 50adc38110 增加拍照失败日志 4 months ago
Matthew 2fe94152e5 Update version to 1.3.19
Based Core Version to 1.4.14
4 months ago
Matthew 46b1c48c5c 处理以太网变化标记 4 months ago
Matthew 28d58199bf 增加日志 4 months ago
Matthew 09cd545777 Update version to 1.3.18
Based Core Version to 1.4.13
4 months ago
Matthew dc692d05b1 优化日志 4 months ago
Matthew 6be75f4a8e 云台电源和网络电源分开控制 4 months ago
Matthew 9b59489969 Update version to 1.3.17
Based Core Version to 1.4.13
4 months ago
Matthew 31954a3f17 增加以太网变化时检测网络是否断连 4 months ago
Matthew b7f6631672 启动服务之后再监控以太网 4 months ago
Matthew c4d8bd7f92 修复bug 4 months ago
Matthew 672159ddc5 Update version to 1.3.16
Based Core Version to 1.4.12
4 months ago
Matthew 727e397efe Update MpMaster version to 1.0.96 4 months ago
Matthew 9fa531b98d Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
Matthew 3c3cb9b130 增加网络信息的设置
重启MPApp使用临时方案,通过MpMaster来完成,解决自身不能重启的问题
4 months ago
Matthew bcda473b40 调整重启MpAPP的菜单 4 months ago
liuguijing 011bd762ab Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
liuguijing fe174f800d 短信通用版本修改中:新增就版本短信支持 4 months ago
Matthew 101c8f41a4 修改通知栏图标 4 months ago
Matthew 49cdb9d3ab 修改通知栏图标 4 months ago
Matthew 53df77733d DNS 设置为 0.0.0.0 4 months ago
Matthew 3d3a02158e 优化函数名称 4 months ago
Matthew e0ed5615c3 优化变量名字 4 months ago
Matthew 85a3d99f21 Update MpMaster version to 1.0.95 4 months ago
Matthew 018ccea1f8 修正目录 4 months ago
Matthew 4f1b8c3b49 Update version to 1.3.15
Based Core Version to 1.4.10
4 months ago
Matthew db208237d6 Update version to 1.3.14
Based Core Version to 1.4.9
4 months ago
Matthew 7adebb3019 增加重启延时控制 4 months ago
Matthew b7bfefe3e6 Refactor 4 months ago
Matthew af9ace5183 Update version to 1.3.13
Based Core Version to 1.4.8
4 months ago
XI.CHEN 87e8853737 Update version to 1.3.12
Based Core Version to 1.4.7
4 months ago
Matthew 384d999ead Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
Matthew fc3f5fafa1 移除USB摄像头的处理 4 months ago
XI.CHEN b597e1f4eb Update version to 1.3.11
Based Core Version to 1.4.7
4 months ago
XI.CHEN b9ba8135cd 错误处理优化 4 months ago
XI.CHEN 63264f8e1c 延长云台预置位调用等待时间 4 months ago
liuguijing 2ba7429b56 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
liuguijing ae75a59f51 短信通用版本修改中:清除第二次短信进来时,上一次发送的短信内容
增加jsonarray类型的判断
4 months ago
liuguijing de466ba891 短信通用版本修改中:修改配置文件参数设置数组时全是字符串类型的bug,修复发送短信空指针的bug 4 months ago
liuguijing 874f7db0cd 短信通用版本修改中:修改文件路径存储错误的bug 4 months ago
liuguijing 30e4562f1d 短信通用版本修改中:新增文件替换,文件获取,文件参数修改 4 months ago
XI.CHEN 04504794a0 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 4 months ago
XI.CHEN 7d7234037f 串口log文件名后缀改为txt 4 months ago
Matthew f2db516071 补拍的时间处理保护
补拍应该不会存在拍照计划时间为0的情况,这儿这样处理,是为了外部特殊处理,故意把拍照计划时间置为0
4 months ago
liuguijing becda30777 短信通用版本修改中:对于文件路径的判断和文件绝对路径判断的修改 4 months ago
liuguijing 6915001d02 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
liuguijing c680c83015 短信通用版本修改 4 months ago
Matthew c08b8a03b1 Update version to 1.3.9
Based Core Version to 1.4.6
4 months ago
Matthew 5bfd1f58ea 优化日志 4 months ago
Matthew 772690d57e Update version to 1.3.8
Based Core Version to 1.4.5
4 months ago
Matthew 5000fdecef Update version to 1.3.7
Based Core Version to 1.4.4
4 months ago
liuguijing c91c97e5d0 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 4 months ago
liuguijing 30360e9a05 短信修改成通用版本,新增具体参数修改 4 months ago
liuguijing 511852597c 短信修改成通用版本 4 months ago
Matthew 2b0f921157 Update version to 1.3.6
Based Core Version to 1.4.3
4 months ago
liuguijing 650765e87d 短信
运维时间表不返回修复
设置CMD_ID不应答修复
设置主站IP端口返回的网络方式和加密方式不一致修复
水印查询和修改修复
设置心跳及连接协议修复
4 months ago
XI.CHEN 345f9cc09d Update version to 1.3.5
Based Core Version to 1.4.2
4 months ago
XI.CHEN 055cd6cc7f 放宽温度传感器取值范围限制 4 months ago
Matthew a8c97d93e2 Update MpMaster version to 1.0.94 4 months ago
Matthew b5684256fe 运维如果没有获取到cmdid,则使用SN 4 months ago
Matthew e762e2b53c Update MpMaster version to 1.0.93 4 months ago
Matthew a716856a4f 优化日志压缩和上传 4 months ago
Matthew 0bfbe32077 Update MpMaster version to 1.0.92 4 months ago
Matthew 72c936c3dd 增加日志文件过滤的后缀名 4 months ago
Matthew 278703deed Update MpMaster version to 1.0.91 5 months ago
Matthew eba83ab37d 修正判断时间 5 months ago
Matthew da75bd8519 增加运维程序的标识宏 5 months ago
Matthew c307b7e56e cmdid未设置的异常保护 5 months ago
XI.CHEN 51a744cc51 Update version to 1.3.4
Based Core Version to 1.4.2
5 months ago
jxjajs 72c2ebebc6 修改海康球机网络抓拍 5 months ago
jxjajs 378bc2d0fb 读取图片返回错误,包号不增加 5 months ago
jxjajs 5a8d4fdf9e Merge branch 'N938' of http://61.169.135.146:8081/git/xymp/TermApp 5 months ago
Matthew bbb4d8b4a3 Update version to 1.3.3
Based Core Version to 1.4.1
5 months ago
Matthew 6a7295d9c3 调整网络拍照延时关闭时间 5 months ago
Matthew f1c6153852 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 5 months ago
Matthew 26725d8bdc Update version to 1.3.2
Based Core Version to 1.4.1
5 months ago
Matthew dfac4eeba6 如果IP已经是设置的IP,则跳过 5 months ago
XI.CHEN ac3e4627e9 电源控制 修正自检判断条件 5 months ago
XI.CHEN 697fe83f5e 云台 在调用预置位前等待自检 5 months ago
XI.CHEN cdfb158437 Update version to 1.3.1
Based Core Version to 1.4.01
5 months ago
XI.CHEN f3b2f01f8b 串口log文件名统一 5 months ago
Matthew d4a561f3c2 Update version to 1.1.55
Based Core Version to 1.2.36
5 months ago
Matthew 417d5c5083 网络打开时需要同时打开485 5 months ago
XI.CHEN 2c35ca4b3d Update version to 1.1.54
Based Core Version to 1.2.36
5 months ago
XI.CHEN c408fa2e68 去除双重水印 5 months ago
XI.CHEN 34f0c173cb 修复938凌晨重启问题,串口log路径统一 5 months ago
XI.CHEN 5a8b7427c4 修改网络摄像头分辨率设置参数错误 5 months ago
XI.CHEN a76b654593 修复通过app和配置工具手动拍照时不延时关电 5 months ago
XI.CHEN 35ad7d8d25 Update version to 1.1.53
Based Core Version to 1.2.35
5 months ago
XI.CHEN 950e520fb2 修复编译错误 5 months ago
XI.CHEN 5cc05c373d 增加宇视分辨率设置 5 months ago
XI.CHEN de288c860f 增加文件锁日志 5 months ago
Matthew 7b240fd01b Update version to 1.1.52
Based Core Version to 1.2.34
5 months ago
Matthew dc2b8c1504 libcurl增加错误处理 5 months ago
Matthew 4e4904241c Update version to 1.1.51
Based Core Version to 1.2.33
5 months ago
Matthew b71fcebdf2 修复bug 5 months ago
Matthew 7baaae19d8 Update version to 1.1.50
Based Core Version to 1.2.33
5 months ago
Matthew 128a0a0003 改一下网络电源控制的时间 5 months ago
Matthew 26144c195b Update version to 1.1.49
Based Core Version to 1.2.33
5 months ago
Matthew dfd970d1a9 优化错误处理 5 months ago
XI.CHEN 5d318b62c8 增加网络拍照失败日志 5 months ago
XI.CHEN 9417980864 Update version to 1.1.48
Based Core Version to 1.2.32
5 months ago
Matthew a29b471633 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 5 months ago
Matthew 680b763ff6 优化配置文件的初始化
把日志显示移到独立的activity
5 months ago
jxjajs eb26477067 Merge remote-tracking branch 'origin/main' 5 months ago
jxjajs 8dbe00c8f9 读取图片返回错误,包号不增加 5 months ago
XI.CHEN 6e337a986f 心跳周期小于10min时,运维580秒未收到心跳才重启 5 months ago
Matthew 25e465df3b Update version to 1.1.47
Based Core Version to 1.2.31
5 months ago
XI.CHEN a423c07820 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 5 months ago
XI.CHEN e4b9245bed 网络连续拍照不重复自检 5 months ago
Matthew 0d9b6a1736 优化日志 5 months ago
Matthew 5007405469 Update version to 1.1.46
Based Core Version to 1.2.31
5 months ago
Matthew f910187a48 优化对焦 5 months ago
Matthew 8b79de9208 修复警告 5 months ago
Matthew 7eac44e6ce 移除异常处理 5 months ago
Matthew c7c91af66d 调整日志 5 months ago
XI.CHEN a3599ccfc1 Update version to 1.1.45
Based Core Version to 1.2.30
6 months ago
XI.CHEN dd35758e59 修复云台控制开启条件判断 6 months ago
Matthew ac5905f9fc Update version to 1.1.44
Based Core Version to 1.2.29
6 months ago
Matthew 9231d205ca Fix bug 6 months ago
Matthew 0243d9b7d4 修复gpio bug和优化 6 months ago
Matthew a59144c97b Update version to 1.1.43
Based Core Version to 1.2.29
6 months ago
Matthew 69a634edab 网络摄像机的通道号需要独立设置
复用CameraId字段
6 months ago
Matthew 03628c4a70 调整配置 6 months ago
Matthew 65f9e617e8 Revert "多渠道打包"
This reverts commit 43fde7df76.
6 months ago
Matthew 25ae7ec464 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew fa7b1d2bfa 使用IPv4 6 months ago
XI.CHEN 2b6db343be Update version to 1.1.42
Based Core Version to 1.2.29
6 months ago
XI.CHEN d5b46fb343 右上OSD换行位置 6 months ago
Matthew 8c1492052d 修复手动拍照各数据组合的错误 6 months ago
liuguijing 8dabf9aecc Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
liuguijing 43fde7df76 多渠道打包 6 months ago
XI.CHEN 2354403941 Update version to 1.1.41
Based Core Version to 1.2.28
6 months ago
XI.CHEN 3b03dce9e4 修复数据缓冲区太小导致报错 6 months ago
jxjajs 4967f47036 Merge remote-tracking branch 'origin/main' 6 months ago
jxjajs b482a184a0 修改气象数据雨量、日照数据处理,及数据处理日志信息 6 months ago
jxjajs ef1a061fad 气象数据打印错误 6 months ago
XI.CHEN b73e58bfe7 Update version to 1.1.40
Based Core Version to 1.2.28
6 months ago
XI.CHEN 09073b86b0 增加多合一气象传感器处理 6 months ago
XI.CHEN c2d4c3d296 Update version to 1.1.39
Based Core Version to 1.2.27
6 months ago
XI.CHEN a6d4e065b3 网络拍照规约上送时间与水印时间一致 6 months ago
XI.CHEN 4cca774a09 串口规约上送时间与水印时间一致 6 months ago
XI.CHEN 2f176a0a33 网络定时拍照电源关闭不延时 6 months ago
XI.CHEN e07de45f59 气象采集函数修改 6 months ago
jxjajs a6134b860d 气象数据打印错误 6 months ago
Matthew ebe573f0f3 Update version to 1.1.39
Based Core Version to 1.2.25
6 months ago
XI.CHEN d4e64008bc 增加释放唤醒锁 6 months ago
XI.CHEN af0de16a67 增加释放唤醒锁 6 months ago
XI.CHEN b5413de417 Update version to 1.1.38
Based Core Version to 1.2.26
6 months ago
XI.CHEN a24345e01f 禁用传感器后不导致重采 6 months ago
XI.CHEN 816881fd89 修改定时拍照不延时 6 months ago
XI.CHEN ce4f7358b4 Update version to 1.1.37
Based Core Version to 1.2.25
6 months ago
XI.CHEN c410e1e6a9 定时拍照不会延时关闭摄像机电源 6 months ago
XI.CHEN bf9ee3aea8 心跳周期小于10min时,10分钟收不到心跳才会重启 6 months ago
XI.CHEN f0ed4adea3 Update version to 1.1.36
Based Core Version to 1.2.25
6 months ago
XI.CHEN e0d926efb1 未开启电源时,控制云台不会打开电源 6 months ago
XI.CHEN 92781524d7 Update version to 1.1.35
Based Core Version to 1.2.25
6 months ago
XI.CHEN 907836d3d6 增加电源控制关闭计数判断条件 6 months ago
XI.CHEN 23877165ab 增加串口配置系数和偏移值 6 months ago
XI.CHEN 470d2be42a 修改摄像机手动电源控制 6 months ago
Matthew 61402ee8cb Update MpMaster version to 1.0.90 6 months ago
Matthew 23f2ab7527 暂时禁用运维保活检测 6 months ago
Matthew 56ceb40f6d 优化代码 6 months ago
Matthew eebf9b5a03 增加云台摄像机的拍照处理 6 months ago
XI.CHEN 9facbf3c55 Update version to 1.1.34
Based Core Version to 1.2.24
6 months ago
XI.CHEN c501b689de 增加立即关闭摄像机电源函数,实现手动关闭 6 months ago
XI.CHEN 162d7624af 调整时间类型长度 6 months ago
XI.CHEN 99efe9c91f 加长拍照时间表更新的时间类型长度 6 months ago
XI.CHEN a3291e2dd2 Update version to 1.1.33
Based Core Version to 1.2.23
6 months ago
XI.CHEN ddff8bdaca 串口拍照调用预置位BUG修复 6 months ago
XI.CHEN 1195bb01a1 Update version to 1.1.32
Based Core Version to 1.2.22
6 months ago
XI.CHEN cd1e7b8a8c Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
XI.CHEN ebe0def611 加长云台时间控制类型 6 months ago
Matthew a10f029175 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
XI.CHEN 6940c89caf Update version to 1.1.31 6 months ago
XI.CHEN 7cc3de572b 网络摄像机规约上传时间与水印一致 6 months ago
Matthew 48d37afa1e Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew 2b26a090a0 航煜新款摄像头的接口实现 6 months ago
XI.CHEN c3f7d99a0b 调整云台电源关闭逻辑 6 months ago
XI.CHEN d671565f0b Update version to 1.1.30 6 months ago
XI.CHEN a4deb30423 云台控制增加唤醒锁,调整判断条件 6 months ago
Matthew 7a2acd5f97 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew 3a98b0a451 优化日志 6 months ago
XI.CHEN 4af9c92440 PendingIntent请求码改回固定值 6 months ago
XI.CHEN 7aa553b1fa 修复规约上传时间与水印时间不一致问题 6 months ago
Matthew fd8b9c093e Update version to 1.1.29
Based Core Version to 1.2.20
6 months ago
Matthew 77aa962521 调整闹钟的实现 6 months ago
Matthew 7ae11e40fe 移除无用代码 6 months ago
Matthew 2ddb82a345 Update version to 1.1.28
Based Core Version to 1.2.20
6 months ago
Matthew 8311ad8ba9 优化日志 6 months ago
Matthew b73c395357 Update version to 1.1.27
Based Core Version to 1.2.20
6 months ago
Matthew fc7a00d10c 优化心跳定时器的代码 6 months ago
Matthew d80ae9c119 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew d92bc67606 增加日志 6 months ago
XI.CHEN 7d8c5d2342 Update MpMaster version to 1.0.89 6 months ago
XI.CHEN e2455cfd5a 运维取消无拍照重启(临时) 6 months ago
Matthew bc01722f5d 优化心跳周期控制 6 months ago
Matthew feadee8ae4 优化日志 6 months ago
Matthew 224659c163 优化日志 6 months ago
Matthew fa08064340 优化心跳时间和相关日志 6 months ago
Matthew 6f8772d6a3 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew 09c7af6947 增加日志 6 months ago
XI.CHEN bf3cb6416f Update version to 1.1.26
Based Core Version to 1.2.20
6 months ago
XI.CHEN d6392eb1d3 修正云台控制传入文件地址错误 6 months ago
XI.CHEN b2d2100dfc 修复云台控制阻塞问题 6 months ago
XI.CHEN ab9ba337dc 云台命令映射关系调整 6 months ago
Matthew 0b69dc75bc Update version to 1.1.25
Based Core Version to 1.2.19
6 months ago
Matthew 653812fa56 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew c2687cc87e 增加航煜新接口 6 months ago
XI.CHEN fcd78f02a6 云台控制线程 6 months ago
Matthew 99c8e158c2 优化日志 6 months ago
XI.CHEN 56c1394bce Update version to 1.1.24
Based Core Version to 1.2.19
6 months ago
Matthew bec5b74a4f 优化日志 6 months ago
Matthew dc69600477 有效性检查 6 months ago
XI.CHEN 20f93c20bf 修正采样周期设置实现 6 months ago
XI.CHEN 467d042a7b 修复重启后多拍问题 6 months ago
Matthew 99ee66139b 完善采样周期设置的实现 6 months ago
Matthew b24798af08 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew 8d05947395 修正文件名 6 months ago
XI.CHEN 93864e30b9 Update version to 1.1.23
Based Core Version to 1.2.18
6 months ago
XI.CHEN 1633412d17 延长预置位转动等待时间 6 months ago
Matthew 30d69e378a Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew d435bab3c2 传感器采样复用拍照的定时器机制 6 months ago
XI.CHEN 2445315c5d Update version to 1.1.22
Based Core Version to 1.2.17
6 months ago
XI.CHEN da14135af3 串口拍照增加水印 6 months ago
XI.CHEN 06b8185a0a Update version to 1.1.21
Based Core Version to 1.2.16
6 months ago
Matthew 5f48449da6 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew 6908711c3a 增加HDR曝光增益的控制 6 months ago
jxjajs 0fef92ddaf get photo state error 6 months ago
Matthew 4ba68b4d82 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew f4a93cae9a 调整TARGET_SDK 6 months ago
XI.CHEN 96a1221a53 Update version to 1.1.20
Based Core Version to 1.2.15
6 months ago
XI.CHEN 76e6b91e7e minSDKVersion改回28 6 months ago
XI.CHEN e664a5f766 日照降雨量等未配置传255 6 months ago
Matthew a07e656a56 调整实现 6 months ago
Matthew 66a4fa7f00 优化电源管理 6 months ago
Matthew 77184e796d HDR特殊处理 6 months ago
Matthew 6bd0ebf5b1 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew ae02d3b366 增加OpenCV HDR的支持 6 months ago
XI.CHEN e943c79efc 修改等值覆冰厚度等计算 6 months ago
XI.CHEN dc4b2fbeed 等值覆冰厚度等计算 6 months ago
XI.CHEN bccff01bbd sensorprotocol.h移回termapp 6 months ago
XI.CHEN 8332dd98ef 运维增加LOG,修改PendingIntent请求码 6 months ago
XI.CHEN fed405d585 修改时间参数类型,拍照增加唤醒 6 months ago
Matthew 601c47b11b Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew 7a35404099 不判断认证类型 6 months ago
Matthew 24856c5fb4 更新云台GPIO定义 6 months ago
XI.CHEN a5603100f8 Update version to 1.1.19
Based Core Version to 1.2.14
6 months ago
XI.CHEN 3ccd293f4a 实现摄像机控制延时关闭 6 months ago
Matthew 973000aa6b 移除日志 6 months ago
Matthew af475d2795 修复对焦处理 6 months ago
Matthew 70f6f69fc3 修改参数类型 6 months ago
Matthew bca5c93430 修类参数类型 6 months ago
Matthew ce5e543f6b 增加日志 6 months ago
XI.CHEN a282111810 串口拍照完毕后自动延时关闭电源 6 months ago
jxjajs 9767036d54 格式修改 6 months ago
jxjajs 30b57fcce5 格式修改 6 months ago
jxjajs 8122341f79 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
jxjajs a43965639a 修改标志 6 months ago
XI.CHEN 30a1706af5 增加电源延时 6 months ago
jxjajs 821dd34c18 修改风速风向标志位错误标识 6 months ago
XI.CHEN c2aa754b4b 未接传感器不作为采集失败处理 6 months ago
jxjajs 915259ba2f 修改串口读取时间延长等问题 6 months ago
Matthew dfcb3623a9 Update version to 1.1.17
Based Core Version to 1.2.12
6 months ago
XI.CHEN 78ceadc24e 增加打开电源延时 6 months ago
jxjajs a7b7623c93 串口通讯加锁 6 months ago
XI.CHEN d3a6903f5e 增加数据采集判断条件 6 months ago
XI.CHEN 101f136036 修改多线程采集数据 6 months ago
jxjajs 517323a28e 修改串口拍照方式 6 months ago
XI.CHEN c2c6a2a10f 完善气象覆冰逻辑 6 months ago
XI.CHEN 5f1f788694 修改覆冰气象数据采集重试逻辑 6 months ago
Matthew 2e3cd76f01 Update version to 1.1.16
Based Core Version to 1.2.11
6 months ago
Matthew bd125e73fb Fix Typo 6 months ago
Matthew c98472d1fb 云台网络摄像机的电源控制调整 6 months ago
Matthew 9cb881ce0a 938加密芯片对应的设备文件名调整 6 months ago
Matthew 856dc1c239 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
Matthew 76e6770cac 阻止程序发起的半小时内的设备重启 6 months ago
jxjajs cf9e7b8264 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
jxjajs 1fe9648704 增加串口持续接收数据时间 6 months ago
Matthew 7a784a89e0 Update version to 1.1.15
Based Core Version to 1.2.10
6 months ago
Matthew 1b96064578 938强制调用设置静态ip的函数,以监听网络变化
后面需要优化
6 months ago
Matthew b236eb40d2 修复编译警告 6 months ago
XI.CHEN 58b01573ad 串口拍照调用函数修改 6 months ago
jxjajs e59497e159 修改摄像机串口接收 6 months ago
jxjajs 187d715cd5 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
jxjajs e7a0a3b290 修改波特率设置 6 months ago
Matthew 11bab5a631 修复错误的合并 6 months ago
Matthew 87eac6bd08 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
jxjajs dbbfd96de2 摄像机串口独立 6 months ago
Matthew 4f77f847b1 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 6 months ago
XI.CHEN 3e26f35c65 串口重试时间延长 6 months ago
XI.CHEN 98c99e016c 串口拍照启用线程 6 months ago
XI.CHEN ceb89a0c70 延长预置位调用等待时间 6 months ago
XI.CHEN bd706d1cf4 删除反复读取传感器波特率 6 months ago
jxjajs 385ae48185 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
jxjajs f1cfe67f72 恢复串口拍照从1开始取包 6 months ago
XI.CHEN 2fead34087 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
XI.CHEN 7e48c76d33 Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
jxjajs 8c9ff7f2ba 修改错包 6 months ago
XI.CHEN ef3cc6d726 覆冰打印信息变更 6 months ago
jxjajs da6074e2ec Merge branch 'main' of http://dev.xinyingpower.com:8081/git/xymp/TermApp 6 months ago
Matthew 01a056da4b Update version to 1.1.14
Based Core Version to 1.2.10
6 months ago
jxjajs f10c33a5f9 修改读取图片序号 6 months ago
XI.CHEN 879801088d 增加覆冰传感器打印地址 6 months ago
Matthew f5590a708a 调整设置静态ip的时序 7 months ago
Matthew 2694aec84a 修复手机app拍照总是通道1的bug
修复停止服务时crash bug
7 months ago
Matthew e90e9a3d4e Update MpMaster version to 1.0.88 7 months ago
Matthew 7cf1055452 判断mpapp是否运行两次,确保结果正确 7 months ago
Matthew 3fa3a53507 Update version to 1.1.13
Based Core Version to 1.2.9
7 months ago
Matthew 57c028c877 移除注释的代码 7 months ago
Matthew f8b02ee28c 确保目录存在 7 months ago
Matthew 11f1dcd2f6 Update version to 1.1.12
Based Core Version to 1.2.9
7 months ago
Matthew 4bddca2cd2 优化代码 7 months ago
Matthew 251e120676 调整OTG的电源控制 7 months ago
Matthew 096c253dbf 修复错误的合并 7 months ago
Matthew 90e11f8763 Update version to 1.1.11
Based Core Version to 1.2.9
7 months ago
Matthew c271753dec 938直接使用系统默认IP 7 months ago
Matthew 2a8a9e6074 修复错误的合并 7 months ago
jxjajs 6b4076785f Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 7 months ago
jxjajs 25e459faea 格式变化 7 months ago
Matthew 01c8767a29 增加日志。关闭加密芯片 7 months ago
Matthew 5d844da1c4 增加日志 7 months ago
Matthew b7a7221e31 使用新实现控制电源 7 months ago
Matthew 7c16733906 Update MpMaster version to 1.0.87 7 months ago
Matthew 9ebbe19c4f 修复重启MpAPP最小间隔时间的错误 7 months ago
Matthew 0ba0901619 Update MpMaster version to 1.0.86 7 months ago
Matthew ce39427eda Update version to 1.1.10
Based Core Version to 1.2.9
7 months ago
Matthew 05208d202f 优化代码 7 months ago
Matthew fd2b54aef1 优化代码 7 months ago
Matthew 39493ec75b 处理锁的释放 7 months ago
Matthew cd220aa256 启动电源控制线程 7 months ago
Matthew 36750d6e85 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 7 months ago
Matthew 36d2188ce9 优化电源控制机制 7 months ago
Matthew eb13dea4fa 优化日志 7 months ago
陈曦 56b53eb8d4 根据配置修改分辨率和自检时间 7 months ago
陈曦 0b8fc17c22 延长缩放等待时间 7 months ago
jxjajs 1728e08f7a Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 7 months ago
jxjajs 1e47fe04e6 修改数据格式 7 months ago
Matthew 8cdcbc690e Update version to 1.1.9
Based Core Version to 1.2.8
7 months ago
陈曦 30e9987c9e 气象覆冰数据未读取到的值统一为0xff 7 months ago
陈曦 d1571fad00 串口拍照失败处理;重新拍照前未开启电源则开电 7 months ago
Matthew 02037ac34f Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 7 months ago
陈曦 e7da756eec 优化气象覆冰内存安全 7 months ago
陈曦 fefa90b01f 减少电源关闭延迟 7 months ago
陈曦 c6eee57115 拍照增加调用预置位,手动关电源不等待自检 7 months ago
Matthew 80c9ed157a 移除不必要的线程绑定 7 months ago
Matthew 64b75e994d 网络摄像机拍照的优化 7 months ago
陈曦 140726808f 手动拍照电源自启,拍照电源移动至phoneDevice 7 months ago
Matthew 9617743566 Update version to 1.1.8
Based Core Version to 1.2.6
7 months ago
Matthew 1e72f8e4c4 修复编译错误 7 months ago
Matthew e2c0cff2e3 网络摄像头用户名和密码的设置 7 months ago
Matthew e06b9178bf 网络摄像头拍照的实现 7 months ago
Matthew 83dfd4fb7c 增加批量处理GPIO的接口 7 months ago
Matthew 70075a0ecc 网络摄像机ip地址的属性设置 7 months ago
Matthew 7bdde7b70d 本地网络的设置 7 months ago
Matthew 69536dc45d 实现网络摄像机的操作 7 months ago
陈曦 c098420f1e Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 7 months ago
陈曦 a71841c725 修改result类型 7 months ago
Matthew 53cd58bbe1 Update version to 1.1.7
Based Core Version to 1.2.5
7 months ago
Matthew 002cb79223 移除调试代码 7 months ago
陈曦 a15a0f6ed6 拍照移到PhoneDevice 7 months ago
Matthew f6d5f38bfc Update version to 1.1.6
Based Core Version to 1.2.4
7 months ago
Matthew 2653290682 回退 9e260d2b93 7 months ago
Matthew a7e4fabe13 修复编译错误和警告 7 months ago
Matthew 9e260d2b93 GPIO控制的调整 7 months ago
jxjajs 494b26d92d 增加云台机测试代码 7 months ago
Matthew 1b0d0f421f Update version to 1.1.5
Based Core Version to 1.2.2
7 months ago
Matthew 7cfe23f078 优化 7 months ago
Matthew 7cd424953a 调整938相关的GPIO控制 7 months ago
陈曦 1f18a20b14 GPIO口更改 7 months ago
Matthew 99644acba9 增加调试代码 7 months ago
Matthew 539b701460 增加拍照错误处理 7 months ago
Matthew 7978dd32a7 Update version to 1.1.4
Based Core Version to 1.2.2
7 months ago
Matthew 8a91ac73ce 修复编译错误 7 months ago
Matthew 6dceb6f70f GPIO通过计数进行控制 7 months ago
Matthew 3bdd0ce344 优化代码 7 months ago
Matthew 4bad6887f3 调整编译参数 7 months ago
Matthew 14b3777128 优化 7 months ago
Matthew 92e5473ca3 优化 7 months ago
Matthew 35aa8bf2b0 简化app UI 7 months ago
陈曦 b70a6a2999 删除N938宏 7 months ago
Matthew cc6a91d739 Update version to 1.1.3
Based Core Version to 1.2.2
7 months ago
陈曦 36b02a4cf1 云台控制 7 months ago
陈曦 8376c1b1e3 传感器开启更改 7 months ago
陈曦 4db0d5e092 938GPIO口更新 7 months ago
Matthew 9a3edcd087 增加异常保护 7 months ago
Matthew 78b00b42e9 实现另外一种hdrplus 7 months ago
陈曦 5213b007d6 938电源控制 8 months ago
陈曦 edda95c35d 调整采样条件和GPIO控制 8 months ago
陈曦 d142253bfa 增加覆冰气象读数据状态判断 8 months ago
Matthew 63db3248e2 更新默认配置 8 months ago
Matthew 6ffdb1bd22 优化系统相机拍照 8 months ago
Matthew 9dc4c8ebfb 优化系统相机拍照 8 months ago
Matthew 94a2c4a480 修改禁用RAW格式时的拍照数量设置的错误
优化日志
8 months ago
Matthew 7bd68ea40f 修改IMX214默认配置 8 months ago
Matthew 961ce1d768 优化调试信息 8 months ago
Matthew 2961c8d39e Update version to 1.1.2
Based Core Version to 1.2.1
8 months ago
Matthew 48fee82987 修复编译错误 8 months ago
Matthew e168cf27de 调整ImageReader buffer数量
优化日志
8 months ago
Matthew 1292427410 实现独立进度处理raw照片
避免持续内存泄漏
8 months ago
Matthew 2bbb51184a 实现独立进程执行HDR的方式 8 months ago
Matthew 3fcdc8b7d7 Merge branch 'main' of http://61.169.135.146:8081/git/xymp/TermApp 8 months ago
Matthew 32573af5b0 重启MpAPP逻辑优化 8 months ago
陈曦 73fd656cb6 细化传感器电源控制 8 months ago
Matthew e3150e8f1a 调整低内存报警处置登等级 8 months ago
Matthew 3aa81b4ed1 通过文件锁来检测app是否在运行 8 months ago
Matthew b7c329548e 修复重启原因相关的错误 8 months ago
Matthew 827890deed 增加重启原因 8 months ago
Matthew 091dfc86f0 优化自动切换的代码 8 months ago
Matthew ec09fc8dff 实现RAW自动切换 8 months ago
Matthew 275961e581 优化HDRplus实现 8 months ago
Matthew 9f4bf01718 优化拍照代码 8 months ago
jxjajs 3c65c5d5a9 格式修改 8 months ago
Matthew 823ad5a992 RAW格式打印第一帧的拍照参数信息 8 months ago
Matthew cf0f3f52d3 优化拍照 8 months ago
Matthew 358505aeb0 优化RAW合并处理
直接从memory加载
8 months ago
Matthew 370e89f802 修复编译错误 8 months ago
Matthew 0dd32ce9d3 优化代码 8 months ago
Matthew 83d3376f72 移除错误的代码 8 months ago
Matthew cbf3dce87e NDK实现RAW格式拍照 8 months ago
Matthew 0779d47b36 实现DngCreator 8 months ago
Matthew 6d421a6cc1 优化缓存的使用 9 months ago
Matthew f95a79ba89 调整拍照的实现
利用双Request来实现预览和拍摄。后期可以支持Burst Capture
9 months ago
Matthew a068cf6815 支持宁夏默认配置在初始化时自动部署 9 months ago
Matthew 3fee94e03e 实现独立的AI识别 9 months ago
Matthew 562d49382c AI识别增加最小尺寸限制的控制 9 months ago
Matthew 54ddc6f9d6 调整照片方向 9 months ago
Matthew 21018ae898 增加对xmp的支持 9 months ago
Matthew 0c7fa1ab70 修改拍照的临时目录 9 months ago
Matthew 43c6fe8ec1 移除JPG照片 9 months ago
Matthew 8dab92b04d GPS状态的自动监测
超过10分钟,自动关闭
9 months ago
Matthew 72f595b7a3 实现RawBurstCapture 9 months ago
Matthew 9c9a122034 修复参数传递的bug 9 months ago
Matthew cf77fb46da 增加HDRPlus的依赖设置 9 months ago
Matthew 33030ca2b7 记录历史最大充电电压并上报 9 months ago
Matthew 160b599c6e 优化版本更新的逻辑,避免上传空版本号 9 months ago
Matthew 6ceeba91cf 部分上报数据做一些数量的控制 9 months ago
Matthew be1834e132 调整实现 9 months ago
Matthew 4396e9c4e9 调整照片方向 9 months ago
Matthew 9116ca8bc9 优化RAW格式拍摄的实现 9 months ago
Matthew b073ab05b3 修正bug
避免线程中创建handler
9 months ago
Matthew 3b0a857a52 清除临时照片 9 months ago
Matthew 81f6dede64 修复GPS位置调整的bug 9 months ago
Matthew 93fcd7c49e 定位坐标进行转换 9 months ago
Matthew 6bbf1b6195 夜视优化 9 months ago
Matthew 1fdbc1701e 优化RAW格式照片的处理 9 months ago
Matthew 965c3dbd5e 优化短视频的处理 9 months ago
Matthew c381da7947 调整短视频录制结束后的通信方式 9 months ago
Matthew 0cd8952824 参数通过base64编码传递 9 months ago
Matthew 116af6163d 增加日志 9 months ago
Matthew 835ca6ae72 调整反馈信息 9 months ago
Matthew 23c3a482ee 优化RAW格式拍照 9 months ago
Matthew 7a9824b4a6 还原目标版本 9 months ago
Matthew b52b2985fe 优化实现 9 months ago
Matthew 7c9c0a14cb 增加读取超时时间 9 months ago
Matthew 54f89afc4f 实现RAW格式拍照 9 months ago
Matthew bba323a08a RAW格式拍照的相关实现 9 months ago
Matthew 4a154984f4 优化格式 9 months ago
Matthew eee22756a3 如果已经拍照成功,则不上报错误,避免引起冲突 9 months ago
Matthew c80c9e90f2 加注释 9 months ago
Matthew 65d7ec6ff7 优化实现 9 months ago
Matthew f8039312c5 对齐的心跳不传心跳周期 9 months ago
Matthew e3eccd603f 优化日志 9 months ago
Matthew 9871b4d8fb 临时移除重拍机制 9 months ago
Matthew 5b9b14555a 优化重启相关的逻辑 9 months ago
Matthew a1c4739f6d 全部设置为重复拍摄 9 months ago

@ -4,8 +4,8 @@ plugins {
// 10,00,000 major-minor-build
def AppMajorVersion = 1
def AppMinorVersion = 1
def AppBuildNumber = 1
def AppMinorVersion = 3
def AppBuildNumber = 196
def AppVersionName = AppMajorVersion + "." + AppMinorVersion + "." + AppBuildNumber
def AppVersionCode = AppMajorVersion * 100000 + AppMinorVersion * 1000 + AppBuildNumber
@ -36,11 +36,11 @@ android {
externalNativeBuild {
cmake {
// cppFlags '-std=c++17 -frtti -fexceptions -Wno-error=format-security'
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security'
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security -fopenmp'
// cppFlags '-std=c++17 -Wno-error=format-security'
// arguments "-DANDROID_STL=c++_shared"
arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DASIO_ROOT=" + asioroot, "-DEVPP_ROOT=" + evpproot, "-DNCNN_ROOT=" + ncnnroot
// abiFilters 'arm64-v8a', 'armeabi-v7a'
arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DTERM_CORE_ROOT=" + coreroot, "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DHDRPLUS_ROOT=" + hdrplusroot, "-DNCNN_ROOT=" + ncnnroot, "-DHALIDE_ROOT=" + halideroot
abiFilters 'arm64-v8a', 'armeabi-v7a'
// setAbiFilters(['arm64-v8a'])
}
}
@ -52,6 +52,7 @@ android {
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
debug {
minifyEnabled false
jniDebuggable true
testCoverageEnabled false
}
@ -81,6 +82,7 @@ android {
enable isReleaseTask
reset()
include "armeabi-v7a", "arm64-v8a"
// include "arm64-v8a"
universalApk false
}
}
@ -89,12 +91,14 @@ android {
variant.outputs.all { output ->
if (outputFileName.endsWith('.apk')) {
def buildTypeFlag = "dbg"
def prevFileName = "mpapp"
if(variant.buildType.name.equals('release')) {
buildTypeFlag = "rel"
}
def abi = output.getFilter(com.android.build.OutputFile.ABI)
if (abi == null) abi = "all"
def fileName = "mpapp_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}_${abi}.apk"
if (abi.contains("v7a")) prevFileName = "N938"
def fileName = "${prevFileName}_v${defaultConfig.versionName}_${buildTypeFlag}_${new Date(System.currentTimeMillis()).format("yyyyMMdd")}.apk"
outputFileName = fileName
}
}
@ -113,15 +117,19 @@ android {
exclude 'META-INF/LICENSE-notice.md'
exclude 'META-INF/LICENSE.md'
jniLibs {
useLegacyPackaging true
}
}
}
dependencies {
implementation 'androidx.legacy:legacy-support-v4:1.0.0'
implementation 'androidx.legacy:legacy-support-v13:1.0.0'
// implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.0.0'
// implementation "androidx.core:core:1.10.0" // 使
implementation 'androidx.fragment:fragment:1.3.6'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
implementation 'com.google.android.material:material:1.8.0'
implementation project(path: ':common')

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -1,6 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
xmlns:tools="http://schemas.android.com/tools"
android:sharedUserId="com.xypower.mp"
tools:ignore="Deprecated">
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
@ -10,9 +12,10 @@
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.MANAGE_NETWORK_POLICY"
tools:ignore="ProtectedPermissions" />
<uses-permission
android:name="android.permission.READ_PRIVILEGED_PHONE_STATE"
tools:ignore="ProtectedPermissions" />
@ -55,6 +58,7 @@
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.DISABLE_KEYGUARD" />
<uses-permission android:name="android.permission.USB_PERMISSION" />
<uses-permission
android:name="android.permission.DEVICE_POWER"
tools:ignore="ProtectedPermissions" />
@ -63,14 +67,23 @@
tools:ignore="ProtectedPermissions" />
<uses-permission
android:name="android.permission.START_ACTIVITIES_FROM_BACKGROUND"
tools:ignore="ProtectedPermissions" /> <!-- WiFi AP startTethering -->
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.KILL_BACKGROUND_PROCESSES" />
<uses-permission
android:name="android.permission.TETHER_PRIVILEGED"
tools:ignore="ProtectedPermissions" />
<uses-permission android:name="android.permission.CONNECTIVITY_INTERNAL"
tools:ignore="ProtectedPermissions" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="com.mediatek.camera.feature.mfnr" />
<uses-permission android:name="android.hardware.usb.accessory" />
<uses-feature android:name="android.hardware.usb.host" />
<uses-feature
android:name="android.hardware.telephony"
android:required="false" />
<queries>
<provider
@ -79,6 +92,17 @@
android:enabled="true"
android:exported="false"
android:grantUriPermissions="true" />
<intent>
<action android:name="android.media.action.IMAGE_CAPTURE" />
</intent>
<intent>
<action android:name="android.media.action.STILL_IMAGE_CAMERA" />
</intent>
<intent>
<action android:name="android.intent.action.TIME_CHANGED" />
</intent>
<package android:name="com.xypower.mplive" />
</queries>
<application
@ -92,6 +116,14 @@
android:supportsRtl="true"
android:theme="@style/Theme.MicroPhoto"
tools:targetApi="28">
<activity
android:name=".LogActivity"
android:exported="false"
android:screenOrientation="landscape" />
<activity
android:name=".video.RawActivity"
android:exported="false"
android:screenOrientation="landscape" />
<activity
android:name=".StreamActivity"
android:exported="false"
@ -142,11 +174,10 @@
<category android:name="android.intent.category.default" />
</intent-filter>
</service>
<service android:name=".FloatingWindow" />
<receiver
android:name=".MicroPhotoService$AlarmReceiver"
android:exported="true" />
android:exported="true" >
</receiver>
<receiver
android:name=".BootBroadcastReceiver"
android:enabled="true"
@ -160,17 +191,7 @@
</intent-filter>
</receiver>
<receiver android:name=".NetworkChangedReceiver" />
<receiver
android:name=".ScreenActionReceiver"
android:exported="true">
<intent-filter android:priority="90000">
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.BOOT_COMPLETED" />
<action android:name="android.intent.action.SCREEN_ON" />
<action android:name="android.intent.action.USER_PRESENT" />
<action android:name="android.intent.action.USER_UNLOCKED" />
</intent-filter>
</receiver>
<receiver
android:name="com.xypower.common.UpdateReceiver"
android:enabled="true"
@ -183,11 +204,17 @@
<data android:scheme="package" />
</intent-filter>
</receiver>
<receiver
android:name=".HeartBeatResponseReceiver"
android:enabled="true"
android:exported="true">
<intent-filter >
<action android:name="com.systemui.ACTION_HEARTBEAT_RESPONSE" />
</intent-filter>
</receiver>
<activity
android:name=".MainActivity"
android:exported="true"
android:launchMode="singleTop"
android:screenOrientation="landscape">
<intent-filter>
<action android:name="android.intent.action.MAIN" />

@ -0,0 +1,227 @@
#!/system/bin/sh
# ==============================================
# Configuration parameters - modify as needed
# ==============================================
ETH_IP="192.168.68.91" # Ethernet IP address
ETH_NETMASK="24" # Subnet mask (CIDR format)
ETH_NETWORK="192.168.68.0" # Network address
ETH_BROADCAST="192.168.68.255" # Broadcast address
ETH_GATEWAY="192.168.68.1" # Default gateway
ROUTE_TABLE="20" # Routing table number
MAX_INIT_WAIT=150 # Maximum seconds to wait for ethernet interface
MAX_UP_WAIT=10 # Maximum seconds to wait for interface to come UP
MAX_ROUTE_WAIT=5 # Maximum seconds to wait for routing rules
# For debugging only - comment out in production
# set -x
ANDROID_VERSION=$(getprop ro.build.version.release 2>/dev/null | cut -d '.' -f1)
# Record script start time
SCRIPT_START=$(date +%s)
# Cleanup function - handles unexpected interruptions
cleanup() {
echo "Script interrupted, cleaning up..." >&2
# Add additional cleanup code here if needed
exit 1
}
trap cleanup INT TERM
# Get script directory for finding tools like ethtool
SCRIPT_PATH="$0"
# Ensure path is absolute
case "$SCRIPT_PATH" in
/*) ;; # Already absolute path
*) SCRIPT_PATH="$PWD/$SCRIPT_PATH" ;;
esac
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
echo "Script directory detected as: $SCRIPT_DIR"
# Only configure rp_filter for eth0 interface
echo 0 > /proc/sys/net/ipv4/conf/eth0/rp_filter 2>/dev/null || true
# Wait for eth0 interface to appear
WAITED=0
while [ $WAITED -lt $MAX_INIT_WAIT ]; do
if [ -d "/sys/class/net/eth0" ]; then
echo "eth0 found after $WAITED seconds"
break
fi
echo "Wait eth0... ($WAITED/$MAX_INIT_WAIT)"
sleep 0.1
WAITED=$((WAITED+1))
done
# Check if eth0 exists
if ! [ -d "/sys/class/net/eth0" ]; then
echo "Error: eth0 not exists" >&2
exit 1
fi
# Check physical connection status
if [ -f "/sys/class/net/eth0/carrier" ]; then
CARRIER=$(cat /sys/class/net/eth0/carrier)
echo "Physical connection status: $CARRIER (1=connected, 0=disconnected)"
if [ "$CARRIER" != "1" ]; then
echo "Warning: Ethernet physical connection may have issues, please check the cable" >&2
fi
fi
# Clear previous configuration
/system/bin/ip link set eth0 down
/system/bin/ip addr flush dev eth0
/system/bin/ip route flush dev eth0
/system/bin/ip route flush table $ROUTE_TABLE
/system/bin/ip rule del to $ETH_NETWORK/$ETH_NETMASK 2>/dev/null || true
# Configure physical layer with ethtool (while interface is DOWN)
if [ -x "$SCRIPT_DIR/ethtool" ]; then
echo "Using ethtool from script directory: $SCRIPT_DIR/ethtool"
"$SCRIPT_DIR/ethtool" -s eth0 speed 10 duplex full autoneg off
# Try alternative path next
elif [ -x "/data/data/com.xypower.mpapp/files/ethtool" ]; then
echo "Configuring eth0 to 10Mbps full duplex..."
/data/data/com.xypower.mpapp/files/ethtool -s eth0 speed 10 duplex full autoneg off
else
echo "Warning: ethtool not found, falling back to sysfs configuration" >&2
# Try sysfs configuration as fallback
if [ -f "/sys/class/net/eth0/speed" ]; then
echo "off" > /sys/class/net/eth0/autoneg 2>/dev/null || true
echo "10" > /sys/class/net/eth0/speed 2>/dev/null || true
echo "full" > /sys/class/net/eth0/duplex 2>/dev/null || true
fi
fi
# ====================================================
# MTK Android 9 IP configuration with loss prevention
# ====================================================
# Configure IP address first while interface is DOWN
echo "Setting IP address while interface is DOWN..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
PRE_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration before UP: $PRE_UP_IP (1=configured, 0=missing)"
# Enable interface and wait for UP
echo "Bringing up interface..."
/system/bin/ip link set eth0 up
if [ "$ANDROID_VERSION" = "9" ]; then
sleep 3
else
# Use standard configuration for other devices
sleep 1
fi
# Check if IP was lost after interface UP (common issue on MTK devices)
POST_UP_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP configuration after UP: $POST_UP_IP (1=retained, 0=lost)"
# IP address lost detection and recovery
if [ "$PRE_UP_IP" = "1" ] && [ "$POST_UP_IP" = "0" ]; then
echo "Warning: IP address was lost after bringing interface up - MTK issue detected"
echo "Reapplying IP configuration..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
# Check if reapplied configuration worked
FIXED_IP=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
echo "IP reapplication result: $FIXED_IP (1=success, 0=still missing)"
# If standard method fails, try MTK-specific approaches
if [ "$FIXED_IP" = "0" ]; then
echo "Standard IP configuration failed, trying MTK-specific methods"
# Try ifconfig if available (works better on some MTK devices)
if command -v ifconfig >/dev/null 2>&1; then
echo "Using ifconfig method..."
ifconfig eth0 $ETH_IP netmask 255.255.255.0 up
sleep 1
fi
# Try Android's netd service if available
if [ -x "/system/bin/ndc" ]; then
echo "Using MTK netd service..."
/system/bin/ndc network interface setcfg eth0 $ETH_IP 255.255.255.0 up
sleep 1
fi
fi
fi
# Use loop to wait for interface UP instead of fixed sleep
WAITED=0
while [ $WAITED -lt $MAX_UP_WAIT ]; do
# Check both link status and IP configuration
IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "1" ]; then
echo "Interface is UP with correct IP after $WAITED seconds"
break
fi
echo "Waiting for interface UP with IP... ($WAITED/$MAX_UP_WAIT)"
# If interface is UP but IP is missing, reapply IP
if [ "$IF_STATUS" = "1" ] && [ "$IP_STATUS" = "0" ]; then
echo "Interface UP but IP missing, reapplying IP..."
/system/bin/ip addr add $ETH_IP/$ETH_NETMASK broadcast $ETH_BROADCAST dev eth0
fi
sleep 0.5
WAITED=$((WAITED+1))
done
# Final status check
FINAL_IF_STATUS=$(/system/bin/ip link show eth0 | grep -c ",UP")
FINAL_IP_STATUS=$(/system/bin/ip addr show eth0 | grep -c "inet $ETH_IP")
if [ "$FINAL_IF_STATUS" != "1" ] || [ "$FINAL_IP_STATUS" != "1" ]; then
echo "Warning: Failed to achieve stable interface state with IP" >&2
echo "Final interface status: $FINAL_IF_STATUS (1=UP, 0=DOWN)"
echo "Final IP status: $FINAL_IP_STATUS (1=configured, 0=missing)"
/system/bin/ip addr show eth0
else
echo "Successfully configured eth0 with IP $ETH_IP"
fi
# First add to main routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link
# Then add to specified routing table
/system/bin/ip route add $ETH_NETWORK/$ETH_NETMASK dev eth0 proto static scope link table $ROUTE_TABLE
ADD_ROUTE_STATUS=$?
if [ $ADD_ROUTE_STATUS -eq 0 ]; then
echo "Add route successfully"
else
echo "Failed to add route: $ADD_ROUTE_STATUS" >&2
fi
# Only clear ARP and neighbor cache for eth0
/system/bin/ip neigh flush dev eth0
# Add routing rules - only flush cache once after rule is added
/system/bin/ip rule add from all to $ETH_NETWORK/$ETH_NETMASK lookup $ROUTE_TABLE prio 1000
/system/bin/ip route flush cache dev eth0
# Only enable forwarding for eth0 interface
echo 1 > /proc/sys/net/ipv4/conf/eth0/forwarding 2>/dev/null || true
# Wait for routing rules to take effect - using loop check instead of fixed wait
WAITED=0
while [ $WAITED -lt $MAX_ROUTE_WAIT ]; do
if /system/bin/ip rule | grep -q "$ETH_NETWORK/$ETH_NETMASK"; then
echo "Routing rules are now effective after $WAITED seconds"
break
fi
echo "Waiting for routing rules to take effect... ($WAITED/$MAX_ROUTE_WAIT)"
sleep 0.5
WAITED=$((WAITED+1))
done
# Display execution time
SCRIPT_END=$(date +%s)
TOTAL_TIME=$((SCRIPT_END - SCRIPT_START))
echo "Total script execution time: $TOTAL_TIME seconds"
exit 0

Binary file not shown.

Binary file not shown.

@ -14,6 +14,27 @@ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ffunction-sections -fdata-sections -Wformat
set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}")
# SET_TARGET_PROPERTIES(microphoto PROPERTIES LINK_FLAGS "-Wl,-s,--gc-sections")
add_definitions(-DUSING_ETHERNET)
if(ANDROID_ABI STREQUAL "armeabi-v7a")
add_definitions(-DUSING_N938)
elseif(ANDROID_ABI STREQUAL "arm64-v8a")
# add_definitions(-DUSING_N938)
# add_definitions(-DUSING_PTZ)
endif()
# OUTPUT_DBG_INFO:
add_definitions(-DOUTPUT_DBG_INFO)
# OUTPUT_SOCKET_DBG_INFO Depends ON OUTPUT_DBG_INFO
# TerminalService.cpp
# add_definitions(-DOUTPUT_SOCKET_DBG_INFO)
# OUTPUT_DB_DBG_INFO Depends ON OUTPUT_DBG_INFO
# Database.cpp
# add_definitions(-DOUTPUT_DB_DBG_INFO)
add_definitions(-DUSING_FFMPEG)
IF (CMAKE_BUILD_TYPE STREQUAL Debug)
ADD_DEFINITIONS(-D_DEBUG)
ELSE()
@ -28,17 +49,22 @@ add_definitions(-DASIO_STANDALONE)
add_definitions(-DUSING_XY_EXTENSION)
# add_definitions(-DUSING_BREAK_PAD)
add_definitions(-DSQLITE_THREADSAFE=1)
add_definitions(-DLIBRAW_NO_MEMPOOL_CHECK=1)
# add_definitions(-DHDRPLUS_NO_DETAILED_OUTPUT=1)
add_definitions(-DHAVE_STRING_H) # for memcpy in md5.c
add_definitions(-DUSING_NRSEC)
add_definitions(-DUSING_NRSEC_VPN)
# add_definitions(-DUSING_NRSEC)
# add_definitions(-DUSING_NRSEC_VPN)
# add_definitions(-DUSING_CERT)
# add_definitions(-DUSING_DOWSE)
# OUTPUT_CAMERA_DBG_INFO: CARERA
# add_definitions(-DOUTPUT_CAMERA_DBG_INFO)
add_definitions(-DALIGN_HB_TIMER_TO_PHOTO)
add_definitions(-DENABLE_3V3_ALWAYS)
add_definitions(-DCURL_STATICLIB)
#add_definitions(-DUSING_N938)
add_definitions(-DUSING_HDRPLUS)
add_definitions(-DUSING_EXEC_HDRP=0)
#set(USING_EXEC_HDRP 1)
# include_directories(${OpenCV_DIR}/include)
# add_library( lib_opencv SHARED IMPORTED )
@ -48,7 +74,7 @@ add_definitions(-DENABLE_3V3_ALWAYS)
project("microphoto")
find_package(OpenCV REQUIRED core imgproc highgui)
find_package(OpenCV REQUIRED core imgproc highgui photo)
# find_package(OpenCV REQUIRED core imgproc)
if(OpenCV_FOUND)
include_directories(${OpenCV_INCLUDE_DIRS})
@ -68,88 +94,115 @@ endif(OpenCV_FOUND)
set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn)
find_package(ncnn REQUIRED)
# include(mars/src/CMakeUtils.txt)
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libcutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/libutils/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/img_utils/include)
SET( IMG_UTILS_SRCS
"img_utils/src/EndianUtils.cpp"
#"img_utils/src/FileInput.cpp"
#"img_utils/src/FileOutput.cpp"
#"img_utils/src/SortedEntryVector.cpp"
"img_utils/src/Input.cpp"
"img_utils/src/Output.cpp"
"img_utils/src/Orderable.cpp"
"img_utils/src/TiffIfd.cpp"
"img_utils/src/TiffWritable.cpp"
"img_utils/src/TiffWriter.cpp"
"img_utils/src/TiffEntry.cpp"
"img_utils/src/TiffEntryImpl.cpp"
"img_utils/src/ByteArrayOutput.cpp"
"img_utils/src/DngUtils.cpp"
"img_utils/src/StripSource.cpp"
libutils/SharedBuffer.cpp
libutils/StrongPointer.cpp
DngCreator.cpp
)
# SET(TERM_CORE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../xymp/Core)
#SET(TERM_CORE_ROOT D:/shxy/xymp/Core)
message(WARNING "include_directories ${HDRPLUS_ROOT}/${ANDROID_ABI}/include")
SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp)
SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include)
include_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/include)
link_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/lib)
SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
# message(WARNING "exiv2_DIR=${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2")
# SET(exiv2_DIR ${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2)
# list(APPEND CMAKE_PREFIX_PATH ${HDRPLUS_ROOT}/${ANDROID_ABI}/lib/cmake/exiv2)
SET(BREAKPAD_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/breakpad)
# find_package(exiv2 REQUIRED CONFIG NAMES exiv2)
# message(STATUS "Found Exiv2 and linked")
SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2)
# OpenMP
find_package(OpenMP REQUIRED)
SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
# SET(EVPP_SRC_DIR ${EVPP_ROOT}/evpp)
# library
include_directories( ${CMAKE_CURRENT_SOURCE_DIR}/hdrplus/include )
include_directories(${YAMC_INC_DIR})
include_directories(${BREAKPAD_ROOT} ${BREAKPAD_ROOT}/common/android/include)
include_directories(${ASIO_ROOT}/include)
add_library( # Sets the name of the library.
sqlite3
# include_directories(${HDRPLUS_ROOT}/${ANDROID_ABI}/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/hdrplus2)
include_directories(hdrplus2/${ANDROID_ABI})
include_directories(${HALIDE_ROOT}/${ANDROID_ABI}/include)
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${SQLITE_SRC_DIR}/sqlite3.c
)
SET(ZLMEDIAKIT_LIBS "")
SET(STREAMING_SRCS "")
add_definitions(-DDISABLE_RTTI)
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLMediaKit )
# include_directories( ${HDRPLUS_ROOT}/${ANDROID_ABI}/include/ZLToolKit/src/ )
# SET(ZLMEDIAKIT_LIBS ${ZLMEDIAKIT_LIBS} zlmediakit zltoolkit)
SET(STREAMING_SRCS media/RTSPToMP4.cpp media/RTSPRecorder.cpp media/Streaming.cpp )
#SET(HDRPLUS_LIBS raw exiv2 exiv2-xmp expat lcms2 OpenMP::OpenMP_CXX)
#SET(HDRPLUS2_LIBS raw raw_r lcms2 tiff tiffxx jpeg hdrplus_pipeline)
INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
file(GLOB BREAKPAD_SOURCES_COMMON
native-lib.cpp
${BREAKPAD_ROOT}/client/linux/crash_generation/crash_generation_client.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/thread_info.cc
${BREAKPAD_ROOT}/client/linux/dump_writer_common/ucontext_reader.cc
${BREAKPAD_ROOT}/client/linux/handler/exception_handler.cc
${BREAKPAD_ROOT}/client/linux/handler/minidump_descriptor.cc
${BREAKPAD_ROOT}/client/linux/log/log.cc
${BREAKPAD_ROOT}/client/linux/microdump_writer/microdump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/linux_ptrace_dumper.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/minidump_writer.cc
${BREAKPAD_ROOT}/client/linux/minidump_writer/pe_file.cc
${BREAKPAD_ROOT}/client/minidump_file_writer.cc
${BREAKPAD_ROOT}/common/convert_UTF.cc
${BREAKPAD_ROOT}/common/md5.cc
${BREAKPAD_ROOT}/common/string_conversion.cc
${BREAKPAD_ROOT}/common/linux/elfutils.cc
${BREAKPAD_ROOT}/common/linux/file_id.cc
${BREAKPAD_ROOT}/common/linux/guid_creator.cc
${BREAKPAD_ROOT}/common/linux/linux_libc_support.cc
${BREAKPAD_ROOT}/common/linux/memory_mapped_file.cc
${BREAKPAD_ROOT}/common/linux/safe_readlink.cc
SET(HDRPLUS_SOURCES
hdrplus/src/align.cpp
hdrplus/src/bayer_image.cpp
hdrplus/src/burst.cpp
hdrplus/src/finish.cpp
hdrplus/src/hdrplus_pipeline.cpp
hdrplus/src/merge.cpp
hdrplus/src/params.cpp
)
file(GLOB BREAKPAD_ASM_SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S)
set_property(SOURCE ${BREAKPAD_ROOT}/common/linux/breakpad_getcontext.S PROPERTY LANGUAGE C)
# set_source_files_properties(${BREAKPAD_ASM_SOURCE} PROPERTIES LANGUAGE C)
SET(HDRPLUS2_SOURCES
hdrplus2/src/HDRPlus.cpp
hdrplus2/src/Burst.cpp
hdrplus2/src/InputSource.cpp
hdrplus2/src/LibRaw2DngConverter.cpp
hdrplus2/${ANDROID_ABI}/hdrplus_pipeline.registration.cpp)
SET(HDRPLUS2_SOURCES )
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
SET(YAMC_INC_DIR ${CMAKE_SOURCE_DIR})
add_library( # Sets the name of the library.
breakpad
# SET(TERM_CORE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../../../xymp/Core)
#SET(TERM_CORE_ROOT D:/shxy/xymp/Core)
# Sets the library as a shared library.
STATIC
SET(JSONCPP_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp)
SET(JSONCPP_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jsoncpp/include)
# Provides a relative path to your source file(s).
${BREAKPAD_SOURCES_COMMON}
${BREAKPAD_ASM_SOURCE}
)
SET(CAMERA2_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/camera2)
SET(FREETYPE_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/freetype)
# SET(EVPP_SRC_DIR ${EVPP_ROOT}/evpp)
include_directories(${YAMC_INC_DIR})
include_directories(${ASIO_ROOT}/include)
# SET(SQLITE_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
# SET(SQLITE_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/sqlite)
# add_library(sqlite3 STATIC ${SQLITE_SRC_DIR}/sqlite3.c )
# INCLUDE_DIRECTORIES(${SQLITE_INCLUDE_DIR})
INCLUDE_DIRECTORIES(${JSONCPP_INCLUDE_DIR})
@ -243,10 +296,8 @@ include_directories(${TERM_CORE_ROOT})
add_library( # Sets the name of the library.
jsoncpp
# Sets the library as a shared library.
STATIC
# Provides a relative path to your source file(s).
${JSONCPP_SOURCES}
)
@ -259,6 +310,25 @@ add_library(
${FREETYPE_SRC_FILES}
)
if(USING_EXEC_HDRP)
message(WARNING "HDRP Compiled")
add_executable( libhdrp.so
${HDRPLUS_SOURCES}
hdrplus/bin/hdrplus.cpp )
target_link_libraries( libhdrp.so PUBLIC -fopenmp -static-openmp
android z
${OpenCV_LIBS}
# ${LIBRAW_LIBRARY}
${HDRPLUS_LIBS}
)
else(USING_EXEC_HDRP)
endif()
SET(HDRPLUS_SOURCES_EMBED ${HDRPLUS2_SOURCES} )
SET(HDRPLUS_LIBS_EMBED ${HDRPLUS2_LIBS} )
add_library( # Sets the name of the library.
microphoto
@ -268,8 +338,8 @@ add_library( # Sets the name of the library.
# Provides a relative path to your source file(s).
GPIOControl.cpp
MicroPhoto.cpp
TerminalDevice.cpp
PhoneDevice.cpp
PtzController.cpp
# PhoneDevice2.cpp
Camera.cpp
Camera2Reader.cpp
@ -282,12 +352,23 @@ add_library( # Sets the name of the library.
ncnn/yolov5ncnn.cpp
netcamera/httpclient.cpp
netcamera/VendorCtrl.cpp
netcamera/YuShiCtrl.cpp
netcamera/HangYuCtrl.cpp
netcamera/HikonCtrl.cpp
${STREAMING_SRCS}
#serial/WeatherComm.cpp
# camera2/OpenCVFont.cpp
${HDRPLUS_SOURCES_EMBED}
${CAMERA2_SOURCES}
${IMG_UTILS_SRCS}
${TERM_CORE_ROOT}/Factory.cpp
${TERM_CORE_ROOT}/FilePoster.cpp
${TERM_CORE_ROOT}/LogThread.cpp
@ -297,9 +378,11 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/SpecData_I1_JS.cpp
${TERM_CORE_ROOT}/SpecData_I1_HN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN.cpp
${TERM_CORE_ROOT}/SpecData_I1_HEN_TY.cpp
${TERM_CORE_ROOT}/SpecData_I1_HENZZ.cpp
${TERM_CORE_ROOT}/SpecData_I1_SHX.cpp
${TERM_CORE_ROOT}/SpecData_I1_NX.cpp
${TERM_CORE_ROOT}/SpecData_I1_SX_ZY.cpp
${TERM_CORE_ROOT}/SpecData_XY.cpp
${TERM_CORE_ROOT}/SpecData_ZJ.cpp
${TERM_CORE_ROOT}/SpecData_NW.cpp
@ -317,14 +400,18 @@ add_library( # Sets the name of the library.
${TERM_CORE_ROOT}/Client/Terminal_AH.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_ZZ.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN.cpp
${TERM_CORE_ROOT}/Client/Terminal_HEN_TY.cpp
${TERM_CORE_ROOT}/Client/Terminal_SHX.cpp
${TERM_CORE_ROOT}/Client/Terminal_JS.cpp
${TERM_CORE_ROOT}/Client/Terminal_NX.cpp
${TERM_CORE_ROOT}/Client/Terminal_SX_ZY.cpp
${TERM_CORE_ROOT}/Client/Terminal_ZJ.cpp
${TERM_CORE_ROOT}/Client/Terminal_NW.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
${TERM_CORE_ROOT}/Client/UpgradeReceiver.cpp
${TERM_CORE_ROOT}/Client/Database.cpp
${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
# ${TERM_CORE_ROOT}/Client/SimulatorDevice.cpp
${TERM_CORE_ROOT}/Client/DataController.cpp
)
@ -345,23 +432,17 @@ find_library( # Sets the name of the path variable.
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
microphoto
${PROJECT_NAME}
jsoncpp
freetype
# breakpad
breakpad
# Links the target library to the log library
# included in the NDK.
avcodec avfilter avformat avutil swresample swscale x264
${log-lib}
android camera2ndk mediandk z
ncnn ${OpenCV_LIBS} sqlite3
android camera2ndk mediandk z curl
ncnn ${OpenCV_LIBS} sqlite3 ${HDRPLUS_LIBS_EMBED} ${ZLMEDIAKIT_LIBS}
)
# set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all")

@ -1,4 +1,3 @@
#include "TerminalDevice.h"
/*
* Copyright 2018 The Android Open Source Project
*

@ -371,7 +371,7 @@ namespace cv {
delete userData;
#if defined(USING_HB)
hb_buffer_destroy(hb_buffer);
#endif 0
#endif // 0
}
// https://freetype.org/freetype2/docs/tutorial/example2.cpp
@ -630,7 +630,7 @@ namespace cv {
#if defined(USING_HB)
hb_buffer_destroy(hb_buffer);
#endif 0
#endif // 0
}
Size FreeType2Impl::getTextSize(

File diff suppressed because it is too large Load Diff

@ -0,0 +1,332 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "DngCreator_JNI"
#include <inttypes.h>
#include <string.h>
#include <algorithm>
#include <array>
#include <memory>
#include <vector>
#include <cmath>
#include <algorithm>
#include <camera/NdkCameraMetadata.h>
#include <img_utils/DngUtils.h>
#include <img_utils/TagDefinitions.h>
#include <img_utils/TiffIfd.h>
#include <img_utils/TiffWriter.h>
#include <img_utils/Output.h>
#include <img_utils/Input.h>
#include <img_utils/StripSource.h>
#include <sys/system_properties.h>
// #include "core_jni_helpers.h"
// #include "android_runtime/AndroidRuntime.h"
// #include "android_runtime/android_hardware_camera2_CameraMetadata.h"
#include <jni.h>
// #include <nativehelper/JNIHelp.h>
using namespace android;
using namespace img_utils;
// using android::base::GetProperty;
/**
* Max width or height dimension for thumbnails.
*/
// max pixel dimension for TIFF/EP
#define MAX_THUMBNAIL_DIMENSION 256
// bytes per sample
#define DEFAULT_PIXEL_STRIDE 2
// byts per pixel
#define BYTES_PER_RGB_PIX 3
#define GPS_LAT_REF_NORTH "N"
#define GPS_LAT_REF_SOUTH "S"
#define GPS_LONG_REF_EAST "E"
#define GPS_LONG_REF_WEST "W"
#define GPS_DATE_FORMAT_STR "yyyy:MM:dd"
#define TIFF_DATETIME_FORMAT "yyyy:MM:dd kk:mm:ss"
class ByteVectorOutput : public Output {
public:
ByteVectorOutput(std::vector<uint8_t>& buf);
virtual ~ByteVectorOutput();
virtual status_t open();
virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
virtual status_t close();
protected:
std::vector<uint8_t>& m_buf;
};
class ByteVectorInput : public Input {
public:
ByteVectorInput(const std::vector<uint8_t>& buf);
virtual ~ByteVectorInput();
/**
* Open this Input.
*
* Returns OK on success, or a negative error code.
*/
status_t open();
/**
* Read bytes into the given buffer. At most, the number of bytes given in the
* count argument will be read. Bytes will be written into the given buffer starting
* at the index given in the offset argument.
*
* Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
* error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
*/
ssize_t read(uint8_t* buf, size_t offset, size_t count);
/**
* Skips bytes in the input.
*
* Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
* error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
*/
ssize_t skip(size_t count);
/**
* Close the Input. It is not valid to call open on a previously closed Input.
*
* Returns OK on success, or a negative error code.
*/
status_t close();
protected:
const std::vector<uint8_t>& m_buf;
size_t m_offset;
};
class ByteBufferInput : public Input {
public:
ByteBufferInput(const uint8_t* buf, size_t len);
virtual ~ByteBufferInput();
/**
* Open this Input.
*
* Returns OK on success, or a negative error code.
*/
status_t open();
/**
* Read bytes into the given buffer. At most, the number of bytes given in the
* count argument will be read. Bytes will be written into the given buffer starting
* at the index given in the offset argument.
*
* Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
* error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
*/
ssize_t read(uint8_t* buf, size_t offset, size_t count);
/**
* Skips bytes in the input.
*
* Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
* error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
*/
ssize_t skip(size_t count);
/**
* Close the Input. It is not valid to call open on a previously closed Input.
*
* Returns OK on success, or a negative error code.
*/
status_t close();
protected:
const uint8_t* m_buf;
size_t m_len;
size_t m_offset;
};
struct SIZE
{
int width;
int height;
};
#define BAIL_IF_INVALID_RET_BOOL(expr, jnienv, tagId, writer) \
if ((expr) != OK) { \
return false; \
}
#define BAIL_IF_INVALID_RET_NULL_SP(expr, jnienv, tagId, writer) \
if ((expr) != OK) { \
return nullptr; \
}
#define BAIL_IF_INVALID_R(expr, jnienv, tagId, writer) \
if ((expr) != OK) { \
return -1; \
}
#define BAIL_IF_EMPTY_RET_NULL_SP(entry, jnienv, tagId, writer) \
if ((entry).count == 0) { \
return nullptr; \
}
#define BAIL_IF_EXPR_RET_NULL_SP(expr, jnienv, tagId, writer) \
if (expr) { \
return nullptr; \
}
#define ANDROID_DNGCREATOR_CTX_JNI_ID "mNativeContext"
enum {
BITS_PER_SAMPLE = 16,
BYTES_PER_SAMPLE = 2,
BYTES_PER_RGB_PIXEL = 3,
BITS_PER_RGB_SAMPLE = 8,
BYTES_PER_RGB_SAMPLE = 1,
SAMPLES_PER_RGB_PIXEL = 3,
SAMPLES_PER_RAW_PIXEL = 1,
TIFF_IFD_0 = 0,
TIFF_IFD_SUB1 = 1,
TIFF_IFD_GPSINFO = 2,
};
/**
* POD container class for GPS tag data.
*/
class GpsData {
public:
enum {
GPS_VALUE_LENGTH = 6,
GPS_REF_LENGTH = 2,
GPS_DATE_LENGTH = 11,
};
uint32_t mLatitude[GPS_VALUE_LENGTH];
uint32_t mLongitude[GPS_VALUE_LENGTH];
uint32_t mTimestamp[GPS_VALUE_LENGTH];
uint8_t mLatitudeRef[GPS_REF_LENGTH];
uint8_t mLongitudeRef[GPS_REF_LENGTH];
uint8_t mDate[GPS_DATE_LENGTH];
};
// ----------------------------------------------------------------------------
/**
* Container class for the persistent native context.
*/
class NativeContext : public LightRefBase<NativeContext> {
public:
enum {
DATETIME_COUNT = 20,
};
NativeContext(ACameraMetadata* characteristics, ACameraMetadata* result);
virtual ~NativeContext();
TiffWriter* getWriter();
ACameraMetadata* getCharacteristics() const;
ACameraMetadata* getResult() const;
uint32_t getThumbnailWidth() const;
uint32_t getThumbnailHeight() const;
const uint8_t* getThumbnail() const;
bool hasThumbnail() const;
bool setThumbnail(const std::vector<uint8_t>& buffer, uint32_t width, uint32_t height);
void setOrientation(uint16_t orientation);
uint16_t getOrientation() const;
void setDescription(const std::string& desc);
std::string getDescription() const;
bool hasDescription() const;
void setGpsData(const GpsData& data);
GpsData getGpsData() const;
bool hasGpsData() const;
void setCaptureTime(const std::string& formattedCaptureTime);
std::string getCaptureTime() const;
bool hasCaptureTime() const;
protected:
std::vector<uint8_t> mCurrentThumbnail;
TiffWriter mWriter;
ACameraMetadata* mCharacteristics;
ACameraMetadata* mResult;
uint32_t mThumbnailWidth;
uint32_t mThumbnailHeight;
uint16_t mOrientation;
bool mThumbnailSet;
bool mGpsSet;
bool mDescriptionSet;
bool mCaptureTimeSet;
std::string mDescription;
GpsData mGpsData;
std::string mFormattedCaptureTime;
};
class DngCreator : public NativeContext
{
public:
DngCreator(ACameraMetadata* characteristics, ACameraMetadata* result);
#if 0
void setLocation(Location location);
#endif
void writeInputStream(std::vector<uint8_t>& dngOutput, SIZE size, const std::vector<uint8_t>& pixels, long offset);
void writeByteBuffer(std::vector<uint8_t>& dngOutput, SIZE size, const std::vector<uint8_t>& pixels, long offset);
#if 0
void writeImage(OutputStream& dngOutput, AImage& pixels);
#endif
void close();
// private static final DateFormat sExifGPSDateStamp = new SimpleDateFormat(GPS_DATE_FORMAT_STR);
// private static final DateFormat sDateTimeStampFormat = new SimpleDateFormat(TIFF_DATETIME_FORMAT);
#if 0
static {
sDateTimeStampFormat.setTimeZone(TimeZone.getDefault());
sExifGPSDateStamp.setTimeZone(TimeZone.getTimeZone("UTC"));
}
#endif
/**
* Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels.
*/
void writeByteBuffer(int width, int height, const std::vector<uint8_t>& pixels, std::vector<uint8_t>& dngOutput, int pixelStride, int rowStride, long offset);
/**
* Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}.
*/
/**
* Convert coordinate to EXIF GPS tag format.
*/
void toExifLatLong(double value, int data[6]);
void init(ACameraMetadata* characteristics, ACameraMetadata* result, const std::string& captureTime);
sp<TiffWriter> setup(uint32_t imageWidth, uint32_t imageHeight);
void destroy();
void setGpsTags(const std::vector<int>& latTag, const std::string& latRef, const std::vector<int>& longTag, const std::string& longRef, const std::string& dateTag, const std::vector<int>& timeTag);
void writeImage(std::vector<uint8_t>& out, uint32_t width, uint32_t height, const std::vector<uint8_t>& rawBuffer, int rowStride, int pixStride, uint64_t offset, bool isDirect);
void writeInputStream(std::vector<uint8_t>& out, const std::vector<uint8_t>& rawStream, uint32_t width, uint32_t height, long offset);
void writeInputBuffer(std::vector<uint8_t>& out, const uint8_t* rawBuffer, size_t bufferLen, uint32_t width, uint32_t height, long offset);
};

@ -11,6 +11,7 @@
#include <sys/mman.h>
#include <unistd.h>
#include <climits>
#include "GPIOControl.h"
@ -20,35 +21,143 @@
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define MAX_STRING_LEN 32
typedef struct
std::mutex GpioControl::m_locker;
std::mutex GpioControl::m_gpioLocker;
std::vector<GpioControl::ITEM> GpioControl::m_items;
bool GpioControl::m_cameraPowerStatus = false;
#define ENABLE_GPIO_TRACING
#ifdef ENABLE_GPIO_TRACING
class GpioDebugLogger
{
public:
GpioDebugLogger(int cmd, int value)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_val." + std::to_string(value);
CreateEmptyFile(m_path + ".enter");
}
GpioDebugLogger(int cmd)
{
m_startTime = GetMicroTimeStamp();
m_path = std::string("/sdcard/com.xypower.mpapp/tmp/") + std::to_string(cmd) + std::string("_") + std::to_string(m_startTime) + "_get";
CreateEmptyFile(m_path + ".enter");
}
~GpioDebugLogger()
{
uint64_t ts = (GetMicroTimeStamp() - m_startTime);
if (ts > 1000)
{
CreateEmptyFile(m_path + ".leave." + std::to_string(ts));
}
else
{
std::string path = m_path + ".enter";
std::remove(path.c_str());
}
}
private:
std::string m_path;
uint64_t m_startTime;
};
#endif
size_t GpioControl::turnOnImpl(const IOT_PARAM& param)
{
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
size_t oldRef = 0;
size_t references = 1;
std::vector<ITEM>::iterator it;
int res = 0;
int fd = -1;
time_t now = time(NULL);
// check res???
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == param.cmd)
{
oldRef = it->references;
it->references++;
// it->closeTime = 0;
references = it->references;
if(it->openTime == 0)
it->openTime = now;
SetCamerastatus(it->cmd, true);
break;
}
}
if (it == m_items.end())
{
oldRef = 0;
ITEM item = {param.cmd, references, now};
m_items.push_back(item);
SetCamerastatus(param.cmd, true);
}
if (oldRef == 0/* || param.cmd != CMD_SET_3V3_PWR_EN*/)
{
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(param.cmd, param.value);
#endif
m_gpioLocker.lock();
fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
res = ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
#ifdef OUTPUT_DBG_INFO
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, param.value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
#endif
}
m_gpioLocker.unlock();
#ifdef _DEBUG
ALOGI("PWR TurnOn cmd=%d,result=%d ref=%u\r\n",param.cmd, param.result, (uint32_t)references);
#endif
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
return references;
}
void GpioControl::setInt(int cmd, int value)
{
IOT_PARAM param = { cmd, value, 0 };
// param.cmd = cmd;
// param.value = value;
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd, value);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value = value;
// LOGE("set_int fd=%d,cmd=%d,value=%d\r\n",fd, cmd, value);
if( fd > 0 )
if (fd > 0)
{
int res = ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_int22 cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
close(fd);
#ifdef OUTPUT_DBG_INFO
// int realVal = getInt(param.cmd);
// XYLOG(XYLOG_SEVERITY_INFO, "setInt cmd=%d,value=%d,result=%d RealVal=%d",param.cmd, value, param.result/*, realVal*/);
XYLOG(XYLOG_SEVERITY_DEBUG, "setInt cmd=%d,value=%d,result=%d",param.cmd, value, param.result);
#endif
}
return;
m_gpioLocker.unlock();
}
int GpioControl::getInt(int cmd)
{
#ifdef ENABLE_GPIO_TRACING
GpioDebugLogger logger(cmd);
#endif
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_int fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -57,32 +166,37 @@ int GpioControl::getInt(int cmd)
param.cmd = cmd;
ioctl(fd, IOT_PARAM_READ, &param);
#ifdef _DEBUG
ALOGI("getInt cmd=%d,value=%d,result=%d\r\n",param.cmd, param.value, param.result);
ALOGI("getInt cmd=%d,value=%d,result=%d",param.cmd, param.value, param.result);
#endif
close(fd);
m_gpioLocker.unlock();
return param.value;
}
m_gpioLocker.unlock();
return -1;
}
void GpioControl::setLong(int cmd, long value)
{
int fd = open(GPIO_NODE_MP, O_RDONLY);
IOT_PARAM param;
param.cmd = cmd;
param.value2 = value;
// LOGE("set_long fd=%d,cmd=%d,value2=%ld\r\n",fd, param.cmd, param.value2);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd);
}
m_gpioLocker.unlock();
}
long GpioControl::getLong(int cmd)
{
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_long fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -92,32 +206,37 @@ long GpioControl::getLong(int cmd)
ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_long22 cmd=%d,value2=%ld,result=%d\r\n",param.cmd, param.value2, param.result);
close(fd);
m_gpioLocker.unlock();
return param.value2;
}
m_gpioLocker.unlock();
return -1;
}
void GpioControl::setString(int cmd, const std::string& value)
{
IOT_PARAM param;
int fd = open(GPIO_NODE_MP, O_RDONLY);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
param.cmd = cmd;
memset(param.str, 0, MAX_STRING_LEN);
int len = MAX_STRING_LEN < value.size() ? MAX_STRING_LEN : value.size();
memcpy(param.str, value.c_str(), len);
// LOGE("set_string fd=%d,cmd=%d,str=%s\r\n",fd, param.cmd, param.str);
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
{
ioctl(fd, IOT_PARAM_WRITE, &param);
// LOGE("set_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd);
}
m_gpioLocker.unlock();
return;
}
std::string GpioControl::getString(int cmd)
{
m_gpioLocker.lock();
int fd = open(GPIO_NODE_MP, O_RDONLY);
// LOGE("get_string fd=%d,cmd=%d\r\n",fd, cmd);
if( fd > 0 )
@ -127,67 +246,273 @@ std::string GpioControl::getString(int cmd)
ioctl(fd, IOT_PARAM_READ, &param);
// LOGE("get_string22 cmd=%d,str=%s,result=%d\r\n",param.cmd, param.str, param.result);
close(fd);
m_gpioLocker.unlock();
return std::string(param.str);
}
m_gpioLocker.unlock();
return "";
}
#ifdef USING_N938
/////////////////////////// Power Control /////////////////////////////////
bool GpioControl::SetN938Cmd(int cmd, int val)
size_t GpioControl::TurnOn(int cmd)
{
char buf[32] = { 0 };
snprintf(buf, "out %d %d", cmd, val);
IOT_PARAM param = { cmd, 1, 0 };
// param.cmd = cmd;
// param.value = value;
IOT_PARAM param;
int len = MAX_STRING_LEN < strlen(buf) ? MAX_STRING_LEN : strlen(buf);
m_locker.lock();
size_t ref = turnOnImpl(param);
m_locker.unlock();
return ref;
}
param.cmd = cmd;
memset(param.str, 0, MAX_STRING_LEN);
memcpy(param.str, value.c_str(), len);
size_t GpioControl::TurnOn(const std::vector<int>& cmds)
{
IOT_PARAM param = { 0, 1, 0 };
// param.cmd = cmd;
// param.value = value;
int fd = open(GPIO_NODE_MP, O_RDONLY);
if( fd > 0 )
std::vector<int>::const_iterator it;
m_locker.lock();
for (it = cmds.cbegin(); it != cmds.cend(); ++it)
{
ioctl(fd, IOT_PARAM_WRITE, &param);
close(fd);
if (*it == 0)
{
continue;
}
param.cmd = *it;
turnOnImpl(param);
}
return;
m_locker.unlock();
return 0;
}
size_t GpioControl::TurnOffImmediately(int cmd)
{
time_t ts = time(NULL);
size_t ref = 0;
std::vector<ITEM>::iterator it;
m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd)
{
if (it->references > 0)
{
it->references = 0;
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
break;
}
}
m_locker.unlock();
#ifdef _DEBUG
ALOGI("PWR TurnOffNow cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif
return 0;
}
size_t GpioControl::TurnOff(int cmd, uint32_t delayedCloseTime/* = 0*/)
{
time_t ts = 0;
if (delayedCloseTime > 0)
{
ts = time(NULL) + delayedCloseTime;
}
size_t ref = 0;
std::vector<ITEM>::iterator it;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmd);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock();
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd)
{
if (it->references > 0)
{
it->references--;
if (it->references == 0)
{
SetCamerastatus(cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
}
break;
}
}
m_locker.unlock();
#ifdef _DEBUG
ALOGI("PWR TurnOff cmd=%d ref=%u", cmd, (uint32_t)ref);
#endif
return 0;
}
size_t GpioControl::TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseTime/* = 0*/)
{
time_t ts = 0;
if (delayedCloseTime > 0)
{
ts = time(NULL) + delayedCloseTime;
}
std::vector<ITEM>::iterator it;
std::vector<int>::const_reverse_iterator itCmd;
if (delayedCloseTime > 0)
{
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(cmds);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
m_locker.lock();
// turnOnImpl(param);
for (itCmd = cmds.crbegin(); itCmd != cmds.crend(); ++itCmd)
{
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == *itCmd)
{
if (it->references > 0)
{
it->references--;
if (it->references == 0)
{
SetCamerastatus(it->cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
}
break;
}
}
}
m_locker.unlock();
return 0;
}
bool GpioControl::OpenSensors()
{
GpioControl::setCam3V3Enable(true);
GpioControl::setInt(CMD_SET_485_EN_STATE, true ? 1 : 0);
int igpio;
GpioControl::setInt(CMD_SET_WTH_POWER, 1);
GpioControl::setInt(CMD_SET_PULL_POWER, 1);
GpioControl::setInt(CMD_SET_ANGLE_POWER, 1);
GpioControl::setInt(CMD_SET_OTHER_POWER, 1);
GpioControl::setInt(CMD_SET_PIC1_POWER, 1);
igpio = GpioControl::getInt(CMD_SET_WTH_POWER);
igpio = GpioControl::getInt(CMD_SET_PULL_POWER);
igpio = GpioControl::getInt(CMD_SET_ANGLE_POWER);
igpio = GpioControl::getInt(CMD_SET_OTHER_POWER);
igpio = GpioControl::getInt(CMD_SET_PIC1_POWER);
GpioControl::setInt(CMD_SET_SPI_POWER, 1);
GpioControl::setInt(CMD_SET_485_en0, 1);
GpioControl::setInt(CMD_SET_485_en1, 1);
GpioControl::setInt(CMD_SET_485_en2, 1);
GpioControl::setInt(CMD_SET_485_en3, 1);
GpioControl::setInt(CMD_SET_485_en4, 1);
igpio = GpioControl::getInt(CMD_SET_SPI_POWER);
igpio = GpioControl::getInt(CMD_SET_485_en0);
igpio = GpioControl::getInt(CMD_SET_485_en1);
igpio = GpioControl::getInt(CMD_SET_485_en2);
igpio = GpioControl::getInt(CMD_SET_485_en3);
igpio = GpioControl::getInt(CMD_SET_485_en4);
size_t GpioControl::TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds)
{
for (auto itCmd = cmds.cbegin(); itCmd != cmds.end(); ++itCmd)
{
if (itCmd->second > 0)
{
uint32_t delayedCloseTime = itCmd->second;
std::shared_ptr<PowerControl> powerCtrl = std::make_shared<PowerControl>(itCmd->first);
std::thread th([delayedCloseTime, powerCtrl]() mutable {
std::this_thread::sleep_for(std::chrono::seconds(delayedCloseTime));
powerCtrl.reset();
});
th.detach();
}
}
std::vector<ITEM>::iterator it;
std::vector<std::pair<int, uint32_t> >::const_iterator itCmd;
m_locker.lock();
for (itCmd = cmds.cbegin(); itCmd != cmds.end(); ++itCmd)
{
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == itCmd->first)
{
if (it->references > 0)
{
it->references--;
if (it->references == 0)
{
SetCamerastatus(it->cmd, false);
setInt(it->cmd, 0);
it->openTime = 0;
}
}
break;
}
}
}
m_locker.unlock();
return 0;
}
bool GpioControl::SetCamerastatus(int cmd, bool status)
{
#ifdef USING_N938
if(cmd == CMD_SET_PIC1_POWER)
m_cameraPowerStatus = status;
#endif
#ifdef USING_PTZ
if(cmd == CMD_SET_PTZ_PWR_ENABLE)
{
m_cameraPowerStatus = status;
}
#endif
return true;
}
bool GpioControl::GetCamerastatus()
{
return m_cameraPowerStatus;
}
bool GpioControl::GetSelftestStatus(time_t wait_time)
{
int cmd = 0;
#ifdef USING_N938
cmd = CMD_SET_PIC1_POWER;
#endif
#ifdef USING_PTZ
cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif
time_t now = time(NULL);
std::vector<ITEM>::iterator it;
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd && it->references > 0 && it->openTime!=0 && (now - it->openTime >= wait_time))
{
return true;//自检完成
}
}
return false;
}
time_t GpioControl::GetSelfTestRemain(time_t wait_time)
{
int cmd = 0;
#ifdef USING_N938
cmd = CMD_SET_PIC1_POWER;
#endif
#ifdef USING_PTZ
cmd = CMD_SET_PTZ_PWR_ENABLE;
#endif
time_t now = time(NULL);
std::vector<ITEM>::iterator it;
for (it = m_items.begin(); it != m_items.end(); ++it)
{
if (it->cmd == cmd && it->references > 0)
{
time_t remaintime = (now - it->openTime);
remaintime = (wait_time > remaintime) ? (wait_time - remaintime) : 0;
return remaintime;//自检完成
}
}
return 0;
}

@ -8,6 +8,16 @@
#include <string>
#include <chrono>
#include <thread>
#include <mutex>
#include <vector>
#include <utility>
#include <SemaphoreEx.h>
#include <LogThread.h>
#ifndef USING_N938
#ifndef USING_PTZ // MicroPhoto
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
@ -17,113 +27,188 @@
#define CMD_SET_NETWORK_STATE 106
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
#define CMD_GET_CHARGING_VOL_STATE 110
#define CMD_GET_CHARGING_SHUNT_VOLTAGE_STATE 111
//#define CMD_GET_CHARGING_VOL_STATE 110
//#define CMD_GET_CHARGING_SHUNT_VOLTAGE_STATE 111
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_CHARGING_POWER_STATE 113
#define CMD_GET_CHARGING_CURRENT_STATE 114
#define CMD_GET_BAT_VOL_STATE 115
#define CMD_GET_BAT_SHUNT_VOLTAGE_STATE 116
//#define CMD_GET_CHARGING_POWER_STATE 113
//#define CMD_GET_CHARGING_CURRENT_STATE 114
//#define CMD_GET_BAT_VOL_STATE 115
//#define CMD_GET_BAT_SHUNT_VOLTAGE_STATE 116
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_GET_BAT_POWER_STATE 118
#define CMD_GET_BAT_CURRENT_STATE 119
//#define CMD_GET_BAT_POWER_STATE 118
//#define CMD_GET_BAT_CURRENT_STATE 119
#define CMD_SET_485_STATE 121
#define CMD_SET_SPI_MODE 123
#define CMD_SET_SPI_BITS_PER_WORD 124
#define CMD_SET_SPI_MAXSPEEDHZ 125
#define CMD_SET_PWM_BEE_STATE 126
#define CMD_SET_ALM_MODE 128
#define CMD_SET_SPI_POWER 129
#define CMD_SET_PWM_BEE_STATE 126 // Removed
#define CMD_SET_ALM_MODE 128 // Removed
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_CAM_3V3_EN_STATE 132
#define CMD_SET_12V_EN_STATE 133
#define CMD_SET_SYSTEM_RESET 202
#ifdef USING_N938
#if 1
#define CMD_SET_SPI_POWER 129
#define CMD_SET_3V3_PWR_EN 132
#endif
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_CAM_3V3_EN_STATE 132
#define CMD_SET_12V_EN_STATE 133
#define CMD_SET_485_STATE 121
#define CMD_SET_SPI_MODE 123
#define CMD_SET_SPI_BITS_PER_WORD 124
#define CMD_SET_SPI_MAXSPEEDHZ 125
#define CMD_SET_SPI_POWER 129
#define CMD_SET_WTH_POWER 490
#define CMD_SET_PULL_POWER 491
#define CMD_SET_ANGLE_POWER 492
#define CMD_SET_OTHER_POWER 493
#define CMD_SET_PIC1_POWER 494
#define CMD_SET_GPIO157_POWER 510
#define CMD_SET_GPIO5_POWER 511
#define CMD_SET_PWM_BEE_STATE 126
#define CMD_SET_ALM_MODE 128
#define CMD_SET_485_en0 301
#define CMD_SET_485_en1 302
#define CMD_SET_485_en2 303
#define CMD_SET_485_en3 304
#define CMD_SET_485_en4 305
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
#define CMD_SET_INIT_STATUS 401
#if 0
#define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_NEW_OTG_STATE 507
#define CMD_485_0_DE 156 // 485_0 DE信号
#define CMD_485_0_PWR_EN 157 // 485_0 电源使能
#define CMD_485_0_1_DE_EN 171 // 485_0&1DE电平转换芯片使能信号
#define CMD_485_1_DE 172 //
#else // defined(USING_PTZ)
#define CMD_SET_CAM_3V3_EN_STATE 72 // 整板3V3上电使能
#define CMD_3V3_SWITCH_EN 45 // 整板485_3V3信号电平转换电源使能
#define CMD_SET_OTG_STATE 107
#define CMD_GET_OTG_STATE 108
#define CMD_SET_SPI_POWER 129
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_UART0_EN 73 // 预留UART0电平转换芯片使能
#define CMD_485_1_PWR_EN 5 // 485_1 电源使能
#define CMD_485_3_DE 6 // 485_3 DE信号
#define CMD_485_2_DE 7 // 485_2 DE信号
#define CMD_485_4_DE 13 // 485_4 DE信号
#define CMD_NETWORK_PWR_EN 94 // 100M网络电源使能
#define CMD_SET_12V_EN_STATE 0 // TO BE ADDED
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_GET_LIGHT_ADC 101
#define CMD_SET_LIGHT_ADC 102
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_SET_SPI_MODE 0 // TO BE ADDED
#define CMD_SET_SPI_BITS_PER_WORD 0 // TO BE ADDED
#define CMD_SET_SPI_MAXSPEEDHZ 0 // TO BE ADDED
#define CMD_SET_485_ENABLE 131
#define CMD_SET_3V3_PWR_EN 132
// #define CMD_SET_5V_PWR_ENABLE 517
#define CMD_SET_SENSOR_ENABLE 504
#define CMD_SET_SENSOR_PWR_ENABLE 505
#define CMD_SET_SENSOR2_ENABLE 506
#define CMD_SET_SENSOR4_ENABLE 510
#define CMD_SET_SENSOR1_PWR_ENABLE 513
#define CMD_SET_SENSOR2_PWR_ENABLE 514
#define CMD_SET_SENSOR3_PWR_ENABLE 509
#define CMD_SET_SENSOR4_PWR_ENABLE 525
#define CMD_SET_PHOTO_IN 520
#define CMD_SET_PHOTO_OUT 515
#define CMD_SET_ADC_ENABLE 500
#define CMD_SET_MIPI_SWITCH 501
#define CMD_SET_CAM_RSTN1 502
#define CMD_SET_CAM_RSTN0 503
#define CMD_SET_SD_DECT 507
#define CMD_SET_PTZ_PWR_ENABLE 508
#define CMD_SET_RTC_ENABLE 511
#define CMD_SET_100M_ENABLE 518
#define CMD_SET_100M_SWITCH_PWR_ENABLE 519
#define CMD_SET_AM_POWER_ENABLE 521
#define CMD_SET_NRSEC_POWER_ENABLE 522
#define CMD_SET_AMP_ENABLE 523
#define CMD_SET_LIGHT1_RESISTOR_ENABLE 524
#define CMD_SET_100M_RESET 526
#define CMD_GET_CAMERA_STATUS 310
#define CMD_SET_MADA_MOVE_STATUS 311
#define CMD_SET_MADA_INIT_STATUS 312
#define CMD_SET_MADA_CLOSE_STATUS 313
#define CMD_SET_MADA_REG 314
#define CMD_GET_MADA_REG 315
#define CMD_SET_INIT_STATUS 401
#endif // USING_PTZ
#else // defined(USING_N938)
#define CMD_485_2_PWR_EN 92 // 485_2 电源使能
#define CMD_485_3_PWR_EN 91 // 485_3 电源使能
#define CMD_485_4_PWR_EN 90 // 485_4 电源使能
#define CMD_SET_SYSTEM_RESET 202
#define CMD_SET_SYSTEM_RESET2 203
#define CMD_SET_485_EN1 302
#define CMD_SET_3V3_PWR_EN 132
#define CMD_SET_UART0_EN 361
#define CMD_SET_485_EN0 301
#define CMD_SET_NETWORK_POWER_EN 362
#define CMD_SET_485_EN3 304
#define CMD_SET_485_EN2 303
#define CMD_SET_SPI_POWER 129
// #define CMD_SET_5V_EN 363
#define CMD_SDCARD_DETECT_EN 364
#define CMD_SET_PIC1_POWER 494
#define CMD_SET_OTHER_POWER 493
#define CMD_SET_ANGLE_POWER 492
#define CMD_SET_PULL_POWER 491
#define CMD_SET_WTH_POWER 490
#define CMD_SET_485_EN4 305
#define CMD_LED_CTRL 365
#define CMD_BD_EN 366
#define CMD_ADC_EN 367
#define CMD_SPI2SERIAL_POWER_EN 368
#define CMD_RS485_3V3_EN 369
// Others
#define CMD_SET_485_EN_STATE 131
#define CMD_SET_OTG_STATE 107
#define CMD_SEC_EN 27 // 加密芯片上电使能
#define CMD_GET_CHARGING_BUS_VOLTAGE_STATE 112
#define CMD_GET_BAT_BUS_VOLTAGE_STATE 117
#define CMD_485_2_3_DE_EN 26 // 485_2&3 DE电平转换芯片使能信号
#define CMD_SET_INIT_STATUS 0 // 401
#define CMD_5V_PWR_EN 14 // 整板5V0上电使能
#define CMD_SD_CARD_DECT 15 // SD CARD DECT
#define CMD_PIC1_EN 16
#endif // USING_N938
#define CMD_OTHER_EN 21
#define CMD_ANGLE_EN 22
#define CMD_PULL_EN 23
#define CMD_WEATHER_EN 24
#define CMD_LED_CTRL 46
#define CMD_BD_EN 47
#define CMD_ADC_EN 44
#define GPIO_NODE_MP "/dev/mtkgpioctrl"
#define CMD_SPI_PWR_EN 43 // SPI转串口电源使能
#define MAX_STRING_LEN 32
typedef struct
{
int cmd;
int value;
int result;
long value2;
char str[MAX_STRING_LEN];
}IOT_PARAM;
#endif
class GpioControl
{
public:
struct ITEM
{
int cmd;
size_t references;
time_t openTime;
};
#endif // USING_N938
private:
static std::mutex m_locker;
static std::vector<ITEM> m_items;
static bool m_cameraPowerStatus;
#ifdef USING_N938
#define GPIO_NODE_N938 "/sys/devices/platform/1000b000.pinctrl/mt_gpio"
#else
#define GPIO_NODE_MP "/dev/mtkgpioctrl"
#endif // USING_N938
static std::mutex m_gpioLocker;
protected:
static size_t turnOnImpl(const IOT_PARAM& param);
static size_t turnOffImpl(const IOT_PARAM& param);
class GpioControl
{
public:
// Power
static size_t TurnOn(int cmd);
static size_t TurnOn(const std::vector<int>& cmds);
static size_t TurnOff(int cmd, uint32_t delayedCloseTime = 0);
static size_t TurnOff(const std::vector<int>& cmds, uint32_t delayedCloseTime = 0);
static size_t TurnOff(const std::vector<std::pair<int, uint32_t> >& cmds);
static size_t TurnOffImmediately(int cmd);
static bool SetCamerastatus(int cmd, bool status);
static bool GetCamerastatus();
static bool GetSelftestStatus(time_t wait_time);
static time_t GetSelfTestRemain(time_t wait_time);
public:
static void setInt(int cmd, int value);
static int getInt(int cmd);
static void setLong(int cmd, long value);
@ -133,20 +218,94 @@ public:
static void setOtgState(bool on)
{
setInt(CMD_SET_OTG_STATE, on ? 1 : 0);
on ? TurnOn(CMD_SET_OTG_STATE) : TurnOff(CMD_SET_OTG_STATE);
}
static bool getOtgState()
{
return getInt(CMD_SET_OTG_STATE) != 0;
#ifndef USING_N938
return getInt(CMD_GET_OTG_STATE) != 0;
#else
return false;
#endif
}
static void setCam3V3Enable(bool enabled, uint32_t delayedCloseTime)
{
#ifdef USING_PTZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
#else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN, delayedCloseTime);
#endif
}
static void setCam3V3Enable(bool enabled)
{
#ifdef ENABLE_3V3_ALWAYS
setInt(CMD_SET_CAM_3V3_EN_STATE, 1);
#ifdef USING_PTZ
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
#else
enabled ? TurnOn(CMD_SET_3V3_PWR_EN) : TurnOff(CMD_SET_3V3_PWR_EN);
#endif
}
static void setBeeOn(bool z)
{
#ifndef USING_N938
#ifndef USING_PTZ
z ? TurnOn(CMD_SET_PWM_BEE_STATE) : TurnOff(CMD_SET_PWM_BEE_STATE);
#endif
#endif
}
static void setJidianqiState(bool z) {
#ifndef USING_N938
#ifndef USING_PTZ
z ? TurnOn(CMD_SET_ALM_MODE) : TurnOff(CMD_SET_ALM_MODE);
#endif
#endif
}
static void setSpiPower(bool on) {
on ? TurnOn(CMD_SET_SPI_POWER) : TurnOff(CMD_SET_SPI_POWER);
if (on)
{
std::this_thread::sleep_for(std::chrono::milliseconds(40));
}
}
static void setRS485Enable(bool z, uint32_t delayedCloseTime)
{
#ifndef USING_N938
#ifdef USING_PTZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE, delayedCloseTime);
#else
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE, delayedCloseTime);
#endif
#endif
}
static void set12VEnable(bool z, uint32_t delayedCloseTime)
{
#ifndef USING_N938
z ? TurnOn(CMD_SET_12V_EN_STATE) : TurnOff(CMD_SET_12V_EN_STATE, delayedCloseTime);
#endif
}
static void setRS485Enable(bool z)
{
#ifndef USING_N938
#ifdef USING_PTZ
z ? TurnOn(CMD_SET_485_ENABLE) : TurnOff(CMD_SET_485_ENABLE);
#else
setInt(CMD_SET_CAM_3V3_EN_STATE, enabled ? 1 : 0);
z ? TurnOn(CMD_SET_485_EN_STATE) : TurnOff(CMD_SET_485_EN_STATE);
#endif
#endif
}
static void set12VEnable(bool z)
{
#ifndef USING_N938
z ? TurnOn(CMD_SET_12V_EN_STATE) : TurnOff(CMD_SET_12V_EN_STATE);
#endif
}
@ -155,110 +314,378 @@ public:
setInt(CMD_SET_SYSTEM_RESET, 1);
}
static void reboot2()
{
setInt(CMD_SET_SYSTEM_RESET2, 1);
}
static void setLightAdc(int i)
{
#ifndef USING_N938
#ifdef USING_PTZ
setInt(CMD_SET_LIGHT1_RESISTOR_ENABLE, i);
#else
setInt(CMD_SET_LIGHT_ADC, i);
#endif
#endif
}
static int getLightAdc()
{
#ifndef USING_N938
#ifdef USING_PTZ
return getInt(CMD_SET_LIGHT1_RESISTOR_ENABLE);
#else
return getInt(CMD_GET_LIGHT_ADC);
#endif
#else
return -1;
#endif
}
static int getChargingVoltage()
{
return getInt(CMD_GET_CHARGING_VOL_STATE);
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE);
#else
return -1;
#endif
}
#if 0
static int getChargingShuntVoltage()
{
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_SHUNT_VOLTAGE_STATE);
#else
return -1;
#endif
}
#endif
static int getChargingBusVoltage() {
return getInt(CMD_GET_CHARGING_BUS_VOLTAGE_STATE);
}
#if 0
static int getChargingPower() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_POWER_STATE);
#else
return -1;
#endif
}
static int getChargingCurrent() {
#ifndef USING_N938
return getInt(CMD_GET_CHARGING_CURRENT_STATE);
#else
return -1;
#endif
}
#endif
static int getBatteryVoltage() {
return getInt(CMD_GET_BAT_VOL_STATE);
return getInt(CMD_GET_BAT_BUS_VOLTAGE_STATE);
}
#if 0
static int getBatteryShuntVoltage() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_SHUNT_VOLTAGE_STATE);
#else
return -1;
#endif
}
#endif
static int getBatteryBusVoltage() {
return getInt(CMD_GET_BAT_BUS_VOLTAGE_STATE);
}
#if 0
static int getBatteryPower() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_POWER_STATE);
#else
return -1;
#endif
}
static int getBatteryCurrent() {
#ifndef USING_N938
return getInt(CMD_GET_BAT_CURRENT_STATE);
#else
return -1;
#endif
}
#endif
static void set485WriteMode() {
#if 0
setInt(CMD_SET_485_STATE, 1);
#endif
}
static void set485ReadMode() {
#if 0
setInt(CMD_SET_485_STATE, 0);
#endif
}
static void setSpiMode(int i) {
#ifndef USING_N938
setInt(CMD_SET_SPI_MODE, i);
#endif
}
static void setSpiBitsPerWord(int i) {
#ifndef USING_N938
setInt(CMD_SET_SPI_BITS_PER_WORD, i);
#endif
}
static void setSpiMaxSpeedHz(long j) {
#ifndef USING_N938
setLong(CMD_SET_SPI_MAXSPEEDHZ, j);
#endif
}
};
class PowerControl
{
public:
PowerControl(int cmd1) : m_delayCloseTime(0)
{
m_cmds.resize(1, cmd1);
TurnOn();
}
PowerControl(const std::vector<int>& cmds) : m_delayCloseTime(0)
{
m_cmds = cmds;
TurnOn();
}
PowerControl(int cmd1, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(1, cmd1);
TurnOn();
}
PowerControl(int cmd1, int cmd2, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(2, cmd1);
m_cmds[1] = cmd2;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(3, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(4, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(5, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
TurnOn();
}
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(6, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
TurnOn();
}
static void setBeeOn(bool z) {
setInt(CMD_SET_PWM_BEE_STATE, z ? 1 : 0);
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(7, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
m_cmds[6] = cmd7;
TurnOn();
}
static void setJidianqiState(bool z) {
setInt(CMD_SET_ALM_MODE, z ? 1 : 0);
PowerControl(int cmd1, int cmd2, int cmd3, int cmd4, int cmd5, int cmd6, int cmd7, int cmd8, uint32_t closeDelayTime) : m_delayCloseTime(closeDelayTime)
{
m_cmds.resize(8, cmd1);
m_cmds[1] = cmd2;
m_cmds[2] = cmd3;
m_cmds[3] = cmd4;
m_cmds[4] = cmd5;
m_cmds[5] = cmd6;
m_cmds[6] = cmd7;
m_cmds[7] = cmd8;
TurnOn();
}
static void setSpiPower(bool on) {
setInt(CMD_SET_SPI_POWER, on ? 1 : 0);
if (on)
virtual ~PowerControl()
{
GpioControl::TurnOff(m_cmds, m_delayCloseTime);
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string status = GetStatus();
XYLOG(XYLOG_SEVERITY_INFO, "PWR After TurnOff %s, DelayCloseTime=%u", status.c_str(), m_delayCloseTime);
#endif
}
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
std::string GetStatus()
{
std::string result;
for (auto it = m_cmds.cbegin(); it != m_cmds.cend(); ++it)
{
std::this_thread::sleep_for(std::chrono::milliseconds(40));
if (*it == 0)
{
continue;
}
result += std::to_string(*it) + "=" + std::to_string(GpioControl::getInt(*it)) + " ";
}
return result;
}
#endif // #if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
static void setRS485Enable(bool z) {
setInt(CMD_SET_485_EN_STATE, z ? 1 : 0);
protected:
void TurnOn()
{
#if !defined(NDEBUG) && defined(OUTPUT_DBG_INFO)
// std::string status = GetStatus();
// XYLOG(XYLOG_SEVERITY_INFO, "PWR Before TurnOn %s", status.c_str());
#endif
GpioControl::TurnOn(m_cmds);
}
protected:
std::vector<int> m_cmds;
uint32_t m_delayCloseTime;
};
static void set12VEnable(bool z) {
setInt(CMD_SET_12V_EN_STATE, z ? 1 : 0);
class CameraPowerCtrl : public PowerControl
{
public:
CameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(0, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_3V3_PWR_EN, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class NetCameraPowerCtrl : public PowerControl
{
public:
NetCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
static bool SetN938Cmd(int cmd, int val);
static bool OpenSensors();
static bool CloseSensors();
#endif
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ
// MicroPhoto
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class PlzCameraPowerCtrl : public PowerControl
{
public:
PlzCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_485_ENABLE, CMD_SET_PTZ_PWR_ENABLE, CMD_SET_12V_EN_STATE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class EthernetPowerCtrl : public PowerControl
{
public:
EthernetPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
// PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_5V_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, closeDelayTime)
PowerControl(CMD_SET_3V3_PWR_EN, CMD_SET_OTG_STATE, CMD_SET_100M_ENABLE, closeDelayTime)
#else // USING_PTZ
// Micro Photo
PowerControl(CMD_SET_OTG_STATE, CMD_SET_485_EN_STATE/* Only for wp6*/, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class UsbCameraPowerCtrl : public PowerControl
{
public:
UsbCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_OTG_STATE, CMD_SET_NETWORK_POWER_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_PTZ_PWR_ENABLE, CMD_SET_100M_ENABLE, CMD_SET_100M_SWITCH_PWR_ENABLE, CMD_SET_12V_EN_STATE, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_OTG_STATE, CMD_SET_12V_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
class SerialCameraPowerCtrl : public PowerControl
{
public:
SerialCameraPowerCtrl(uint32_t closeDelayTime) :
#ifdef USING_N938
PowerControl(CMD_SET_SPI_POWER, CMD_SPI2SERIAL_POWER_EN, CMD_RS485_3V3_EN, CMD_SET_PIC1_POWER, CMD_SET_485_EN4, closeDelayTime)
#else // USING_N938
#ifdef USING_PTZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_485_ENABLE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_PTZ_PWR_ENABLE, closeDelayTime)
#else // USING_PTZ
PowerControl(CMD_SET_12V_EN_STATE, CMD_SET_3V3_PWR_EN, CMD_SET_SPI_POWER, CMD_SET_485_EN_STATE, closeDelayTime)
#endif // USING_PTZ
#endif // USING_N938
{
}
};
#endif //MICROPHOTO_GPIOCONTROL_H

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -13,6 +13,7 @@
#include <atomic>
#include <filesystem>
#include <thread>
#include <memory>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraError.h>
@ -27,6 +28,11 @@
#include <opencv2/opencv.hpp>
#include <android/bitmap.h>
#include <android/multinetwork.h>
#include "SensorsProtocol.h"
#include "PtzController.h"
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "error", __VA_ARGS__))
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, "debug", __VA_ARGS__))
@ -149,18 +155,42 @@ void MatToBitmap(JNIEnv *env, cv::Mat& mat, jobject& bitmap) {
}
#endif
class PowerControl;
class VendorCtrl;
class Streaming;
struct STREAMING_CONTEXT
{
std::shared_ptr<Streaming> stream;
std::shared_ptr<PowerControl> powerCtrl;
std::shared_ptr<PowerControl> ethernetPowerCtrl;
};
class CPhoneDevice : public IDevice
{
public:
friend PtzController;
struct NETWORK
{
std::string iface;
std::string ip;
std::string netmask;
std::string gateway;
};
class CPhoneCamera : public NdkCamera
{
public:
CPhoneCamera(CPhoneDevice* dev, int32_t width, int32_t height, const NdkCamera::CAMERA_PARAMS& params);
virtual ~CPhoneCamera();
virtual bool on_image(cv::Mat& rgb);
virtual bool on_image(cv::Mat rgb);
virtual void on_error(const std::string& msg);
virtual void onDisconnected(ACameraDevice* device);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
protected:
CPhoneDevice* m_dev;
@ -173,6 +203,9 @@ public:
virtual void onImageAvailable(AImageReader* reader);
virtual int32_t getOutputFormat() const;
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
protected:
std::string m_path;
@ -182,13 +215,13 @@ public:
{
CPhoneDevice* device;
unsigned int timerType;
unsigned long times;
uint64_t times;
void* data;
unsigned long expectedTimes;
unsigned long uid;
uint64_t expectedTimes;
uint64_t uid;
};
CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, unsigned int netId, unsigned int versionCode);
CPhoneDevice(JavaVM* vm, jobject service, const std::string& appPath, uint64_t activeNetHandle, unsigned int versionCode, const std::string& nativeLibDir);
virtual ~CPhoneDevice();
virtual void SetListener(IListener* listener);
@ -199,27 +232,40 @@ public:
virtual bool UpdateSchedules();
virtual bool QuerySystemProperties(map<string, string>& properties);
virtual bool InstallAPP(const std::string& path, unsigned int delayedTime);
virtual bool Reboot(int resetType);
virtual bool Reboot(int resetType, bool manually, const std::string& reason, uint32_t timeout = 1000);
virtual bool EnableGPS(bool enabled);
virtual float QueryBattaryVoltage(int timesForAvg, bool* isCharging);
virtual int QueryBattaryVoltage(int timesForAvg, int* isCharging);
virtual uint32_t QueryLdr();
virtual bool RequestPosition();
virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout, time_t tsForNextPhoto);
virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const std::string& path);
virtual bool CloseCamera();
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, unsigned long times = 0);
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout, void* data, uint64_t times = 1);
virtual bool UnregisterTimer(timer_uid_t uid);
virtual unsigned long RequestWakelock(unsigned long timeout);
virtual bool ReleaseWakelock(unsigned long wakelock);
virtual int GetWData(WEATHER_INFO *weatherInfo);
virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, SENSOR_PARAM *sensorParam);
virtual bool OpenSensors();
virtual bool CloseSensors();
virtual uint64_t RequestWakelock(uint64_t timeout);
virtual bool ReleaseWakelock(uint64_t wakelock);
virtual std::string GetVersion() const;
virtual int GetWData(WEATHER_INFO *weatherInfo, D_SENSOR_PARAM *sensorParam);
virtual int GetIceData(ICE_INFO *iceInfo, ICE_TAIL *icetail, D_SENSOR_PARAM *sensorParam);
virtual bool OpenSensors(int sensortype);
virtual bool CloseSensors(int sensortype, uint32_t delayedCloseTime);
virtual bool OpenPTZSensors(uint32_t sec);
virtual bool ClosePTZSensors(uint32_t delayedCloseTime);
virtual bool GetPTZSensorsStatus(time_t waittime);
virtual bool GetCameraStatus();
virtual void CameraCtrl(unsigned short waitTime, unsigned short delayTime, unsigned char channel, int cmdidx, unsigned char presetno, const char *serfile, unsigned int baud, int addr);
virtual int GetSerialPhoto(int devno, D_IMAGE_DEF *photo);
virtual void InitSerialComm(D_SENSOR_PARAM *sensorParam, char *filedir,const char *logpath);
bool LoadNetworkInfo();
bool GetNextScheduleItem(uint32_t tsBasedZero, uint32_t scheduleTime, vector<uint32_t>& items);
void UpdatePosition(double lon, double lat, double radius, time_t ts);
bool OnVideoReady(bool result, const char* path, unsigned int photoId);
bool OnVideoReady(bool photoOrVideo, bool result, const char* path, unsigned int photoId);
bool OnCaptureReady(bool photoOrVideo, bool result, cv::Mat mat, unsigned int photoId);
void UpdateSignalLevel(int signalLevel);
void UpdateTfCardPath(const std::string& tfCardPath)
{
@ -230,22 +276,24 @@ public:
mBuildTime = buildTime;
}
void UpdateSimcard(const std::string& simcard);
void UpdateNetwork(net_handle_t nethandle, bool available, bool defaultOrEthernet, bool& changed);
static void TurnOnCameraPower(JNIEnv* env);
static void TurnOffCameraPower(JNIEnv* env);
net_handle_t GetEthnetHandle() const;
static void TurnOnOtg(JNIEnv* env);
static void TurnOffOtg(JNIEnv* env);
VendorCtrl* MakeVendorCtrl(int vendor, uint8_t channel, const std::string& ip, const std::string& userName, const std::string& password, net_handle_t netHandle, bool syncTime);
protected:
std::string GetFileName() const;
std::string GetVersion() const;
bool SendBroadcastMessage(std::string action, int value);
// bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const
bool TakePhotoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool TakeVideoWithNetCamera(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool StartPushStreaming(const IDevice::PHOTO_INFO& localPhotoInfo, const std::string& url, const std::vector<IDevice::OSD_INFO>& osds, std::shared_ptr<PowerControl> powerCtrlPtr);
bool PostProcessPhoto(const PHOTO_INFO& photoInfo, const vector<IDevice::OSD_INFO>& osds, const std::string& path, const std::string& cameraInfo, cv::Mat mat, time_t takingTime);
inline bool TakePhotoCb(int res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime, const std::vector<IDevice::RECOG_OBJECT>& objects) const
{
if (m_listener != NULL)
{
@ -254,26 +302,49 @@ protected:
return false;
}
inline bool TakePhotoCb(int result, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const
{
if (m_listener != NULL)
{
std::vector<IDevice::RECOG_OBJECT> objects;
return m_listener->OnPhotoTaken(result, photoInfo, path, photoTime, objects);
}
return false;
}
inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime) const
inline bool TakePTZPhotoCb(int result, const IDevice::PHOTO_INFO& photoInfo) const
{
if (m_listener != NULL)
{
std::vector<IDevice::RECOG_OBJECT> objects;
return m_listener->OnPhotoTaken(res, photoInfo, path, photoTime, objects);
return m_listener->OnPTZPhotoTaken(result, photoInfo);
}
return false;
}
void QueryPowerInfo(std::map<std::string, std::string>& powerInfo);
inline bool GetPhotoSerialsParamCb(SerialsPhotoParam &param) const
{
if (m_listener != NULL)
{
return m_listener->OnPhotoSerialsParamGet(param);
}
return false;
}
void QueryFlowInfo(std::map<std::string, std::string>& powerInfo);
std::string QueryCpuTemperature();
bool OnImageReady(cv::Mat& mat);
bool OnImageReady(cv::Mat mat);
bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> results, uint32_t ldr, uint32_t duration, cv::Mat rgb);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
void onError(const std::string& msg);
void onDisconnected(ACameraDevice* device);
void CloseCamera2(CPhoneCamera* camera, unsigned int photoId, bool turnOffOtg);
void CloseCamera2(CPhoneCamera* camera, unsigned int photoId, unsigned char cameraType);
static void handleSignal(int sig, siginfo_t *si, void *uc);
bool RegisterHandlerForSignal(int sig);
@ -281,18 +352,33 @@ protected:
void handleTimerImpl(TIMER_CONTEXT* context);
void static handleRebootTimer(union sigval v);
// void handleRebootTimerImpl();
void RestartApp(int rebootType, long timeout);
void RestartApp(int rebootType, long timeout, const std::string& reason);
int QueryBatteryVoltage(int retries);
int CallExecv(int rotation, int frontCamera, const std::string& outputPath, const std::vector<std::string>& images);
void SetStaticIp(const std::string& iface, const std::string& ip, const std::string& netmask, const std::string& gateway);
void ConvertDngToPng(const std::string& dngPath, const std::string& pngPath);
void SetStaticIp();
void ShutdownEthernet();
int ExecuteCommand(const std::string& cmd);
static std::string BuildCaptureResultInfo(ACameraMetadata* result, uint32_t ldr, uint32_t duration, bool burst);
protected:
std::mutex m_devLocker;
mutable std::mutex m_devLocker;
JavaVM* m_vm;
jobject m_javaService;
std::string m_appPath;
std::string m_tfCardPath;
std::string m_nativeLibraryDir;
NETWORK* m_network;
net_handle_t m_defNetHandle;
net_handle_t m_ethnetHandle;
jmethodID mRegisterHeartbeatMid;
jmethodID mUpdateCaptureScheduleMid;
@ -302,12 +388,19 @@ protected:
jmethodID mRequestWakelockMid;
jmethodID mReleaseWakelockMid;
jmethodID mGetSystemInfoMid;
jmethodID mGetFlowInfoMid;
jmethodID mRebootMid;
jmethodID mInstallAppMid;
jmethodID mEnableGpsMid;
jmethodID mRequestPositionMid;
jmethodID mExecHdrplusMid;
jmethodID mSetStaticIpMid;
jmethodID mExecuteCmdMid;
jmethodID mConvertDngToPngMid;
jmethodID mCallSysCameraMid;
std::string mPath;
IDevice::PHOTO_INFO mPhotoInfo;
@ -315,12 +408,12 @@ protected:
IListener* m_listener;
const CFG_RECOGNIZATION* m_pRecognizationCfg;
bool mAIInitialized;
unsigned int mNetId;
unsigned int mVersionCode;
time_t mBuildTime;
atomic_ulong m_timerUidFeed;
atomic_ulong m_wakelockIdFeed;
atomic_ullong m_timerUidFeed;
atomic_ullong m_wakelockIdFeed;
atomic_ullong m_uniqueIdFeed;
std::map<IDevice::timer_uid_t, TIMER_CONTEXT*> mTimers;
mutable CPhoneCamera* mCamera;
@ -328,16 +421,33 @@ protected:
time_t mHeartbeatStartTime;
unsigned int mHeartbeatDuration;
static std::mutex m_powerLocker;
static long mCameraPowerCount;
static long mOtgCount;
std::thread m_threadClose;
std::shared_ptr<PowerControl> m_powerCtrlPtr;
uint32_t m_ethernetFailures;
int m_signalLevel;
time_t m_signalLevelUpdateTime;
std::string m_simcard;
mutable std::mutex m_cameraLocker;
time_t m_lastTime;
std::atomic<bool> m_shouldStopWaiting;
std::atomic<bool> m_isSelfTesting{false};
IDevice::ICE_TAIL m_tempData;
mutable std::mutex m_dataLocker;
mutable std::mutex m_collectDataLocker;
std::condition_variable m_CollectDatacv;
std::atomic<bool> m_collecting;
unsigned long long localDelayTime;
std::map<uint8_t, STREAMING_CONTEXT > m_streamings;
PtzController* m_ptzController;
};

@ -1,912 +0,0 @@
#include "TerminalDevice.h"
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#define LOG_TAG "CameraTestHelpers"
#include "PhoneDevice2.h"
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
// #include <opencv2/objdetect.hpp>
// #include <opencv2/features2d.hpp>
// #include <opencv2/core/types.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <android/log.h>
#include <AndroidHelper.h>
extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// ranges
// are normalized to eight bits.
static const int kMaxChannelValue = 262143;
static inline uint32_t YUV2RGB(int nY, int nU, int nV) {
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0) nY = 0;
// This is the floating point equivalent. We do the conversion in integer
// because some Android devices do not have floating point in hardware.
// nR = (int)(1.164 * nY + 1.596 * nV);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 2.018 * nU);
int nR = (int)(1192 * nY + 1634 * nV);
int nG = (int)(1192 * nY - 833 * nV - 400 * nU);
int nB = (int)(1192 * nY + 2066 * nU);
nR = std::min(kMaxChannelValue, std::max(0, nR));
nG = std::min(kMaxChannelValue, std::max(0, nG));
nB = std::min(kMaxChannelValue, std::max(0, nB));
nR = (nR >> 10) & 0xff;
nG = (nG >> 10) & 0xff;
nB = (nB >> 10) & 0xff;
return 0xff000000 | (nR << 16) | (nG << 8) | nB;
}
CPhoneDevice2::CPhoneDevice2(JavaVM* vm, jobject service)
{
m_vm = vm;
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
m_javaService = env->NewGlobalRef(service);
jclass classService = env->GetObjectClass(m_javaService);
mRegisterTimerMid = env->GetMethodID(classService, "registerTimer", "(JI)Z");
mRegisterHeartbeatMid = env->GetMethodID(classService, "registerHeartbeatTimer", "(I)V");
mUnregisterTimerMid = env->GetMethodID(classService, "unregisterTimer", "(J)Z");
mUpdateTimeMid = env->GetMethodID(classService, "updateTime", "(J)Z");
env->DeleteLocalRef(classService);
if (attached)
{
vm->DetachCurrentThread();
}
m_timerUidFeed = time(NULL);
presentRotation_ = 0;
}
CPhoneDevice2::~CPhoneDevice2()
{
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
env->DeleteGlobalRef(m_javaService);
if (attached)
{
m_vm->DetachCurrentThread();
}
m_javaService = NULL;
}
void CPhoneDevice2::SetListener(IListener* listener)
{
m_listener = listener;
}
bool CPhoneDevice2::UpdateTime(time_t ts)
{
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return false;
}
jlong timeInMillis = ((jlong)ts) * 1000;
ret = env->CallBooleanMethod(m_javaService, mUpdateTimeMid, timeInMillis);
if (attached)
{
m_vm->DetachCurrentThread();
}
return (ret == JNI_TRUE);
}
bool CPhoneDevice2::Reboot()
{
return false;
}
IDevice::timer_uid_t CPhoneDevice2::RegisterTimer(unsigned int timerType, unsigned int timeout)
{
IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1);
ALOGI("NDK RegTimer: uid=%lld Type=%u timeout=%u", uid, timerType, timeout);
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return 0;
}
ret = env->CallBooleanMethod(m_javaService, mRegisterTimerMid, (jlong)uid, (jint)timeout);
if (attached)
{
m_vm->DetachCurrentThread();
}
if (ret == JNI_TRUE)
{
unsigned long val = timerType;
mTimers.insert(mTimers.end(), std::pair<IDevice::timer_uid_t, unsigned long>(uid, val));
return uid;
}
return 0;
}
bool CPhoneDevice2::UnregisterTimer(IDevice::timer_uid_t uid)
{
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return false;
}
ret = env->CallBooleanMethod(m_javaService, mUnregisterTimerMid, (jlong)uid);
if (attached)
{
m_vm->DetachCurrentThread();
}
if (ret == JNI_TRUE)
{
mTimers.erase(uid);
return true;
}
return false;
}
bool CPhoneDevice2::FireTimer(timer_uid_t uid)
{
std::map<IDevice::timer_uid_t, unsigned long>::iterator it = mTimers.find(uid);
if (it == mTimers.end())
{
return false;
}
unsigned long timerType = it->second & 0xFFFFFFFF;
unsigned long times = (it->second & 0xFFFFFFFF00000000) >> 32;
times++;
if (timerType != 100)
{
int aa = 0;
}
it->second = timerType | (times << 32);
if (m_listener == NULL)
{
return false;
}
m_listener->OnTimeout(uid, timerType, NULL, times);
return true;
}
IDevice::timer_uid_t CPhoneDevice2::RegisterHeartbeat(unsigned int timerType, unsigned int timeout)
{
IDevice::timer_uid_t uid = m_timerUidFeed.fetch_add(1);
JNIEnv* env = NULL;
jboolean ret = JNI_FALSE;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
return 0;
}
env->CallVoidMethod(m_javaService, mRegisterHeartbeatMid, (jint)timeout);
if (attached)
{
m_vm->DetachCurrentThread();
}
return uid;
}
bool CPhoneDevice2::TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const string& path)
{
ALOGI("TAKE_PHOTO: CH=%u PR=%u\n", (unsigned int)photoInfo.channel, (unsigned int)photoInfo.preset);
mPhotoInfo = photoInfo;
mPath = path;
mDisplayDimension = DisplayDimension(photoInfo.width, photoInfo.height);
ALOGE("Image Buffer Size: %d", photoInfo.width * photoInfo.height * 4);
imageBuffer_ = (uint8_t*)malloc(photoInfo.width * photoInfo.height * 4);
AASSERT(imageBuffer_ != nullptr, "Failed to allocate imageBuffer_");
int cameraId = (int)photoInfo.channel - 1;
ACameraIdList *cameraIdList = NULL;
ACameraMetadata *cameraMetadata = NULL;
const char *selectedCameraId = NULL;
camera_status_t camera_status = ACAMERA_OK;
ACameraManager *cameraManager = ACameraManager_create();
camera_status = ACameraManager_getCameraIdList(cameraManager, &cameraIdList);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to get camera id list (reason: %d)\n", camera_status);
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
if (cameraIdList->numCameras < 1 ) {
ALOGI("No camera device detected.\n");
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
if (cameraIdList->numCameras <= cameraId ) {
ALOGI("No required camera device %d detected.\n", cameraId);
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
selectedCameraId = cameraIdList->cameraIds[cameraId];
ALOGI("Trying to open Camera2 (id: %s, num of camera : %d)\n", selectedCameraId,
cameraIdList->numCameras);
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId,
&cameraMetadata);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to get camera meta data of ID:%s\n", selectedCameraId);
}
ACameraMetadata_const_entry face, orientation;
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_LENS_FACING, &face);
uint32_t cameraFacing_ = static_cast<int32_t>(face.data.u8[0]);
if (cameraFacing_ == ACAMERA_LENS_FACING_FRONT)
{
int aa = 0;
}
camera_status = ACameraMetadata_getConstEntry(cameraMetadata, ACAMERA_SENSOR_ORIENTATION, &orientation);
ALOGI("====Current SENSOR_ORIENTATION: %8d", orientation.data.i32[0]);
uint32_t cameraOrientation_ = orientation.data.i32[0];
if (cameraOrientation_ == 90 || cameraOrientation_ == 270)
{
mDisplayDimension.Flip();
}
ImageFormat resCap = {(int32_t)photoInfo.width, (int32_t)photoInfo.height, AIMAGE_FORMAT_YUV_420_888};
MatchCaptureSizeRequest(cameraManager, selectedCameraId, photoInfo.width, photoInfo.height, cameraOrientation_, &resCap);
deviceStateCallbacks.onDisconnected = camera_device_on_disconnected;
deviceStateCallbacks.onError = camera_device_on_error;
camera_status = ACameraManager_openCamera(cameraManager, selectedCameraId,
&deviceStateCallbacks, &cameraDevice);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to open camera device (id: %s)\n", selectedCameraId);
}
camera_status = ACameraDevice_createCaptureRequest(cameraDevice, TEMPLATE_STILL_CAPTURE/*TEMPLATE_PREVIEW*/,
&captureRequest);
if (camera_status != ACAMERA_OK) {
ALOGI("Failed to create preview capture request (id: %s)\n", selectedCameraId);
}
ACaptureSessionOutputContainer_create(&captureSessionOutputContainer);
captureSessionStateCallbacks.onReady = capture_session_on_ready;
captureSessionStateCallbacks.onActive = capture_session_on_active;
captureSessionStateCallbacks.onClosed = capture_session_on_closed;
ACameraMetadata_free(cameraMetadata);
ACameraManager_deleteCameraIdList(cameraIdList);
ACameraManager_delete(cameraManager);
media_status_t status;
// status = AImageReader_new(1920, 1080, AIMAGE_FORMAT_YUV_420_888, 5, &mAImageReader);
status = AImageReader_new(resCap.width, resCap.height, resCap.format, 5, &mAImageReader);
if (status != AMEDIA_OK)
{
ALOGI("AImageReader_new error\n");
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
AImageReader_ImageListener listener{
.context = this,
.onImageAvailable = OnImageCallback,
};
AImageReader_setImageListener(mAImageReader, &listener);
//ANativeWindow *mNativeWindow;
status = AImageReader_getWindow(mAImageReader, &theNativeWindow);
if (status != AMEDIA_OK)
{
ALOGI("AImageReader_getWindow error\n");
TakePhotoCb(false, photoInfo, path, 0);
return false;
}
ALOGI("Surface is prepared in %p.\n", theNativeWindow);
// theNativeWindow
ACameraOutputTarget_create(theNativeWindow, &cameraOutputTarget);
ACaptureRequest_addTarget(captureRequest, cameraOutputTarget);
ACaptureSessionOutput_create(theNativeWindow, &sessionOutput);
ACaptureSessionOutputContainer_add(captureSessionOutputContainer, sessionOutput);
ACameraDevice_createCaptureSession(cameraDevice, captureSessionOutputContainer,
&captureSessionStateCallbacks, &captureSession);
// ACameraCaptureSession_setRepeatingRequest(captureSession, NULL, 1, &captureRequest, NULL);
ACameraCaptureSession_capture(captureSession, NULL, 1, &captureRequest, NULL);
ALOGI("Surface is prepared in here.\n");
return true;
}
ACameraCaptureSession_stateCallbacks* CPhoneDevice2::GetSessionListener()
{
static ACameraCaptureSession_stateCallbacks sessionListener = {
.context = this,
.onClosed = CPhoneDevice2::capture_session_on_closed,
.onReady = CPhoneDevice2::capture_session_on_ready,
.onActive = CPhoneDevice2::capture_session_on_active,
};
return &sessionListener;
}
void CPhoneDevice2::ImageCallback(AImageReader *reader)
{
bool res = false;
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
if (status == AMEDIA_OK && image)
{
int32_t srcFormat = -1;
AImage_getFormat(image, &srcFormat);
AASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
int32_t srcPlanes = 0;
AImage_getNumberOfPlanes(image, &srcPlanes);
AASSERT(srcPlanes == 3, "Is not 3 planes");
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
int32_t width = srcRect.right - srcRect.left;
int32_t height = srcRect.bottom - srcRect.top;
// int32_t height = srcRect.right - srcRect.left;
// int32_t width = srcRect.bottom - srcRect.top;
uint8_t *yPixel = nullptr;
uint8_t *uPixel = nullptr;
uint8_t *vPixel = nullptr;
int32_t yLen = 0;
int32_t uLen = 0;
int32_t vLen = 0;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
uint8_t * data = new uint8_t[yLen + vLen + uLen];
memcpy(data, yPixel, yLen);
memcpy(data+yLen, vPixel, vLen);
memcpy(data+yLen+vLen, uPixel, uLen);
cv::Mat mYUV = cv::Mat(((height * 3) >> 1), width, CV_8UC1, data);
// cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
// cv::Mat mYUV = cv::Mat(height, yStride, CV_8UC4, data);
cv::Mat _yuv_rgb_img(height, width, CV_8UC4), _yuv_gray_img;
cv::cvtColor(mYUV, _yuv_rgb_img, cv::COLOR_YUV2RGB_NV21, 3);
cv::rotate(_yuv_rgb_img, _yuv_rgb_img, cv::ROTATE_180);
// cv::Mat rgbMat(height, width, CV_8UC3);
// 通过cv::cvtColor将yuv420转换为rgb格式
// cvtColor(_yuv_rgb_img, rgbMat, cv::COLOR_YUV2RGB_I420);
// cv::Mat mat = cv::Mat(buffer.height, buffer.stride, CV_8UC4, buffer.bits);
const char *str = "OSD";
putText(_yuv_rgb_img, str, cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(0, 0, 0), 4,cv::LINE_AA);
putText(_yuv_rgb_img, str, cv::Point(50, 50), cv::FONT_HERSHEY_COMPLEX, 1, cv::Scalar(255, 255, 255), 2,cv::LINE_AA);
vector <int> compression_params;
compression_params.push_back(cv::IMWRITE_JPEG_QUALITY);
compression_params.push_back(80);
res = cv::imwrite(mPath.c_str(), _yuv_rgb_img, compression_params);
// ANativeWindow_unlockAndPost(theNativeWindow);
if (res)
{
int aa = 0;
}
// res = WriteFile(image, GetFileName() + ".org.jpg");
AImage_delete(image);
// delete pThis;
TakePhotoCb(res, mPhotoInfo, mPath, time(NULL));
}
}
void CPhoneDevice2::OnImageCallback(void *ctx, AImageReader *reader)
{
CPhoneDevice2* pThis = reinterpret_cast<CPhoneDevice2*>(ctx);
if (pThis != NULL)
{
pThis->ImageCallback(reader);
}
}
bool CPhoneDevice2::WriteFile(AImage *image, const string& path)
{
int planeCount = 0;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
ALOGI("Info: getNumberOfPlanes() planeCount = %d", planeCount);
if (!(status == AMEDIA_OK && planeCount == 1))
{
ALOGE("Error: getNumberOfPlanes() planeCount = %d", planeCount);
return false;
}
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);
bool res = false;
FILE *file = fopen(path.c_str(), "wb");
if (file && data && len)
{
fwrite(data, 1, len, file);
fclose(file);
ALOGI("Capture: %s", path.c_str());
res = true;
}
else
{
if (file)
fclose(file);
}
return res;
}
bool CPhoneDevice2::WriteFile(CPhoneDevice2* pThis, AImage *image)
{
return pThis->WriteFile(image, pThis->GetFileName());
}
std::string CPhoneDevice2::GetFileName() const
{
return mPath;
}
/*
const char *selectedCameraId = NULL;
ACameraManager *cameraManager = ACameraManager_create();
*/
bool CPhoneDevice2::MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
ImageFormat* resCap) {
DisplayDimension disp(resCap->width,resCap->height);
if (cameraOrientation_ == 90 || cameraOrientation_ == 270) {
disp.Flip();
}
ACameraMetadata* metadata;
camera_status_t camera_status = ACAMERA_OK;
camera_status = ACameraManager_getCameraCharacteristics(cameraManager, selectedCameraId, &metadata);
ACameraMetadata_const_entry entry;
camera_status = ACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
// format of the data: format, width, height, input?, type int32
bool foundIt = false;
DisplayDimension foundRes(16384, 16384);
DisplayDimension maxJPG(0, 0);
for (int i = 0; i < entry.count; i += 4) {
int32_t input = entry.data.i32[i + 3];
int32_t format = entry.data.i32[i + 0];
if (input) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 || format == AIMAGE_FORMAT_JPEG) {
DisplayDimension res(entry.data.i32[i + 1], entry.data.i32[i + 2]);
ALOGI("Camera Resolution: %d x %d fmt=%d", res.width(), res.height(), format);
if (!disp.IsSameRatio(res)) continue;
if (format == AIMAGE_FORMAT_YUV_420_888 && res > disp) {
foundIt = true;
foundRes = res;
} else if (format == AIMAGE_FORMAT_JPEG && res > maxJPG) {
maxJPG = res;
}
}
}
if (foundIt) {
// resView->width = foundRes.org_width();
// resView->height = foundRes.org_height();
resCap->width = foundRes.org_width();
resCap->height = foundRes.org_height();
} else {
ALOGI("Did not find any compatible camera resolution, taking 640x480");
resCap->width = disp.org_width();
resCap->height = disp.org_height();
// *resCap = *resView;
}
// resView->format = AIMAGE_FORMAT_YUV_420_888;
// resCap->format = AIMAGE_FORMAT_JPEG;
return foundIt;
}
/**
* Convert yuv image inside AImage into ANativeWindow_Buffer
* ANativeWindow_Buffer format is guaranteed to be
* WINDOW_FORMAT_RGBX_8888
* WINDOW_FORMAT_RGBA_8888
* @param buf a {@link ANativeWindow_Buffer } instance, destination of
* image conversion
* @param image a {@link AImage} instance, source of image conversion.
* it will be deleted via {@link AImage_delete}
*/
bool CPhoneDevice2::DisplayImage(ANativeWindow_Buffer *buf, AImage *image) {
AASSERT(buf->format == WINDOW_FORMAT_RGBX_8888 ||
buf->format == WINDOW_FORMAT_RGBA_8888,
"Not supported buffer format");
int32_t srcFormat = -1;
AImage_getFormat(image, &srcFormat);
AASSERT(AIMAGE_FORMAT_YUV_420_888 == srcFormat, "Failed to get format");
int32_t srcPlanes = 0;
AImage_getNumberOfPlanes(image, &srcPlanes);
AASSERT(srcPlanes == 3, "Is not 3 planes");
switch (presentRotation_) {
case 0:
PresentImage(buf, image);
break;
case 90:
PresentImage90(buf, image);
break;
case 180:
PresentImage180(buf, image);
break;
case 270:
PresentImage270(buf, image);
break;
default:
AASSERT(0, "NOT recognized display rotation: %d", presentRotation_);
}
AImage_delete(image);
image = nullptr;
return true;
}
/*
* PresentImage()
* Converting yuv to RGB
* No rotation: (x,y) --> (x, y)
* Refer to:
* https://mathbits.com/MathBits/TISection/Geometry/Transformations2.htm
*/
void CPhoneDevice2::PresentImage(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t rowStride;
AImage_getPlaneRowStride(image, 0, &rowStride);
int32_t height = std::min(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out += buf->stride;
}
}
/*
* PresentImage90()
* Converting YUV to RGB
* Rotation image anti-clockwise 90 degree -- (x, y) --> (-y, x)
*/
void CPhoneDevice2::PresentImage90(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = std::min(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += height - 1;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// [x, y]--> [-y, x]
int testb = pU[uv_offset];
int testc = pV[uv_offset];
int testA = pY[x];
out[x * buf->stride] = YUV2RGB(testA, testb, testc);
}
out -= 1; // move to the next column
}
}
/*
* PresentImage180()
* Converting yuv to RGB
* Rotate image 180 degree: (x, y) --> (-x, -y)
*/
void CPhoneDevice2::PresentImage180(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = std::min(buf->height, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->width, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
out += (height - 1) * buf->stride;
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
// mirror image since we are using front camera
out[width - 1 - x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
// out[x] = YUV2RGB(pY[x], pU[uv_offset], pV[uv_offset]);
}
out -= buf->stride;
}
}
/*
* PresentImage270()
* Converting image from YUV to RGB
* Rotate Image counter-clockwise 270 degree: (x, y) --> (y, x)
*/
void CPhoneDevice2::PresentImage270(ANativeWindow_Buffer *buf, AImage *image) {
AImageCropRect srcRect;
AImage_getCropRect(image, &srcRect);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uvStride);
yPixel = imageBuffer_;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
vPixel = imageBuffer_ + yLen;
AImage_getPlaneData(image, 1, &vPixel, &vLen);
uPixel = imageBuffer_ + yLen + vLen;
AImage_getPlaneData(image, 2, &uPixel, &uLen);
AImage_getPlanePixelStride(image, 1, &uvPixelStride);
int32_t height = std::min(buf->width, (srcRect.bottom - srcRect.top));
int32_t width = std::min(buf->height, (srcRect.right - srcRect.left));
uint32_t *out = static_cast<uint32_t *>(buf->bits);
for (int32_t y = 0; y < height; y++) {
const uint8_t *pY = yPixel + yStride * (y + srcRect.top) + srcRect.left;
int32_t uv_row_start = uvStride * ((y + srcRect.top) >> 1);
const uint8_t *pU = uPixel + uv_row_start + (srcRect.left >> 1);
const uint8_t *pV = vPixel + uv_row_start + (srcRect.left >> 1);
for (int32_t x = 0; x < width; x++) {
const int32_t uv_offset = (x >> 1) * uvPixelStride;
int testb = pU[uv_offset];
int testc = pV[uv_offset];
int testA = pY[x];
out[(width - 1 - x) * buf->stride] =
YUV2RGB(testA, testb, testc);
}
out += 1; // move to the next column
}
}
/*
bool CPhoneDevice2::SendBroadcastMessage(String16 action, int value)
{
TM_INFO_LOG("sendBroadcastMessage(): Action: %s, Value: %d ", action.string(), value);
sp <IServiceManager> sm = defaultServiceManager();
sp <IBinder> am = sm->getService(String16("activity"));
if (am != NULL) {
Parcel data, reply;
data.writeInterfaceToken(String16("android.app.IActivityManager"));
data.writeStrongBinder(NULL);
// intent begin
data.writeString16(action); // action
data.writeInt32(0); // URI data type
data.writeString16(NULL, 0); // type
data.writeInt32(0); // flags
data.writeString16(NULL, 0); // package name
data.writeString16(NULL, 0); // component name
data.writeInt32(0); // source bound - size
data.writeInt32(0); // categories - size
data.writeInt32(0); // selector - size
data.writeInt32(0); // clipData - size
data.writeInt32(-2); // contentUserHint: -2 -> UserHandle.USER_CURRENT
data.writeInt32(-1); // bundle extras length
data.writeInt32(0x4C444E42); // 'B' 'N' 'D' 'L'
int oldPos = data.dataPosition();
data.writeInt32(1); // size
// data.writeInt32(0); // VAL_STRING, need to remove because of analyze common intent
data.writeString16(String16("type"));
data.writeInt32(1); // VAL_INTEGER
data.writeInt32(value);
int newPos = data.dataPosition();
data.setDataPosition(oldPos - 8);
data.writeInt32(newPos - oldPos); // refill bundle extras length
data.setDataPosition(newPos);
// intent end
data.writeString16(NULL, 0); // resolvedType
data.writeStrongBinder(NULL); // resultTo
data.writeInt32(0); // resultCode
data.writeString16(NULL, 0); // resultData
data.writeInt32(-1); // resultExtras
data.writeString16(NULL, 0); // permission
data.writeInt32(0); // appOp
data.writeInt32(-1); // option
data.writeInt32(1); // serialized: != 0 -> ordered
data.writeInt32(0); // sticky
data.writeInt32(-2); // userId: -2 -> UserHandle.USER_CURRENT
status_t ret = am->transact(IBinder::FIRST_CALL_TRANSACTION + 13, data,
&reply); // BROADCAST_INTENT_TRANSACTION
if (ret == NO_ERROR) {
int exceptionCode = reply.readExceptionCode();
if (exceptionCode) {
TM_INFO_LOG("sendBroadcastMessage(%s) caught exception %d\n",
action.string(), exceptionCode);
return false;
}
} else {
return false;
}
} else {
TM_INFO_LOG("getService() couldn't find activity service!\n");
return false;
}
return true;
}
*/
void CPhoneDevice2::camera_device_on_disconnected(void *context, ACameraDevice *device)
{
ALOGI("Camera(id: %s) is diconnected.\n", ACameraDevice_getId(device));
CPhoneDevice2* pThis = (CPhoneDevice2*)context;
// delete pThis;
}
void CPhoneDevice2::camera_device_on_error(void *context, ACameraDevice *device, int error)
{
ALOGI("Error(code: %d) on Camera(id: %s).\n", error, ACameraDevice_getId(device));
}
void CPhoneDevice2::capture_session_on_ready(void *context, ACameraCaptureSession *session)
{
ALOGI("Session is ready. %p\n", session);
}
void CPhoneDevice2::capture_session_on_active(void *context, ACameraCaptureSession *session)
{
ALOGI("Session is activated. %p\n", session);
}
void CPhoneDevice2::capture_session_on_closed(void *context, ACameraCaptureSession *session)
{
ALOGI("Session is closed. %p\n", session);
}

@ -1,124 +0,0 @@
#ifndef __PHONE_DEVICE2_H__
#define __PHONE_DEVICE2_H__
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <map>
#include <atomic>
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraError.h>
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraMetadataTags.h>
#include <media/NdkImageReader.h>
#include <Client/Device.h>
#include <string>
#include "camera2/Camera2Helper.h"
class CPhoneDevice2 : public IDevice
{
public:
CPhoneDevice2(JavaVM* vm, jobject service);
virtual ~CPhoneDevice2();
virtual void SetListener(IListener* listener);
virtual bool UpdateTime(time_t ts);
virtual bool Reboot();
virtual timer_uid_t RegisterHeartbeat(unsigned int timerType, unsigned int timeout);
virtual bool TakePhoto(const IDevice::PHOTO_INFO& photoInfo, const vector<OSD_INFO>& osds, const string& path);
virtual timer_uid_t RegisterTimer(unsigned int timerType, unsigned int timeout);
virtual bool UnregisterTimer(timer_uid_t uid);
virtual bool FireTimer(timer_uid_t uid);
protected:
ACameraCaptureSession_stateCallbacks *GetSessionListener();
std::string GetFileName() const;
bool SendBroadcastMessage(std::string action, int value);
bool MatchCaptureSizeRequest(ACameraManager *cameraManager, const char *selectedCameraId, unsigned int width, unsigned int height, uint32_t cameraOrientation_,
ImageFormat* resCap);
bool DisplayImage(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage90(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage180(ANativeWindow_Buffer* buf, AImage* image);
void PresentImage270(ANativeWindow_Buffer* buf, AImage* image);
static void camera_device_on_disconnected(void *context, ACameraDevice *device);
static void camera_device_on_error(void *context, ACameraDevice *device, int error);
static void capture_session_on_ready(void *context, ACameraCaptureSession *session);
static void capture_session_on_active(void *context, ACameraCaptureSession *session);
static void capture_session_on_closed(void *context, ACameraCaptureSession *session);
void ImageCallback(AImageReader *reader);
static void OnImageCallback(void *ctx, AImageReader *reader);
bool WriteFile(AImage *image, const string& path);
static bool WriteFile(CPhoneDevice2* pThis, AImage *image);
inline bool TakePhotoCb(bool res, const IDevice::PHOTO_INFO& photoInfo, const string& path, time_t photoTime)
{
if (m_listener != NULL)
{
std::vector<IDevice::RECOG_OBJECT> objects;
return m_listener->OnPhotoTaken(res, photoInfo, path, photoTime, objects);
}
return false;
}
protected:
JavaVM* m_vm;
jobject m_javaService;
jmethodID mRegisterTimerMid;
jmethodID mRegisterHeartbeatMid;
jmethodID mUnregisterTimerMid;
jmethodID mUpdateTimeMid;
std::string mPath;
IDevice::PHOTO_INFO mPhotoInfo;
IListener* m_listener;
atomic_ulong m_timerUidFeed;
std::map<IDevice::timer_uid_t, unsigned long> mTimers;
AImageReader *mAImageReader;
ANativeWindow *theNativeWindow;
ACameraDevice *cameraDevice;
ACaptureRequest *captureRequest;
ACameraOutputTarget *cameraOutputTarget;
ACaptureSessionOutput *sessionOutput;
ACaptureSessionOutputContainer *captureSessionOutputContainer;
ACameraCaptureSession *captureSession;
ACameraDevice_StateCallbacks deviceStateCallbacks;
ACameraCaptureSession_stateCallbacks captureSessionStateCallbacks;
DisplayDimension mDisplayDimension;
int32_t presentRotation_;
int32_t imageHeight_;
int32_t imageWidth_;
uint8_t* imageBuffer_;
int32_t yStride, uvStride;
uint8_t *yPixel, *uPixel, *vPixel;
int32_t yLen, uLen, vLen;
int32_t uvPixelStride;
};
#endif // __PHONE_DEVICE2_H__

@ -0,0 +1,57 @@
#ifndef __POSITION_HELPER_H__
#define __POSITION_HELPER_H__
#include <cmath>
#define _USE_MATH_DEFINES
inline double transformLat(double x, double y)
{
double ret = -100.0 + 2.0 * x + 3.0 * y + 0.2 * y * y + 0.1 * x * y + 0.2 * std::sqrt(std::abs(x));
ret += (20.0 * std::sin(6.0 * x * M_PI) + 20.0 * std::sin(2.0 * x * M_PI)) * 2.0 / 3.0;
ret += (20.0 * std::sin(y * M_PI) + 40.0 * std::sin(y / 3.0 * M_PI)) * 2.0 / 3.0;
ret += (160.0 * std::sin(y / 12.0 * M_PI) + 320 * std::sin(y * M_PI / 30.0)) * 2.0 / 3.0;
return ret;
}
inline double transformLng(double x, double y)
{
double ret = 300.0 + x + 2.0 * y + 0.1 * x * x + 0.1 * x * y + 0.1 * std::sqrt(std::abs(x));
ret += (20.0 * std::sin(6.0 * x * M_PI) + 20.0 * std::sin(2.0 * x * M_PI)) * 2.0 / 3.0;
ret += (20.0 * std::sin(x * M_PI) + 40.0 * std::sin(x / 3.0 * M_PI)) * 2.0 / 3.0;
ret += (150.0 * std::sin(x / 12.0 * M_PI) + 300.0 * std::sin(x / 30.0 * M_PI)) * 2.0 / 3.0;
return ret;
}
inline void transformPosition(double& lat, double& lng)
{
// 卫星椭球坐标投影到平面地图坐标系的投影因子
#define AXIS 6378245.0
// 椭球的偏心率(a^2 - b^2) / a^2
#define OFFSET 0.00669342162296594323
double dLat = transformLat(lng - 105.0, lat - 35.0);
double dLon = transformLng(lng - 105.0, lat - 35.0);
double radLat = lat / 180.0 * M_PI;
double magic = std::sin(radLat);
magic = 1 - OFFSET * magic * magic;
double sqrtMagic = std::sqrt(magic);
dLat = (dLat * 180.0) / ((AXIS * (1 - OFFSET)) / (magic * sqrtMagic) * M_PI);
dLon = (dLon * 180.0) / (AXIS / sqrtMagic * std::cos(radLat) * M_PI);
lat += dLat;
lng += dLon;
}
inline bool shouldConvertPosition(double lat, double lon)
{
if (lon < 72.004 || lon > 137.8347)
{
return false;
}
if (lat < 0.8293 || lat > 55.8271)
{
return false;
}
return true;
}
#endif // __POSITION_HELPER_H__

@ -0,0 +1,462 @@
//
// Created by Matthew on 2025/3/5.
//
#include "PtzController.h"
#include "SensorsProtocol.h"
#include "GPIOControl.h"
#include "PhoneDevice.h"
#include "time.h"
#include <memory>
PtzController::PtzController(CPhoneDevice* pPhoneDevice) : m_pPhoneDevice(pPhoneDevice)
{
m_exit = false;
}
void PtzController::Startup()
{
m_thread = std::thread(PtzThreadProc, this);
}
void PtzController::PtzThreadProc(PtzController* pThis)
{
pThis->PtzProc();
}
void PtzController::AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr)
{
SERIAL_CMD cmd = { 0 };
cmd.channel = channel;
cmd.preset = preset;
cmd.cmdidx = cmdidx;
cmd.bImageSize = bImageSize;
strcpy(cmd.serfile, serfile);
cmd.baud = baud;
cmd.addr = addr;
cmd.ts = time(NULL);
m_locker.lock();
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
}
void PtzController::AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds)
{
IDevice::SerialsPhotoParam param = { "", 0, 0 };
m_pPhoneDevice->GetPhotoSerialsParamCb(param);
SERIAL_CMD cmdPreset = { 0 };
time_t ts = time(NULL);
#if 1
// if (photoInfo.preset != 0 && photoInfo.preset != 0xFF)
{
cmdPreset.ts = photoInfo.selfTestingTime;
cmdPreset.delayTime = photoInfo.closeDelayTime;
cmdPreset.channel = photoInfo.channel;
cmdPreset.channel = photoInfo.preset;
cmdPreset.cmdidx = PHOTO_OPEN_POWER;
strcpy(cmdPreset.serfile, param.serfile);
cmdPreset.baud = param.baud;
cmdPreset.addr = param.addr;
}
#endif
SERIAL_CMD cmd = { 0 };
cmd.ts = ts;
cmd.delayTime = photoInfo.closeDelayTime;
cmd.channel = photoInfo.channel;
cmd.preset = photoInfo.preset;
cmd.cmdidx = Take_Photo;
cmd.bImageSize = photoInfo.resolution;
strcpy(cmd.serfile, param.serfile);
cmd.baud = param.baud;
cmd.addr = param.addr;
PtzPhotoParams* ppp = new PtzPhotoParams(photoInfo, path, osds);
cmd.photoParams.reset(ppp);
// cmd.delayTime;
// uint8_t bImageSize;
// char serfile[128];
// uint32_t baud;
// int addr;
m_locker.lock();
#if 1
if (cmdPreset.cmdidx != 0)
{
m_cmds.push_back(cmdPreset);
}
#endif
m_cmds.push_back(cmd);
m_locker.unlock();
m_sem.release();
m_sem.release();
}
void PtzController::ExitAndWait()
{
m_exit = true;
m_sem.release();
if (m_thread.joinable())
{
m_thread.join();
}
}
void PtzController::PtzProc()
{
PROC_PTZ_STATE state = PTZS_POWER_OFF;
SERIAL_CMD cmd;
PTZ_STATE ptz_state;
bool hasCmd = false;
int i=0;
int closecmd=0;
std::shared_ptr<PowerControl> powerCtrl;
time_t selfTestingStartTime = 0;
time_t selfTestingWaitTime = 0;
time_t PTZ_preset_start_time = 0;
time_t PTZ_preset_wait_time = 0;
time_t close_delay_time = CAMERA_CLOSE_DELAYTIME;
time_t start_delay_time = 0;
time_t auto_delay_time = 0;
time_t auto_wait_time = WAIT_TIME_AUTO_CLOSE;
time_t photo_move_preset_time = 0;
int iwaitime = 0;
while(true)
{
m_sem.acquire();
if (m_exit)
{
break;
}
hasCmd = false;
m_locker.lock();
for (auto it = m_cmds.begin(); it != m_cmds.end(); ++it)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
// find first non-taking-photo cmd
if (it->cmdidx != Take_Photo)
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
else
{
cmd = *it;
m_cmds.erase(it);
hasCmd = true;
break;
}
}
m_locker.unlock();
if (!hasCmd)
{
if ((state == PTZS_SELF_TESTING) || (PTZS_PHOTO_SELF_TESTING == state))
{
time_t timeout = time(NULL) - selfTestingStartTime;
if(timeout < 0)
selfTestingStartTime = time(NULL);
if (timeout >= selfTestingWaitTime)
{
XYLOG(XYLOG_SEVERITY_INFO, "超时(%u秒)未收到云台自检结束应答,状态改为空闲!", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
else
{
//if(timeout >= CAMERA_SELF_TEST_TIME)
{
#ifndef NDEBUG
if (timeout == 1 || ((timeout % 10) == 0))
#endif
{
XYLOG(XYLOG_SEVERITY_INFO, "开始查询云台自检状态timeout=%u秒", (uint32_t)timeout);
}
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到云台自检结束应答状态改为空闲timeout=%u秒", (uint32_t)timeout);
state = PTZS_IDLE;
m_sem.release();
continue;
}
}
}
}
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
m_sem.release();
continue;
}
if(0 == start_delay_time)
{
if(0 == iwaitime)
{
auto_delay_time = time(NULL);
iwaitime += 1;
m_sem.release();
continue;
}
else
{
if(time(NULL) - auto_delay_time < 0)
{
auto_delay_time = time(NULL);
}
if(time(NULL) - auto_delay_time >= auto_wait_time)
{
iwaitime = 0;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机自动上电延时时间超过%u秒准备关闭摄像机", (uint32_t)auto_wait_time);
}
else
{
m_sem.release();
continue;
}
}
}
else
{
if(time(NULL) - start_delay_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间不会关摄像机电源*/
start_delay_time = time(NULL);
}
if(time(NULL) - start_delay_time >= close_delay_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机空闲时间超过%u秒准备关闭摄像机", (uint32_t)close_delay_time);
}
else
{
m_sem.release();
continue;
}
}
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发,摄像机本来就处于关机状态!");
// Do Nothing
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "自动关机触发通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
powerCtrl.reset();
closecmd = 0;
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "自动触发关闭云台电源state=%d", state);
}
start_delay_time = 0;
continue;
}
switch (cmd.cmdidx)
{
case Take_Photo:
{
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
//powerCtrl = std::make_shared<PlzCameraPowerCtrl>(cmd.photoParams->mPhotoInfo.closeDelayTime);
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = cmd.photoParams->mPhotoInfo.selfTestingTime;
state = PTZS_PHOTO_SELF_TESTING;
XYLOG(XYLOG_SEVERITY_INFO, "1、收到拍照指令摄像机从关机状态改为自检状态");
m_locker.lock();
m_cmds.insert(m_cmds.begin(), cmd);
m_locker.unlock();
m_sem.release();
continue;
}
}
if(cmd.photoParams->mPhotoInfo.scheduleTime == 0)
{
if(1 == closecmd)
{
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令但同时后续收到关机指令等待拍完照片再关机。state=%d", state);
}
else
{
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "3、收到手动拍照指令state=%d", state);
}
}
else
XYLOG(XYLOG_SEVERITY_INFO, "2、收到自动拍照指令state=%d", state);
state = PTZS_TAKING_PHOTO;
if (cmd.preset != 0 && cmd.preset != 0xFF)
{
CameraPhotoCmd(0, cmd.channel, MOVE_PRESETNO, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
#if 0
if(START_ONCE_SELF == cmd.preset)
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "拍照调用200号预置点指令摄像机启动一次性自检从拍照状态改为自检状态取消拍照动作设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
break;
}
#endif
PTZ_preset_start_time = time(NULL);
if(START_ONCE_SELF == cmd.preset)
PTZ_preset_wait_time = CAMERA_SELF_TEST_TIME;
else
PTZ_preset_wait_time = MOVE_PRESET_WAIT_TIME;
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前开始调用预置点%ustate=%d", (uint32_t)cmd.preset, state);
for(;;)
{
if(0 == QueryPtzState(&ptz_state, QUERY_PTZ_STATE, cmd.serfile, cmd.baud, cmd.addr))
{
if(0 == ptz_state.ptz_status)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u收到移动结束应答移动时长=%d秒 state=%d", (uint32_t)cmd.preset, (uint32_t)(time(NULL)-PTZ_preset_start_time), state);
break;
}
}
if(time(NULL) - PTZ_preset_start_time < 0)
{/* 防止等待关机期间,其他线程发生对时,改变了系统时间,导致长时间等待摄像机到达预置点*/
PTZ_preset_start_time = time(NULL);
}
if(time(NULL) - PTZ_preset_start_time >= PTZ_preset_wait_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "摄像机拍照前调用预置点%u摄像机在%u秒内未收到调用预置点结束应答state=%d", (uint32_t)cmd.preset, (uint32_t)PTZ_preset_wait_time, state);
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(10));
photo_move_preset_time = time(NULL);
}
}
if(cmd.photoParams->mPhotoInfo.mediaType == 1)
m_pPhoneDevice->TakeVideoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
else if ((cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM || cmd.photoParams->mPhotoInfo.mediaType == XY_MEDIA_TYPE_STREAM_OFF))
{
m_pPhoneDevice->StartPushStreaming(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
}
else
m_pPhoneDevice->TakePhotoWithNetCamera(cmd.photoParams->mPhotoInfo, cmd.photoParams->mPath, cmd.photoParams->mOsds, powerCtrl);
state = PTZS_IDLE;
}
break;
case PHOTO_OPEN_POWER:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_PHOTO_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到拍照指令开机摄像机处于state=%d", state);
}
break;
case OPEN_TOTAL:
if (state == PTZS_POWER_OFF)
{
if (!powerCtrl)
{
powerCtrl = std::make_shared<PlzCameraPowerCtrl>(0);
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令,摄像机从关机状态改为自检状态!设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动开机指令摄像机处于state=%d", state);
}
closecmd = 0;
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动打开摄像机指令刷新关机计时初始值state=%d", state);
break;
case CLOSE_TOTAL:
if (state == PTZS_POWER_OFF)
{
closecmd = 0;
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令,摄像机本来就处于关机状态!");
// Do Nothing
}
else if(PTZS_PHOTO_SELF_TESTING == state)
{
closecmd = 1;
XYLOG(XYLOG_SEVERITY_INFO, "在拍照自检过程中收到关机指令取消延时关机转到自动关机处理state=%d", state);
}
else
{
XYLOG(XYLOG_SEVERITY_INFO, "收到关机指令通知云台准备关机state=%d", state);
for(i=0; i<3; i++)
{
if(0 == QueryPtzState(&ptz_state, NOTIFY_PTZ_CLOSE, cmd.serfile, cmd.baud, cmd.addr))
break;
}
closecmd = 0;
powerCtrl.reset();
state = PTZS_POWER_OFF;
XYLOG(XYLOG_SEVERITY_INFO, "关闭云台电源state=%d", state);
}
start_delay_time = 0;
break;
default:
{
if (state == PTZS_POWER_OFF)
{
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令,摄像机处于关机状态,无法执行!");
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
break;
}
start_delay_time = time(NULL);
XYLOG(XYLOG_SEVERITY_INFO, "收到手动控制摄像机指令刷新关机计时初始值state=%d", state);
if(cmd.ts <= photo_move_preset_time)
{
XYLOG(XYLOG_SEVERITY_INFO, "丢弃拍照调预置点期间收到的控制云台指令,指令时间" FMT_TIME_T ",拍照时间" FMT_TIME_T "state=%d", cmd.ts, photo_move_preset_time, state);
}
else
{
if((MOVE_PRESETNO == cmd.cmdidx) && (START_ONCE_SELF == cmd.preset))
{
selfTestingStartTime = time(NULL);
selfTestingWaitTime = CAMERA_SELF_TEST_TIME;
state = PTZS_SELF_TESTING;
m_sem.release();
XYLOG(XYLOG_SEVERITY_INFO, "收到调用200号预置点指令摄像机启动一次性自检从当前状态改为自检状态设置的自检等待时间%u秒", (uint32_t)selfTestingWaitTime);
}
CameraPhotoCmd(cmd.ts, cmd.channel, cmd.cmdidx, 0, cmd.preset, cmd.serfile, cmd.baud, cmd.addr);
}
}
break;
}
}
}

@ -0,0 +1,100 @@
//
// Created by Matthew on 2025/3/5.
//
#ifndef MICROPHOTO_PTZCONTROLLER_H
#define MICROPHOTO_PTZCONTROLLER_H
#include <Buffer.h>
#include <thread>
#include <vector>
#include <memory>
#include <string>
#include <mutex>
#include <SemaphoreEx.h>
#include <Client/Device.h>
enum PROC_PTZ_STATE
{
PTZS_POWER_OFF = 0,
PTZS_IDLE = 1,
PTZS_SELF_TESTING = 2,
PTZS_MOVING = 3,
PTZS_TAKING_PHOTO = 4,
PTZS_PHOTO_SELF_TESTING = 5,
};
#define CAMERA_SELF_TEST_TIME 150 /* Camera self-test time (excluding PTZ self-test)*/
#define MOVE_PRESET_WAIT_TIME 20 /* Waiting for the maximum time for the PTZ to move to the preset position*/
#define CAMERA_CLOSE_DELAYTIME 360 /* Auto Power-Off Timer Setting After Manual Power-On (for Camera)*/
#define PHOTO_OPEN_POWER 16000
#define WAIT_TIME_AUTO_CLOSE 2 /* In order to automatically capture multiple preset point images at the same time and prevent the camera from self checking every time it takes a picture.*/
class PtzPhotoParams
{
public:
PtzPhotoParams(const IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds) :
mPhotoInfo(photoInfo), mPath(path), mOsds(osds)
{
}
~PtzPhotoParams()
{
}
IDevice::PHOTO_INFO mPhotoInfo;
std::string mPath;
std::vector<IDevice::OSD_INFO> mOsds;
};
struct SERIAL_CMD
{
uint8_t channel;
uint8_t preset;
time_t ts;
int cmdidx;
uint32_t delayTime;
uint8_t bImageSize;
char serfile[128];
uint32_t baud;
int addr;
std::shared_ptr<PtzPhotoParams> photoParams;
};
class CPhoneDevice;
class PtzController
{
public:
PtzController(CPhoneDevice* pPhoneDevice);
void Startup();
// ();
void AddCommand(uint8_t channel, int cmdidx, uint8_t bImageSize, uint8_t preset, const char *serfile, uint32_t baud, int addr);
void AddPhotoCommand(IDevice::PHOTO_INFO& photoInfo, const std::string& path, const std::vector<IDevice::OSD_INFO>& osds);
void ExitAndWait();
protected:
static void PtzThreadProc(PtzController* pThis);
void PtzProc();
protected:
protected:
std::mutex m_locker;
std::vector<SERIAL_CMD> m_cmds;
CSemaphore m_sem;
bool m_exit;
std::thread m_thread;
CPhoneDevice* m_pPhoneDevice;
};
#endif //MICROPHOTO_PTZCONTROLLER_H

File diff suppressed because it is too large Load Diff

@ -0,0 +1,557 @@
//
// Created by hyz on 2024/6/5.
//
#ifndef __SENSOR_PROTOCOL_H__
#define __SENSOR_PROTOCOL_H__
#include <string>
#ifndef LOBYTE
#define LOBYTE(w) ((unsigned char)(w))
#endif
#ifndef HIBYTE
#define HIBYTE(w) ((unsigned char)(((unsigned short)(w) >> 8) & 0xFF))
#endif
#ifndef LOWORD
#define LOWORD(l) ((uint16_t)(l))
#endif
#ifndef HIWORD
#define HIWORD(l) ((uint16_t)((uint32_t)(l) >> 16))
#endif
#define MAX_STRING_LEN 32
#define IOT_PARAM_WRITE 0xAE
#define IOT_PARAM_READ 0xAF
#define MAX_FIELDS_NUM 20 /* BD_NMEA0183单组字符串数据内含数据最大数量*/
#define MAX_SERIAL_DEV_NUM 25 /* 最大接串口传感器数量*/
#define MAX_SERIAL_PORT_NUM 5
#define MAX_DEV_VALUE_NUM 12 /* 一台装置最大的采样值数量*/
#define WEATHER_PROTOCOL 1 /* 温湿度协议序号*/
#define WIND_PROTOCOL 2 /* 风速风向协议序号*/
#define SLANT_PROTOCOL 3 /* 倾斜角协议序号*/
#define RALLY_PROTOCOL 4 /* 拉力协议序号*/
#define PELCO_P_PROTOCOL 5 /* 摄像机Pelco_P协议序号*/
#define PELCO_D_PROTOCOL 6 /* 摄像机Pelco_D协议序号*/
#define SERIALCAMERA_PROTOCOL 8 /* 串口摄像机协议序号*/
#define MUTIWEATHER_PROTOCOL 9 /*多合一气象*/
#define NMEA0183_PROTOCOL 10 /* 单一北斗NMEA0183标准协议*/
#define RESERVE2_PROTOCOL 17 /* 备用2协议序号*/
#define RESERVE4_PROTOCOL 19 /* 备用4协议序号*/
#define RESERVE5_PROTOCOL 20 /* 备用5协议序号*/
#define INVALID_PROTOCOL 21 /* 无效协议序号*/
#define AirTempNo 0 /* 空气温度数据存储序号*/
#define HumidityNo 1 /* 相对湿度数据存储序号*/
#define WindSpeedNo 2 /* 风速数据存储序号*/
#define WindDirectionNo 3 /* 风向数据存储序号*/
#define RainfallNo 4 /* 雨量数据存储序号*/
#define AtmosNo 5 /* 大气压数据存储序号*/
#define OpticalRadiationNo 6 /* 日照(光辐射)数据存储序号*/
#define SER_IDLE 0 /* 传感器处于空闲状态,未启动采样*/
#define SER_SAMPLE 1 /* 正在采样过程中*/
#define SAMPLINGSUCCESS 2 /* 采样结束,正常读取到数据*/
#define SER_STARTSAMPLE 3 /* 启动采样*/
#define SER_SAMPLEFAIL -1 /* 采样失败,未采集到数据,传感器故障或未接*/
#define PHOTO_SAVE_SUCC 5 /* 图片保存成功*/
#define WEATHER_DATA_NUM 8 /* 气象数据最大数量(一般最多是6要素)*/
#define RALLY_DATA_NUM 2 /* 拉力数据最大数量(一般是1个)*/
#define SLANTANGLE_DATA_NUM 3 /* 倾角数据最大数量(一般只有X轴和Y轴值)*/
#define PTZ_MOVETIME 1 // 云台移动等待时间为1秒
#define MAX_CHANNEL_NUM 2 /* 视频通道最大通道*/
#define MAX_PHOTO_FRAME_LEN 1024 /* 图片数据一包最大长度*/
#define MAX_PHOTO_PACKET_NUM 1024 /* 图片最大包数图片最大定为1MB*/
#define RECVDATA_MAXLENTH 2048 /* 接收数据缓冲区最大值*/
#define TIMER_CNT 50 // Poll命令定时器时间 5 ms
#define SENDDATA_MAXLENTH RECVDATA_MAXLENTH /* 正常发送数据缓冲区最大值*/
// 摄像机控制命令宏定义
#define Cmd_Cancel 0x00000000 // 关闭功能
#define SET_PRESETNO 0x00030000 // 设置预置点
#define MOVE_TO_PRESETNO 0x00070000 // 调用预置点
/* 摄像机PELCO-P控制命令宏定义*/
#define P_Auto_Scan 0x20000000 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define P_IRIS_CLOSE 0x08000000 /* 光圈缩小(1 有效)*/
#define P_IRIS_OPEN 0x04000000 /* 光圈放大(1 有效)*/
#define P_FOCUS_NEAR 0x02000000 /* 近距离聚焦(1 有效)*/
#define P_FOCUS_FAR 0x01000000 /* 远距离聚焦(1 有效)*/
#define P_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/
#define P_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/
#define P_MOVE_DOWN 0x0010001f /* 向下移动镜头(1 有效)*/
#define P_MOVE_UP 0x0008001f /* 向上移动镜头(1 有效)*/
#define P_MOVE_LEFT 0x00041f00 /* 向左移动镜头(1 有效)*/
#define P_MOVE_RIGHT 0x00021f00 /* 向右移动镜头(1 有效)*/
// 摄像机PELCO-D控制命令宏定义
#define D_Auto_Scan 0x10000000 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define D_IRIS_CLOSE 0x04000000 /* 光圈缩小(1 有效)*/
#define D_IRIS_OPEN 0x02000000 /* 光圈放大(1 有效)*/
#define D_FOCUS_NEAR 0x01000000 /* 近距离聚焦(1 有效)*/
#define D_FOCUS_FAR 0x00800000 /* 远距离聚焦(1 有效)*/
#define D_ZOOM_WIDE 0x00400000 /* 远离物体(1 有效)*/
#define D_ZOOM_TELE 0x00200000 /* 接近物体(1 有效)*/
#define D_MOVE_DOWN 0x0010002d /* 向下移动镜头(1 有效)*/
#define D_MOVE_UP 0x0008002d /* 向上移动镜头(1 有效)*/
#define D_MOVE_LEFT 0x00042d00 /* 向左移动镜头(1 有效)*/
#define D_MOVE_RIGHT 0x00022d00 /* 向右移动镜头(1 有效)*/
#define D_OPEN_TOTAL 0x0009000B /* 打开总电源(1 有效)*/
#define D_OPEN_MODULE_POWER 0x0009000C /* 打开机芯电源(1 有效)*/
/* 摄像机下发命令宏定义*/
#define TAKE_PHOTO 20000 /* 拍照*/
#define SET_BAUD 10000 /* 设置球机波特率*/
#define STOP_CMD 10005 /* 取消或停止指令*/
#define AUTO_SCAN 10006 /* 自动扫描功能控制(1/0 打开/关闭该功能)*/
#define IRIS_CLOSE 10007 /* 光圈缩小(1 有效)*/
#define IRIS_OPEN 10008 /* 光圈放大(1 有效)*/
#define FOCUS_NEAR 10009 /* 近距离聚焦(1 有效)*/
#define FOCUS_FAR 10010 /* 远距离聚焦(1 有效)*/
#define ZOOM_WIDE 10011 /* 远离物体(1 有效)*/
#define ZOOM_TELE 10012 /* 接近物体(1 有效)*/
#define MOVE_DOWN 10013 /* 向下移动镜头(1 有效)*/
#define MOVE_UP 10014 /* 向上移动镜头(1 有效)*/
#define MOVE_LEFT 10015 /* 向左移动镜头(1 有效)*/
#define MOVE_RIGHT 10016 /* 向右移动镜头(1 有效)*/
#define MOVE_PRESETNO 10017 // 调用预置点
#define SAVE_PRESETNO 10018 // 设置预置点
#define OPEN_TOTAL 10019 /* 打开总电源(1 有效)*/
#define OPEN_MODULE_POWER 10020 /* 打开机芯电源(1 有效)*/
#define NOTIFY_PTZ_CLOSE 10021 // 通知云台关闭
#define QUERY_PTZ_STATE 10022 // 查询云台状态
#define CLOSE_TOTAL 10040 /* 关闭总电源*/
#define SPEED_DOME_CAMERA 0 /* 球机摄像机*/
#define SERIAL_CAMERA 2 /* 串口摄像机a*/
#define START_ONCE_SELF 200 /* 一次性自检需要的调用的预置点200*/
#define COLLECT_DATA 0 /* 调试使用*/
#define HexCharToInt( c ) (((c) >= '0') && ((c) <= '9') ? (c) - '0' : ((c) >= 'a') && ((c) <= 'f') ? (c) - 'a' + 10 :((c) >= 'A') && ((c) <= 'F') ? (c) - 'A' + 10 : 0 )
//SDS包类型结构
typedef struct
{
uint8_t PortIdx; // 信息类型
uint16_t MsgType; // 信息类型
int MsgLen; // 信息长度
uint8_t MsgData[RECVDATA_MAXLENTH];
} RTUMSG;
typedef struct
{
float fFactor; // 数据系数
float EuValueDelta; // 数据工程值偏移
} AI_PARAM;
typedef struct
{
AI_PARAM AiParam; // 数据点配置参数
int AiState; // 数据标识(-1采样失败0:没有采样1正在采样2采样结束3启动采样
float EuValue; // 数据工程值
} AI_DEF;
typedef struct
{
uint8_t AiState; // 数据标识(-1采样失败0:没有采样1正在采样2采样结束3启动采样
float EuValue; // 数据工程值
} Data_DEF;
typedef struct
{
int imagelen; // 整个图片大小
int phototime; // 拍照时间
uint8_t presetno; // 拍照预置点
char photoname[512]; // 图片存储名称和路径
int state;// 标识(-1拍照失败0:没有拍照1正在取图2拍照成功3启动拍照
} IMAGE_DEF;
typedef struct
{
int imagelen; // 整个图片大小
int imagenum; // 整个图片的总包数
int phototime; // 拍照时间
uint8_t presetno; // 拍照预置点
char photoname[512]; // 图片存储名称和路径
uint8_t buf[MAX_PHOTO_PACKET_NUM][MAX_PHOTO_FRAME_LEN]; // 图片数据缓存
int ilen[MAX_PHOTO_PACKET_NUM]; // 相对应的每包图片数据的长度
int state;// 标识(-1拍照失败0:没有拍照1正在取图2拍照成功3启动拍照
} PHOTO_DEF;
// 上层调用采集传感器参数
typedef struct SENSOR_PARAM
{
unsigned int baudrate; /* 波特率*/
int databit; /* 数据位*/
float stopbit; /* 停止位*/
char parity; /* 校验位*/
char pathname[64]; /* 串口文件名及路径*/
//int commNo; /* 约定的串口序号例如我们PC机上显示的COM1。。。*/
uint8_t SensorsType; /* 传感器类型索引,大于 0*/
int devaddr; /* 装置(传感器)使用的地址*/
uint8_t IsNoInsta; /* 装置没有安装或者已经坏了(1:正常, 0:无效,坏了或没有安装)*/
uint8_t CameraChannel; /* 像机的通道号*/
uint8_t Phase; /* 传感器所安装相别指拉力和倾角11表示A1....*/
float multiple; /*系数*/
float offset; /*偏移值*/
} SENSOR_PARAM;
// 需要配置的串口装置参数
typedef struct
{
unsigned int baudrate; /* 波特率*/
int databit; /* 数据位*/
int stopbit; /* 停止位*/
char parity; /* 校验位*/
char pathname[64]; /* 串口文件名及路径*/
int commid; /* 串口序号 注意从0开始*/
uint8_t ProtocolIdx; /* 规约索引,大于 0*/
int devaddr; /* 装置使用的地址*/
uint8_t IsNoInsta; /* 装置没有安装或者已经坏了(1:正常, 0:无效,坏了或没有安装)*/
uint8_t CameraChannel; /* 像机的通道号*/
uint8_t Phase; /* 传感器所安装相别指拉力和倾角11表示A1....*/
} SERIAL_PARAM;
// 云台状态数据
typedef struct
{
uint8_t ptz_process; /* 云台所处过程(1:自检状态;2:调用预置点;3:一般状态;)*/
uint8_t ptz_status; /* 云台当前状态值(0:停止;1:运动;2:机芯未上电;其他:其他错误*/
int presetno; /* 云台所处预置点值*/
float x_coordinate; /* 云台所处位置水平方向坐标*/
float y_coordinate; /* 云台所处位置垂直方向坐标*/
} PTZ_STATE;
/*
$--RMC IDRMC --
2 UTCtime hhmmss.ss UTC
3 status
V=
A=
4 lat ddmm.mmmmm 2
5 uLat N-S-
6 lon dddmm.mmmm
m
3
7 uLon E-W-西
8 spd
9 cog
10 date ddmmyy dd mm yy
11 mv
12 mvE E-W-西
13 mode [1]
14 navStatus V
NMEA 4.1
15 CS 16 $*$**/
// 北斗卫星数据
typedef struct
{
struct tm UTC_time; /* UTC时间*/
int ms_time; /* 毫秒*/
double lat; /* 纬度,原值(前 2 字符表示度,后面的字符表示分)转换后为° */
char uLat; /* 纬度方向N-北S-南*/
double lon; /* 经度,原值(前 3 字符表示度,后面的字符表示分)转换后为°*/
char uLon; /* 经度'E'-东,'W'-西*/
char status; /* 'A'=数据有效 其他字符表示数据无效*/
} BD_GNSS_DATA;
typedef struct
{
int m_iRevStatus; /* */
int m_iRecvLen; /* */
int m_iNeedRevLength; /* */
int iRecvTime; /* */
uint8_t m_au8RecvBuf[RECVDATA_MAXLENTH];/* */
int fd; /* 串口打开的文件句柄*/
uint8_t PollCmd[SENDDATA_MAXLENTH];
int cmdlen; // 发送缓冲区命令长度
//******************** Poll Cmd ****************************
uint8_t Retry; /* 重试命令次数 */
uint8_t RetryCnt; /* 重试命令计数*/
int64_t RetryTime; /* 重试命令时间 */
int64_t RetryTimeCnt; /* 重试命令时间计数*/
int64_t WaitTime; /* 命令间隔时间 */
int64_t WaitTimeCnt; /* 命令间隔时间计数*/
uint8_t ForceWaitFlag; /* 强制等待标志*/
uint16_t ForceWaitCnt; /* 强制等待计数*/
uint8_t ReSendCmdFlag; /* 重发命令标志 */
uint8_t SendCmdFlag; /* 命令发送标志 */
uint8_t RevCmdFlag; /* 命令正常接收标志*/
//**********************************************************
int64_t lsendtime; /* 命令发送绝对时间计时(毫秒)*/
int cameraaddr; /* 摄像机地址*/
int SerialCmdidx; /* 正在使用的串口发送命令的命令序号(-1:表示没有命令发送)
使*/
PHOTO_DEF image; /* 临时存储图片数据*/
int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/
PTZ_STATE ptz_state;
int sendptzstatecmd; // 查询命令次数控制
BD_GNSS_DATA bd_data;
} SIO_PARAM_SERIAL_DEF;
typedef const struct
{
//char *account; // 命令说明
char *cmd_name; // 命令名称
int (*recv_process)(SIO_PARAM_SERIAL_DEF *); /* urc数据处理*/
}BD_NMEA0183_PROC_FUNC;
//串口相关装置所有参数集中定义
typedef struct
{
//******************** 端口基本信息 ************************
uint8_t IsNeedSerial; /* 是否需要使用串口通讯*/
int CmdWaitTime; /* 没有使用*/
uint8_t UseSerialidx; /* 使用的串口序号*/
int SerialCmdidx; /* 正在使用的串口发送命令的命令序号(-1:表示没有命令发送)
使*/
int enrecvtime; /* 发送加密命令后接收到应答计时*/
int64_t FirstCmdTimeCnt; /* 串口读取数据起始时间*/
uint8_t nextcmd; /* 第二次发送读取气象雨量命令 */
uint8_t SameTypeDevIdx; /* 相同类型装置顺序排列序号(从0开始)*/
uint8_t uOpenPowerFlag; /* 传感器上电标志(0:不需要打开; 1:需要打开)*/
int recvdatacnt; /* 接收到有效数据*/
PHOTO_DEF image; /* 临时存储图片数据*/
AI_DEF aiValue[MAX_DEV_VALUE_NUM]; /* 传感器采样值*/
} SERIAL_DEV_DEF;
//串口相关装置所有参数集中定义
typedef struct
{
uint8_t clcyesampling; /* 正在进行采样(0:没有进行采样;1:正在进行采样;)*/
uint8_t camerauseserial; /* 摄像机使用那个串口*/
uint32_t PtzCmdType; /* 云台指令类型*/
int usecameradevidx; /* 有像机指令需要执行*/
/* 执行指令的装置序号(-1:表示没有需要执行的指令;)*/
int SendStopPtzCmdTimeCnt; /* 发送云台停止指令*/
uint8_t serialstatus[MAX_SERIAL_PORT_NUM]; /* 串口是否可以使用状态分别对应串口1、2、3*/
SERIAL_DEV_DEF ms_dev[MAX_SERIAL_DEV_NUM]; /* 装置所接传感器数量*/
int UseingSerialdev[MAX_SERIAL_PORT_NUM]; /* 正在使用串口通讯的装置序号(-1,表示串口空闲)*/
int curdevidx[MAX_SERIAL_PORT_NUM]; /* 当前正在通讯的装置序号(-1表示没有装置需要通讯)*/
uint8_t IsReadWireTem; /* 是否在开始读取测温数据(0:表示没有;1:是)*/
//int proruntime; /* 程序运行时间*/
int IsSleep; /* 是否使程序休眠(1:不休眠;2:休眠)*/
int tempsamplingstartime; /* 测温启动距离采样启动时间间隔*/
int tempsamplingsucctime; /* 测温启动距离采样成功时间间隔*/
int samplingtimeSec; /* 高速采样数据秒级时间控制*/
int SectimesamplingCnt[3]; /* 高速采样数据秒级采样数*/
int SunshineSensorsFault; /* 控制日照传感器故障发送*/
int TempSensorsFault; /* 控制测温传感器故障发送*/
int FirstSensorsFault; /* 第一次检测传感器故障发送*/
int SensorsIsUse; /* 传感器是否启用与自检位置匹配*/
int sequsampling; /* 顺序采样控制序号-1:无采样;其他对应相应装置序号*/
int imagepacketnum; /* 串口摄像机拍照图片总包数*/
int historyimagenum[MAX_CHANNEL_NUM]; /* 球机保存的历史图片数量*/
#if 1
//int sendflag; /* 临时上送泄露电流值标志*/
int sendphototime; /* 临时上送图片数据统计*/
int sendphotocmdcnt; /* 一次拍照过程中发送拍照指令计数*/
int photographtime; /* 图片拍摄的时间*/
int iLastGetPhotoNo; /* 设置串口摄像机参数时暂存拍照命令序号*/
uint8_t bImageSize; /* 用于临时存储接收上层命令的图片大小*/
uint8_t presetno; /* 用于临时存储接收上层命令的预置点*/
char filedir[512]; /* 用于摄像机拍照之后暂时存放的路径*/
#endif
uint8_t errorPhotoNoCnt; /* 串口摄像机拍照时回应错误包号计数(如:召第6包回应第3包)*/
uint8_t RephotographCnt; /* 串口摄像机重拍计数(只在读照片数据应答出错时才重拍)*/
} SRDT_DEF;
static void PortDataProcess( void );
static int64_t get_msec();
int serial_port_comm();
static int weather_comm(SERIAL_PARAM weatherport);
static void setRS485Enable(bool z);
static void set485WriteMode();
static void set485ReadMode();
static void set12VEnable(bool z);
static void setCam3V3Enable(bool enabled);
// 串口相关的所有函数定义
/* 打开串口电源*/
void Gm_OpenSerialPower();
uint8_t getdevtype(int devno);
// 打开传感器电源
void Gm_OpenSensorsPower();
// 关闭传感器电源
void Gm_CloseSensorsPower(int port);
// 打开串口通讯
void Gm_OpenSerialPort(int devidx);
// 关闭串口通讯
void Gm_CloseSerialPort();
void DBG_LOG(int commid, char flag, const char* format, ...);
int SaveLogTofile(int commid, const char *szbuf);
// 功能说明:串口发送数据 返回实际发送的字节数
int GM_SerialComSend(const unsigned char * cSendBuf, size_t nSendLen, int commid);
void Gm_InitSerialComm(SENSOR_PARAM *sensorParam, const char *filedir,const char *log);
// 启动串口通讯
void GM_StartSerialComm();
// 启动使用串口拍照
int GM_StartSerialCameraPhoto(int phototime, unsigned char channel, int cmdidx, unsigned char bImageSize, unsigned char presetno, const char *serfile, unsigned int baud, int addr);
void delete_old_files(const char *path, int days);
// 串口轮询通讯定时器
int GM_SerialTimer();
//轮询所有串口和传感器是否需要生成下发命令
void Gm_FindAllSensorsCommand();
//检查所有传感器是否采集完毕,采集完毕的关闭传感器电源
void GM_IsCloseSensors();
//检查所有串口是否有数据接收,有则启动接收
void GM_AllSerialComRecv();
//判断是否需要关闭定时器
int GM_CloseTimer();
void testComm();
void Gm_InitSerialComm_Test();
// 串口接收数据处理
void SerialDataProcess(int devidx, uint8_t *buf, int len);
void CameraRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t *buf, int len);
// 串口摄像机数据处理
void CameraPhotoPortDataProcess(SIO_PARAM_SERIAL_DEF *curserial);
// 发送命令
void SendCmdFormPollCmdBuf( int port );
// 清除发送命令的所有标识
void ClearCmdAllFlag(int commid);
// 下发串口拍照指令控制
int FindNextCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam);
// 生成 CameraPhoto命令
void MakeCameraPhotoCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx, int OneParam, uint16_t TwoParam, uint8_t Threep, int phototime);
// 清除命令缓冲区
void ClearCmdFormPollCmdBuf(int port);
// 准备发送云台指令
int Gm_CtrlPtzCmd(SIO_PARAM_SERIAL_DEF *pPortParam, uint32_t ptzcmd);
// 发送转动摄像机云台命令定时器
int Gm_Camera_Timer();
// 生成 PELCO_P 命令 *
void Gm_SendPelco_pCommand( uint32_t cmdtype);
// 计算Pelco_p校验
uint8_t Gm_Pelco_pXORCheck( uint8_t *msg, int len );
// 生成 PELCO_D 命令 *
void Gm_SendPelco_DCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint32_t cmdtype);
// 计算Pelco_D校验
uint8_t Gm_Pelco_DCheck( uint8_t *msg, int len );
// 查询传感器电源状态
char Gm_GetSensorsPowerState(int port);
// 通过传感器使用的航空头查找传感器使用的串口序号
void FindDevUseSerialCommNo();
// 寻找并生成下一条倾角命令
int FindNextShxyProtocolCommand( int devidx );
// 倾角命令校验码计算
unsigned char CalLpc(unsigned char *msg, int len);
// 读上海欣影传感器协议数据
void ShxyProtocolRecvData(int commid, uint8_t *buf, int len);
// 检查检验和是否正确
int CheckShxyProtocolLpcError( uint8_t* msg, int len );
// 把16进制和10进制ASCII字符串转换成int整数
int ATOI(char *buf);
//生成倾角命令
void MakeShxyProtocolPollCommand(int portno, uint8_t cmdidx);
// 上海欣影传感器协议数据处理
void ShxyProtocolDataProcess( int commid);
// 控制关闭传感器电源
//void Gm_CtrlCloseSensorsPower(int devidx);
// 检查传感器电源是否应该关闭或打开
//void Gm_CheckSensorsPower(void);
int SaveImageDataTofile(int devno);
void Collect_sensor_data();
int CameraPhotoCmd(int phototime, unsigned char channel, int cmdidx, unsigned char bImageSize, unsigned char presetno, const char *serfile, unsigned int baud, int addr);
/* 数据和图片采集数据返回函数 开始*/
int GetWeatherData(Data_DEF *data, int datano);
int GetAirTempData(Data_DEF *airt);
int GetHumidityData(Data_DEF *airt);
int GetWindSpeedData(Data_DEF *airt);
int GetWindDirectionData(Data_DEF *airt);
int GetRainfallData(Data_DEF *airt);
int GetAtmosData(Data_DEF *airt);
int GetOpticalRadiationData(Data_DEF *airt);
int GetPullValue(int devno, Data_DEF *data);
int GetAngleValue(int devno, Data_DEF *data, int Xy);
int GetImage(int devno, IMAGE_DEF *photo);
/* 数据和图片采集数据返回函数 结束*/
// 生成一个随机整数
int GeneratingRandomNumber();
int Gm_SetSerialPortParam(int commid);
void ClearCameraCmdAllFlag(SIO_PARAM_SERIAL_DEF *pPortParam);
void ClearCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam);
int Gm_OpenCameraSerial(SIO_PARAM_SERIAL_DEF *pPortParam, const char *serfile, unsigned int baud);
int Gm_SetCameraSerialPortParam(int fd, unsigned int baud);
int GM_CameraComSend(unsigned char * cSendBuf, size_t nSendLen, int fd);
void SendCameraCmdFormPollCmdBuf(SIO_PARAM_SERIAL_DEF *pPortParam);
void Gm_FindCameraCommand(SIO_PARAM_SERIAL_DEF *pPortParam);
void GM_CameraSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam);
int GM_IsCloseCamera(SIO_PARAM_SERIAL_DEF *pPortParam);
int GM_CameraSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
int QueryPtzState(PTZ_STATE *ptz_state, int cmdidx, const char *serfile, unsigned int baud, int addr);
void MakePtzStateQueryCommand(SIO_PARAM_SERIAL_DEF *pPortParam, uint8_t cmdidx);
int Query_BDGNSS_Data(BD_GNSS_DATA *BD_data, int samptime, const char *serfile, unsigned int baud);
int GM_BdSerialTimer(SIO_PARAM_SERIAL_DEF *pPortParam);
void GM_BdSerialComRecv(SIO_PARAM_SERIAL_DEF *pPortParam);
void BdRecvData(SIO_PARAM_SERIAL_DEF *pPortParam, u_char *buf, int len);
unsigned char BDXorCheck(unsigned char *msg, int len);
void BD_NMEA0183_PortDataProcess(SIO_PARAM_SERIAL_DEF *curserial);
char** BD_NMEA0183_SplitString(char *str, int *total_fields);
int BD_get_BDRMC_data(SIO_PARAM_SERIAL_DEF *curserial);
#endif // __SENSOR_PROTOCOL_H__

@ -51,16 +51,16 @@ static void set_parity (struct termios *opt, char parity)
{
switch (parity)
{
case'N':/* 无校验 */
case 'N':/* 无校验 */
case 'n':
opt->c_cflag &= ~PARENB;
break;
case'E':/*偶校验*/
case 'E':/*偶校验*/
case 'e':
opt->c_cflag |= PARENB;
opt->c_cflag &= ~PARODD;
break;
case'O':/* 奇校验 */
case 'O':/* 奇校验 */
case 'o':
opt->c_cflag |= PARENB;
opt->c_cflag |= ~PARODD;

@ -1,79 +0,0 @@
#include "TerminalDevice.h"
#include <dlfcn.h>
#include "Camera.h"
#include <AndroidHelper.h>
typedef jbyteArray (*TakePhotoFunc)(int, int, int, int);
extern bool GetJniEnv(JavaVM *vm, JNIEnv **env, bool& didAttachThread);
CTerminalDevice::CTerminalDevice(JavaVM* vm, jobject service)
{
m_vm = vm;
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
m_javaService = env->NewGlobalRef(service);
if (attached)
{
vm->DetachCurrentThread();
}
}
CTerminalDevice::~CTerminalDevice()
{
JNIEnv* env = NULL;
bool attached = false;
bool res = GetJniEnv(m_vm, &env, attached);
if (!res)
{
ALOGE("Failed to get JNI Env");
}
env->DeleteGlobalRef(m_javaService);
if (attached)
{
m_vm->DetachCurrentThread();
}
m_javaService = NULL;
}
bool CTerminalDevice::TakePhoto(unsigned char channel, unsigned char preset, const string& path, bool photo)
{
jboolean res = JNI_FALSE;
CCamera camera;
camera.initCamera(NULL);
if (camera.isCameraReady())
{
camera.takePicture();
}
camera.closeCamera();
#if 0
JNIEnv* env = NULL;
bool attached = GetJniEnv(m_vm, &env);
jclass serviceClass = env->GetObjectClass(m_javaService);
jmethodID mid = env->GetMethodID(serviceClass, "takePhoto", "(SSLjava/lang/String;)Z");
jstring str = env->NewStringUTF(path.c_str());
res = env->CallBooleanMethod (m_javaService, mid, (jint)channel, (jint)preset, str);
env->ReleaseStringUTFChars(str, path.c_str());
env->DeleteLocalRef(serviceClass);
if (!res)
{
int aa = 1;
}
if (attached)
{
m_vm->DetachCurrentThread();
}
#endif
return res == JNI_TRUE;
}

@ -1,21 +0,0 @@
#ifndef __TERMINAL_DEVICE_H__
#define __TERMINAL_DEVICE_H__
#include <Client/Device.h>
#include <jni.h>
class CTerminalDevice : public IDevice
{
public:
CTerminalDevice(JavaVM* vm, jobject service);
~CTerminalDevice();
virtual bool TakePhoto(unsigned char channel, unsigned char preset, const string& path, bool photo);
private:
JavaVM* m_vm;
jobject m_javaService;
};
#endif // __TERMINAL_DEVICE_H__

@ -17,6 +17,11 @@
#ifndef __CAMERA2_HELPER_H__
#define __CAMERA2_HELPER_H__
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui.hpp>
#include "mat.h"
template <typename T>
class RangeValue {
@ -103,4 +108,107 @@ private:
};
inline void ConvertYUV21ToMat(const uint8_t* nv21, int nv21_width, int nv21_height, int orgWidth, int orgHeight,
int sensorOrientation, bool front, int rotation, cv::Mat& rgb)
{
int w = 0;
int h = 0;
int rotate_type = 0;
cv::Mat nv21_rotated;
const unsigned char* yuv420data = nv21;
if (rotation != 0)
{
int co = 0;
if (front)
{
co = (sensorOrientation + (rotation - 1) * 90) % 360;
co = (360 - co) % 360;
}
else
{
co = (sensorOrientation - (rotation - 1) * 90 + 360) % 360;
}
// XYLOG(XYLOG_SEVERITY_DEBUG, "Orientation=%d Facing=%d", co, camera_facing);
// int co = 0;
if (co == 0)
{
w = nv21_width;
h = nv21_height;
rotate_type = front ? 2 : 1;
}
else if (co == 90)
{
w = nv21_height;
h = nv21_width;
int tmp = orgWidth;
orgWidth = orgHeight;
orgHeight = tmp;
rotate_type = front ? 5 : 6;
}
else if (co == 180)
{
w = nv21_width;
h = nv21_height;
rotate_type = front ? 4 : 3;
}
else if (co == 270)
{
w = nv21_height;
h = nv21_width;
int tmp = orgWidth;
orgWidth = orgHeight;
orgHeight = tmp;
rotate_type = front ? 7 : 8;
}
nv21_rotated.create(h + h / 2, w, CV_8UC1);
ncnn::kanna_rotate_yuv420sp(nv21, nv21_width, nv21_height, nv21_rotated.data, w, h, rotate_type);
yuv420data = nv21_rotated.data;
}
else
{
w = nv21_width;
h = nv21_height;
}
// nv21_rotated to rgb
if (w == orgWidth && h == orgHeight)
{
rgb.create(h, w, CV_8UC3);
// ncnn::yuv420sp2rgb(nv21_rotated.data, w, h, rgb.data);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, rgb.data);
}
else
{
cv::Mat org(h, w, CV_8UC3);
ncnn::yuv420sp2rgb_nv12(yuv420data, w, h, org.data);
if (w * orgHeight == h * orgWidth) // Same Ratio
{
cv::resize(org, rgb, cv::Size(orgWidth, orgHeight));
}
else
{
// Crop image
if (w > orgWidth && h >= orgHeight)
{
int left = (w - orgWidth) / 2;
int top = (h - orgHeight) / 2;
rgb = org(cv::Range(top, top + orgHeight), cv::Range(left, left + orgWidth));
}
else
{
rgb = org;
}
}
}
}
#endif /* __CAMERA2_HELPER_H__ */

@ -9,6 +9,7 @@
using namespace std;
using namespace cv;
// https://zhuanlan.zhihu.com/p/38176640
void Debevec(vector<Mat>exposureImages, vector<float>exposureTimes, Mat& output);
void Robertson(vector<Mat>exposureImages, vector<float>exposureTimes, Mat& output);

@ -1,3 +1,4 @@
/*
* Copyright (C) 2017 The Android Open Source Project
*

File diff suppressed because it is too large Load Diff

@ -0,0 +1,724 @@
/* Copyright Statement:
*
* This software/firmware and related documentation ("MediaTek Software") are
* protected under relevant copyright laws. The information contained herein is
* confidential and proprietary to MediaTek Inc. and/or its licensors. Without
* the prior written permission of MediaTek inc. and/or its licensors, any
* reproduction, modification, use or disclosure of MediaTek Software, and
* information contained herein, in whole or in part, shall be strictly
* prohibited.
*
* MediaTek Inc. (C) 2010. All rights reserved.
*
* BY OPENING THIS FILE, RECEIVER HEREBY UNEQUIVOCALLY ACKNOWLEDGES AND AGREES
* THAT THE SOFTWARE/FIRMWARE AND ITS DOCUMENTATIONS ("MEDIATEK SOFTWARE")
* RECEIVED FROM MEDIATEK AND/OR ITS REPRESENTATIVES ARE PROVIDED TO RECEIVER
* ON AN "AS-IS" BASIS ONLY. MEDIATEK EXPRESSLY DISCLAIMS ANY AND ALL
* WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE OR
* NONINFRINGEMENT. NEITHER DOES MEDIATEK PROVIDE ANY WARRANTY WHATSOEVER WITH
* RESPECT TO THE SOFTWARE OF ANY THIRD PARTY WHICH MAY BE USED BY,
* INCORPORATED IN, OR SUPPLIED WITH THE MEDIATEK SOFTWARE, AND RECEIVER AGREES
* TO LOOK ONLY TO SUCH THIRD PARTY FOR ANY WARRANTY CLAIM RELATING THERETO.
* RECEIVER EXPRESSLY ACKNOWLEDGES THAT IT IS RECEIVER'S SOLE RESPONSIBILITY TO
* OBTAIN FROM ANY THIRD PARTY ALL PROPER LICENSES CONTAINED IN MEDIATEK
* SOFTWARE. MEDIATEK SHALL ALSO NOT BE RESPONSIBLE FOR ANY MEDIATEK SOFTWARE
* RELEASES MADE TO RECEIVER'S SPECIFICATION OR TO CONFORM TO A PARTICULAR
* STANDARD OR OPEN FORUM. RECEIVER'S SOLE AND EXCLUSIVE REMEDY AND MEDIATEK'S
* ENTIRE AND CUMULATIVE LIABILITY WITH RESPECT TO THE MEDIATEK SOFTWARE
* RELEASED HEREUNDER WILL BE, AT MEDIATEK'S OPTION, TO REVISE OR REPLACE THE
* MEDIATEK SOFTWARE AT ISSUE, OR REFUND ANY SOFTWARE LICENSE FEES OR SERVICE
* CHARGE PAID BY RECEIVER TO MEDIATEK FOR SUCH MEDIATEK SOFTWARE AT ISSUE.
*
* The following software/firmware and/or related documentation ("MediaTek
* Software") have been modified by MediaTek Inc. All revisions are subject to
* any receiver's applicable license agreements with MediaTek Inc.
*/
#ifndef _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
#define _MTK_HARDWARE_MTKCAM_INCLUDE_MTKCAM_UTILS_METADATA_HAL_MTKPLATFORMMETADATATAG_H_
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section {
MTK_HAL_REQUEST = 0xC000, // MTK HAL internal metadata become from 0xC000 0000
MTK_P1NODE,
MTK_P2NODE,
MTK_3A_TUNINING,
MTK_3A_EXIF,
MTK_MF_EXIF,
MTK_EIS,
MTK_STEREO,
MTK_FRAMESYNC,
MTK_VHDR,
MTK_PIPELINE,
MTK_NR,
MTK_PLUGIN,
MTK_DUALZOOM,
MTK_FEATUREPIPE,
MTK_POSTPROC,
MTK_FEATURE,
MTK_FSC,
} mtk_platform_metadata_section_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_section_start {
MTK_HAL_REQUEST_START = MTK_HAL_REQUEST << 16,
MTK_P1NODE_START = MTK_P1NODE << 16,
MTK_P2NODE_START = MTK_P2NODE << 16,
MTK_3A_TUNINING_START = MTK_3A_TUNINING << 16,
MTK_3A_EXIF_START = MTK_3A_EXIF << 16,
MTK_EIS_START = MTK_EIS << 16,
MTK_STEREO_START = MTK_STEREO << 16,
MTK_FRAMESYNC_START = MTK_FRAMESYNC << 16,
MTK_VHDR_START = MTK_VHDR << 16,
MTK_PIPELINE_START = MTK_PIPELINE << 16,
MTK_NR_START = MTK_NR << 16,
MTK_PLUGIN_START = MTK_PLUGIN << 16,
MTK_DUALZOOM_START = MTK_DUALZOOM << 16,
MTK_FEATUREPIPE_START = MTK_FEATUREPIPE << 16,
MTK_POSTPROC_START = MTK_POSTPROC << 16,
MTK_FEATURE_START = MTK_FEATURE << 16,
MTK_FSC_START = MTK_FSC << 16,
} mtk_platform_metadata_section_start_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_tag {
MTK_HAL_REQUEST_REQUIRE_EXIF = MTK_HAL_REQUEST_START, //MUINT8
MTK_HAL_REQUEST_DUMP_EXIF, //MUINT8
MTK_HAL_REQUEST_REPEAT, //MUINT8
MTK_HAL_REQUEST_DUMMY, //MUINT8
MTK_HAL_REQUEST_SENSOR_SIZE, //MSize
MTK_HAL_REQUEST_SENSOR_ID, //MINT32
MTK_HAL_REQUEST_DEVICE_ID, //MINT32
MTK_HAL_REQUEST_HIGH_QUALITY_CAP, //MUINT8
MTK_HAL_REQUEST_ISO_SPEED, //MINT32
MTK_HAL_REQUEST_BRIGHTNESS_MODE, //MINT32
MTK_HAL_REQUEST_CONTRAST_MODE, //MINT32
MTK_HAL_REQUEST_HUE_MODE, //MINT32
MTK_HAL_REQUEST_SATURATION_MODE, //MINT32
MTK_HAL_REQUEST_EDGE_MODE, //MINT32
MTK_HAL_REQUEST_PASS1_DISABLE, //MINT32
MTK_HAL_REQUEST_ERROR_FRAME, // used for error handling //MUINT8
MTK_HAL_REQUEST_PRECAPTURE_START, // 4cell //MUINT8
MTK_HAL_REQUEST_AF_TRIGGER_START, // 4cell //MUINT8
MTK_HAL_REQUEST_IMG_IMGO_FORMAT, //MINT32
MTK_HAL_REQUEST_IMG_RRZO_FORMAT, //MINT32
MTK_HAL_REQUEST_INDEX, //MINT32
MTK_HAL_REQUEST_COUNT, //MINT32
MTK_HAL_REQUEST_SMVR_FPS, //MUINT8 // 0: NOT batch request
MTK_HAL_REQUEST_REMOSAIC_ENABLE, //MUINT8 // 0: preview mode 1: capture mode
MTK_HAL_REQUEST_INDEX_BSS, //MINT32
MTK_HAL_REQUEST_ZSD_CAPTURE_INTENT, //MUINT8
MTK_HAL_REQUEST_REAL_CAPTURE_SIZE, //MSize
MTK_HAL_REQUEST_VIDEO_SIZE, //MSize
MTK_HAL_REQUEST_RAW_IMAGE_INFO, //MINT32 // index[0]: raw fmt, index[1]: raw stride, index[2]: raw size(width), index[3]: raw size(height)
MTK_HAL_REQUEST_ISP_PIPELINE_MODE, //MINT32
MTK_P1NODE_SCALAR_CROP_REGION = MTK_P1NODE_START, //MRect
MTK_P1NODE_BIN_CROP_REGION, //MRect
MTK_P1NODE_DMA_CROP_REGION, //MRect
MTK_P1NODE_BIN_SIZE, //MSize
MTK_P1NODE_RESIZER_SIZE, //MSize
MTK_P1NODE_RESIZER_SET_SIZE, //MSize
MTK_P1NODE_CTRL_RESIZE_FLUSH, //MBOOL
MTK_P1NODE_CTRL_READOUT_FLUSH, //MBOOL
MTK_P1NODE_CTRL_RECONFIG_SENSOR_SETTING, //MBOOL
MTK_P1NODE_PROCESSOR_MAGICNUM, //MINT32
MTK_P1NODE_MIN_FRM_DURATION, //MINT64
MTK_P1NODE_RAW_TYPE, //MINT32
MTK_P1NODE_SENSOR_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER2_CROP_REGION, //MRect
MTK_P1NODE_YUV_RESIZER1_SIZE, //MSize
MTK_P1NODE_SENSOR_MODE, //MINT32
MTK_P1NODE_SENSOR_VHDR_MODE, //MINT32
MTK_P1NODE_METADATA_TAG_INDEX, //MINT32
MTK_P1NODE_RSS_SIZE, //MSize
MTK_P1NODE_SENSOR_STATUS, //MINT32
MTK_P1NODE_SENSOR_RAW_ORDER, //MINT32
MTK_P1NODE_TWIN_SWITCH, //MINT32
MTK_P1NODE_TWIN_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCH, //MINT32
MTK_P1NODE_RESIZE_QUALITY_STATUS, //MINT32
MTK_P1NODE_RESIZE_QUALITY_LEVEL, //MINT32
MTK_P1NODE_RESIZE_QUALITY_SWITCHING, //MBOOL
MTK_P1NODE_RESUME_SHUTTER_TIME_US, //MINT32
MTK_P1NODE_FRAME_START_TIMESTAMP, //MINT64
MTK_P1NODE_FRAME_START_TIMESTAMP_BOOT, //MINT64
MTK_P1NODE_REQUEST_PROCESSED_WITHOUT_WB, //MBOOL
MTK_P1NODE_ISNEED_GMV, //MBOOL
MTK_P2NODE_HIGH_SPEED_VDO_FPS = MTK_P2NODE_START, //MINT32
MTK_P2NODE_HIGH_SPEED_VDO_SIZE, //MSize
MTK_P2NODE_CTRL_CALTM_ENABLE, //MBOOL
MTK_P2NODE_FD_CROP_REGION, //MRect
MTK_P2NODE_CROP_REGION, //MRect // for removing black edge
MTK_P2NODE_DSDN_ENABLE, //MBOOL // for DSDN on/off controled by Policy
MTK_P2NODE_SENSOR_CROP_REGION, //MRect
MTK_3A_AE_HIGH_ISO_BINNING, //MBOOL // for 3HDR high iso binning mode
MTK_SENSOR_SCALER_CROP_REGION, //MRect
MTK_PROCESSOR_CAMINFO = MTK_3A_TUNINING_START, //IMemory
MTK_ISP_ATMS_MAPPING_INFO, //IMemory
MTK_3A_ISP_PROFILE, //MUINT8
MTK_3A_ISP_P1_PROFILE, //MUINT8
MTK_CAMINFO_LCSOUT_INFO, //IMemory
MTK_3A_ISP_BYPASS_LCE, //MBOOL
MTK_3A_ISP_DISABLE_NR, //MBOOL
MTK_3A_ISP_NR3D_SW_PARAMS, //MINT32[14] //GMVX, GMVY, confX, confY, MAX_GMV, frameReset, GMV_Status,ISO_cutoff
MTK_3A_ISP_NR3D_HW_PARAMS, //IMemory
MTK_3A_ISP_LCE_GAIN, //MINT32, bits[0:15]: LCE gain, bits[16:31]: LCE gain confidence ratio (0-100)
MTK_3A_ISP_FUS_NUM, //MINT32
MTK_3A_AE_CAP_PARAM, //IMemory
MTK_3A_AE_CAP_SINGLE_FRAME_HDR, //MUINT8
MTK_3A_AE_BV_TRIGGER, //MBOOL
MTK_3A_AF_LENS_POSITION, //MINT32
MTK_3A_FLICKER_RESULT, //MINT32
MTK_3A_DUMMY_BEFORE_REQUEST_FRAME, //MBOOL // Dummy frame before capture, only for capture intent, preview don't use
MTK_3A_DUMMY_AFTER_REQUEST_FRAME, //MBOOL // Dummy frame after capture, only for capture intent, preview don't use
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MAX, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE_MIN, //MINT32
MTK_3A_MANUAL_AWB_COLORTEMPERATURE, //MINT32
MTK_3A_HDR_MODE, //MUINT8
MTK_3A_AE_HDR_MIXED_ISO, //MUINT32
MTK_3A_AE_ZSL_STABLE, //MINT32 ( MBOOL )
MTK_3A_PGN_ENABLE, //MUINT8
MTK_3A_SKIP_HIGH_QUALITY_CAPTURE, //MUINT8
MTK_3A_AI_SHUTTER, //MBOOL
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL, //MINT32
MTK_3A_FEATURE_AE_TARGET_MODE, //MINT32
MTK_3A_OPEN_ID, //MINT32
MTK_LSC_TBL_DATA, //IMemory
MTK_LSC_TSF_DATA, //IMemory
MTK_LSC_TSF_DUMP_NO, //IMemory
MTK_ISP_P2_ORIGINAL_SIZE, //MSize
MTK_ISP_P2_CROP_REGION, //MRect
MTK_ISP_P2_RESIZER_SIZE, //MSize
MTK_ISP_P2_IN_IMG_FMT, //MINT32, 0 or not exist: RAW->YUV, 1: YUV->YUV
MTK_ISP_P2_TUNING_UPDATE_MODE, //MUINT8, [0 or not exist]: as default; [1]: keep existed parameters but some parts will be updated; [2]: keep all existed parameters (force mode) [3] LPCNR Pass1 [4] LPCNR Pass2
MTK_ISP_P2_IN_IMG_RES_REVISED, //MINT32, describes P2 input image revised resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_ISP_APP_TARGET_SIZE, //MINT32, describes APP Target resolution. bit[0:15] width in pixel, bit[16:31] height in pixel. May be not exist.
MTK_MSF_SCALE_INDEX, //MINT32, which scale stage index, would only exist with scaling flow
MTK_MSF_FRAME_NUM, //MINT32, After BSS which frame number is this stage using
MTK_TOTAL_MULTI_FRAME_NUM, //MINT32, MSYUV fuction used this input to know frame nunber
MTK_TOTAL_MULTI_FRAME_NUM_CAPTURED, //MINT32, MSF function used
MTK_SW_DSDN_VERSION, //MINT32, distinguish different dsdn version
MTK_ISP_COLOR_SPACE, //MINT32
MTK_ISP_DRC_CURVE, //IMemory
MTK_ISP_DRC_CURVE_SIZE, //MINT32
MTK_ISP_FEO_DATA, //IMemory
MTK_ISP_FEO_ENABLE, //MINT32
MTK_ISP_FEO_INFO, //IMemory
MTK_ISP_HLR_RATIO, //MINT32, which is a HDR ratio applied in HLR
MTK_ISP_STAGE, //MINT32
MTK_FOCUS_AREA_POSITION, //MINT32
MTK_FOCUS_AREA_SIZE, //MSize
MTK_FOCUS_AREA_RESULT, //MUINT8
MTK_FOCUS_PAUSE, //MUINT8
MTK_FOCUS_MZ_ON, //MUINT8
MTK_3A_AF_FOCUS_VALUE, //MINT64
MTK_3A_PRV_CROP_REGION, //MRect
MTK_3A_ISP_MDP_TARGET_SIZE, //MSize
MTK_3A_REPEAT_RESULT, //MUINT8
MTK_3A_SKIP_PRECAPTURE, //MBOOL //if CUST_ENABLE_FLASH_DURING_TOUCH is true, MW can skip precapture
MTK_3A_SKIP_BAD_FRAME, //MBOOL
MTK_3A_FLARE_IN_MANUAL_CTRL_ENABLE, //MBOOL
MTK_3A_DYNAMIC_SUBSAMPLE_COUNT, //MINT32 30fps = 1, 60fps = 2, ... , 120fps = 4
MTK_3A_AE_LV_VALUE, //MINT32
MTK_APP_CONTROL, //MINT32
MTK_3A_CUST_PARAMS, //IMemory
MTK_3A_SETTING_CUST_PARAMS, //IMemory
MTK_3A_PERFRAME_INFO, //IMemory
MTK_SENSOR_MODE_INFO_ACTIVE_ARRAY_CROP_REGION, //MRect
MTK_3A_AE_BV, //MINT32
MTK_3A_AE_CWV, //MINT32
MTK_ISP_P2_PROCESSED_RAW, //MINT32
MTK_3A_EXIF_METADATA = MTK_3A_EXIF_START, //IMetadata
MTK_EIS_REGION = MTK_EIS_START, //MINT32
MTK_EIS_INFO, //MINT64
MTK_EIS_VIDEO_SIZE, //MRect
MTK_EIS_NEED_OVERRIDE_TIMESTAMP, //MBOOL
MTK_EIS_LMV_DATA, //IMemory
MTK_STEREO_JPS_MAIN1_CROP = MTK_STEREO_START, //MRect
MTK_STEREO_JPS_MAIN2_CROP, //MRect
MTK_STEREO_SYNC2A_MODE, //MINT32
MTK_STEREO_SYNCAF_MODE, //MINT32
MTK_STEREO_HW_FRM_SYNC_MODE, //MINT32
MTK_STEREO_NOTIFY, //MINT32
MTK_STEREO_SYNC2A_MASTER_SLAVE, //MINT32[2]
MTK_STEREO_SYNC2A_STATUS, //IMemory
MTK_JPG_ENCODE_TYPE, //MINT8
MTK_CONVERGENCE_DEPTH_OFFSET, //MFLOAT
MTK_N3D_WARPING_MATRIX_SIZE, //MUINT32
MTK_P1NODE_MAIN2_HAL_META, //IMetadata
MTK_P2NODE_BOKEH_ISP_PROFILE, //MUINT8
MTK_STEREO_FEATURE_DENOISE_MODE, //MINT32
MTK_STEREO_FEATURE_SENSOR_PROFILE, //MINT32
MTK_P1NODE_MAIN2_APP_META, //IMetadata
MTK_STEREO_FEATURE_OPEN_ID, //MINT32
MTK_STEREO_FRAME_PER_CAPTURE, //MINT32
MTK_STEREO_ENABLE_MFB, //MINT32
MTK_STEREO_BSS_RESULT, //MINT32
MTK_STEREO_FEATURE_FOV_CROP_REGION, //MINT32[6] // p.x, p.y, p.w, p.h, srcW, srcH
MTK_STEREO_DCMF_FEATURE_MODE, //MINT32 // mtk_platform_metadata_enum_dcmf_feature_mode
MTK_STEREO_HDR_EV, //MINT32
MTK_STEREO_DELAY_FRAME_COUNT, //MINT32
MTK_STEREO_DCMF_DEPTHMAP_SIZE, //MSize
MTK_STEREO_WITH_CAMSV, //MBOOL
MTK_FRAMESYNC_ID = MTK_FRAMESYNC_START, //MINT32
MTK_FRAMESYNC_TOLERANCE, //MINT64
MTK_FRAMESYNC_FAILHANDLE, //MINT32
MTK_FRAMESYNC_RESULT, //MINT64
MTK_FRAMESYNC_TYPE, //MINT32
MTK_FRAMESYNC_MODE, //MUINT8
MTK_VHDR_LCEI_DATA = MTK_VHDR_START, //Memory
MTK_VHDR_IMGO_3A_ISP_PROFILE, //MUINT8
MTK_HDR_FEATURE_HDR_HAL_MODE,
MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM,
MTK_VHDR_MULTIFRAME_TIMESTAMP, //MINT64
MTK_VHDR_MULTIFRAME_EXPOSURE_TIME, //MINT64
MTK_PIPELINE_UNIQUE_KEY = MTK_PIPELINE_START, //MINT32
MTK_PIPELINE_FRAME_NUMBER, //MINT32
MTK_PIPELINE_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_EV_VALUE, //MINT32
MTK_PIPELINE_DUMP_UNIQUE_KEY, //MINT32
MTK_PIPELINE_DUMP_FRAME_NUMBER, //MINT32
MTK_PIPELINE_DUMP_REQUEST_NUMBER, //MINT32
MTK_PIPELINE_VIDEO_RECORD, //MINT32
MTK_NR_MODE = MTK_NR_START, //MINT32
MTK_NR_MNR_THRESHOLD_ISO, //MINT32
MTK_NR_SWNR_THRESHOLD_ISO, //MINT32
MTK_REAL_LV, //MINT32
MTK_ANALOG_GAIN, //MUINT32
MTK_AWB_RGAIN, //MINT32
MTK_AWB_GGAIN, //MINT32
MTK_AWB_BGAIN, //MINT32
MTK_PLUGIN_MODE = MTK_PLUGIN_START, //MINT64
MTK_PLUGIN_COMBINATION_KEY, //MINT64
MTK_PLUGIN_P2_COMBINATION, //MINT64
MTK_PLUGIN_PROCESSED_FRAME_COUNT, //MINT32
MTK_PLUGIN_CUSTOM_HINT, //MINT32
MTK_PLUGIN_DETACT_JOB_SYNC_TOKEN, //MINT64, may be not exists.
MTK_PLUGIN_UNIQUEKEY,
MTK_DUALZOOM_DROP_REQ = MTK_DUALZOOM_START, //MINT32
MTK_DUALZOOM_FORCE_ENABLE_P2, //MINT32
MTK_DUALZOOM_DO_FRAME_SYNC, //MINT32
MTK_DUALZOOM_ZOOM_FACTOR, //MINT32
MTK_DUALZOOM_DO_FOV, //MINT32
MTK_DUALZOOM_FOV_RECT_INFO, //MINT32
MTK_DUALZOOM_FOV_CALB_INFO, //MINT32
MTK_DUALZOOM_FOV_MARGIN_PIXEL, //MSize
MTK_DUALCAM_AF_STATE, //MUINT8
MTK_DUALCAM_LENS_STATE, //MUINT8
MTK_DUALCAM_TIMESTAMP, //MINT64
MTK_DUALZOOM_3DNR_MODE, //MINT32
MTK_DUALZOOM_ZOOMRATIO, //MINT32
MTK_DUALZOOM_CENTER_SHIFT, //MINT32
MTK_DUALZOOM_FOV_RATIO, //MFLOAT
MTK_DUALZOOM_REAL_MASTER, //MINT32
MTK_DUALZOOM_FD_TARGET_MASTER, //MINT32
MTK_DUALZOOM_FD_REAL_MASTER, //MINT32 // maybe not set
MTK_LMV_SEND_SWITCH_OUT, //MINT32
MTK_LMV_SWITCH_OUT_RESULT, //MINT32
MTK_LMV_VALIDITY, //MINT32
MTK_VSDOF_P1_MAIN1_ISO, //MINT32
MTK_DUALZOOM_IS_STANDBY, //MBOOL
MTK_DUALZOOM_CAP_CROP, //MRect
MTK_DUALZOOM_MASTER_UPDATE_MODE, //MBOOL
MTK_DUALZOOM_STREAMING_NR, //MINT32
MTK_FEATUREPIPE_APP_MODE = MTK_FEATUREPIPE_START, //MINT32
MTK_POSTPROC_TYPE = MTK_POSTPROC_START, //MINT32
MTK_FEATURE_STREAMING = MTK_FEATURE_START, //MINT64
MTK_FEATURE_CAPTURE, //MINT64
MTK_FEATURE_CAPTURE_PHYSICAL, //MINT64
MTK_FEATURE_FREE_MEMORY_MBYTE, //MINT32
MTK_FEATURE_MFNR_NVRAM_QUERY_INDEX, //MINT32
MTK_FEATURE_MFNR_NVRAM_DECISION_ISO, //MINT32
MTK_FEATURE_MFNR_TUNING_INDEX_HINT, //MINT64
MTK_FEATURE_MFNR_FINAL_EXP, //MINT32
MTK_FEATURE_MFNR_OPEN_ID, //MINT32
MTK_FEATURE_AINR_MDLA_MODE, //MINT32
MTK_ISP_AINR_MDLA_MODE, //MINT32
MTK_ISP_LTM_BIT_MODE, //MINT32
MTK_FEATURE_BSS_SELECTED_FRAME_COUNT, //MINT32
MTK_FEATURE_BSS_FORCE_DROP_NUM, //MINT32
MTK_FEATURE_BSS_FIXED_LSC_TBL_DATA, //MUINT8
MTK_FEATURE_BSS_PROCESS, //MINT32
MTK_FEATURE_BSS_ISGOLDEN, //MBOOL
MTK_FEATURE_BSS_REORDER, //MBOOL
MTK_FEATURE_BSS_MANUAL_ORDER, //MUINT8
MTK_FEATURE_BSS_RRZO_DATA, //MUINT8
MTK_FEATURE_BSS_DOWNSAMPLE, //MBOOL
MTK_FEATURE_PACK_RRZO, //MUINT8
MTK_FEATURE_FACE_RECTANGLES, //MRect array
MTK_FEATURE_FACE_POSE_ORIENTATIONS, //MINT32[n*3] array, each struct include: xAsix, yAsix, zAsix
MTK_FEATURE_CAP_YUV_PROCESSING, //MUINT8
MTK_FEATURE_CAP_PIPE_DCE_CONTROL, //MUINT8
MTK_FEATURE_MULTIFRAMENODE_BYPASSED, //MUINT8
MTK_FEATURE_FACE_APPLIED_GAMMA, //MINT32
MTK_FEATURE_CAP_PQ_USERID, //MINT64
MTK_FEATURE_FLIP_IN_P2A, //MINT32
MTK_FSC_CROP_DATA = MTK_FSC_START, //IMemory
MTK_FSC_WARP_DATA, //IMemory
MTK_STAGGER_ME_META, //IMetadata
MTK_STAGGER_SE_META, //IMetadata
MTK_STAGGER_BLOB_IMGO_ORDER //MUINT8
} mtk_platform_metadata_tag_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_3a_exif_metadata_tag {
MTK_3A_EXIF_FNUMBER, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH, //MINT32
MTK_3A_EXIF_FOCAL_LENGTH_35MM, //MINT32
MTK_3A_EXIF_SCENE_MODE, //MINT32
MTK_3A_EXIF_AWB_MODE, //MINT32
MTK_3A_EXIF_LIGHT_SOURCE, //MINT32
MTK_3A_EXIF_EXP_PROGRAM, //MINT32
MTK_3A_EXIF_SCENE_CAP_TYPE, //MINT32
MTK_3A_EXIF_FLASH_LIGHT_TIME_US, //MINT32
MTK_3A_EXIF_AE_METER_MODE, //MINT32
MTK_3A_EXIF_AE_EXP_BIAS, //MINT32
MTK_3A_EXIF_CAP_EXPOSURE_TIME, //MINT32
MTK_3A_EXIF_AE_ISO_SPEED, //MINT32
MTK_3A_EXIF_REAL_ISO_VALUE, //MINT32
MTK_3A_EXIF_AE_BRIGHTNESS_VALUE, //MINT32
MTK_3A_EXIF_FLASH_FIRING_STATUS, //MINT32
MTK_3A_EXIF_FLASH_RETURN_DETECTION, //MINT32
MTK_3A_EXIF_FLASH_MODE, //MINT32
MTK_3A_EXIF_FLASH_FUNCTION, //MINT32
MTK_3A_EXIF_FLASH_REDEYE, //MINT32
MTK_3A_EXIF_DEBUGINFO_BEGIN, // debug info begin
// key: MINT32
MTK_3A_EXIF_DBGINFO_AAA_KEY = MTK_3A_EXIF_DEBUGINFO_BEGIN, //MINT32
MTK_3A_EXIF_DBGINFO_AAA_DATA,
MTK_3A_EXIF_DBGINFO_SDINFO_KEY,
MTK_3A_EXIF_DBGINFO_SDINFO_DATA,
MTK_3A_EXIF_DBGINFO_ISP_KEY,
MTK_3A_EXIF_DBGINFO_ISP_DATA,
//
MTK_CMN_EXIF_DBGINFO_KEY,
MTK_CMN_EXIF_DBGINFO_DATA,
//
MTK_MF_EXIF_DBGINFO_MF_KEY,
MTK_MF_EXIF_DBGINFO_MF_DATA,
//
MTK_N3D_EXIF_DBGINFO_KEY,
MTK_N3D_EXIF_DBGINFO_DATA,
//
MTK_POSTNR_EXIF_DBGINFO_NR_KEY,
MTK_POSTNR_EXIF_DBGINFO_NR_DATA,
//
MTK_RESVB_EXIF_DBGINFO_KEY,
MTK_RESVB_EXIF_DBGINFO_DATA,
//
MTK_RESVC_EXIF_DBGINFO_KEY,
MTK_RESVC_EXIF_DBGINFO_DATA,
// data: Memory
MTK_3A_EXIF_DEBUGINFO_END, // debug info end
} mtk_platform_3a_exif_metadata_tag_t;
// MTK_3A_FEATURE_AE_EXPOSURE_LEVEL
typedef enum mtk_camera_metadata_enum_ae_exposure_level {
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NONE = 0,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_SHORT,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_NORMAL,
MTK_3A_FEATURE_AE_EXPOSURE_LEVEL_LONG,
} mtk_camera_metadata_enum_ae_exposure_level_t;
// MTK_3A_FEATURE_AE_TARGET_MODE
typedef enum mtk_camera_metadata_enum_ae_target_mode {
MTK_3A_FEATURE_AE_TARGET_MODE_NORMAL = 0,
MTK_3A_FEATURE_AE_TARGET_MODE_IVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_ZVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_LE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_SE_FIX,
MTK_3A_FEATURE_AE_TARGET_MODE_4CELL_MVHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR,
MTK_3A_FEATURE_AE_TARGET_MODE_MSTREAM_VHDR_RTO1X,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_2EXP,
MTK_3A_FEATURE_AE_TARGET_MODE_STAGGER_3EXP,
} mtk_camera_metadata_enum_ae_target_mode_t;
//MTK_3A_FEATURE_AE_VALID_EXPOSURE_NUM
typedef enum mtk_camera_metadata_enum_stagger_valid_exposure_num {
MTK_STAGGER_VALID_EXPOSURE_NON = 0,
MTK_STAGGER_VALID_EXPOSURE_1 = 1,
MTK_STAGGER_VALID_EXPOSURE_2 = 2,
MTK_STAGGER_VALID_EXPOSURE_3 = 3
} mtk_camera_metadata_enum_stagger_valid_exposure_num_t;
//MTK_3A_ISP_FUS_NUM
typedef enum mtk_camera_metadata_enum_3a_isp_fus_num {
MTK_3A_ISP_FUS_NUM_NON = 0,
MTK_3A_ISP_FUS_NUM_1 = 1,
MTK_3A_ISP_FUS_NUM_2 = 2,
MTK_3A_ISP_FUS_NUM_3 = 3,
} mtk_camera_metadata_enum_3a_isp_fus_num_t;
/******************************************************************************
*
******************************************************************************/
typedef enum mtk_platform_metadata_enum_nr_mode {
MTK_NR_MODE_OFF = 0,
MTK_NR_MODE_MNR,
MTK_NR_MODE_SWNR,
MTK_NR_MODE_AUTO
} mtk_platform_metadata_enum_nr_mode_t;
typedef enum mtk_platform_metadata_enum_mfb_mode {
MTK_MFB_MODE_OFF = 0,
MTK_MFB_MODE_MFLL,
MTK_MFB_MODE_AIS,
MTK_MFB_MODE_NUM,
} mtk_platform_metadata_enum_mfb_mode_t;
typedef enum mtk_platform_metadata_enum_custom_hint {
MTK_CUSTOM_HINT_0 = 0,
MTK_CUSTOM_HINT_1,
MTK_CUSTOM_HINT_2,
MTK_CUSTOM_HINT_3,
MTK_CUSTOM_HINT_4,
MTK_CUSTOM_HINT_NUM,
} mtk_platform_metadata_enum_custom_hint_t;
typedef enum mtk_platform_metadata_enum_plugin_mode {
MTK_PLUGIN_MODE_COMBINATION = 1 << 0,
MTK_PLUGIN_MODE_NR = 1 << 1,
MTK_PLUGIN_MODE_HDR = 1 << 2,
MTK_PLUGIN_MODE_MFNR = 1 << 3,
MTK_PLUGIN_MODE_COPY = 1 << 4,
MTK_PLUGIN_MODE_TEST_PRV = 1 << 5,
MTK_PLUGIN_MODE_BMDN = 1 << 6,
MTK_PLUGIN_MODE_MFHR = 1 << 7,
MTK_PLUGIN_MODE_BMDN_3rdParty = 1 << 8,
MTK_PLUGIN_MODE_MFHR_3rdParty = 1 << 9,
MTK_PLUGIN_MODE_FUSION_3rdParty = 1 << 10,
MTK_PLUGIN_MODE_VSDOF_3rdParty = 1 << 11,
MTK_PLUGIN_MODE_COLLECT = 1 << 12,
MTK_PLUGIN_MODE_HDR_3RD_PARTY = 1 << 13,
MTK_PLUGIN_MODE_MFNR_3RD_PARTY = 1 << 14,
MTK_PLUGIN_MODE_BOKEH_3RD_PARTY = 1 << 15,
MTK_PLUGIN_MODE_DCMF_3RD_PARTY = 1 << 16,
} mtk_platform_metadata_enum_plugin_mode_t;
typedef enum mtk_platform_metadata_enum_p2_plugin_combination {
MTK_P2_RAW_PROCESSOR = 1 << 0,
MTK_P2_ISP_PROCESSOR = 1 << 1,
MTK_P2_YUV_PROCESSOR = 1 << 2,
MTK_P2_MDP_PROCESSOR = 1 << 3,
MTK_P2_CAPTURE_REQUEST = 1 << 4,
MTK_P2_PREVIEW_REQUEST = 1 << 5
} mtk_platform_metadata_enum_p2_plugin_combination;
typedef enum mtk_platform_metadata_enum_isp_color_space {
MTK_ISP_COLOR_SPACE_SRGB = 0 ,
MTK_ISP_COLOR_SPACE_DISPLAY_P3 = 1 ,
MTK_ISP_COLOR_SPACE_CUSTOM_1 = 2
} mtk_platform_metadata_enum_isp_color_space;
typedef enum mtk_platform_metadata_enum_dualzoom_drop_req {
MTK_DUALZOOM_DROP_NEVER_DROP = 0,
MTK_DUALZOOM_DROP_NONE = 1,
MTK_DUALZOOM_DROP_DIRECTLY = 2,
MTK_DUALZOOM_DROP_NEED_P1,
MTK_DUALZOOM_DROP_NEED_SYNCMGR,
MTK_DUALZOOM_DROP_NEED_SYNCMGR_NEED_STREAM_F_PIPE,
} mtk_platform_metadata_enum_dualzoom_drop_req_t;
typedef enum mtk_platform_metadata_enum_p1_sensor_status {
MTK_P1_SENSOR_STATUS_NONE = 0,
MTK_P1_SENSOR_STATUS_STREAMING = 1,
MTK_P1_SENSOR_STATUS_SW_STANDBY = 2,
MTK_P1_SENSOR_STATUS_HW_STANDBY = 3,
} mtk_platform_metadata_enum_p1_sensor_status_t;
typedef enum mtk_platform_metadata_enum_p1_twin_switch {
MTK_P1_TWIN_SWITCH_NONE = 0,
MTK_P1_TWIN_SWITCH_ONE_TG = 1,
MTK_P1_TWIN_SWITCH_TWO_TG = 2
} mtk_platform_metadata_enum_p1_twin_switch_t;
typedef enum mtk_platform_metadata_enum_p1_twin_status {
MTK_P1_TWIN_STATUS_NONE = 0,
MTK_P1_TWIN_STATUS_TG_MODE_1 = 1,
MTK_P1_TWIN_STATUS_TG_MODE_2 = 2,
MTK_P1_TWIN_STATUS_TG_MODE_3 = 3,
} mtk_platform_metadata_enum_p1_twin_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_switch {
MTK_P1_RESIZE_QUALITY_SWITCH_NONE = 0,
MTK_P1_RESIZE_QUALITY_SWITCH_L_L = 1,
MTK_P1_RESIZE_QUALITY_SWITCH_L_H = 2,
MTK_P1_RESIZE_QUALITY_SWITCH_H_L = 3,
MTK_P1_RESIZE_QUALITY_SWITCH_H_H = 4,
} mtk_platform_metadata_enum_p1_resize_quality_switch_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_status {
MTK_P1_RESIZE_QUALITY_STATUS_NONE = 0,
MTK_P1_RESIZE_QUALITY_STATUS_ACCEPT = 1,
MTK_P1_RESIZE_QUALITY_STATUS_IGNORE = 2,
MTK_P1_RESIZE_QUALITY_STATUS_REJECT = 3,
MTK_P1_RESIZE_QUALITY_STATUS_ILLEGAL = 4,
} mtk_platform_metadata_enum_p1_resize_quality_status_t;
typedef enum mtk_platform_metadata_enum_p1_resize_quality_level {
MTK_P1_RESIZE_QUALITY_LEVEL_UNKNOWN = 0,
MTK_P1_RESIZE_QUALITY_LEVEL_L = 1,
MTK_P1_RESIZE_QUALITY_LEVEL_H = 2,
} mtk_platform_metadata_enum_p1_resize_quality_level_t;
typedef enum mtk_platform_metadata_enum_lmv_result {
MTK_LMV_RESULT_OK = 0,
MTK_LMV_RESULT_FAILED,
MTK_LMV_RESULT_SWITCHING
} mtk_platform_metadata_enum_lmv_result_t;
typedef enum mtk_platform_metadata_enum_featurepipe_app_mode {
MTK_FEATUREPIPE_PHOTO_PREVIEW = 0,
MTK_FEATUREPIPE_VIDEO_PREVIEW = 1,
MTK_FEATUREPIPE_VIDEO_RECORD = 2,
MTK_FEATUREPIPE_VIDEO_STOP = 3,
} mtk_platform_metadata_enum_featurepipe_app_mode_t;
typedef enum mtk_platform_metadata_enum_dcmf_feature_mode {
MTK_DCMF_FEATURE_BOKEH = 0,
MTK_DCMF_FEATURE_MFNR_BOKEH = 1,
MTK_DCMF_FEATURE_HDR_BOKEH = 2,
} mtk_platform_metadata_enum_dcmf_feature_mode_t;
typedef enum mtk_platform_metadata_enum_smvr_fps {
MTK_SMVR_FPS_30 = 0,
MTK_SMVR_FPS_120 = 1,
MTK_SMVR_FPS_240 = 2,
MTK_SMVR_FPS_480 = 3,
MTK_SMVR_FPS_960 = 4,
} mtk_platform_metadata_enum_smvr_fps_t;
//MTK_FRAMESYNC_FAILHANDLE
typedef enum mtk_platform_metadata_enum_fremesync_failhandle {
MTK_FRAMESYNC_FAILHANDLE_CONTINUE,
MTK_FRAMESYNC_FAILHANDLE_DROP,
} mtk_platform_metadata_enum_fremesync_failhandle_t;
//MTK_FRAMESYNC_RESULT
typedef enum mtk_platform_metadata_enum_fremesync_result {
MTK_FRAMESYNC_RESULT_PASS,
MTK_FRAMESYNC_RESULT_FAIL_CONTINUE,
MTK_FRAMESYNC_RESULT_FAIL_DROP,
} mtk_platform_metadata_enum_fremesync_result_t;
//MTK_FRAMESYNC_MODE
typedef enum mtk_platform_metadata_enum_fremesync_mode {
MTK_FRAMESYNC_MODE_VSYNC_ALIGNMENT,
MTK_FRAMESYNC_MODE_READOUT_CENTER_ALIGNMENT,
} mtk_platform_metadata_enum_fremesync_mode_t;
//MTK_FEATURE_MULTIFRAMENODE_BYPASSED
typedef enum mtk_platform_metadata_enum_multiframenode_bypassed {
MTK_FEATURE_MULTIFRAMENODE_NOT_BYPASSED = 0,
MTK_FEATURE_MULTIFRAMENODE_TO_BE_BYPASSED = 1
} mtk_platform_metadata_enum_mfllnode_bypassed_t;
//MTK_FEATURE_BSS_PROCESS
typedef enum mtk_platform_metadata_enum_bss_processing {
MTK_FEATURE_BSS_PROCESS_ENABLE = 0,
MTK_FEATURE_BSS_PROCESS_DISABLE = 1
} mtk_platform_metadata_enum_bss_processing_t;
//MTK_FEATURE_BSS_MANUAL_ORDER
typedef enum mtk_platform_metadata_enum_bss_manual_order {
MTK_FEATURE_BSS_MANUAL_ORDER_OFF = 0,
MTK_FEATURE_BSS_MANUAL_ORDER_GOLDEN = 1
} mtk_platform_metadata_enum_bss_manual_order_t;
//MTK_FEATURE_CAP_YUV_PROCESSING
typedef enum mtk_platform_metadata_enum_cap_yuv_processing {
MTK_FEATURE_CAP_YUV_PROCESSING_NOT_NEEDED = 0,
MTK_FEATURE_CAP_YUV_PROCESSING_NEEDED = 1
} mtk_platform_metadata_enum_cap_yuv_processing_t;
//MTK_FEATURE_CAP_PIPE_DCE_CONTROL
typedef enum mtk_platform_metadata_enum_cap_pipe_control {
MTK_FEATURE_CAP_PIPE_DCE_ENABLE_BUT_NOT_APPLY = 2,
MTK_FEATURE_CAP_PIPE_DCE_MANUAL_DISABLE = 1,
MTK_FEATURE_CAP_PIPE_DCE_DEFAULT_APPLY = 0
} mtk_platform_metadata_enum_cap_pipe_dce_control_t;
// MTK_FEATURE_AINR_MDLA_MODE, MTK_ISP_AINR_MDLA_MODE
typedef enum mtk_platform_metadata_enum_ainr_mdla_mode {
MTK_FEATURE_AINR_MDLA_MODE_NONE = 0,
MTK_FEATURE_AINR_MDLA_MODE_DRCOUT_16BIT = 1,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_12BIT = 2,
MTK_FEATURE_AINR_MDLA_MODE_NNOUT_16BIT = 3,
} mtk_platform_metadata_enum_ainr_mdla_mode_t;
//MTK_ISP_P2_PROCESSED_RAW
typedef enum mtk_platform_metadata_enum_p2_processed_raw {
MTK_ISP_P2_PROCESSED_RAW_NOT_NEEDED = 0,
MTK_ISP_P2_PROCESSED_RAW_NEEDED = 1
} mtk_platform_metadata_enum_p2_processed_raw_t;
//MTK_DUALZOOM_STREAMING_NR
typedef enum mtk_platform_metadata_enum_dualzoom_streaming_nr {
MTK_DUALZOOM_STREAMING_NR_AUTO = 0,
MTK_DUALZOOM_STREAMING_NR_OFF = 1
} mtk_platform_metadata_enum_dualzoom_streaming_nr_t;
//MTK_STAGGER_BLOB_IMGO_ORDER
typedef enum mtk_platform_metadata_enum_stagger_blob_imgo_order {
MTK_STAGGER_IMGO_NONE = 0,
MTK_STAGGER_IMGO_NE = 1,
MTK_STAGGER_IMGO_ME = 2,
MTK_STAGGER_IMGO_SE = 3
} mtk_platform_metadata_enum_stagger_blob_imgo_order_t;
//MTK_3A_EXIF_FLASH_FIRING_STATUS
typedef enum mtk_platform_metadata_enum_3a_exif_flash_firing_status_t {
MTK_3A_EXIF_FLASH_FIRING_STATUS_NOT_FIRED = 0,
MTK_3A_EXIF_FLASH_FIRING_STATUS_FIRED = 1,
} mtk_platform_metadata_enum_3a_exif_flash_firing_status_t;
//MTK_3A_EXIF_FLASH_RETURN_DETECTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_return_detection_t {
MTK_3A_EXIF_FLASH_RETURN_DETECTION_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_RESERVED = 1,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_NOT_DETECTED = 2,
MTK_3A_EXIF_FLASH_RETURN_DETECTION_STROBE_DETECTED = 3,
} mtk_platform_metadata_enum_3a_exif_flash_return_detection_t;
//MTK_3A_EXIF_FLASH_MODE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_mode_t {
MTK_3A_EXIF_FLASH_MODE_UNKNOWN = 0,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_FIRING = 1,
MTK_3A_EXIF_FLASH_MODE_COMPULSORY_SUPPRESSION = 2,
MTK_3A_EXIF_FLASH_MODE_AUTO = 3,
} mtk_platform_metadata_enum_3a_exif_flash_mode_t;
//MTK_3A_EXIF_FLASH_FUNCTION
typedef enum mtk_platform_metadata_enum_3a_exif_flash_function_t {
MTK_3A_EXIF_FLASH_FUNCTION_SUPPORT = 0,
MTK_3A_EXIF_FLASH_FUNCTION_NOT_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_function_t;
//MTK_3A_EXIF_FLASH_REDEYE
typedef enum mtk_platform_metadata_enum_3a_exif_flash_redeye_t {
MTK_3A_EXIF_FLASH_REDEYE_NOT_SUPPORT = 0,
MTK_3A_EXIF_FLASH_REDEYE_SUPPORT = 1,
} mtk_platform_metadata_enum_3a_exif_flash_redeye_t;
//MTK_FEATURE_ABF
typedef enum mtk_platform_metadata_enum_abf_mode {
MTK_ABF_MODE_OFF = 0,
MTK_ABF_MODE_ON,
} mtk_platform_metadata_enum_abf_mode_t;
#endif

File diff suppressed because it is too large Load Diff

@ -23,6 +23,7 @@
#include <opencv2/core/core.hpp>
#include "Camera2Helper.h"
#include <mutex>
#include <map>
#include <set>
/**
@ -38,8 +39,10 @@ static const uint64_t kMaxExposureTime = static_cast<uint64_t>(250000000);
#define WAIT_AWB_LOCKED 2
#define WAIT_AF_LOCKED 4
#define EXPECTED_CAPTURE_IDX 0
#define EXPECTED_CAPTURE_IDX 1
#define PREVIEW_REQUEST_IDX 0
#define CAPTURE_REQUEST_IDX 1
#define DEFAULT_WARMUP_TIME 250 // 250ms
class CameraManager
{
@ -82,13 +85,18 @@ public:
unsigned int orientation:3;
unsigned int zoom : 1;
unsigned int wait3ALocked : 3;
unsigned int reserved : 18;
unsigned int burstRawCapture : 3;
unsigned int customHdr : 1;
unsigned int hdrStep : 3;
unsigned int minFps : 4;
unsigned int reserved : 7;
int64_t exposureTime;
unsigned int sensitivity;
int compensation;
float zoomRatio;
uint8_t requestTemplate;
uint8_t awbMode;
uint8_t burstCaptures;
unsigned short focusTimeout; // milli-seconds 65535
};
@ -105,8 +113,8 @@ public:
int32_t compensation;
uint8_t sceneMode;
uint8_t awbMode;
uint16_t avgY;
float zoomRatio;
uint8_t avgY;
uint64_t duration;
int64_t frameDuration;
@ -115,6 +123,28 @@ public:
uint8_t afLockSetted : 1;
};
struct CaptureRequest
{
/* For image capture */
NdkCamera* pThis;
AImageReader* imageReader;
ANativeWindow* imageWindow;
ACameraOutputTarget* imageTarget;
ACaptureSessionOutput* sessionOutput;
ACaptureRequest* request;
ACameraDevice_request_template templateId;
int sessionSequenceId;
};
struct CaptureResult
{
ACameraMetadata* result;
AImage* image;
int sequenceId;
};
NdkCamera(int32_t width, int32_t height, const CAMERA_PARAMS& params);
virtual ~NdkCamera();
@ -123,37 +153,75 @@ public:
void close();
int selfTest(const std::string& cameraId, int32_t& maxResolutionX, int32_t& maxResolutionY);
static void writeJpegFile(AImage *image, const char* path);
static void writeRawFile(AImage *image, ACameraMetadata* characteristics, ACameraMetadata* result, const char* path);
void onAvailabilityCallback(const char* cameraId);
void onUnavailabilityCallback(const char* cameraId);
virtual void onImageAvailable(AImageReader* reader);
virtual int32_t getOutputFormat() const;
virtual bool on_image(cv::Mat& rgb);
virtual int32_t getBurstCaptures() const;
void CreateSession(ANativeWindow* previewWindow, ANativeWindow* jpgWindow, bool manaulPreview, int32_t imageRotation, int32_t width, int32_t height);
void CreateSession(ANativeWindow* previewWindow);
CaptureRequest* CreateRequest(bool isPreviewRequest, int32_t sensitivity = -1);
void DestroyRequest(CaptureRequest* request);
void DestroySession();
virtual bool on_image(cv::Mat rgb);
virtual void on_error(const std::string& msg);
virtual void on_image(const unsigned char* nv21, int nv21_width, int nv21_height);
virtual void onDisconnected(ACameraDevice* device);
virtual bool onOneCapture(std::shared_ptr<ACameraMetadata> characteristics, std::shared_ptr<ACameraMetadata> result, uint32_t ldr, uint32_t duration, cv::Mat rgb);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::vector<uint8_t> >& frames);
virtual bool onBurstCapture(std::shared_ptr<ACameraMetadata> characteristics, std::vector<std::shared_ptr<ACameraMetadata> >& results, uint32_t ldr, uint32_t duration, std::vector<std::shared_ptr<AImage> >& frames);
void onCaptureProgressed(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result);
void onCaptureCompleted(ACameraCaptureSession* session, ACaptureRequest* request, const ACameraMetadata* result);
void onCaptureFailed(ACameraCaptureSession* session, ACaptureRequest* request, ACameraCaptureFailure* failure);
void onSessionReady(ACameraCaptureSession *session);
void onError(ACameraDevice* device, int error);
void CopyPreviewRequest(ACaptureRequest* request, const ACameraMetadata* previewResult);
const CAPTURE_RESULT& getCaptureResult() const
void FireBurstCapture();
void FireOneCapture(uint64_t ts);
uint32_t GetLdr() const
{
return mFinalLdr;
}
bool HasFatalError() const
{
return mFinalResult;
return m_fatalError;
}
bool IsCameraAvailable(const std::string& cameraId);
int64_t GetTimestamp(const ACameraMetadata* result);
static bool convertAImageToNv21(AImage* image, uint8_t** nv21, int32_t& width, int32_t& height);
static void EnumCameraResult(ACameraMetadata* result, CAPTURE_RESULT& captureResult);
protected:
void SetupMFNR(ACameraMetadata* characteristics, ACaptureRequest* request, bool ais, int32_t sensitivity);
void Setup3DNR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
void SetupHDR(ACameraMetadata* characteristics, ACaptureRequest* request, int32_t sensitivity);
bool SetupTonemapCurve(ACameraMetadata* characteristics, ACaptureRequest* request);
protected:
std::mutex m_locker;
std::set<std::string> m_availableCameras;
protected:
CAMERA_PARAMS m_params;
DisplayDimension foundRes;
int camera_facing;
int camera_orientation;
bool m_firstFrame;
bool m_photoTaken;
int32_t mWidth;
int32_t mHeight;
std::string mCameraId;
@ -164,6 +232,7 @@ protected:
uint8_t awbMode;
bool aeLockAvailable;
bool awbLockAvailable;
bool m_fatalError;
uint64_t numberOfPrecaptures;
unsigned long long m_precaptureStartTime;
@ -178,11 +247,13 @@ protected:
int32_t activeArraySize[2];
int32_t maxRegions[3];
unsigned int m_imagesCaptured;
bool mCaptureTriggered;
bool mFocusTriggered;
bool mCaptureDispatched;
uint32_t mStableFrameCount;
CAPTURE_RESULT mResult;
CAPTURE_RESULT mFinalResult;
unsigned long long m_startTime;
uint64_t m_startTime;
protected:
@ -190,15 +261,38 @@ protected:
CameraManager camera_manager;
ACameraDevice* camera_device;
AImageReader* image_reader;
ANativeWindow* image_reader_surface;
ACameraOutputTarget* image_reader_target;
ACaptureRequest* capture_request;
ACaptureSessionOutputContainer* capture_session_output_container;
ACaptureSessionOutput* capture_session_output;
AImageReader* mPreviewImageReader;
ANativeWindow* mPreviewImageWindow;
ACameraOutputTarget* mPreviewOutputTarget;
ACaptureSessionOutput* mPreviewSessionOutput;
AImageReader* mImageReader;
ANativeWindow* mImageWindow;
ACameraOutputTarget* mOutputTarget;
ACaptureSessionOutput* mSessionOutput;
std::shared_ptr<ACameraMetadata> mCharacteristics;
std::vector<CaptureRequest*> mCaptureRequests;
ACameraCaptureSession* capture_session;
int captureSequenceId;
std::shared_ptr<ACameraMetadata> mPreviewResults;
std::vector<std::shared_ptr<ACameraMetadata> > mCaptureResults;
std::map<int64_t, std::shared_ptr<ACameraMetadata> > mCaptureResultMap;
uint32_t mLdr;
uint32_t mFinalLdr;
uint32_t mFinalBurstCaptures;
int32_t mFinalOutputFormat;
std::vector<std::shared_ptr<AImage> > mCaptureFrames;
// cv::Mat mOneFrame;
std::vector<std::pair<int64_t, cv::Mat> > mOneFrame;
std::vector<std::vector<uint8_t> > mRawFrames;
int64_t m_minTimestamp;
};
#endif // NDKCAMERA_H

@ -0,0 +1,72 @@
#include "hdrplus/hdrplus_pipeline.h"
int main( int argc, char** argv )
{
int rotation = atoi(argv[1]);
bool frontCamera = atoi(argv[2]) != 0;
std::vector<std::string> paths;
for (int idx = 4; idx < argc; idx++)
{
paths.push_back(argv[idx]);
}
cv::Mat mat;
hdrplus::hdrplus_pipeline pipeline;
pipeline.run_pipeline( paths, 0, mat);
if (mat.empty())
{
printf("run_pipeline return empty mat");
}
mat = hdrplus::convert16bit2_8bit_(mat.clone());
if (rotation > 0)
{
if (rotation == 1) // 0
{
cv::Mat tempPic;
cv::transpose(mat, tempPic);
cv::flip(tempPic, mat, 0);
}
else if (rotation == 2) // 90
{
cv::Mat tempPic;
cv::transpose(mat, tempPic);
cv::flip(tempPic, mat, 1);
}
else if (rotation == 3) // 180
{
if (frontCamera)
{
cv::flip(mat, mat, 0);
}
else
{
cv::flip(mat, mat, -1);
}
}
else if (rotation == 4) // 270
{
cv::Mat tempPic;
cv::transpose(mat, tempPic);
cv::flip(tempPic, mat, 0);
}
}
cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR);
if (mat.empty())
{
printf("mat is empty before save");
}
bool res = cv::imwrite(argv[3], mat);
if (!res)
{
printf("Failed to write file %s err=%d", argv[3], errno);
}
return 0;
}

@ -0,0 +1,38 @@
#pragma once
#include <vector>
#include <utility> // std::pair
#include <opencv2/opencv.hpp> // all opencv header
#include "hdrplus/burst.h"
namespace hdrplus
{
class align
{
public:
align() = default;
~align() = default;
/**
* @brief Run alignment on burst of images
*
* @param burst_images collection of burst images
* @param aligements alignment in pixel value pair.
* Outer most vector is per alternative image.
* Inner most two vector is for horizontle & verticle tiles
*/
void process( const hdrplus::burst& burst_images, \
std::vector<std::vector<std::vector<std::pair<int, int>>>>& aligements );
private:
// From original image to coarse image
const std::vector<int> inv_scale_factors = { 1, 2, 4, 4 };
const std::vector<int> distances = { 1, 2, 2, 2 }; // L1 / L2 distance
const std::vector<int> grayimg_search_radious = { 1, 4, 4, 4 };
const std::vector<int> grayimg_tile_sizes = { 16, 16, 16, 8 };
const int num_levels = 4;
};
} // namespace hdrplus

@ -0,0 +1,54 @@
#pragma once
#include <string>
#include <vector>
#include <utility> // std::pair
#include <memory> // std::shared_ptr
#include <opencv2/opencv.hpp> // all opencv header
#include <libraw/libraw.h>
namespace hdrplus
{
class MemFile
{
public:
std::vector<uint8_t> content;
const std::vector<uint8_t> GetConstData() const
{
return content;
}
std::vector<uint8_t> GetData()
{
return content;
}
};
class bayer_image
{
public:
explicit bayer_image( const std::string& bayer_image_path );
explicit bayer_image( const std::vector<uint8_t>& bayer_image_content );
explicit bayer_image( std::shared_ptr<MemFile> bayer_image_file );
~bayer_image() = default;
std::pair<double, double> get_noise_params() const;
std::shared_ptr<LibRaw> libraw_processor;
cv::Mat raw_image;
cv::Mat grayscale_image;
int width;
int height;
int white_level;
std::vector<int> black_level_per_channel;
float iso;
private:
float baseline_lambda_shot = 3.24 * pow( 10, -4 );
float baseline_lambda_read = 4.3 * pow( 10, -6 );
};
} // namespace hdrplus

@ -0,0 +1,46 @@
#pragma once
#include <vector>
#include <string>
#include <opencv2/opencv.hpp> // all opencv header
#include "hdrplus/bayer_image.h"
namespace hdrplus
{
class burst
{
public:
explicit burst( const std::string& burst_path, const std::string& reference_image_path );
explicit burst(const std::vector<std::string>& burst_paths, int reference_image_index);
explicit burst( const std::vector<std::vector<uint8_t> >& bayer_image_contents, int reference_image_index );
explicit burst( const std::vector<std::shared_ptr<MemFile> >& bayer_image_files, int reference_image_index );
~burst() = default;
// Reference image index in the array
int reference_image_idx;
// Source bayer images & grayscale unpadded image
std::vector<hdrplus::bayer_image> bayer_images;
// Image padded to upper level tile size (16*2)
// Use for alignment, merging, and finishing
std::vector<cv::Mat> bayer_images_pad;
// Padding information
std::vector<int> padding_info_bayer;
// Image padded to upper level tile size (16)
// Use for alignment, merging, and finishing
std::vector<cv::Mat> grayscale_images_pad;
// number of image (including reference) in burst
int num_images;
// Bayer image after merging, stored as cv::Mat
cv::Mat merged_bayer_image;
};
} // namespace hdrplus

@ -0,0 +1,251 @@
#pragma once
#include <opencv2/opencv.hpp> // all opencv header
#include <string>
#include <fstream>
#include <sstream>
#include <iostream>
#include <unordered_map>
#include <hdrplus/bayer_image.h>
#include <dirent.h>
#include <hdrplus/params.h>
#include <hdrplus/burst.h>
namespace hdrplus
{
uint16_t uGammaCompress_1pix(float x, float threshold,float gainMin,float gainMax,float exponent);
uint16_t uGammaDecompress_1pix(float x, float threshold,float gainMin,float gainMax,float exponent);
cv::Mat uGammaCompress_(cv::Mat m,float threshold,float gainMin,float gainMax,float exponent);
cv::Mat uGammaDecompress_(cv::Mat m,float threshold,float gainMin,float gainMax,float exponent);
cv::Mat gammasRGB(cv::Mat img, bool mode);
class finish
{
public:
cv::Mat mergedBayer; // merged image from Merge Module
std::string burstPath; // path to burst images
std::vector<std::string> rawPathList; // a list or array of the path to all burst imgs under burst Path
int refIdx; // index of the reference img
Parameters params;
cv::Mat rawReference;
// LibRaw libraw_processor_finish;
bayer_image* refBayer;
std::string mergedImgPath;
finish()
{
refBayer = NULL;
}
// please use this initialization after merging part finish
finish(std::string burstPath, cv::Mat mergedBayer,int refIdx) {
refBayer = NULL;
this->refIdx = refIdx;
this->burstPath = burstPath;
this->mergedBayer = mergedBayer;
}
// for local testing only
finish(std::string burstPath, std::string mergedBayerPath,int refIdx){
this->refIdx = refIdx;
this->burstPath = burstPath;
this->mergedBayer = loadFromCSV(mergedBayerPath, CV_16UC1);//
load_rawPathList(burstPath);
refBayer= new bayer_image(this->rawPathList[refIdx]);
this->rawReference = refBayer->raw_image;//;grayscale_image
// initialize parameters in libraw_processor_finish
setLibRawParams();
showParams();
std::cout<<"Finish init() finished!"<<std::endl;
}
~finish()
{
if (refBayer != NULL)
{
delete refBayer;
refBayer = NULL;
}
}
// finish pipeline func
// void process(std::string burstPath, cv::Mat mergedBayer,int refIdx);
void process(const hdrplus::burst& burst_images, cv::Mat& finalOutputImage);
// replace Mat a with Mat b
void copy_mat_16U(cv::Mat& A, cv::Mat B);
void copy_rawImg2libraw(std::shared_ptr<LibRaw>& libraw_ptr, cv::Mat B);
// postprocess
// cv::Mat postprocess(std::shared_ptr<LibRaw>& libraw_ptr);
void showImg(cv::Mat img)
{
int ch = CV_MAT_CN(CV_8UC1);
// cv::Mat tmp(4208,3120,CV_16UC1);
cv::Mat tmp(img);
// u_int16_t* ptr_tmp = (u_int16_t*)tmp.data;
// u_int16_t* ptr_img = (u_int16_t*)img.data;
// // col major to row major
// for(int r = 0; r < tmp.rows; r++) {
// for(int c = 0; c < tmp.cols; c++) {
// *(ptr_tmp+r*tmp.cols+c) = *(ptr_img+c*tmp.rows+r)/2048.0*255.0;
// }
// }
// std::cout<<"height="<<tmp.rows<<std::endl;
// std::cout<<"width="<<tmp.cols<<std::endl;
// cv::transpose(tmp, tmp);
u_int16_t* ptr = (u_int16_t*)tmp.data;
for(int r = 0; r < tmp.rows; r++) {
for(int c = 0; c < tmp.cols; c++) {
*(ptr+r*tmp.cols+c) = *(ptr+r*tmp.cols+c)/2048.0*255.0;
}
}
tmp = tmp.reshape(ch);
tmp.convertTo(tmp, CV_8UC1);
cv::imshow("test",tmp);
cv::imwrite("test2.jpg", tmp);
cv::waitKey(0);
std::cout<< this->mergedBayer.size()<<std::endl;
}
void showMat(cv::Mat img){
std::cout<<"size="<<img.size()<<std::endl;
std::cout<<"type="<<img.type()<<std::endl;
}
void showParams()
{
std::cout<<"Parameters:"<<std::endl;
std::cout<<"tuning_ltmGain = "<<this->params.tuning.ltmGain<<std::endl;
std::cout<<"tuning_gtmContrast = "<<this->params.tuning.gtmContrast<<std::endl;
for(auto key_val:this->params.flags){
std::cout<<key_val.first<<","<<key_val.second<<std::endl;
}
std::cout<<"demosaic_algorithm = "<<refBayer->libraw_processor->imgdata.params.user_qual<<std::endl;
std::cout<<"half_size = "<<refBayer->libraw_processor->imgdata.params.half_size<<std::endl;
std::cout<<"use_camera_wb = "<<refBayer->libraw_processor->imgdata.params.use_camera_wb<<std::endl;
std::cout<<"use_auto_wb = "<<refBayer->libraw_processor->imgdata.params.use_auto_wb<<std::endl;
std::cout<<"no_auto_bright = "<<refBayer->libraw_processor->imgdata.params.no_auto_bright<<std::endl;
std::cout<<"output_color = "<<refBayer->libraw_processor->imgdata.params.output_color <<std::endl;
std::cout<<"gamma[0] = "<<refBayer->libraw_processor->imgdata.params.gamm[0]<<std::endl;
std::cout<<"gamma[1] = "<<refBayer->libraw_processor->imgdata.params.gamm[1]<<std::endl;
std::cout<<"output_bps = "<<refBayer->libraw_processor->imgdata.params.output_bps<<std::endl;
// std::cout<<"demosaic_algorithm = "<<libraw_processor_finish.imgdata.params.user_qual<<std::endl;
// std::cout<<"half_size = "<<libraw_processor_finish.imgdata.params.half_size<<std::endl;
// std::cout<<"use_camera_wb = "<<libraw_processor_finish.imgdata.params.use_camera_wb<<std::endl;
// std::cout<<"use_auto_wb = "<<libraw_processor_finish.imgdata.params.use_auto_wb<<std::endl;
// std::cout<<"no_auto_bright = "<<libraw_processor_finish.imgdata.params.no_auto_bright<<std::endl;
// std::cout<<"output_color = "<<libraw_processor_finish.imgdata.params.output_color <<std::endl;
// std::cout<<"gamma[0] = "<<libraw_processor_finish.imgdata.params.gamm[0]<<std::endl;
// std::cout<<"gamma[1] = "<<libraw_processor_finish.imgdata.params.gamm[1]<<std::endl;
// std::cout<<"output_bps = "<<libraw_processor_finish.imgdata.params.output_bps<<std::endl;
std::cout<<"===================="<<std::endl;
}
void showRawPathList(){
std::cout<<"RawPathList:"<<std::endl;
for(auto pth:this->rawPathList){
std::cout<<pth<<std::endl;
}
std::cout<<"===================="<<std::endl;
}
private:
cv::Mat loadFromCSV(const std::string& path, int opencv_type)
{
cv::Mat m;
std::ifstream csvFile (path);
std::string line;
while (getline(csvFile, line))
{
std::vector<int> dvals;
std::stringstream ss(line);
std::string val;
// int count=0;
while (getline(ss, val, ','))
{
dvals.push_back(stod(val));//*255.0/2048.0
// count++;
}
// std::cout<<count<<std::endl;
cv::Mat mline(dvals, true);
cv::transpose(mline, mline);
m.push_back(mline);
}
int ch = CV_MAT_CN(opencv_type);
m = m.reshape(ch);
m.convertTo(m, opencv_type);
return m;
}
void load_rawPathList(std::string burstPath){
DIR *pDir; // pointer to root
struct dirent *ptr;
if (!(pDir = opendir(burstPath.c_str()))) {
std::cout<<"root dir not found!"<<std::endl;
return;
}
while ((ptr = readdir(pDir)) != nullptr) {
// ptr will move to the next file automatically
std::string sub_file = burstPath + "/" + ptr->d_name; // current filepath that ptr points to
if (ptr->d_type != 8 && ptr->d_type != 4) { // not normal file or dir
return;
}
// only need normal files
if (ptr->d_type == 8) {
if (strcmp(ptr->d_name, ".") != 0 && strcmp(ptr->d_name, "..") != 0) {
if (strstr(ptr->d_name, ".dng")) {
rawPathList.emplace_back(sub_file);
}
}
}
}
// close root dir
closedir(pDir);
}
void setLibRawParams(){
refBayer->libraw_processor->imgdata.params.user_qual = params.rawpyArgs.demosaic_algorithm;
refBayer->libraw_processor->imgdata.params.half_size = params.rawpyArgs.half_size;
refBayer->libraw_processor->imgdata.params.use_camera_wb = params.rawpyArgs.use_camera_wb;
refBayer->libraw_processor->imgdata.params.use_auto_wb = params.rawpyArgs.use_auto_wb;
refBayer->libraw_processor->imgdata.params.no_auto_bright = params.rawpyArgs.no_auto_bright;
refBayer->libraw_processor->imgdata.params.output_color = params.rawpyArgs.output_color;
refBayer->libraw_processor->imgdata.params.gamm[0] = params.rawpyArgs.gamma[0];
refBayer->libraw_processor->imgdata.params.gamm[1] = params.rawpyArgs.gamma[1];
refBayer->libraw_processor->imgdata.params.output_bps = params.rawpyArgs.output_bps;
// libraw_processor_finish.imgdata.params.user_qual = params.rawpyArgs.demosaic_algorithm;
// libraw_processor_finish.imgdata.params.half_size = params.rawpyArgs.half_size;
// libraw_processor_finish.imgdata.params.use_camera_wb = params.rawpyArgs.use_camera_wb;
// libraw_processor_finish.imgdata.params.use_auto_wb = params.rawpyArgs.use_auto_wb;
// libraw_processor_finish.imgdata.params.no_auto_bright = params.rawpyArgs.no_auto_bright;
// libraw_processor_finish.imgdata.params.output_color = params.rawpyArgs.output_color;
// libraw_processor_finish.imgdata.params.gamm[0] = params.rawpyArgs.gamma[0];
// libraw_processor_finish.imgdata.params.gamm[1] = params.rawpyArgs.gamma[1];
// libraw_processor_finish.imgdata.params.output_bps = params.rawpyArgs.output_bps;
}
};
} // namespace hdrplus

@ -0,0 +1,54 @@
#pragma once
#include <string>
#include <opencv2/opencv.hpp> // all opencv header
#include "hdrplus/burst.h"
#include "hdrplus/align.h"
#include "hdrplus/merge.h"
#include "hdrplus/finish.h"
namespace hdrplus
{
inline cv::Mat convert16bit2_8bit_(cv::Mat ans) {
if(ans.type()==CV_16UC3){
cv::MatIterator_<cv::Vec3w> it, end;
for( it = ans.begin<cv::Vec3w>(), end = ans.end<cv::Vec3w>(); it != end; ++it)
{
// std::cout<<sizeof (*it)[0] <<std::endl;
(*it)[0] *=(255.0/USHRT_MAX);
(*it)[1] *=(255.0/USHRT_MAX);
(*it)[2] *=(255.0/USHRT_MAX);
}
ans.convertTo(ans, CV_8UC3);
}else if(ans.type()==CV_16UC1){
u_int16_t* ptr = (u_int16_t*)ans.data;
int end = ans.rows*ans.cols;
for(int i=0;i<end;i++){
*(ptr+i) *=(255.0/USHRT_MAX);
}
ans.convertTo(ans, CV_8UC1);
}else{
// std::cout<<"Unsupported Data Type"<<std::endl;
}
return ans;
}
class hdrplus_pipeline
{
private:
hdrplus::align align_module;
hdrplus::merge merge_module;
hdrplus::finish finish_module;
public:
void run_pipeline( const std::string& burst_path, const std::string& reference_image_path );
bool run_pipeline( const std::vector<std::string>& burst_paths, int reference_image_index, cv::Mat& finalImg );
bool run_pipeline( const std::vector<std::vector<uint8_t> >& burst_contents, int reference_image_index, cv::Mat& finalImg );
bool run_pipeline( const std::vector<std::shared_ptr<MemFile> >& burst_contents, int reference_image_index, cv::Mat& finalImg );
hdrplus_pipeline() = default;
~hdrplus_pipeline() = default;
};
} // namespace hdrplus

@ -0,0 +1,184 @@
#pragma once
#include <vector>
#include <opencv2/opencv.hpp> // all opencv header
#include <cmath>
#include "hdrplus/burst.h"
#define TILE_SIZE 16
#define TEMPORAL_FACTOR 75
#define SPATIAL_FACTOR 0.1
namespace hdrplus
{
class merge
{
public:
int offset = TILE_SIZE / 2;
float baseline_lambda_shot = 3.24 * pow( 10, -4 );
float baseline_lambda_read = 4.3 * pow( 10, -6 );
merge() = default;
~merge() = default;
/**
* @brief Run alignment on burst of images
*
* @param burst_images collection of burst images
* @param alignments alignment in pixel value pair.
* Outer most vector is per alternative image.
* Inner most two vector is for horizontal & vertical tiles
*/
void process( hdrplus::burst& burst_images, \
std::vector<std::vector<std::vector<std::pair<int, int>>>>& alignments);
/*
std::vector<cv::Mat> get_other_tiles(); //return the other tile list T_1 to T_n
std::vector<cv::Mat> vector_math(string operation, reference_tile, other_tile_list); //for loop allowing operations across single element and list
std::vector<cv::Mat> scalar_vector_math(string operation, scalar num, std::vector<cv::Mat> tile_list); //for loop allowing operations across single element and list
std::vector<cv::Mat> average_vector(std::vector<cv::Mat> tile_list); //take average of vector elements
*/
private:
float tileRMS(cv::Mat tile) {
cv::Mat squared;
cv::multiply(tile, tile, squared);
return sqrt(cv::mean(squared)[0]);
}
std::vector<float> getNoiseVariance(std::vector<cv::Mat> tiles, float lambda_shot, float lambda_read) {
std::vector<float> noise_variance;
for (auto tile : tiles) {
noise_variance.push_back(lambda_shot * tileRMS(tile) + lambda_read);
}
return noise_variance;
}
cv::Mat cosineWindow1D(cv::Mat input, int window_size = TILE_SIZE) {
cv::Mat output = input.clone();
for (int i = 0; i < input.cols; ++i) {
output.at<float>(0, i) = 1. / 2. - 1. / 2. * cos(2 * M_PI * (input.at<float>(0, i) + 1 / 2.) / window_size);
}
return output;
}
cv::Mat cosineWindow2D(cv::Mat tile) {
int window_size = tile.rows; // Assuming square tile
cv::Mat output_tile = tile.clone();
cv::Mat window = cv::Mat::zeros(1, window_size, CV_32F);
for(int i = 0; i < window_size; ++i) {
window.at<float>(i) = i;
}
cv::Mat window_x = cosineWindow1D(window, window_size);
window_x = cv::repeat(window_x, window_size, 1);
cv::Mat window_2d = window_x.mul(window_x.t());
cv::Mat window_applied;
cv::multiply(tile, window_2d, window_applied, 1, CV_32F);
return window_applied;
}
cv::Mat cat2Dtiles(std::vector<std::vector<cv::Mat>> tiles) {
std::vector<cv::Mat> rows;
for (auto row_tiles : tiles) {
cv::Mat row;
cv::hconcat(row_tiles, row);
rows.push_back(row);
}
cv::Mat img;
cv::vconcat(rows, img);
return img;
}
void circshift(cv::Mat &out, const cv::Point &delta)
{
cv::Size sz = out.size();
// error checking
assert(sz.height > 0 && sz.width > 0);
// no need to shift
if ((sz.height == 1 && sz.width == 1) || (delta.x == 0 && delta.y == 0))
return;
// delta transform
int x = delta.x;
int y = delta.y;
if (x > 0) x = x % sz.width;
if (y > 0) y = y % sz.height;
if (x < 0) x = x % sz.width + sz.width;
if (y < 0) y = y % sz.height + sz.height;
// in case of multiple dimensions
std::vector<cv::Mat> planes;
split(out, planes);
for (size_t i = 0; i < planes.size(); i++)
{
// vertical
cv::Mat tmp0, tmp1, tmp2, tmp3;
cv::Mat q0(planes[i], cv::Rect(0, 0, sz.width, sz.height - y));
cv::Mat q1(planes[i], cv::Rect(0, sz.height - y, sz.width, y));
q0.copyTo(tmp0);
q1.copyTo(tmp1);
tmp0.copyTo(planes[i](cv::Rect(0, y, sz.width, sz.height - y)));
tmp1.copyTo(planes[i](cv::Rect(0, 0, sz.width, y)));
// horizontal
cv::Mat q2(planes[i], cv::Rect(0, 0, sz.width - x, sz.height));
cv::Mat q3(planes[i], cv::Rect(sz.width - x, 0, x, sz.height));
q2.copyTo(tmp2);
q3.copyTo(tmp3);
tmp2.copyTo(planes[i](cv::Rect(x, 0, sz.width - x, sz.height)));
tmp3.copyTo(planes[i](cv::Rect(0, 0, x, sz.height)));
}
cv::merge(planes, out);
}
void fftshift(cv::Mat &out)
{
cv::Size sz = out.size();
cv::Point pt(0, 0);
pt.x = (int) floor(sz.width / 2.0);
pt.y = (int) floor(sz.height / 2.0);
circshift(out, pt);
}
void ifftshift(cv::Mat &out)
{
cv::Size sz = out.size();
cv::Point pt(0, 0);
pt.x = (int) ceil(sz.width / 2.0);
pt.y = (int) ceil(sz.height / 2.0);
circshift(out, pt);
}
std::vector<cv::Mat> getReferenceTiles(cv::Mat reference_image);
cv::Mat mergeTiles(std::vector<cv::Mat> tiles, int rows, int cols);
cv::Mat processChannel( hdrplus::burst& burst_images, \
std::vector<std::vector<std::vector<std::pair<int, int>>>>& alignments, \
cv::Mat channel_image, \
std::vector<cv::Mat> alternate_channel_i_list,\
float lambda_shot, \
float lambda_read);
//temporal denoise
std::vector<cv::Mat> temporal_denoise(std::vector<cv::Mat> tiles, std::vector<std::vector<cv::Mat>> alt_tiles, std::vector<float> noise_variance, float temporal_factor);
std::vector<cv::Mat> spatial_denoise(std::vector<cv::Mat> tiles, int num_alts, std::vector<float> noise_variance, float spatial_factor);
};
} // namespace hdrplus

@ -0,0 +1,69 @@
#pragma once
#include <string>
#include <unordered_map>
#include <memory> // std::shared_ptr
#include <opencv2/opencv.hpp> // all opencv header
#include <libraw/libraw.h>
namespace hdrplus
{
class RawpyArgs{
public:
int demosaic_algorithm = 3;// 3 - AHD interpolation <->int user_qual
bool half_size = false;
bool use_camera_wb = true;
bool use_auto_wb = false;
bool no_auto_bright = true;
int output_color = LIBRAW_COLORSPACE_sRGB;
int gamma[2] = {1,1}; //# gamma correction not applied by rawpy (not quite understand)
int output_bps = 16;
};
class Options{
public:
std::string input = "";
std::string output = "";
std::string mode = "full"; //'full' 'align' 'merge' 'finish'
int reference = 0;
float temporalfactor=75.0;
float spatialfactor = 0.1;
int ltmGain=-1;
double gtmContrast=0.075;
int verbose=2; // (0, 1, 2, 3, 4, 5)
};
class Tuning{
public:
std::string ltmGain = "auto";
double gtmContrast = 0.075;
std::vector<float> sharpenAmount{1,0.5,0.5};
std::vector<float> sharpenSigma{1,2,4};
std::vector<float> sharpenThreshold{0.02,0.04,0.06};
};
class Parameters{
public:
std::unordered_map<std::string,bool> flags;
RawpyArgs rawpyArgs;
Options options;
Tuning tuning;
Parameters()
{
const char* keys[] = {"writeReferenceImage", "writeGammaReference", "writeMergedImage", "writeGammaMerged",
"writeShortExposure", "writeLongExposure", "writeFusedExposure", "writeLTMImage",
"writeLTMGamma", "writeGTMImage", "writeReferenceFinal", "writeFinalImage"};
for (int idx = 0; idx < sizeof(keys) / sizeof(const char*); idx++) {
flags[keys[idx]] = true;
}
}
};
cv::Mat postprocess(std::shared_ptr<LibRaw>& libraw_ptr, RawpyArgs rawpyArgs);
void setParams(std::shared_ptr<LibRaw>& libraw_ptr, RawpyArgs rawpyArgs);
} // namespace hdrplus

@ -0,0 +1,326 @@
#pragma once
#include <string>
#include <stdexcept> // std::runtime_error
#include <opencv2/opencv.hpp> // all opencv header
#include <omp.h>
// https://stackoverflow.com/questions/63404539/portable-loop-unrolling-with-template-parameter-in-c-with-gcc-icc
/// Helper macros for stringification
#define TO_STRING_HELPER(X) #X
#define TO_STRING(X) TO_STRING_HELPER(X)
// Define loop unrolling depending on the compiler
#if defined(__ICC) || defined(__ICL)
#define UNROLL_LOOP(n) _Pragma(TO_STRING(unroll (n)))
#elif defined(__clang__)
#define UNROLL_LOOP(n) _Pragma(TO_STRING(unroll (n)))
#elif defined(__GNUC__) && !defined(__clang__)
#define UNROLL_LOOP(n) _Pragma(TO_STRING(GCC unroll (16)))
#elif defined(_MSC_BUILD)
#pragma message ("Microsoft Visual C++ (MSVC) detected: Loop unrolling not supported!")
#define UNROLL_LOOP(n)
#else
#warning "Unknown compiler: Loop unrolling not supported!"
#define UNROLL_LOOP(n)
#endif
namespace hdrplus
{
template <typename T, int kernel>
cv::Mat box_filter_kxk( const cv::Mat& src_image )
{
const T* src_image_ptr = (T*)src_image.data;
int src_height = src_image.size().height;
int src_width = src_image.size().width;
int src_step = src_image.step1();
if ( kernel <= 0 )
{
#ifdef __ANDROID__
return cv::Mat();
#else
throw std::runtime_error(std::string( __FILE__ ) + "::" + __func__ + " box filter only support kernel size >= 1");
#endif
}
// int(src_height / kernel) = floor(src_height / kernel)
// When input size is not multiplier of kernel, take floor
cv::Mat dst_image( src_height / kernel, src_width / kernel, src_image.type() );
T* dst_image_ptr = (T*)dst_image.data;
int dst_height = dst_image.size().height;
int dst_width = dst_image.size().width;
int dst_step = dst_image.step1();
for ( int row_i = 0; row_i < dst_height; ++row_i )
{
for ( int col_i = 0; col_i < dst_width; col_i++ )
{
// Take ceiling for rounding
T box_sum = T( 0 );
UNROLL_LOOP( kernel )
for ( int kernel_row_i = 0; kernel_row_i < kernel; ++kernel_row_i )
{
UNROLL_LOOP( kernel )
for ( int kernel_col_i = 0; kernel_col_i < kernel; ++kernel_col_i )
{
box_sum += src_image_ptr[ ( row_i * kernel + kernel_row_i ) * src_step + ( col_i * kernel + kernel_col_i ) ];
}
}
// Average by taking ceiling
T box_avg = box_sum / T( kernel * kernel );
dst_image_ptr[ row_i * dst_step + col_i ] = box_avg;
}
}
return dst_image;
}
template <typename T, int kernel>
cv::Mat downsample_nearest_neighbour( const cv::Mat& src_image )
{
const T* src_image_ptr = (T*)src_image.data;
int src_height = src_image.size().height;
int src_width = src_image.size().width;
int src_step = src_image.step1();
// int(src_height / kernel) = floor(src_height / kernel)
// When input size is not multiplier of kernel, take floor
cv::Mat dst_image = cv::Mat( src_height / kernel, src_width / kernel, src_image.type() );
T* dst_image_ptr = (T*)dst_image.data;
int dst_height = dst_image.size().height;
int dst_width = dst_image.size().width;
int dst_step = dst_image.step1();
// -03 should be enough to optimize below code
for ( int row_i = 0; row_i < dst_height; row_i++ )
{
UNROLL_LOOP( 32 )
for ( int col_i = 0; col_i < dst_width; col_i++ )
{
dst_image_ptr[ row_i * dst_step + col_i ] = \
src_image_ptr[ (row_i * kernel) * src_step + (col_i * kernel) ];
}
}
return dst_image;
}
template< typename T >
void print_cvmat( cv::Mat image )
{
const T* img_ptr = (const T*)image.data;
int height = image.size().height;
int width = image.size().width;
int step = image.step1();
int chns = image.channels();
printf("print_cvmat()::Image of size height = %d, width = %d, step = %d\n", \
height, width, step );
if ( chns == 1 )
{
for ( int row_i = 0; row_i < height; ++row_i )
{
int row_i_offset = row_i * step;
for ( int col_i = 0; col_i < width; ++col_i )
{
printf("%3.d ", img_ptr[ row_i_offset + col_i ]);
//printf("%3.d ", int( image.at<T>( row_i, col_i ) ) );
}
printf("\n");
}
}
else if ( chns == 3 )
{
for ( int row_i = 0; row_i < height; ++row_i )
{
int row_i_offset = row_i * step;
for ( int col_i = 0; col_i < width; ++col_i )
{
printf("[%3.d, %3.d, %3.d] ", img_ptr[ row_i_offset + col_i * 3 + 0 ], \
img_ptr[ row_i_offset + col_i * 3 + 1 ], \
img_ptr[ row_i_offset + col_i * 3 + 2 ] );
}
printf("\n");
}
}
else
{
#ifdef __ANDROID__
#else
throw std::runtime_error("cv::Mat number of channel currently not support to print\n");
#endif
}
}
/**
* @brief Extract RGB channel seprately from bayer image
*
* @tparam T data tyoe of bayer image.
* @return vector of RGB image. OpenCV internally maintain reference count.
* Thus this step won't create deep copy overhead.
*
* @example extract_rgb_from_bayer<uint16_t>( bayer_img, rgb_vector_container );
*/
template <typename T>
void extract_rgb_from_bayer( const cv::Mat& bayer_img, \
cv::Mat& img_ch1, cv::Mat& img_ch2, cv::Mat& img_ch3, cv::Mat& img_ch4 )
{
const T* bayer_img_ptr = (const T*)bayer_img.data;
int bayer_width = bayer_img.size().width;
int bayer_height = bayer_img.size().height;
int bayer_step = bayer_img.step1();
if ( bayer_width % 2 != 0 || bayer_height % 2 != 0 )
{
#ifdef __ANDROID__
#else
throw std::runtime_error("Bayer image data size incorrect, must be multiplier of 2\n");
#endif
}
// RGB image is half the size of bayer image
int rgb_width = bayer_width / 2;
int rgb_height = bayer_height / 2;
img_ch1.create( rgb_height, rgb_width, bayer_img.type() );
img_ch2.create( rgb_height, rgb_width, bayer_img.type() );
img_ch3.create( rgb_height, rgb_width, bayer_img.type() );
img_ch4.create( rgb_height, rgb_width, bayer_img.type() );
int rgb_step = img_ch1.step1();
T* img_ch1_ptr = (T*)img_ch1.data;
T* img_ch2_ptr = (T*)img_ch2.data;
T* img_ch3_ptr = (T*)img_ch3.data;
T* img_ch4_ptr = (T*)img_ch4.data;
#pragma omp parallel for
for ( int rgb_row_i = 0; rgb_row_i < rgb_height; rgb_row_i++ )
{
int rgb_row_i_offset = rgb_row_i * rgb_step;
// Every RGB row corresbonding to two Bayer image row
int bayer_row_i_offset0 = ( rgb_row_i * 2 + 0 ) * bayer_step; // For RG
int bayer_row_i_offset1 = ( rgb_row_i * 2 + 1 ) * bayer_step; // For GB
for ( int rgb_col_j = 0; rgb_col_j < rgb_width; rgb_col_j++ )
{
// img_ch1/2/3/4 : (0,0), (1,0), (0,1), (1,1)
int bayer_col_i_offset0 = rgb_col_j * 2 + 0;
int bayer_col_i_offset1 = rgb_col_j * 2 + 1;
img_ch1_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset0 + bayer_col_i_offset0 ];
img_ch3_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset0 + bayer_col_i_offset1 ];
img_ch2_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset1 + bayer_col_i_offset0 ];
img_ch4_ptr[ rgb_row_i_offset + rgb_col_j ] = bayer_img_ptr[ bayer_row_i_offset1 + bayer_col_i_offset1 ];
}
}
}
/**
* @brief Convert RGB image to gray image through same weight linear combination.
* Also support implicit data type conversion.
*
* @tparam RGB_DTYPE rgb image type (e.g. uint16_t)
* @tparam GRAY_DTYPE gray image type (e.g. uint16_t)
* @tparam GRAY_CVTYPE opencv gray image type
*/
template< typename RGB_DTYPE, typename GRAY_DTYPE, int GRAY_CVTYPE >
cv::Mat rgb_2_gray( const cv::Mat& rgb_img )
{
const RGB_DTYPE* rgb_img_ptr = (const RGB_DTYPE*)rgb_img.data;
int img_width = rgb_img.size().width;
int img_height = rgb_img.size().height;
int rgb_img_step = rgb_img.step1();
// Create output gray cv::Mat
cv::Mat gray_img( img_height, img_width, GRAY_CVTYPE );
GRAY_DTYPE* gray_img_ptr = (GRAY_DTYPE*)gray_img.data;
int gray_img_step = gray_img.step1();
#pragma omp parallel for
for ( int row_i = 0; row_i < img_height; row_i++ )
{
int rgb_row_i_offset = row_i * rgb_img_step;
int gray_row_i_offset = row_i * gray_img_step;
UNROLL_LOOP( 32 ) // multiplier of cache line size
for ( int col_j = 0; col_j < img_width; col_j++ )
{
GRAY_DTYPE avg_ij(0);
avg_ij += rgb_img_ptr[ rgb_row_i_offset + (col_j * 3 + 0) ];
avg_ij += rgb_img_ptr[ rgb_row_i_offset + (col_j * 3 + 1) ];
avg_ij += rgb_img_ptr[ rgb_row_i_offset + (col_j * 3 + 2) ];
avg_ij /= 3;
gray_img_ptr[ gray_row_i_offset + col_j ] = avg_ij;
}
}
// OpenCV use reference count. Thus return won't create deep copy
return gray_img;
}
template <typename T>
void print_tile( const cv::Mat& img, int tile_size, int start_idx_row, int start_idx_col )
{
const T* img_ptr = (T*)img.data;
int src_step = img.step1();
for ( int row = start_idx_row; row < tile_size + start_idx_row; ++row )
{
const T* img_ptr_row = img_ptr + row * src_step;
for ( int col = start_idx_col; col < tile_size + start_idx_col; ++col )
{
printf("%u ", img_ptr_row[ col ] );
}
printf("\n");
}
printf("\n");
}
template< typename T>
void print_img( const cv::Mat& img, int img_height = -1, int img_width = -1 )
{
const T* img_ptr = (T*)img.data;
if ( img_height == -1 && img_width == -1 )
{
img_height = img.size().height;
img_width = img.size().width;
}
else
{
img_height = std::min( img.size().height, img_height );
img_width = std::min( img.size().width, img_width );
}
printf("Image size (h=%d, w=%d), Print range (h=0-%d, w=0-%d)]\n", \
img.size().height, img.size().width, img_height, img_width );
int img_step = img.step1();
for ( int row = 0; row < img_height; ++row )
{
const T* img_ptr_row = img_ptr + row * img_step;
for ( int col = 0; col < img_width; ++col )
{
printf("%u ", img_ptr_row[ col ]);
}
printf("\n");
}
printf("\n");
}
} // namespace hdrplus

@ -0,0 +1,994 @@
#include <vector>
#include <string>
#include <limits>
#include <cstdio>
#include <utility> // std::make_pair
#include <stdexcept> // std::runtime_error
#include <opencv2/opencv.hpp> // all opencv header
#include <omp.h>
#include "hdrplus/align.h"
#include "hdrplus/burst.h"
#include "hdrplus/utility.h"
namespace hdrplus
{
// Function declration
static void build_per_grayimg_pyramid( \
std::vector<cv::Mat>& images_pyramid, \
const cv::Mat& src_image, \
const std::vector<int>& inv_scale_factors );
template< int pyramid_scale_factor_prev_curr, int tilesize_scale_factor_prev_curr, int tile_size >
static void build_upsampled_prev_aligement( \
const std::vector<std::vector<std::pair<int, int>>>& src_alignment, \
std::vector<std::vector<std::pair<int, int>>>& dst_alignment, \
int num_tiles_h, int num_tiles_w, \
const cv::Mat& ref_img, const cv::Mat& alt_img, \
bool consider_nbr = false );
template< typename data_type, typename return_type, int tile_size >
static unsigned long long l1_distance( const cv::Mat& img1, const cv::Mat& img2, \
int img1_tile_row_start_idx, int img1_tile_col_start_idx, \
int img2_tile_row_start_idx, int img2_tile_col_start_idx );
template< typename data_type, typename return_type, int tile_size >
static return_type l2_distance( const cv::Mat& img1, const cv::Mat& img2, \
int img1_tile_row_start_idx, int img1_tile_col_start_idx, \
int img2_tile_row_start_idx, int img2_tile_col_start_idx );
static void align_image_level( \
const cv::Mat& ref_img, \
const cv::Mat& alt_img, \
std::vector<std::vector<std::pair<int, int>>>& prev_aligement, \
std::vector<std::vector<std::pair<int, int>>>& curr_alignment, \
int scale_factor_prev_curr, \
int curr_tile_size, \
int prev_tile_size, \
int search_radiou, \
int distance_type );
// Function Implementations
// static function only visible within file
static void build_per_grayimg_pyramid( \
std::vector<cv::Mat>& images_pyramid, \
const cv::Mat& src_image, \
const std::vector<int>& inv_scale_factors )
{
// #ifndef NDEBUG
// printf("%s::%s build_per_grayimg_pyramid start with scale factor : ", __FILE__, __func__ );
// for ( int i = 0; i < inv_scale_factors.size(); ++i )
// {
// printf("%d ", inv_scale_factors.at( i ));
// }
// printf("\n");
// #endif
images_pyramid.resize( inv_scale_factors.size() );
for ( size_t i = 0; i < inv_scale_factors.size(); ++i )
{
cv::Mat blur_image;
cv::Mat downsample_image;
switch ( inv_scale_factors[ i ] )
{
case 1:
images_pyramid[ i ] = src_image.clone();
// cv::Mat use reference count, will not create deep copy
downsample_image = src_image;
break;
case 2:
// printf("(2) downsample with gaussian sigma %.2f", inv_scale_factors[ i ] * 0.5 );
// // Gaussian blur
cv::GaussianBlur( images_pyramid.at( i-1 ), blur_image, cv::Size(0, 0), inv_scale_factors[ i ] * 0.5 );
// // Downsample
downsample_image = downsample_nearest_neighbour<uint16_t, 2>( blur_image );
// downsample_image = downsample_nearest_neighbour<uint16_t, 2>( images_pyramid.at( i-1 ) );
// Add
images_pyramid.at( i ) = downsample_image.clone();
break;
case 4:
// printf("(4) downsample with gaussian sigma %.2f", inv_scale_factors[ i ] * 0.5 );
cv::GaussianBlur( images_pyramid.at( i-1 ), blur_image, cv::Size(0, 0), inv_scale_factors[ i ] * 0.5 );
downsample_image = downsample_nearest_neighbour<uint16_t, 4>( blur_image );
// downsample_image = downsample_nearest_neighbour<uint16_t, 4>( images_pyramid.at( i-1 ) );
images_pyramid.at( i ) = downsample_image.clone();
break;
default:
#ifdef __ANDROID__
break;
#else
throw std::runtime_error("inv scale factor " + std::to_string( inv_scale_factors[ i ]) + "invalid" );
#endif
}
}
}
static bool operator!=( const std::pair<int, int>& lhs, const std::pair<int, int>& rhs )
{
return lhs.first != rhs.first || lhs.second != rhs.second;
}
template< int pyramid_scale_factor_prev_curr, int tilesize_scale_factor_prev_curr, int tile_size >
static void build_upsampled_prev_aligement( \
const std::vector<std::vector<std::pair<int, int>>>& src_alignment, \
std::vector<std::vector<std::pair<int, int>>>& dst_alignment, \
int num_tiles_h, int num_tiles_w, \
const cv::Mat& ref_img, const cv::Mat& alt_img, \
bool consider_nbr )
{
int src_num_tiles_h = src_alignment.size();
int src_num_tiles_w = src_alignment[ 0 ].size();
constexpr int repeat_factor = pyramid_scale_factor_prev_curr / tilesize_scale_factor_prev_curr;
// printf("build_upsampled_prev_aligement with scale factor %d, repeat factor %d, tile size factor %d\n", \
// pyramid_scale_factor_prev_curr, repeat_factor, tilesize_scale_factor_prev_curr );
int dst_num_tiles_main_h = src_num_tiles_h * repeat_factor;
int dst_num_tiles_main_w = src_num_tiles_w * repeat_factor;
if ( dst_num_tiles_main_h > num_tiles_h || dst_num_tiles_main_w > num_tiles_w )
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("current level number of tiles smaller than upsampled tiles\n");
#endif
}
// Allocate data for dst_alignment
// NOTE: number of tiles h, number of tiles w might be different from dst_num_tiles_main_h, dst_num_tiles_main_w
// For tiles between num_tile_h and dst_num_tiles_main_h, use (0,0)
dst_alignment.resize( num_tiles_h, std::vector<std::pair<int, int>>( num_tiles_w, std::pair<int, int>(0, 0) ) );
// Upsample alignment
#pragma omp parallel for collapse(2)
for ( int row_i = 0; row_i < src_num_tiles_h; row_i++ )
{
for ( int col_i = 0; col_i < src_num_tiles_w; col_i++ )
{
// Scale alignment
std::pair<int, int> align_i = src_alignment[ row_i ][ col_i ];
align_i.first *= pyramid_scale_factor_prev_curr;
align_i.second *= pyramid_scale_factor_prev_curr;
// repeat
UNROLL_LOOP( repeat_factor )
for ( int repeat_row_i = 0; repeat_row_i < repeat_factor; ++repeat_row_i )
{
int repeat_row_i_offset = row_i * repeat_factor + repeat_row_i;
UNROLL_LOOP( repeat_factor )
for ( int repeat_col_i = 0; repeat_col_i < repeat_factor; ++repeat_col_i )
{
int repeat_col_i_offset = col_i * repeat_factor + repeat_col_i;
dst_alignment[ repeat_row_i_offset ][ repeat_col_i_offset ] = align_i;
}
}
}
}
if ( consider_nbr )
{
// Copy consurtctor
std::vector<std::vector<std::pair<int, int>>> upsampled_alignment{ dst_alignment };
// Distance function
unsigned long long (*distance_func_ptr)(const cv::Mat&, const cv::Mat&, int, int, int, int) = \
&l1_distance<uint16_t, unsigned long long, tile_size>;
#pragma omp parallel for collapse(2)
for ( int tile_row_i = 0; tile_row_i < num_tiles_h; tile_row_i++ )
{
for ( int tile_col_i = 0; tile_col_i < num_tiles_w; tile_col_i++ )
{
const auto& curr_align_i = upsampled_alignment[ tile_row_i ][ tile_col_i ];
// Container for nbr alignment pair
std::vector<std::pair<int, int>> nbrs_align_i;
// Consider 4 neighbour's alignment
// Only compute distance if alignment is different
if ( tile_col_i > 0 )
{
const auto& nbr1_align_i = upsampled_alignment[ tile_row_i + 0 ][ tile_col_i - 1 ];
if ( curr_align_i != nbr1_align_i ) nbrs_align_i.emplace_back( nbr1_align_i );
}
if ( tile_col_i < num_tiles_w - 1 )
{
const auto& nbr2_align_i = upsampled_alignment[ tile_row_i + 0 ][ tile_col_i + 1 ];
if ( curr_align_i != nbr2_align_i ) nbrs_align_i.emplace_back( nbr2_align_i );
}
if ( tile_row_i > 0 )
{
const auto& nbr3_align_i = upsampled_alignment[ tile_row_i - 1 ][ tile_col_i + 0 ];
if ( curr_align_i != nbr3_align_i ) nbrs_align_i.emplace_back( nbr3_align_i );
}
if ( tile_row_i < num_tiles_h - 1 )
{
const auto& nbr4_align_i = upsampled_alignment[ tile_row_i + 1 ][ tile_col_i + 0 ];
if ( curr_align_i != nbr4_align_i ) nbrs_align_i.emplace_back( nbr4_align_i );
}
// If there is a nbr alignment that need to be considered. Compute distance
if ( ! nbrs_align_i.empty() )
{
int ref_tile_row_start_idx_i = tile_row_i * tile_size / 2;
int ref_tile_col_start_idx_i = tile_col_i * tile_size / 2;
// curr_align_i's distance
auto curr_align_i_distance = distance_func_ptr(
ref_img, alt_img, \
ref_tile_row_start_idx_i, \
ref_tile_col_start_idx_i, \
ref_tile_row_start_idx_i + curr_align_i.first, \
ref_tile_col_start_idx_i + curr_align_i.second );
for ( const auto& nbr_align_i : nbrs_align_i )
{
auto nbr_align_i_distance = distance_func_ptr(
ref_img, alt_img, \
ref_tile_row_start_idx_i, \
ref_tile_col_start_idx_i, \
ref_tile_row_start_idx_i + nbr_align_i.first, \
ref_tile_col_start_idx_i + nbr_align_i.second );
if ( nbr_align_i_distance < curr_align_i_distance )
{
#ifdef NDEBUG
printf("tile [%d, %d] update align, prev align (%d, %d) curr align (%d, %d), prev distance %d curr distance %d\n", \
tile_row_i, tile_col_i, \
curr_align_i.first, curr_align_i.second, \
nbr_align_i.first, nbr_align_i.second, \
int(curr_align_i_distance), int(nbr_align_i_distance) );
#endif
dst_alignment[ tile_row_i ][ tile_col_i ] = nbr_align_i;
curr_align_i_distance = nbr_align_i_distance;
}
}
}
}
}
}
}
// Set tilesize as template argument for better compiler optimization result.
template< typename data_type, typename return_type, int tile_size >
static unsigned long long l1_distance( const cv::Mat& img1, const cv::Mat& img2, \
int img1_tile_row_start_idx, int img1_tile_col_start_idx, \
int img2_tile_row_start_idx, int img2_tile_col_start_idx )
{
#define CUSTOME_ABS( x ) ( x ) > 0 ? ( x ) : - ( x )
const data_type* img1_ptr = (const data_type*)img1.data;
const data_type* img2_ptr = (const data_type*)img2.data;
int img1_step = img1.step1();
int img2_step = img2.step1();
int img1_width = img1.size().width;
int img1_height = img1.size().height;
int img2_width = img2.size().width;
int img2_height = img2.size().height;
// Range check for safety
if ( img1_tile_row_start_idx < 0 || img1_tile_row_start_idx > img1_height - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l1 distance img1_tile_row_start_idx" + std::to_string( img1_tile_row_start_idx ) + \
" out of valid range (0, " + std::to_string( img1_height - tile_size ) + ")\n" );
#endif
}
if ( img1_tile_col_start_idx < 0 || img1_tile_col_start_idx > img1_width - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l1 distance img1_tile_col_start_idx" + std::to_string( img1_tile_col_start_idx ) + \
" out of valid range (0, " + std::to_string( img1_width - tile_size ) + ")\n" );
#endif
}
if ( img2_tile_row_start_idx < 0 || img2_tile_row_start_idx > img2_height - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l1 distance img2_tile_row_start_idx out of valid range\n");
#endif
}
if ( img2_tile_col_start_idx < 0 || img2_tile_col_start_idx > img2_width - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l1 distance img2_tile_col_start_idx out of valid range\n");
#endif
}
return_type sum(0);
UNROLL_LOOP( tile_size )
for ( int row_i = 0; row_i < tile_size; ++row_i )
{
const data_type* img1_ptr_row_i = img1_ptr + (img1_tile_row_start_idx + row_i) * img1_step + img1_tile_col_start_idx;
const data_type* img2_ptr_row_i = img2_ptr + (img2_tile_row_start_idx + row_i) * img2_step + img2_tile_col_start_idx;
UNROLL_LOOP( tile_size )
for ( int col_i = 0; col_i < tile_size; ++col_i )
{
data_type l1 = CUSTOME_ABS( img1_ptr_row_i[ col_i ] - img2_ptr_row_i[ col_i ] );
sum += l1;
}
}
#undef CUSTOME_ABS
return sum;
}
template< typename data_type, typename return_type, int tile_size >
static return_type l2_distance( const cv::Mat& img1, const cv::Mat& img2, \
int img1_tile_row_start_idx, int img1_tile_col_start_idx, \
int img2_tile_row_start_idx, int img2_tile_col_start_idx )
{
#define CUSTOME_ABS( x ) ( x ) > 0 ? ( x ) : - ( x )
const data_type* img1_ptr = (const data_type*)img1.data;
const data_type* img2_ptr = (const data_type*)img2.data;
int img1_step = img1.step1();
int img2_step = img2.step1();
int img1_width = img1.size().width;
int img1_height = img1.size().height;
int img2_width = img2.size().width;
int img2_height = img2.size().height;
// Range check for safety
if ( img1_tile_row_start_idx < 0 || img1_tile_row_start_idx > img1_height - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l2 distance img1_tile_row_start_idx" + std::to_string( img1_tile_row_start_idx ) + \
" out of valid range (0, " + std::to_string( img1_height - tile_size ) + ")\n" );
#endif
}
if ( img1_tile_col_start_idx < 0 || img1_tile_col_start_idx > img1_width - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l2 distance img1_tile_col_start_idx" + std::to_string( img1_tile_col_start_idx ) + \
" out of valid range (0, " + std::to_string( img1_width - tile_size ) + ")\n" );
#endif
}
if ( img2_tile_row_start_idx < 0 || img2_tile_row_start_idx > img2_height - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l2 distance img2_tile_row_start_idx out of valid range\n");
#endif
}
if ( img2_tile_col_start_idx < 0 || img2_tile_col_start_idx > img2_width - tile_size )
{
#ifdef __ANDROID__
return 0;
#else
throw std::runtime_error("l2 distance img2_tile_col_start_idx out of valid range\n");
#endif
}
// printf("Search two tile with ref : \n");
// print_tile<data_type>( img1, tile_size, img1_tile_row_start_idx, img1_tile_col_start_idx );
// printf("Search two tile with alt :\n");
// print_tile<data_type>( img2, tile_size, img2_tile_row_start_idx, img2_tile_col_start_idx );
return_type sum(0);
UNROLL_LOOP( tile_size )
for ( int row_i = 0; row_i < tile_size; ++row_i )
{
const data_type* img1_ptr_row_i = img1_ptr + (img1_tile_row_start_idx + row_i) * img1_step + img1_tile_col_start_idx;
const data_type* img2_ptr_row_i = img2_ptr + (img2_tile_row_start_idx + row_i) * img2_step + img2_tile_col_start_idx;
UNROLL_LOOP( tile_size )
for ( int col_i = 0; col_i < tile_size; ++col_i )
{
data_type l1 = CUSTOME_ABS( img1_ptr_row_i[ col_i ] - img2_ptr_row_i[ col_i ] );
sum += ( l1 * l1 );
}
}
#undef CUSTOME_ABS
return sum;
}
template<typename T, int tile_size>
static cv::Mat extract_img_tile( const cv::Mat& img, int img_tile_row_start_idx, int img_tile_col_start_idx )
{
const T* img_ptr = (const T*)img.data;
int img_width = img.size().width;
int img_height = img.size().height;
int img_step = img.step1();
if ( img_tile_row_start_idx < 0 || img_tile_row_start_idx > img_height - tile_size )
{
#ifdef __ANDROID__
return cv::Mat();
#else
throw std::runtime_error("extract_img_tile img_tile_row_start_idx " + std::to_string( img_tile_row_start_idx ) + \
" out of valid range (0, " + std::to_string( img_height - tile_size ) + ")\n" );
#endif
}
if ( img_tile_col_start_idx < 0 || img_tile_col_start_idx > img_width - tile_size )
{
#ifdef __ANDROID__
return cv::Mat();
#else
throw std::runtime_error("extract_img_tile img_tile_col_start_idx " + std::to_string( img_tile_col_start_idx ) + \
" out of valid range (0, " + std::to_string( img_width - tile_size ) + ")\n" );
#endif
}
cv::Mat img_tile( tile_size, tile_size, img.type() );
T* img_tile_ptr = (T*)img_tile.data;
int img_tile_step = img_tile.step1();
UNROLL_LOOP( tile_size )
for ( int row_i = 0; row_i < tile_size; ++row_i )
{
const T* img_ptr_row_i = img_ptr + img_step * ( img_tile_row_start_idx + row_i );
T* img_tile_ptr_row_i = img_tile_ptr + img_tile_step * row_i;
UNROLL_LOOP( tile_size )
for ( int col_i = 0; col_i < tile_size; ++col_i )
{
img_tile_ptr_row_i[ col_i ] = img_ptr_row_i[ img_tile_col_start_idx + col_i ];
}
}
return img_tile;
}
void align_image_level( \
const cv::Mat& ref_img, \
const cv::Mat& alt_img, \
std::vector<std::vector<std::pair<int, int>>>& prev_aligement, \
std::vector<std::vector<std::pair<int, int>>>& curr_alignment, \
int scale_factor_prev_curr, \
int curr_tile_size, \
int prev_tile_size, \
int search_radiou, \
int distance_type )
{
// Every align image level share the same distance function.
// Use function ptr to reduce if else overhead inside for loop
unsigned long long (*distance_func_ptr)(const cv::Mat&, const cv::Mat&, int, int, int, int) = nullptr;
if ( distance_type == 1 ) // l1 distance
{
if ( curr_tile_size == 8 )
{
distance_func_ptr = &l1_distance<uint16_t, unsigned long long, 8>;
}
else if ( curr_tile_size == 16 )
{
distance_func_ptr = &l1_distance<uint16_t, unsigned long long, 16>;
}
}
else if ( distance_type == 2 ) // l2 distance
{
if ( curr_tile_size == 8 )
{
distance_func_ptr = &l2_distance<uint16_t, unsigned long long, 8>;
}
else if ( curr_tile_size == 16 )
{
distance_func_ptr = &l2_distance<uint16_t, unsigned long long, 16>;
}
}
// Every level share the same upsample function
void (*upsample_alignment_func_ptr)(const std::vector<std::vector<std::pair<int, int>>>&, \
std::vector<std::vector<std::pair<int, int>>>&, \
int, int, const cv::Mat&, const cv::Mat&, bool) = nullptr;
if ( scale_factor_prev_curr == 2 )
{
if ( curr_tile_size / prev_tile_size == 2 )
{
if ( curr_tile_size == 8 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 2, 8>;
}
else if ( curr_tile_size == 16 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 2, 16>;
}
else
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Something wrong with upsampling function setting\n");
#endif
}
}
else if ( curr_tile_size / prev_tile_size == 1 )
{
if ( curr_tile_size == 8 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 1, 8>;
}
else if ( curr_tile_size == 16 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<2, 1, 16>;
}
else
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Something wrong with upsampling function setting\n");
#endif
}
}
else
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Something wrong with upsampling function setting\n");
#endif
}
}
else if ( scale_factor_prev_curr == 4 )
{
if ( curr_tile_size / prev_tile_size == 2 )
{
if ( curr_tile_size == 8 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 2, 8>;
}
else if ( curr_tile_size == 16 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 2, 16>;
}
else
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Something wrong with upsampling function setting\n");
#endif
}
}
else if ( curr_tile_size / prev_tile_size == 1 )
{
if ( curr_tile_size == 8 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 1, 8>;
}
else if ( curr_tile_size == 16 )
{
upsample_alignment_func_ptr = &build_upsampled_prev_aligement<4, 1, 16>;
}
else
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Something wrong with upsampling function setting\n");
#endif
}
}
else
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Something wrong with upsampling function setting\n");
#endif
}
}
// Function to extract reference image tile for memory cache
cv::Mat (*extract_ref_img_tile)(const cv::Mat&, int, int) = nullptr;
if ( curr_tile_size == 8 )
{
extract_ref_img_tile = &extract_img_tile<uint16_t, 8>;
}
else if ( curr_tile_size == 16 )
{
extract_ref_img_tile = &extract_img_tile<uint16_t, 16>;
}
// Function to extract search image tile for memory cache
cv::Mat (*extract_alt_img_search)(const cv::Mat&, int, int) = nullptr;
if ( curr_tile_size == 8 )
{
if ( search_radiou == 1 )
{
extract_alt_img_search = &extract_img_tile<uint16_t, 8+1*2>;
}
else if ( search_radiou == 4 )
{
extract_alt_img_search = &extract_img_tile<uint16_t, 8+4*2>;
}
}
else if ( curr_tile_size == 16 )
{
if ( search_radiou == 1 )
{
extract_alt_img_search = &extract_img_tile<uint16_t, 16+1*2>;
}
else if ( search_radiou == 4 )
{
extract_alt_img_search = &extract_img_tile<uint16_t, 16+4*2>;
}
}
int num_tiles_h = ref_img.size().height / (curr_tile_size / 2) - 1;
int num_tiles_w = ref_img.size().width / (curr_tile_size / 2 ) - 1;
/* Upsample pervious layer alignment */
std::vector<std::vector<std::pair<int, int>>> upsampled_prev_aligement;
// Coarsest level
// prev_alignment is invalid / empty, construct alignment as (0,0)
if ( prev_tile_size == -1 )
{
upsampled_prev_aligement.resize( num_tiles_h, \
std::vector<std::pair<int, int>>( num_tiles_w, std::pair<int, int>(0, 0) ) );
}
// Upsample previous level alignment
else
{
upsample_alignment_func_ptr( prev_aligement, upsampled_prev_aligement, \
num_tiles_h, num_tiles_w, ref_img, alt_img, false );
// printf("\n!!!!!Upsampled previous alignment\n");
// for ( int tile_row = 0; tile_row < int(upsampled_prev_aligement.size()); tile_row++ )
// {
// for ( int tile_col = 0; tile_col < int(upsampled_prev_aligement.at(0).size()); tile_col++ )
// {
// const auto tile_start = upsampled_prev_aligement.at( tile_row ).at( tile_col );
// printf("up tile (%d, %d) -> start idx (%d, %d)\n", \
// tile_row, tile_col, tile_start.first, tile_start.second);
// }
// }
}
#ifndef NDEBUG
printf("%s::%s start: \n", __FILE__, __func__ );
printf(" scale_factor_prev_curr %d, tile_size %d, prev_tile_size %d, search_radiou %d, distance L%d, \n", \
scale_factor_prev_curr, curr_tile_size, prev_tile_size, search_radiou, distance_type );
printf(" ref img size h=%d w=%d, alt img size h=%d w=%d, \n", \
ref_img.size().height, ref_img.size().width, alt_img.size().height, alt_img.size().width );
printf(" num tile h (upsampled) %d, num tile w (upsampled) %d\n", num_tiles_h, num_tiles_w);
#endif
// allocate memory for current alignmenr
curr_alignment.resize( num_tiles_h, std::vector<std::pair<int, int>>( num_tiles_w, std::pair<int, int>(0, 0) ) );
/* Pad alternative image */
cv::Mat alt_img_pad;
cv::copyMakeBorder( alt_img, \
alt_img_pad, \
search_radiou, search_radiou, search_radiou, search_radiou, \
cv::BORDER_CONSTANT, cv::Scalar( UINT_LEAST16_MAX ) );
// printf("Reference image h=%d, w=%d: \n", ref_img.size().height, ref_img.size().width );
// print_img<uint16_t>( ref_img );
// printf("Alter image pad h=%d, w=%d: \n", alt_img_pad.size().height, alt_img_pad.size().width );
// print_img<uint16_t>( alt_img_pad );
// printf("!! enlarged tile size %d\n", curr_tile_size + 2 * search_radiou );
int alt_tile_row_idx_max = alt_img_pad.size().height - ( curr_tile_size + 2 * search_radiou );
int alt_tile_col_idx_max = alt_img_pad.size().width - ( curr_tile_size + 2 * search_radiou );
// Dlete below distance vector, this is for debug only
std::vector<std::vector<uint16_t>> distances( num_tiles_h, std::vector<uint16_t>( num_tiles_w, 0 ));
/* Iterate through all reference tile & compute distance */
#pragma omp parallel for collapse(2)
for ( int ref_tile_row_i = 0; ref_tile_row_i < num_tiles_h; ref_tile_row_i++ )
{
for ( int ref_tile_col_i = 0; ref_tile_col_i < num_tiles_w; ref_tile_col_i++ )
{
// Upper left index of reference tile
int ref_tile_row_start_idx_i = ref_tile_row_i * curr_tile_size / 2;
int ref_tile_col_start_idx_i = ref_tile_col_i * curr_tile_size / 2;
// printf("\nRef img tile [%d, %d] -> start idx [%d, %d] (row, col)\n", \
// ref_tile_row_i, ref_tile_col_i, ref_tile_row_start_idx_i, ref_tile_col_start_idx_i );
// printf("\nRef img tile [%d, %d]\n", ref_tile_row_i, ref_tile_col_i );
// print_tile<uint16_t>( ref_img, curr_tile_size, ref_tile_row_start_idx_i, ref_tile_col_start_idx_i );
// Upsampled alignment at this tile
// Alignment are relative displacement in pixel value
int prev_alignment_row_i = upsampled_prev_aligement.at( ref_tile_row_i ).at( ref_tile_col_i ).first;
int prev_alignment_col_i = upsampled_prev_aligement.at( ref_tile_row_i ).at( ref_tile_col_i ).second;
// Alternative image tile start idx
int alt_tile_row_start_idx_i = ref_tile_row_start_idx_i + prev_alignment_row_i;
int alt_tile_col_start_idx_i = ref_tile_col_start_idx_i + prev_alignment_col_i;
// Ensure alternative image tile within range
if ( alt_tile_row_start_idx_i < 0 )
alt_tile_row_start_idx_i = 0;
if ( alt_tile_col_start_idx_i < 0 )
alt_tile_col_start_idx_i = 0;
if ( alt_tile_row_start_idx_i > alt_tile_row_idx_max )
{
// int before = alt_tile_row_start_idx_i;
alt_tile_row_start_idx_i = alt_tile_row_idx_max;
// printf("@@ change start x from %d to %d\n", before, alt_tile_row_idx_max);
}
if ( alt_tile_col_start_idx_i > alt_tile_col_idx_max )
{
// int before = alt_tile_col_start_idx_i;
alt_tile_col_start_idx_i = alt_tile_col_idx_max;
// printf("@@ change start y from %d to %d\n", before, alt_tile_col_idx_max );
}
// Explicitly caching reference image tile
cv::Mat ref_img_tile_i = extract_ref_img_tile( ref_img, ref_tile_row_start_idx_i, ref_tile_col_start_idx_i );
cv::Mat alt_img_search_i = extract_alt_img_search( alt_img_pad, alt_tile_row_start_idx_i, alt_tile_col_start_idx_i );
// Because alternative image is padded with search radious.
// Using same coordinate with reference image will automatically considered search radious * 2
// printf("Alt image tile [%d, %d]-> start idx [%d, %d]\n", \
// ref_tile_row_i, ref_tile_col_i, alt_tile_row_start_idx_i, alt_tile_col_start_idx_i );
// printf("\nAlt image tile [%d, %d]\n", ref_tile_row_i, ref_tile_col_i );
// print_tile<uint16_t>( alt_img_pad, curr_tile_size + 2 * search_radiou, alt_tile_row_start_idx_i, alt_tile_col_start_idx_i );
// Search based on L1/L2 distance
unsigned long long min_distance_i = ULONG_LONG_MAX;
int min_distance_row_i = -1;
int min_distance_col_i = -1;
for ( int search_row_j = 0; search_row_j < ( search_radiou * 2 + 1 ); search_row_j++ )
{
for ( int search_col_j = 0; search_col_j < ( search_radiou * 2 + 1 ); search_col_j++ )
{
// printf("\n--->tile at [%d, %d] search (%d, %d)\n", \
// ref_tile_row_i, ref_tile_col_i, search_row_j - search_radiou, search_col_j - search_radiou );
// unsigned long long distance_j = distance_func_ptr( ref_img, alt_img_pad, \
// ref_tile_row_start_idx_i, ref_tile_col_start_idx_i, \
// alt_tile_row_start_idx_i + search_row_j, alt_tile_col_start_idx_i + search_col_j );
// unsigned long long distance_j = distance_func_ptr( ref_img_tile_i, alt_img_pad, \
// 0, 0, \
// alt_tile_row_start_idx_i + search_row_j, alt_tile_col_start_idx_i + search_col_j );
unsigned long long distance_j = distance_func_ptr( ref_img_tile_i, alt_img_search_i, \
0, 0, \
search_row_j, search_col_j );
// printf("<---tile at [%d, %d] search (%d, %d), new dis %llu, old dis %llu\n", \
// ref_tile_row_i, ref_tile_col_i, search_row_j - search_radiou, search_col_j - search_radiou, distance_j, min_distance_i );
// If this is smaller distance
if ( distance_j < min_distance_i )
{
min_distance_i = distance_j;
min_distance_col_i = search_col_j;
min_distance_row_i = search_row_j;
}
// If same value, choose the one closer to the original tile location
if ( distance_j == min_distance_i && min_distance_row_i != -1 && min_distance_col_i != -1 )
{
int prev_distance_row_2_ref = min_distance_row_i - search_radiou;
int prev_distance_col_2_ref = min_distance_col_i - search_radiou;
int curr_distance_row_2_ref = search_row_j - search_radiou;
int curr_distance_col_2_ref = search_col_j - search_radiou;
int prev_distance_2_ref_sqr = prev_distance_row_2_ref * prev_distance_row_2_ref + prev_distance_col_2_ref * prev_distance_col_2_ref;
int curr_distance_2_ref_sqr = curr_distance_row_2_ref * curr_distance_row_2_ref + curr_distance_col_2_ref * curr_distance_col_2_ref;
// previous min distance idx is farther away from ref tile start location
if ( prev_distance_2_ref_sqr > curr_distance_2_ref_sqr )
{
// printf("@@@ Same distance %d, choose closer one (%d, %d) instead of (%d, %d)\n", \
// distance_j, search_row_j, search_col_j, min_distance_row_i, min_distance_col_i);
min_distance_col_i = search_col_j;
min_distance_row_i = search_row_j;
}
}
}
}
// printf("tile at (%d, %d) alignment (%d, %d)\n", \
// ref_tile_row_i, ref_tile_col_i, min_distance_row_i, min_distance_col_i );
int alignment_row_i = prev_alignment_row_i + min_distance_row_i - search_radiou;
int alignment_col_i = prev_alignment_col_i + min_distance_col_i - search_radiou;
std::pair<int, int> alignment_i( alignment_row_i, alignment_col_i );
// Add min_distance_i's corresbonding idx as min
curr_alignment.at( ref_tile_row_i ).at( ref_tile_col_i ) = alignment_i;
distances.at( ref_tile_row_i ).at( ref_tile_col_i ) = min_distance_i;
}
}
// printf("\n!!!!!Min distance for each tile \n");
// for ( int tile_row = 0; tile_row < num_tiles_h; tile_row++ )
// {
// for ( int tile_col = 0; tile_col < num_tiles_w; ++tile_col )
// {
// printf("tile (%d, %d) distance %u\n", \
// tile_row, tile_col, distances.at( tile_row).at(tile_col ) );
// }
// }
// printf("\n!!!!!Alignment at current level\n");
// for ( int tile_row = 0; tile_row < num_tiles_h; tile_row++ )
// {
// for ( int tile_col = 0; tile_col < num_tiles_w; tile_col++ )
// {
// const auto tile_start = curr_alignment.at( tile_row ).at( tile_col );
// printf("tile (%d, %d) -> start idx (%d, %d)\n", \
// tile_row, tile_col, tile_start.first, tile_start.second);
// }
// }
}
void align::process( const hdrplus::burst& burst_images, \
std::vector<std::vector<std::vector<std::pair<int, int>>>>& images_alignment )
{
#ifndef NDEBUG
printf("%s::%s align::process start\n", __FILE__, __func__ ); fflush(stdout);
#endif
images_alignment.clear();
images_alignment.resize( burst_images.num_images );
// image pyramid per image, per pyramid level
std::vector<std::vector<cv::Mat>> per_grayimg_pyramid;
// printf("!!!!! ref bayer padded\n");
// print_img<uint16_t>( burst_images.bayer_images_pad.at( burst_images.reference_image_idx) );
// exit(1);
// printf("!!!!! ref gray padded\n");
// print_img<uint16_t>( burst_images.grayscale_images_pad.at( burst_images.reference_image_idx) );
// exit(1);
per_grayimg_pyramid.resize( burst_images.num_images );
#pragma omp parallel for
for ( int img_idx = 0; img_idx < burst_images.num_images; ++img_idx )
{
// per_grayimg_pyramid[ img_idx ][ 0 ] is the original image
// per_grayimg_pyramid[ img_idx ][ 3 ] is the coarsest image
build_per_grayimg_pyramid( per_grayimg_pyramid.at( img_idx ), \
burst_images.grayscale_images_pad.at( img_idx ), \
this->inv_scale_factors );
}
// #ifndef NDEBUG
// printf("%s::%s build image pyramid of size : ", __FILE__, __func__ );
// for ( int level_i = 0; level_i < num_levels; ++level_i )
// {
// printf("(%d, %d) ", per_grayimg_pyramid[ 0 ][ level_i ].size().height,
// per_grayimg_pyramid[ 0 ][ level_i ].size().width );
// }
// printf("\n"); fflush(stdout);
// #endif
// print image pyramid
// for ( int level_i; level_i < num_levels; ++level_i )
// {
// printf("\n\n!!!!! ref gray pyramid level %d img : \n" , level_i );
// print_img<uint16_t>( per_grayimg_pyramid[ burst_images.reference_image_idx ][ level_i ] );
// }
// exit(-1);
// Align every image
const std::vector<cv::Mat>& ref_grayimg_pyramid = per_grayimg_pyramid[ burst_images.reference_image_idx ];
std::vector<std::vector<std::pair<int, int>>> curr_alignment;
std::vector<std::vector<std::pair<int, int>>> prev_alignment;
for ( int img_idx = 0; img_idx < burst_images.num_images; ++img_idx )
{
// Do not align with reference image
if ( img_idx == burst_images.reference_image_idx )
continue;
const std::vector<cv::Mat>& alt_grayimg_pyramid = per_grayimg_pyramid[ img_idx ];
// Align every level from coarse to grain
// level 0 : finest level, the original image
// level 3 : coarsest level
curr_alignment.clear();
prev_alignment.clear();
for ( int level_i = num_levels - 1; level_i >= 0; level_i-- ) // 3,2,1,0
{
// make curr alignment as previous alignment
prev_alignment.swap( curr_alignment );
curr_alignment.clear();
// printf("\n\n########################align level %d\n", level_i );
align_image_level(
ref_grayimg_pyramid[ level_i ], // reference image at current level
alt_grayimg_pyramid[ level_i ], // alternative image at current level
prev_alignment, // previous layer alignment
curr_alignment, // current layer alignment
( level_i == ( num_levels - 1 ) ? -1 : inv_scale_factors[ level_i + 1 ] ), // scale factor between previous layer and current layer. -1 if current layer is the coarsest layer, [-1, 4, 4, 2]
grayimg_tile_sizes[ level_i ], // current level tile size
( level_i == ( num_levels - 1 ) ? -1 : grayimg_tile_sizes[ level_i + 1 ] ), // previous level tile size
grayimg_search_radious[ level_i ], // search radious
distances[ level_i ] ); // L1/L2 distance
// printf("@@@Alignment at level %d is h=%d, w=%d", level_i, curr_alignment.size(), curr_alignment.at(0).size() );
} // for pyramid level
// Alignment at grayscale image
images_alignment.at( img_idx ).swap( curr_alignment );
// printf("\n!!!!!Alternative Image Alignment\n");
// for ( int tile_row = 0; tile_row < images_alignment.at( img_idx ).size(); tile_row++ )
// {
// for ( int tile_col = 0; tile_col < images_alignment.at( img_idx ).at(0).size(); tile_col++ )
// {
// const auto tile_start = images_alignment.at( img_idx ).at( tile_row ).at( tile_col );
// printf("tile (%d, %d) -> start idx (%d, %d)\n", \
// tile_row, tile_col, tile_start.first, tile_start.second);
// }
// }
} // for alternative image
per_grayimg_pyramid.clear();
}
} // namespace hdrplus

@ -0,0 +1,234 @@
#include <string>
#include <cstdio>
#include <iostream>
#include <utility> // std::pair, std::makr_pair
#include <memory> // std::shared_ptr
#include <stdexcept> // std::runtime_error
#include <opencv2/opencv.hpp> // all opencv header
#include <libraw/libraw.h>
#include <exiv2/exiv2.hpp> // exiv2
#include "hdrplus/bayer_image.h"
#include "hdrplus/utility.h" // box_filter_kxk
namespace hdrplus
{
bayer_image::bayer_image( const std::string& bayer_image_path )
{
libraw_processor = std::make_shared<LibRaw>();
// Open RAW image file
int return_code;
if ( ( return_code = libraw_processor->open_file( bayer_image_path.c_str() ) ) != LIBRAW_SUCCESS )
{
libraw_processor->recycle();
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
// Unpack the raw image
if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS )
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
// Get image basic info
width = int( libraw_processor->imgdata.rawdata.sizes.raw_width );
height = int( libraw_processor->imgdata.rawdata.sizes.raw_height );
// Read exif tags
Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(bayer_image_path);
assert(image.get() != 0);
image->readMetadata();
Exiv2::ExifData &exifData = image->exifData();
if (exifData.empty()) {
std::string error(bayer_image_path);
error += ": No Exif data found in the file";
std::cout << error << std::endl;
}
white_level = exifData["Exif.Image.WhiteLevel"].toLong();
black_level_per_channel.resize( 4 );
black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0);
black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1);
black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2);
black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3);
iso = exifData["Exif.Image.ISOSpeedRatings"].toLong();
// Create CV mat
// https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/
// https://www.libraw.org/node/2141
raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height
// 2x2 box filter
grayscale_image = box_filter_kxk<uint16_t, 2>( raw_image );
#ifndef NDEBUG
printf("%s::%s read bayer image %s with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \
__FILE__, __func__, bayer_image_path.c_str(), width, height, iso, white_level, \
black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] );
fflush( stdout );
#endif
}
bayer_image::bayer_image( const std::vector<uint8_t>& bayer_image_content )
{
libraw_processor = std::make_shared<LibRaw>();
// Open RAW image file
int return_code;
if ( ( return_code = libraw_processor->open_buffer( (void *)(&bayer_image_content[0]), bayer_image_content.size() ) ) != LIBRAW_SUCCESS )
{
libraw_processor->recycle();
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
// Unpack the raw image
if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS )
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
// Get image basic info
width = int( libraw_processor->imgdata.rawdata.sizes.raw_width );
height = int( libraw_processor->imgdata.rawdata.sizes.raw_height );
// Read exif tags
Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(&bayer_image_content[0], bayer_image_content.size());
assert(image.get() != 0);
image->readMetadata();
Exiv2::ExifData &exifData = image->exifData();
if (exifData.empty()) {
std::string error = "No Exif data found in the file";
std::cout << error << std::endl;
}
white_level = exifData["Exif.Image.WhiteLevel"].toLong();
black_level_per_channel.resize( 4 );
black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0);
black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1);
black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2);
black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3);
iso = exifData["Exif.Image.ISOSpeedRatings"].toLong();
// Create CV mat
// https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/
// https://www.libraw.org/node/2141
raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height
// 2x2 box filter
grayscale_image = box_filter_kxk<uint16_t, 2>( raw_image );
#ifndef NDEBUG
printf("%s::%s read bayer image with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \
__FILE__, __func__, width, height, iso, white_level, \
black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] );
fflush( stdout );
#endif
}
bayer_image::bayer_image( std::shared_ptr<MemFile> bayer_image_file )
{
libraw_processor = std::make_shared<LibRaw>();
// Open RAW image file
int return_code;
{
std::vector<uint8_t>& fileData = bayer_image_file->content;
if ( ( return_code = libraw_processor->open_buffer( (void *)(&fileData[0]), fileData.size() ) ) != LIBRAW_SUCCESS )
{
libraw_processor->recycle();
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error opening file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
}
// Unpack the raw image
if ( ( return_code = libraw_processor->unpack() ) != LIBRAW_SUCCESS )
{
#ifdef __ANDROID__
return;
#else
throw std::runtime_error("Error unpack file " + bayer_image_path + " " + libraw_strerror( return_code ));
#endif
}
// Get image basic info
width = int( libraw_processor->imgdata.rawdata.sizes.raw_width );
height = int( libraw_processor->imgdata.rawdata.sizes.raw_height );
// Read exif tags
Exiv2::Image::AutoPtr image = Exiv2::ImageFactory::open(&bayer_image_file->content[0], bayer_image_file->content.size());
assert(image.get() != 0);
image->readMetadata();
Exiv2::ExifData &exifData = image->exifData();
if (exifData.empty()) {
std::string error = "No Exif data found in the file";
std::cout << error << std::endl;
}
white_level = exifData["Exif.Image.WhiteLevel"].toLong();
black_level_per_channel.resize( 4 );
black_level_per_channel.at(0) = exifData["Exif.Image.BlackLevel"].toLong(0);
black_level_per_channel.at(1) = exifData["Exif.Image.BlackLevel"].toLong(1);
black_level_per_channel.at(2) = exifData["Exif.Image.BlackLevel"].toLong(2);
black_level_per_channel.at(3) = exifData["Exif.Image.BlackLevel"].toLong(3);
iso = exifData["Exif.Image.ISOSpeedRatings"].toLong();
// Create CV mat
// https://answers.opencv.org/question/105972/de-bayering-a-cr2-image/
// https://www.libraw.org/node/2141
raw_image = cv::Mat( height, width, CV_16U, libraw_processor->imgdata.rawdata.raw_image ).clone(); // changed the order of width and height
// 2x2 box filter
grayscale_image = box_filter_kxk<uint16_t, 2>( raw_image );
#ifndef NDEBUG
printf("%s::%s read bayer image with\n width %zu\n height %zu\n iso %.3f\n white level %d\n black level %d %d %d %d\n", \
__FILE__, __func__, width, height, iso, white_level, \
black_level_per_channel[0], black_level_per_channel[1], black_level_per_channel[2], black_level_per_channel[3] );
fflush( stdout );
#endif
}
std::pair<double, double> bayer_image::get_noise_params() const
{
// Set ISO to 100 if not positive
double iso_ = iso <= 0 ? 100 : iso;
// Calculate shot noise and read noise parameters w.r.t ISO 100
double lambda_shot_p = iso_ / 100.0f * baseline_lambda_shot;
double lambda_read_p = (iso_ / 100.0f) * (iso_ / 100.0f) * baseline_lambda_read;
double black_level = (black_level_per_channel[0] + \
black_level_per_channel[1] + \
black_level_per_channel[2] + \
black_level_per_channel[3]) / 4.0;
// Rescale shot and read noise to normal range
double lambda_shot = lambda_shot_p * (white_level - black_level);
double lambda_read = lambda_read_p * (white_level - black_level) * (white_level - black_level);
// return pair
return std::make_pair(lambda_shot, lambda_read);
}
}

@ -0,0 +1,321 @@
#include <cstdio>
#include <string>
#include <omp.h>
#include <opencv2/opencv.hpp> // all opencv header
#include "hdrplus/burst.h"
#include "hdrplus/utility.h"
namespace hdrplus
{
burst::burst( const std::string& burst_path, const std::string& reference_image_path )
{
std::vector<cv::String> bayer_image_paths;
// Search through the input path directory to get all input image path
if ( burst_path.at( burst_path.size() - 1) == '/')
cv::glob( burst_path + "*.dng", bayer_image_paths, false );
else
cv::glob( burst_path + "/*.dng", bayer_image_paths, false );
#ifndef NDEBUG
for ( const auto& bayer_img_path_i : bayer_image_paths )
{
printf("img i path %s\n", bayer_img_path_i.c_str()); fflush(stdout);
}
printf("ref img path %s\n", reference_image_path.c_str()); fflush(stdout);
#endif
// Number of images
num_images = bayer_image_paths.size();
// Find reference image path in input directory
// reference image path need to be absolute path
reference_image_idx = -1;
for ( size_t i = 0; i < bayer_image_paths.size(); ++i )
{
if ( bayer_image_paths[ i ] == reference_image_path )
{
reference_image_idx = i;
}
}
if ( reference_image_idx == -1 )
{
return;
// throw std::runtime_error("Error unable to locate reference image " + reference_image_path );
}
#ifndef NDEBUG
for ( const auto& bayer_image_path_i : bayer_image_paths )
{
printf("%s::%s Find image %s\n", \
__FILE__, __func__, bayer_image_path_i.c_str());
}
printf("%s::%s reference image idx %d\n", \
__FILE__, __func__, reference_image_idx );
#endif
// Get source bayer image
// Downsample original bayer image by 2x2 box filter
for ( const auto& bayer_image_path_i : bayer_image_paths )
{
bayer_images.emplace_back( bayer_image_path_i );
}
// Pad information
int tile_size_bayer = 32;
int padding_top = tile_size_bayer / 2;
int padding_bottom = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer );
int padding_left = tile_size_bayer / 2;
int padding_right = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer );
padding_info_bayer = std::vector<int>{ padding_top, padding_bottom, padding_left, padding_right };
// Pad bayer image
for ( const auto& bayer_image_i : bayer_images )
{
cv::Mat bayer_image_pad_i;
cv::copyMakeBorder( bayer_image_i.raw_image, \
bayer_image_pad_i, \
padding_top, padding_bottom, padding_left, padding_right, \
cv::BORDER_REFLECT );
// cv::Mat use internal reference count
bayer_images_pad.emplace_back( bayer_image_pad_i );
grayscale_images_pad.emplace_back( box_filter_kxk<uint16_t, 2>( bayer_image_pad_i ) );
}
#ifndef NDEBUG
printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \
__FILE__, __func__, \
bayer_images[ 0 ].height, \
bayer_images[ 0 ].width, \
bayer_images_pad[ 0 ].size().height, \
bayer_images_pad[ 0 ].size().width );
printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \
__FILE__, __func__, \
padding_top, padding_bottom, padding_left, padding_right );
#endif
}
burst::burst( const std::vector<std::string>& bayer_image_paths, int reference_image_index )
{
// Number of images
num_images = bayer_image_paths.size();
// Find reference image path in input directory
// reference image path need to be absolute path
reference_image_idx = -1;
if ( reference_image_index >= 0 && reference_image_index < bayer_image_paths.size() )
{
reference_image_idx = reference_image_index;
}
if ( reference_image_idx == -1 )
{
return;
// throw std::runtime_error("Error reference image index is out of range " );
}
#ifndef NDEBUG
for ( const auto& bayer_image_path_i : bayer_image_paths )
{
printf("%s::%s Find image %s\n", \
__FILE__, __func__, bayer_image_path_i.c_str());
}
printf("%s::%s reference image idx %d\n", \
__FILE__, __func__, reference_image_idx );
#endif
// Get source bayer image
// Downsample original bayer image by 2x2 box filter
for ( const auto& bayer_image_path_i : bayer_image_paths )
{
bayer_images.emplace_back( bayer_image_path_i );
}
// Pad information
int tile_size_bayer = 32;
int padding_top = tile_size_bayer / 2;
int padding_bottom = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer );
int padding_left = tile_size_bayer / 2;
int padding_right = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer );
padding_info_bayer = std::vector<int>{ padding_top, padding_bottom, padding_left, padding_right };
// Pad bayer image
for ( const auto& bayer_image_i : bayer_images )
{
cv::Mat bayer_image_pad_i;
cv::copyMakeBorder( bayer_image_i.raw_image, \
bayer_image_pad_i, \
padding_top, padding_bottom, padding_left, padding_right, \
cv::BORDER_REFLECT );
// cv::Mat use internal reference count
bayer_images_pad.emplace_back( bayer_image_pad_i );
grayscale_images_pad.emplace_back( box_filter_kxk<uint16_t, 2>( bayer_image_pad_i ) );
}
#ifndef NDEBUG
printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \
__FILE__, __func__, \
bayer_images[ 0 ].height, \
bayer_images[ 0 ].width, \
bayer_images_pad[ 0 ].size().height, \
bayer_images_pad[ 0 ].size().width );
printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \
__FILE__, __func__, \
padding_top, padding_bottom, padding_left, padding_right );
#endif
}
burst::burst( const std::vector<std::vector<uint8_t> >& bayer_image_contents, int reference_image_index )
{
// Number of images
num_images = bayer_image_contents.size();
// Find reference image path in input directory
// reference image path need to be absolute path
reference_image_idx = -1;
if ( reference_image_index >= 0 && reference_image_index < bayer_image_contents.size() )
{
reference_image_idx = reference_image_index;
}
if ( reference_image_idx == -1 )
{
return;
// throw std::runtime_error("Error reference image index is out of range " );
}
#ifndef NDEBUG
printf("%s::%s reference image idx %d\n", \
__FILE__, __func__, reference_image_idx );
#endif
// Get source bayer image
// Downsample original bayer image by 2x2 box filter
for ( const auto& bayer_image_content : bayer_image_contents )
{
bayer_images.emplace_back( bayer_image_content );
}
// Pad information
int tile_size_bayer = 32;
int padding_top = tile_size_bayer / 2;
int padding_bottom = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer );
int padding_left = tile_size_bayer / 2;
int padding_right = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer );
padding_info_bayer = std::vector<int>{ padding_top, padding_bottom, padding_left, padding_right };
// Pad bayer image
for ( const auto& bayer_image_i : bayer_images )
{
cv::Mat bayer_image_pad_i;
cv::copyMakeBorder( bayer_image_i.raw_image, \
bayer_image_pad_i, \
padding_top, padding_bottom, padding_left, padding_right, \
cv::BORDER_REFLECT );
// cv::Mat use internal reference count
bayer_images_pad.emplace_back( bayer_image_pad_i );
grayscale_images_pad.emplace_back( box_filter_kxk<uint16_t, 2>( bayer_image_pad_i ) );
}
#ifndef NDEBUG
printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \
__FILE__, __func__, \
bayer_images[ 0 ].height, \
bayer_images[ 0 ].width, \
bayer_images_pad[ 0 ].size().height, \
bayer_images_pad[ 0 ].size().width );
printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \
__FILE__, __func__, \
padding_top, padding_bottom, padding_left, padding_right );
#endif
}
burst::burst( const std::vector<std::shared_ptr<MemFile> >& bayer_image_files, int reference_image_index )
{
// Number of images
num_images = bayer_image_files.size();
// Find reference image path in input directory
// reference image path need to be absolute path
reference_image_idx = -1;
if ( reference_image_index >= 0 && reference_image_index < bayer_image_files.size() )
{
reference_image_idx = reference_image_index;
}
if ( reference_image_idx == -1 )
{
return;
// throw std::runtime_error("Error reference image index is out of range " );
}
#ifndef NDEBUG
printf("%s::%s reference image idx %d\n", \
__FILE__, __func__, reference_image_idx );
#endif
// Get source bayer image
// Downsample original bayer image by 2x2 box filter
for ( const auto& bayer_image_file : bayer_image_files )
{
bayer_images.emplace_back( bayer_image_file );
}
// Pad information
int tile_size_bayer = 32;
int padding_top = tile_size_bayer / 2;
int padding_bottom = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].height % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].height % tile_size_bayer );
int padding_left = tile_size_bayer / 2;
int padding_right = tile_size_bayer / 2 + \
( (bayer_images[ 0 ].width % tile_size_bayer) == 0 ? \
0 : tile_size_bayer - bayer_images[ 0 ].width % tile_size_bayer );
padding_info_bayer = std::vector<int>{ padding_top, padding_bottom, padding_left, padding_right };
// Pad bayer image
for ( const auto& bayer_image_i : bayer_images )
{
cv::Mat bayer_image_pad_i;
cv::copyMakeBorder( bayer_image_i.raw_image, \
bayer_image_pad_i, \
padding_top, padding_bottom, padding_left, padding_right, \
cv::BORDER_REFLECT );
// cv::Mat use internal reference count
bayer_images_pad.emplace_back( bayer_image_pad_i );
grayscale_images_pad.emplace_back( box_filter_kxk<uint16_t, 2>( bayer_image_pad_i ) );
}
#ifndef NDEBUG
printf("%s::%s Pad bayer image from (%d, %d) -> (%d, %d)\n", \
__FILE__, __func__, \
bayer_images[ 0 ].height, \
bayer_images[ 0 ].width, \
bayer_images_pad[ 0 ].size().height, \
bayer_images_pad[ 0 ].size().width );
printf("%s::%s pad top %d, buttom %d, left %d, right %d\n", \
__FILE__, __func__, \
padding_top, padding_bottom, padding_left, padding_right );
#endif
}
} // namespace hdrplus

@ -0,0 +1,786 @@
#include <iostream>
#include <opencv2/opencv.hpp> // all opencv header
#include "hdrplus/finish.h"
#include "hdrplus/utility.h"
#include <cmath>
#ifdef __ANDROID__
#define DBG_OUTPUT_ROOT "/sdcard/com.xypower.mpapp/tmp/"
#else
#define DBG_OUTPUT_ROOT ""
#endif
// #include <type_traits>
namespace hdrplus
{
cv::Mat convert16bit2_8bit_(cv::Mat ans){
if(ans.type()==CV_16UC3){
cv::MatIterator_<cv::Vec3w> it, end;
for( it = ans.begin<cv::Vec3w>(), end = ans.end<cv::Vec3w>(); it != end; ++it)
{
// std::cout<<sizeof (*it)[0] <<std::endl;
(*it)[0] *=(255.0/USHRT_MAX);
(*it)[1] *=(255.0/USHRT_MAX);
(*it)[2] *=(255.0/USHRT_MAX);
}
ans.convertTo(ans, CV_8UC3);
}else if(ans.type()==CV_16UC1){
u_int16_t* ptr = (u_int16_t*)ans.data;
int end = ans.rows*ans.cols;
for(int i=0;i<end;i++){
*(ptr+i) *=(255.0/USHRT_MAX);
}
ans.convertTo(ans, CV_8UC1);
}else{
std::cout<<"Unsupported Data Type"<<std::endl;
}
return ans;
}
cv::Mat convert8bit2_16bit_(cv::Mat ans){
if(ans.type()==CV_8UC3){
ans.convertTo(ans, CV_16UC3);
cv::MatIterator_<cv::Vec3w> it, end;
for( it = ans.begin<cv::Vec3w>(), end = ans.end<cv::Vec3w>(); it != end; ++it)
{
// std::cout<<sizeof (*it)[0] <<std::endl;
(*it)[0] *=(65535.0/255.0);
(*it)[1] *=(65535.0/255.0);
(*it)[2] *=(65535.0/255.0);
}
}else if(ans.type()==CV_8UC1){
ans.convertTo(ans, CV_16UC1);
u_int16_t* ptr = (u_int16_t*)ans.data;
int end = ans.rows*ans.cols;
for(int i=0;i<end;i++){
*(ptr+i) *=(65535.0/255.0);
}
}else{
std::cout<<"Unsupported Data Type"<<std::endl;
}
return ans;
}
cv::Mat convert8bit2_12bit_(cv::Mat ans){
// cv::Mat ans(I);
cv::MatIterator_<cv::Vec3w> it, end;
for( it = ans.begin<cv::Vec3w>(), end = ans.end<cv::Vec3w>(); it != end; ++it)
{
// std::cout<<sizeof (*it)[0] <<std::endl;
(*it)[0] *=(2048.0/255.0);
(*it)[1] *=(2048.0/255.0);
(*it)[2] *=(2048.0/255.0);
}
ans.convertTo(ans, CV_16UC3);
return ans;
}
uint16_t uGammaCompress_1pix(float x, float threshold,float gainMin,float gainMax,float exponent){
// Normalize pixel val
x/=USHRT_MAX;
// check the val against the threshold
if(x<=threshold){
x =gainMin*x;
}else{
x = gainMax* pow(x,exponent)-gainMax+1;
}
// clip
if(x<0){
x=0;
}else{
if(x>1){
x = 1;
}
}
x*=USHRT_MAX;
return (uint16_t)x;
}
uint16_t uGammaDecompress_1pix(float x, float threshold,float gainMin,float gainMax,float exponent){
// Normalize pixel val
x/=65535.0;
// check the val against the threshold
if(x<=threshold){
x = x/gainMin;
}else{
x = pow((x+gainMax-1)/gainMax,exponent);
}
// clip
if(x<0){
x=0;
}else{
if(x>1){
x = 1;
}
}
x*=65535;
return (uint16_t)x;
}
cv::Mat uGammaCompress_(cv::Mat m,float threshold,float gainMin,float gainMax,float exponent){
if(m.type()==CV_16UC3){
cv::MatIterator_<cv::Vec3w> it, end;
for( it = m.begin<cv::Vec3w>(), end = m.end<cv::Vec3w>(); it != end; ++it)
{
(*it)[0] =uGammaCompress_1pix((*it)[0],threshold,gainMin,gainMax,exponent);
(*it)[1] =uGammaCompress_1pix((*it)[1],threshold,gainMin,gainMax,exponent);
(*it)[2] =uGammaCompress_1pix((*it)[2],threshold,gainMin,gainMax,exponent);
}
}else if(m.type()==CV_16UC1){
u_int16_t* ptr = (u_int16_t*)m.data;
int end = m.rows*m.cols;
for(int i=0;i<end;i++){
*(ptr+i) = uGammaCompress_1pix(*(ptr+i),threshold,gainMin,gainMax,exponent);
}
}else{
std::cout<<"Unsupported Data Type"<<std::endl;
}
return m;
}
cv::Mat uGammaDecompress_(cv::Mat m,float threshold,float gainMin,float gainMax,float exponent){
if(m.type()==CV_16UC3){
cv::MatIterator_<cv::Vec3w> it, end;
for( it = m.begin<cv::Vec3w>(), end = m.end<cv::Vec3w>(); it != end; ++it)
{
(*it)[0] =uGammaDecompress_1pix((*it)[0],threshold,gainMin,gainMax,exponent);
(*it)[1] =uGammaDecompress_1pix((*it)[1],threshold,gainMin,gainMax,exponent);
(*it)[2] =uGammaDecompress_1pix((*it)[2],threshold,gainMin,gainMax,exponent);
}
}else if(m.type()==CV_16UC1){
u_int16_t* ptr = (u_int16_t*)m.data;
int end = m.rows*m.cols;
for(int i=0;i<end;i++){
*(ptr+i) = uGammaDecompress_1pix(*(ptr+i),threshold,gainMin,gainMax,exponent);
}
}else{
std::cout<<"Unsupported Data Type"<<std::endl;
}
return m;
}
cv::Mat gammasRGB(cv::Mat img, bool mode){
if(mode){// compress
return uGammaCompress_(img,0.0031308, 12.92, 1.055, 1. / 2.4);
}else{ // decompress
return uGammaDecompress_(img, 0.04045, 12.92, 1.055, 2.4);
}
}
void copy_mat_16U_2(u_int16_t* ptr_A, cv::Mat B){
// u_int16_t* ptr_A = (u_int16_t*)A.data;
u_int16_t* ptr_B = (u_int16_t*)B.data;
for(int r = 0; r < B.rows; r++) {
for(int c = 0; c < B.cols; c++) {
*(ptr_A+r*B.cols+c) = *(ptr_B+r*B.cols+c);
}
}
}
cv::Mat mean_(cv::Mat img){
// initialize processedImg
int H = img.rows;
int W = img.cols;
cv::Mat processedImg = cv::Mat(H,W,CV_16UC1);
u_int16_t* ptr = (u_int16_t*)processedImg.data;
// traverse img
int idx = 0;
cv::MatIterator_<cv::Vec3w> it, end;
for( it = img.begin<cv::Vec3w>(), end = img.end<cv::Vec3w>(); it != end; ++it)
{
uint32_t tmp = (*it)[0]+(*it)[1]+(*it)[2];
uint16_t avg_val = tmp/3;
*(ptr+idx) = avg_val;
idx++;
}
return processedImg;
}
double getMean(cv::Mat img){
u_int16_t* ptr = (u_int16_t*)img.data;
int max_idx = img.rows*img.cols*img.channels();
double sum=0;
for(int i=0;i<max_idx;i++){
sum += *(ptr+i);
}
sum/=max_idx;
sum/=USHRT_MAX;
return sum;
}
cv::Mat matMultiply_scalar(cv::Mat img,float gain){
u_int16_t* ptr = (u_int16_t*)img.data;
int max_idx = img.rows*img.cols*img.channels();
for(int i=0;i<max_idx;i++){
double tmp = *(ptr+i)*gain;
if(tmp<0){
*(ptr+i)=0;
}else if(tmp>USHRT_MAX){
*(ptr+i) = USHRT_MAX;
}else{
*(ptr+i)=(u_int16_t)tmp;
}
}
return img;
}
double getSaturated(cv::Mat img, double threshold){
threshold *= USHRT_MAX;
double count=0;
u_int16_t* ptr = (u_int16_t*)img.data;
int max_idx = img.rows*img.cols*img.channels();
for(int i=0;i<max_idx;i++){
if(*(ptr+i)>threshold){
count++;
}
}
return count/(double)max_idx;
}
cv::Mat meanGain_(cv::Mat img,int gain){
if(img.channels()!=3){
std::cout<<"unsupport img type in meanGain_()"<<std::endl;
return cv::Mat();
}else{ // RGB img
int H = img.rows;
int W = img.cols;
cv::Mat processedImg = cv::Mat(H,W,CV_16UC1);
u_int16_t* ptr = (u_int16_t*)processedImg.data;
int idx=0;
cv::MatIterator_<cv::Vec3w> it, end;
for( it = img.begin<cv::Vec3w>(), end = img.end<cv::Vec3w>(); it != end; ++it)
{
double sum = 0;
// R
double tmp = (*it)[0]*gain;
if(tmp<0) tmp=0;
if(tmp>USHRT_MAX) tmp = USHRT_MAX;
sum+=tmp;
// G
tmp = (*it)[1]*gain;
if(tmp<0) tmp=0;
if(tmp>USHRT_MAX) tmp = USHRT_MAX;
sum+=tmp;
// B
tmp = (*it)[2]*gain;
if(tmp<0) tmp=0;
if(tmp>USHRT_MAX) tmp = USHRT_MAX;
sum+=tmp;
// put into processedImg
uint16_t avg_val = sum/3;
*(ptr+idx) = avg_val;
idx++;
}
return processedImg;
}
}
cv::Mat applyScaling_(cv::Mat mergedImage, cv::Mat shortGray, cv::Mat fusedGray){
cv::Mat result = mergedImage.clone();
u_int16_t* ptr_shortg = (u_int16_t*)shortGray.data;
u_int16_t* ptr_fusedg = (u_int16_t*)fusedGray.data;
int count = 0;
cv::MatIterator_<cv::Vec3w> it, end;
for( it = result.begin<cv::Vec3w>(), end = result.end<cv::Vec3w>(); it != end; ++it)
{
double s = 1;
if(*(ptr_shortg+count)!=0){
s = *(ptr_fusedg+count);
s/=*(ptr_shortg+count);
}
for(int c=0;c<mergedImage.channels();c++){
double tmp = (*it)[c]*s;
if(tmp<0){
(*it)[c] = 0;
}else if(tmp>USHRT_MAX){
(*it)[c] = USHRT_MAX;
}else{
(*it)[c] = tmp;
}
}
}
return result;
}
void localToneMap(cv::Mat& mergedImage, Options options, cv::Mat& shortg,
cv::Mat& longg, cv::Mat& fusedg, int& gain){
std::cout<<"HDR Tone Mapping..."<<std::endl;
// # Work with grayscale images
cv::Mat shortGray = rgb_2_gray<uint16_t, uint16_t, CV_16U>(mergedImage); //mean_(mergedImage);
std::cout<<"--- Compute grayscale image"<<std::endl;
// compute gain
gain = 0;
if(options.ltmGain==-1){
double dsFactor = 25;
int down_height = round(shortGray.rows/dsFactor);
int down_width = round(shortGray.cols/dsFactor);
cv::Mat shortS;
cv::resize(shortGray,shortS,cv::Size(down_height,down_width),cv::INTER_LINEAR);
shortS = shortS.reshape(1,1);
bool bestGain = false;
double compression = 1.0;
double saturated = 0.0;
cv::Mat shortSg = gammasRGB(shortS.clone(), true);
double sSMean = getMean(shortSg);
while((compression < 1.9 && saturated < .95)||((!bestGain) && (compression < 6) && (gain < 30) && (saturated < 0.33))){
gain += 2;
cv::Mat longSg = gammasRGB(shortS.clone()*gain, true);
double lSMean = getMean(longSg);
compression = lSMean / sSMean;
bestGain = lSMean > (1 - sSMean) / 2; // only works if burst underexposed
saturated = getSaturated(longSg,0.95);
if(options.verbose==4){
}
}
}else{
if(options.ltmGain>0){
gain = options.ltmGain;
}
}
std::cout<<"--- Compute gain"<<std::endl;
// create a synthetic long exposure
cv::Mat longGray = meanGain_(mergedImage.clone(),gain);
std::cout<<"--- Synthetic long expo"<<std::endl;
// apply gamma correction to both
longg = gammasRGB(longGray.clone(), true);
shortg = gammasRGB(shortGray.clone(),true);
std::cout<<"--- Apply Gamma correction"<<std::endl;
// perform tone mapping by exposure fusion in grayscale
cv::Ptr<cv::MergeMertens> mergeMertens = cv::createMergeMertens();
std::cout<<"--- Create Mertens"<<std::endl;
// hack: cv2 mergeMertens expects inputs between 0 and 255
// but the result is scaled between 0 and 1 (some values can actually be greater than 1!)
std::vector<cv::Mat> src_expos;
src_expos.push_back(convert16bit2_8bit_(shortg.clone()));
src_expos.push_back(convert16bit2_8bit_(longg.clone()));
mergeMertens->process(src_expos, fusedg);
fusedg = fusedg*USHRT_MAX;
fusedg.convertTo(fusedg, CV_16UC1);
std::cout<<"--- Apply Mertens"<<std::endl;
// undo gamma correction
cv::Mat fusedGray = gammasRGB(fusedg.clone(), false);
// cv::imwrite("fusedg_degamma.png", fusedGray);
std::cout<<"--- Un-apply Gamma correction"<<std::endl;
// scale each RGB channel of the short exposure accordingly
mergedImage = applyScaling_(mergedImage, shortGray, fusedGray);
std::cout<<"--- Scale channels"<<std::endl;
}
u_int16_t enhanceContrast_1pix(u_int16_t pix_val,double gain){
double x = pix_val;
x/=USHRT_MAX;
x = x - gain*sin(2*M_PI*x);
if(x<0){
x = 0;
}else if(x>1){
x = 1;
}
u_int16_t result = x*USHRT_MAX;
return result;
}
cv::Mat enhanceContrast(cv::Mat image, Options options){
if(options.gtmContrast>=0 && options.gtmContrast<=1){
u_int16_t* ptr = (u_int16_t*)image.data;
int end = image.rows*image.cols*image.channels();
for(int idx = 0;idx<end;idx++){
*(ptr+idx) = enhanceContrast_1pix(*(ptr+idx),options.gtmContrast);
}
}else{
std::cout<<"GTM ignored, expected a contrast enhancement ratio between 0 and 1"<<std::endl;
}
return image;
}
cv::Mat distL1_(cv::Mat X, cv::Mat Y){
int end_x = X.rows*X.cols*X.channels();
int end_y = Y.rows*Y.cols*Y.channels();
cv::Mat result = cv::Mat(X.rows,X.cols,X.type());
if(end_x==end_y){
u_int16_t* ptr_x = (u_int16_t*)X.data;
u_int16_t* ptr_y = (u_int16_t*)Y.data;
u_int16_t* ptr_r = (u_int16_t*)result.data;
for(int i=0;i<end_x;i++){
if(*(ptr_x+i)<*(ptr_y+i)){
*(ptr_r+i) = *(ptr_y+i) - *(ptr_x+i);
}else{
*(ptr_r+i) = *(ptr_x+i) - *(ptr_y+i);
}
}
}else{
std::cout<<"Mat size not match. distL1_ failed!"<<std::endl;
}
return result;
}
cv::Mat sharpenTriple_(cv::Mat image,
cv::Mat blur0, cv::Mat low0, float th0, float k0,
cv::Mat blur1, cv::Mat low1, float th1, float k1,
cv::Mat blur2, cv::Mat low2, float th2, float k2){
// create result mat
cv::Mat result = cv::Mat(image.rows,image.cols,image.type());
// initialize iteraters
u_int16_t* ptr_r = (u_int16_t*)result.data;
u_int16_t* ptr_img = (u_int16_t*)image.data;
u_int16_t* ptr_blur0 = (u_int16_t*)blur0.data;
u_int16_t* ptr_low0 = (u_int16_t*)low0.data;
u_int16_t* ptr_blur1 = (u_int16_t*)blur1.data;
u_int16_t* ptr_low1 = (u_int16_t*)low1.data;
u_int16_t* ptr_blur2 = (u_int16_t*)blur2.data;
u_int16_t* ptr_low2 = (u_int16_t*)low2.data;
int n_channels = image.channels();
int end = image.rows*image.cols*n_channels;
// traverse Image
for(int idx = 0;idx<end;idx++){
double r, r0, r1, r2;
double x = *(ptr_img+idx);
double l0 = *(ptr_low0+idx)/(double)USHRT_MAX;
double l1 = *(ptr_low1+idx)/(double)USHRT_MAX;
double l2 = *(ptr_low2+idx)/(double)USHRT_MAX;
double b0 = *(ptr_blur0+idx);
double b1 = *(ptr_blur1+idx);
double b2 = *(ptr_blur2+idx);
r0 = l0<th0? x:x+k0*(x-b0);
r1 = l1<th1? x:x+k1*(x-b1);
r2 = l2<th2? x:x+k2*(x-b2);
r = (r0+r1+r2)/3.0;
if(r<0) r=0;
if(r>USHRT_MAX) r = USHRT_MAX;
*(ptr_r+idx) = (u_int16_t)r;
}
return result;
}
cv::Mat sharpenTriple(cv::Mat image, Tuning tuning, Options options){
// sharpen the image using unsharp masking
std::vector<float> amounts = tuning.sharpenAmount;
std::vector<float> sigmas = tuning.sharpenSigma;
std::vector<float> thresholds = tuning.sharpenThreshold;
// Compute all Gaussian blur
cv::Mat blur0,blur1,blur2;
cv::GaussianBlur(image,blur0,cv::Size(0,0),sigmas[0]);
cv::GaussianBlur(image,blur1,cv::Size(0,0),sigmas[1]);
cv::GaussianBlur(image,blur2,cv::Size(0,0),sigmas[2]);
std::cout<<" --- gaussian blur"<<std::endl;
// cv::imwrite("blur2.png", blur2);
// Compute all low contrast images
cv::Mat low0 = distL1_(blur0, image);
cv::Mat low1 = distL1_(blur1, image);
cv::Mat low2 = distL1_(blur2, image);
std::cout<<" --- low contrast"<<std::endl;
// cv::imwrite("low2.png", low2);
// Compute the triple sharpen
cv::Mat sharpImage = sharpenTriple_(image,
blur0, low0, thresholds[0], amounts[0],
blur1, low1, thresholds[1], amounts[1],
blur2, low2, thresholds[2], amounts[2]);
std::cout<<" --- sharpen"<<std::endl;
return sharpImage;
}
void copy_mat_16U_3(u_int16_t* ptr_A, cv::Mat B){
// u_int16_t* ptr_A = (u_int16_t*)A.data;
u_int16_t* ptr_B = (u_int16_t*)B.data;
int H = B.rows;
int W = B.cols;
int end = H*W;
for(int i=0;i<end;i++){
*(ptr_A+i) = *(ptr_B+i);
}
}
// void copy_mat_16U_3(u_int16_t* ptr_A, cv::Mat B){
// // u_int16_t* ptr_A = (u_int16_t*)A.data;
// u_int16_t* ptr_B = (u_int16_t*)B.data;
// for(int r = 0; r < B.rows; r++) {
// for(int c = 0; c < B.cols; c++) {
// *(ptr_A+r*B.cols+c) = *(ptr_B+r*B.cols+c);
// }
// }
// }
cv::Mat processMergedMat(cv::Mat mergedImg, int opencv_type){
cv::Mat m;
#if 0
uint16_t* ptr = (uint16_t*)mergedImg.data;
for(int r = 0; r < mergedImg.rows; r++) {
std::vector<int> dvals;
for(int c = 0; c < mergedImg.cols; c++) {
dvals.push_back(*(ptr+r*mergedImg.cols+c));
}
cv::Mat mline(dvals, true);
cv::transpose(mline, mline);
m.push_back(mline);
}
#endif
int ch = CV_MAT_CN(opencv_type);
m = mergedImg.clone();
m = m.reshape(ch);
m.convertTo(m, opencv_type);
return m;
}
void show20_20(cv::Mat m){
u_int16_t* ptr = (u_int16_t*)m.data;
for(int i=0;i<20;i++){
for(int j=0;j<20;j++){
std::cout<<*(ptr+i*m.cols+j)<<", ";
}
std::cout<<std::endl;
}
}
void writeCSV(std::string filename, cv::Mat m)
{
std::ofstream myfile;
myfile.open(filename.c_str());
myfile<< cv::format(m, cv::Formatter::FMT_CSV) << std::endl;
myfile.close();
}
void finish::process(const hdrplus::burst& burst_images, cv::Mat& finalOutputImage){
// copy mergedBayer to rawReference
std::cout<<"finish pipeline start ..."<<std::endl;
// save merged Image value
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
writeCSV(DBG_OUTPUT_ROOT "merged.csv",burst_images.merged_bayer_image);
#endif
this->refIdx = burst_images.reference_image_idx;
// this->burstPath = burstPath;
// std::cout<<"processMerged:"<<std::endl;
// show20_20(mergedB);
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1);
// this->mergedBayer = processMergedMat(mergedB,CV_16UC1);//loadFromCSV("merged.csv", CV_16UC1);
// std::cout<<"processMerged:"<<std::endl;
// show20_20(this->mergedBayer);
// this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1);
// this->mergedBayer = processMergedMat(burst_images.merged_bayer_image, CV_16UC1);
#else
// this->mergedBayer = loadFromCSV(DBG_OUTPUT_ROOT "merged.csv", CV_16UC1);
this->mergedBayer = processMergedMat(burst_images.merged_bayer_image, CV_16UC1);
// std::cout<<"processMerged:"<<std::endl;
#endif
// std::cout<<"csv:"<<std::endl;
// show20_20(this->mergedBayer);
// load_rawPathList(burstPath);
// read in ref img
// bayer_image* ref = new bayer_image(rawPathList[refIdx]);
bayer_image* ref = new bayer_image(burst_images.bayer_images[burst_images.reference_image_idx]);
cv::Mat processedRefImage = postprocess(ref->libraw_processor,params.rawpyArgs);
std::cout<<"size ref: "<<processedRefImage.rows<<"*"<<processedRefImage.cols<<std::endl;
// write reference image
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeReferenceImage"]){
std::cout<<"writing reference img ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(processedRefImage.clone());
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
// cv::imshow("test",processedImage);
cv::imwrite(DBG_OUTPUT_ROOT "processedRef.jpg", outputImg);
// cv::waitKey(0);
}
#endif
// write gamma reference
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeGammaReference"]){
std::cout<<"writing Gamma reference img ..."<<std::endl;
cv::Mat outputImg = gammasRGB(processedRefImage.clone(),true);
outputImg = convert16bit2_8bit_(outputImg);
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "processedRefGamma.jpg", outputImg);
}
#endif
// get the bayer_image of the merged image
// bayer_image* mergedImg = new bayer_image(rawPathList[refIdx]);
bayer_image* mergedImg = new bayer_image(burst_images.bayer_images[this->refIdx]);
mergedImg->libraw_processor->imgdata.rawdata.raw_image = (uint16_t*)this->mergedBayer.data;
// copy_mat_16U_3(mergedImg->libraw_processor->imgdata.rawdata.raw_image,this->mergedBayer);
cv::Mat processedMerge = postprocess(mergedImg->libraw_processor,params.rawpyArgs);
// write merged image
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeMergedImage"]){
std::cout<<"writing Merged img ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(processedMerge.clone());
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "mergedImg.jpg", outputImg);
}
#endif
// write gamma merged image
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeMergedImage"]){
std::cout<<"writing Gamma Merged img ..."<<std::endl;
cv::Mat outputImg = gammasRGB(processedMerge.clone(),true);
outputImg = convert16bit2_8bit_(outputImg);
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "mergedImgGamma.jpg", outputImg);
}
#endif
// step 5. HDR tone mapping
// processedImage, gain, shortExposure, longExposure, fusedExposure = localToneMap(burstPath, processedImage, options)
int gain;
if(params.options.ltmGain){
cv::Mat shortExposure, longExposure, fusedExposure;
localToneMap(processedMerge, params.options,shortExposure,longExposure,fusedExposure,gain);
std::cout<<"gain="<< gain<<std::endl;
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeShortExposure"]){
std::cout<<"writing ShortExposure img ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(shortExposure);
cv::imwrite(DBG_OUTPUT_ROOT "shortg.jpg", outputImg);
}
#endif
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeLongExposure"]){
std::cout<<"writing LongExposure img ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(longExposure);
cv::imwrite(DBG_OUTPUT_ROOT "longg.jpg", outputImg);
}
#endif
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeFusedExposure"]){
std::cout<<"writing FusedExposure img ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(fusedExposure);
cv::imwrite(DBG_OUTPUT_ROOT "fusedg.jpg", outputImg);
}
#endif
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeLTMImage"]){
std::cout<<"writing LTMImage ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(processedMerge.clone());
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "ltmGain.jpg", outputImg);
}
#endif
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeLTMGamma"]){
std::cout<<"writing LTMImage Gamma ..."<<std::endl;
cv::Mat outputImg = gammasRGB(processedMerge.clone(),true);
outputImg = convert16bit2_8bit_(outputImg);
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "ltmGain_gamma.jpg", outputImg);
}
#endif
}
// step 6 GTM: contrast enhancement / global tone mapping
if(params.options.gtmContrast){
processedMerge = enhanceContrast(processedMerge, params.options);
std::cout<<"STEP 6 -- Apply GTM"<<std::endl;
}
// apply the final sRGB gamma curve
processedMerge = gammasRGB(processedMerge.clone(),true);
std::cout<<"-- Apply Gamma"<<std::endl;
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeGTMImage"]) {
std::cout<<"writing GTMImage ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(processedMerge.clone());
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "GTM_gamma.jpg", outputImg);
}
#endif
// Step 7: sharpen
finalOutputImage = sharpenTriple(processedMerge.clone(), params.tuning, params.options);
cv::Mat& processedImage = finalOutputImage;
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeFinalImage"]){
std::cout<<"writing FinalImage ..."<<std::endl;
cv::Mat outputImg = convert16bit2_8bit_(processedImage.clone());
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "FinalImage.jpg", outputImg);
}
#endif
// write final ref
#ifndef HDRPLUS_NO_DETAILED_OUTPUT
if(params.flags["writeReferenceFinal"]){
std::cout<<"writing Final Ref Image ..."<<std::endl;
if(params.options.ltmGain){
params.options.ltmGain = gain;
}
cv::Mat shortExposureRef, longExposureRef, fusedExposureRef;
localToneMap(processedRefImage, params.options,shortExposureRef,longExposureRef,fusedExposureRef,gain);
if(params.options.gtmContrast){ // contrast enhancement / global tone mapping
processedRefImage = enhanceContrast(processedRefImage, params.options);
}
processedRefImage = gammasRGB(processedRefImage.clone(),true);
// sharpen
processedRefImage = sharpenTriple(processedRefImage.clone(), params.tuning, params.options);
cv::Mat outputImg = convert16bit2_8bit_(processedRefImage.clone());
cv::cvtColor(outputImg, outputImg, cv::COLOR_RGB2BGR);
cv::imwrite(DBG_OUTPUT_ROOT "FinalReference.jpg", outputImg);
}
#endif
// End of finishing
}
void finish::copy_mat_16U(cv::Mat& A, cv::Mat B){
u_int16_t* ptr_A = (u_int16_t*)A.data;
u_int16_t* ptr_B = (u_int16_t*)B.data;
for(int r = 0; r < A.rows; r++) {
for(int c = 0; c < A.cols; c++) {
*(ptr_A+r*A.cols+c) = *(ptr_B+r*B.cols+c);
}
}
}
void finish::copy_rawImg2libraw(std::shared_ptr<LibRaw>& libraw_ptr, cv::Mat B){
u_int16_t* ptr_A = (u_int16_t*)libraw_ptr->imgdata.rawdata.raw_image;
u_int16_t* ptr_B = (u_int16_t*)B.data;
for(int r = 0; r < B.rows; r++) {
for(int c = 0; c < B.cols; c++) {
*(ptr_A+r*B.cols+c) = *(ptr_B+r*B.cols+c);
}
}
}
} // namespace hdrplus

@ -0,0 +1,138 @@
#include <cstdio>
#include <string>
#include <vector>
#include <utility> // std::pair
#include <opencv2/opencv.hpp> // all opencv header
#include "hdrplus/hdrplus_pipeline.h"
#include "hdrplus/burst.h"
#include "hdrplus/align.h"
#include "hdrplus/merge.h"
#include "hdrplus/finish.h"
#include <fstream>
#ifdef __ANDROID__
// #include <AndroidHelper.h>
#endif
namespace hdrplus
{
void hdrplus_pipeline::run_pipeline( \
const std::string& burst_path, \
const std::string& reference_image_path )
{
// Create burst of images
burst burst_images( burst_path, reference_image_path );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
// Run align
align_module.process( burst_images, alignments );
// Run merging
merge_module.process( burst_images, alignments );
// Run finishing
cv::Mat finalImg;
finish_module.process( burst_images, finalImg);
}
bool hdrplus_pipeline::run_pipeline( \
const std::vector<std::string>& burst_paths, \
int reference_image_index, cv::Mat& finalImg )
{
// Create burst of images
burst burst_images( burst_paths, reference_image_index );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
#ifdef __ANDROID__
// ALOGI("Finish loading images");
#endif
// Run align
align_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish align");
#endif
// Run merging
merge_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish merging");
#endif
// Run finishing
finish_module.process( burst_images, finalImg);
#ifdef __ANDROID__
// ALOGI("Finish process");
#endif
return true;
}
bool hdrplus_pipeline::run_pipeline( \
const std::vector<std::vector<uint8_t> >& burst_contents, \
int reference_image_index, cv::Mat& finalImg )
{
// Create burst of images
burst burst_images( burst_contents, reference_image_index );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
#ifdef __ANDROID__
// ALOGI("Finish loading images");
#endif
// Run align
align_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish align");
#endif
// Run merging
merge_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish merging");
#endif
// Run finishing
finish_module.process( burst_images, finalImg);
#ifdef __ANDROID__
// ALOGI("Finish process");
#endif
return true;
}
bool hdrplus_pipeline::run_pipeline( \
const std::vector<std::shared_ptr<MemFile> >& burst_files, \
int reference_image_index, cv::Mat& finalImg )
{
// Create burst of images
burst burst_images( burst_files, reference_image_index );
std::vector<std::vector<std::vector<std::pair<int, int>>>> alignments;
#ifdef __ANDROID__
// ALOGI("Finish loading images");
#endif
// Run align
align_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish align");
#endif
// Run merging
merge_module.process( burst_images, alignments );
#ifdef __ANDROID__
// ALOGI("Finish merging");
#endif
// Run finishing
finish_module.process( burst_images, finalImg);
#ifdef __ANDROID__
// ALOGI("Finish process");
#endif
return true;
}
} // namespace hdrplus

@ -0,0 +1,340 @@
#include <opencv2/opencv.hpp> // all opencv header
#include <vector>
#include <utility>
#include "hdrplus/merge.h"
#include "hdrplus/burst.h"
#include "hdrplus/utility.h"
namespace hdrplus
{
void merge::process(hdrplus::burst& burst_images, \
std::vector<std::vector<std::vector<std::pair<int, int>>>>& alignments)
{
// 4.1 Noise Parameters and RMS
// Noise parameters calculated from baseline ISO noise parameters
double lambda_shot, lambda_read;
std::tie(lambda_shot, lambda_read) = burst_images.bayer_images[burst_images.reference_image_idx].get_noise_params();
// 4.2-4.4 Denoising and Merging
// Get padded bayer image
cv::Mat reference_image = burst_images.bayer_images_pad[burst_images.reference_image_idx];
#ifndef NDEBUG
// cv::imwrite("ref.jpg", reference_image);
#endif
// Get raw channels
std::vector<cv::Mat> channels(4);
hdrplus::extract_rgb_from_bayer<uint16_t>(reference_image, channels[0], channels[1], channels[2], channels[3]);
std::vector<cv::Mat> processed_channels(4);
// For each channel, perform denoising and merge
for (int i = 0; i < 4; ++i) {
// Get channel mat
cv::Mat channel_i = channels[i];
// cv::imwrite("ref" + std::to_string(i) + ".jpg", channel_i);
//we should be getting the individual channel in the same place where we call the processChannel function with the reference channel in its arguments
//possibly we could add another argument in the processChannel function which is the channel_i for the alternate image. maybe using a loop to cover all the other images
//create list of channel_i of alternate images:
std::vector<cv::Mat> alternate_channel_i_list;
for (int j = 0; j < burst_images.num_images; j++) {
if (j != burst_images.reference_image_idx) {
//get alternate image
cv::Mat alt_image = burst_images.bayer_images_pad[j];
std::vector<cv::Mat> alt_channels(4);
hdrplus::extract_rgb_from_bayer<uint16_t>(alt_image, alt_channels[0], alt_channels[1], alt_channels[2], alt_channels[3]);
alternate_channel_i_list.push_back(alt_channels[i]);
}
}
// Apply merging on the channel
cv::Mat merged_channel = processChannel(burst_images, alignments, channel_i, alternate_channel_i_list, lambda_shot, lambda_read);
// cv::imwrite("merged" + std::to_string(i) + ".jpg", merged_channel);
// Put channel raw data back to channels
merged_channel.convertTo(processed_channels[i], CV_16U);
}
// Write all channels back to a bayer mat
cv::Mat merged(reference_image.rows, reference_image.cols, CV_16U);
int x, y;
for (y = 0; y < reference_image.rows; ++y){
uint16_t* row = merged.ptr<uint16_t>(y);
if (y % 2 == 0){
uint16_t* i0 = processed_channels[0].ptr<uint16_t>(y / 2);
uint16_t* i1 = processed_channels[1].ptr<uint16_t>(y / 2);
for (x = 0; x < reference_image.cols;){
//R
row[x] = i0[x / 2];
x++;
//G1
row[x] = i1[x / 2];
x++;
}
}
else {
uint16_t* i2 = processed_channels[2].ptr<uint16_t>(y / 2);
uint16_t* i3 = processed_channels[3].ptr<uint16_t>(y / 2);
for(x = 0; x < reference_image.cols;){
//G2
row[x] = i2[x / 2];
x++;
//B
row[x] = i3[x / 2];
x++;
}
}
}
// Remove padding
std::vector<int> padding = burst_images.padding_info_bayer;
cv::Range horizontal = cv::Range(padding[2], reference_image.cols - padding[3]);
cv::Range vertical = cv::Range(padding[0], reference_image.rows - padding[1]);
burst_images.merged_bayer_image = merged(vertical, horizontal);
// cv::imwrite("merged.jpg", burst_images.merged_bayer_image);
}
std::vector<cv::Mat> merge::getReferenceTiles(cv::Mat reference_image) {
std::vector<cv::Mat> reference_tiles;
for (int y = 0; y < reference_image.rows - offset; y += offset) {
for (int x = 0; x < reference_image.cols - offset; x += offset) {
cv::Mat tile = reference_image(cv::Rect(x, y, TILE_SIZE, TILE_SIZE));
reference_tiles.push_back(tile);
}
}
return reference_tiles;
}
cv::Mat merge::mergeTiles(std::vector<cv::Mat> tiles, int num_rows, int num_cols) {
// 1. get all four subsets: original (evenly split), horizontal overlapped,
// vertical overlapped, 2D overlapped
std::vector<std::vector<cv::Mat>> tiles_original;
std::vector<cv::Mat> row;
for (int y = 0; y < num_rows / offset - 1; y += 2) {
row.clear();
for (int x = 0; x < num_cols / offset - 1; x += 2) {
row.push_back(tiles[y * (num_cols / offset - 1) + x]);
}
tiles_original.push_back(row);
}
std::vector<std::vector<cv::Mat>> tiles_horizontal;
// std::vector<cv::Mat> row;
for (int y = 0; y < num_rows / offset - 1; y += 2) {
row.clear();
for (int x = 1; x < num_cols / offset - 1; x += 2) {
row.push_back(tiles[y * (num_cols / offset - 1) + x]);
}
tiles_horizontal.push_back(row);
}
std::vector<std::vector<cv::Mat>> tiles_vertical;
// std::vector<cv::Mat> row;
for (int y = 1; y < num_rows / offset - 1; y += 2) {
row.clear();
for (int x = 0; x < num_cols / offset - 1; x += 2) {
row.push_back(tiles[y * (num_cols / offset - 1) + x]);
}
tiles_vertical.push_back(row);
}
std::vector<std::vector<cv::Mat>> tiles_2d;
// std::vector<cv::Mat> row;
for (int y = 1; y < num_rows / offset - 1; y += 2) {
row.clear();
for (int x = 1; x < num_cols / offset - 1; x += 2) {
row.push_back(tiles[y * (num_cols / offset - 1) + x]);
}
tiles_2d.push_back(row);
}
// 2. Concatenate the four subsets
cv::Mat img_original = cat2Dtiles(tiles_original);
cv::Mat img_horizontal = cat2Dtiles(tiles_horizontal);
cv::Mat img_vertical = cat2Dtiles(tiles_vertical);
cv::Mat img_2d = cat2Dtiles(tiles_2d);
// 3. Add the four subsets together
img_original(cv::Rect(offset, 0, num_cols - TILE_SIZE, num_rows)) += img_horizontal;
img_original(cv::Rect(0, offset, num_cols, num_rows - TILE_SIZE)) += img_vertical;
img_original(cv::Rect(offset, offset, num_cols - TILE_SIZE, num_rows - TILE_SIZE)) += img_2d;
return img_original;
}
cv::Mat merge::processChannel(hdrplus::burst& burst_images, \
std::vector<std::vector<std::vector<std::pair<int, int>>>>& alignments, \
cv::Mat channel_image, \
std::vector<cv::Mat> alternate_channel_i_list,\
float lambda_shot, \
float lambda_read) {
// Get tiles of the reference image
std::vector<cv::Mat> reference_tiles = getReferenceTiles(channel_image);
// Get noise variance (sigma**2 = lambda_shot * tileRMS + lambda_read)
std::vector<float> noise_variance = getNoiseVariance(reference_tiles, lambda_shot, lambda_read);
// Apply FFT on reference tiles (spatial to frequency)
std::vector<cv::Mat> reference_tiles_DFT;
for (auto ref_tile : reference_tiles) {
cv::Mat ref_tile_DFT;
ref_tile.convertTo(ref_tile_DFT, CV_32F);
cv::dft(ref_tile_DFT, ref_tile_DFT, cv::DFT_COMPLEX_OUTPUT);
reference_tiles_DFT.push_back(ref_tile_DFT);
}
// Acquire alternate tiles and apply FFT on them as well
std::vector<std::vector<cv::Mat>> alt_tiles_list(reference_tiles.size());
int num_tiles_row = alternate_channel_i_list[0].rows / offset - 1;
int num_tiles_col = alternate_channel_i_list[0].cols / offset - 1;
std::vector<cv::Mat> alt_tiles;
for (int y = 0; y < num_tiles_row; ++y) {
for (int x = 0; x < num_tiles_col; ++x) {
alt_tiles.clear();
// Get reference tile location
int top_left_y = y * offset;
int top_left_x = x * offset;
for (int i = 0; i < alternate_channel_i_list.size(); ++i) {
// Get alignment displacement
int displacement_y, displacement_x;
std::tie(displacement_y, displacement_x) = alignments[i + 1][y][x];
// Get tile
cv::Mat alt_tile = alternate_channel_i_list[i](cv::Rect(top_left_x + displacement_x, top_left_y + displacement_y, TILE_SIZE, TILE_SIZE));
// Apply FFT
cv::Mat alt_tile_DFT;
alt_tile.convertTo(alt_tile_DFT, CV_32F);
cv::dft(alt_tile_DFT, alt_tile_DFT, cv::DFT_COMPLEX_OUTPUT);
alt_tiles.push_back(alt_tile_DFT);
}
alt_tiles_list[y * num_tiles_col + x] = alt_tiles;
}
}
// 4.2 Temporal Denoising
reference_tiles_DFT = temporal_denoise(reference_tiles_DFT, alt_tiles_list, noise_variance, TEMPORAL_FACTOR);
// 4.3 Spatial Denoising
reference_tiles_DFT = spatial_denoise(reference_tiles_DFT, alternate_channel_i_list.size(), noise_variance, SPATIAL_FACTOR);
//now reference tiles are temporally and spatially denoised
// Apply IFFT on reference tiles (frequency to spatial)
std::vector<cv::Mat> denoised_tiles;
for (auto dft_tile : reference_tiles_DFT) {
cv::Mat denoised_tile;
cv::divide(dft_tile, TILE_SIZE * TILE_SIZE, dft_tile);
cv::dft(dft_tile, denoised_tile, cv::DFT_INVERSE | cv::DFT_REAL_OUTPUT);
denoised_tiles.push_back(denoised_tile);
}
reference_tiles = denoised_tiles;
// 4.4 Cosine Window Merging
// Process tiles through 2D cosine window
std::vector<cv::Mat> windowed_tiles;
for (auto tile : reference_tiles) {
windowed_tiles.push_back(cosineWindow2D(tile));
}
// Merge tiles
return mergeTiles(windowed_tiles, channel_image.rows, channel_image.cols);
}
std::vector<cv::Mat> merge::temporal_denoise(std::vector<cv::Mat> tiles, std::vector<std::vector<cv::Mat>> alt_tiles, std::vector<float> noise_variance, float temporal_factor) {
// goal: temporially denoise using the weiner filter
// input:
// 1. array of 2D dft tiles of the reference image
// 2. array of 2D dft tiles of the aligned alternate image
// 3. estimated noise variance
// 4. temporal factor
// return: merged image patches dft
// calculate noise scaling
double temporal_noise_scaling = (TILE_SIZE * TILE_SIZE * (2.0 / 16)) * TEMPORAL_FACTOR;
// loop across tiles
std::vector<cv::Mat> denoised;
for (int i = 0; i < tiles.size(); ++i) {
// sum of pairwise denoising
cv::Mat tile_sum = tiles[i].clone();
double coeff = temporal_noise_scaling * noise_variance[i];
// Ref tile
cv::Mat tile = tiles[i];
// Alt tiles
std::vector<cv::Mat> alt_tiles_i = alt_tiles[i];
for (int j = 0; j < alt_tiles_i.size(); ++j) {
// Alt tile
cv::Mat alt_tile = alt_tiles_i[j];
// Tile difference
cv::Mat diff = tile - alt_tile;
// Calculate absolute difference
cv::Mat complexMats[2];
cv::split(diff, complexMats); // planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I))
cv::magnitude(complexMats[0], complexMats[1], complexMats[0]); // planes[0] = magnitude
cv::Mat absolute_diff = complexMats[0].mul(complexMats[0]);
// find shrinkage operator A
cv::Mat shrinkage;
cv::divide(absolute_diff, absolute_diff + coeff, shrinkage);
cv::merge(std::vector<cv::Mat>{shrinkage, shrinkage}, shrinkage);
// Interpolation
tile_sum += alt_tile + diff.mul(shrinkage);
}
// Average by num of frames
cv::divide(tile_sum, alt_tiles_i.size() + 1, tile_sum);
denoised.push_back(tile_sum);
}
return denoised;
}
std::vector<cv::Mat> merge::spatial_denoise(std::vector<cv::Mat> tiles, int num_alts, std::vector<float> noise_variance, float spatial_factor) {
double spatial_noise_scaling = (TILE_SIZE * TILE_SIZE * (1.0 / 16)) * spatial_factor;
// Calculate |w| using ifftshift
cv::Mat row_distances = cv::Mat::zeros(1, TILE_SIZE, CV_32F);
for(int i = 0; i < TILE_SIZE; ++i) {
row_distances.at<float>(i) = i - offset;
}
row_distances = cv::repeat(row_distances.t(), 1, TILE_SIZE);
cv::Mat col_distances = row_distances.t();
cv::Mat distances;
cv::sqrt(row_distances.mul(row_distances) + col_distances.mul(col_distances), distances);
ifftshift(distances);
std::vector<cv::Mat> denoised;
// Loop through all tiles
for (int i = 0; i < tiles.size(); ++i) {
cv::Mat tile = tiles[i];
float coeff = noise_variance[i] / (num_alts + 1) * spatial_noise_scaling;
// Calculate absolute difference
cv::Mat complexMats[2];
cv::split(tile, complexMats); // planes[0] = Re(DFT(I)), planes[1] = Im(DFT(I))
cv::magnitude(complexMats[0], complexMats[1], complexMats[0]); // planes[0] = magnitude
cv::Mat absolute_diff = complexMats[0].mul(complexMats[0]);
// Division
cv::Mat scale;
cv::divide(absolute_diff, absolute_diff + distances * coeff, scale);
cv::merge(std::vector<cv::Mat>{scale, scale}, scale);
denoised.push_back(tile.mul(scale));
}
return denoised;
}
} // namespace hdrplus

@ -0,0 +1,53 @@
#include <iostream>
#include <opencv2/opencv.hpp> // all opencv header
#include <hdrplus/params.h>
namespace hdrplus
{
void setParams(std::shared_ptr<LibRaw>& libraw_ptr, RawpyArgs rawpyArgs){
libraw_ptr->imgdata.params.user_qual = rawpyArgs.demosaic_algorithm;
libraw_ptr->imgdata.params.half_size = rawpyArgs.half_size;
libraw_ptr->imgdata.params.use_camera_wb = rawpyArgs.use_camera_wb;
libraw_ptr->imgdata.params.use_auto_wb = rawpyArgs.use_auto_wb;
libraw_ptr->imgdata.params.no_auto_bright = rawpyArgs.no_auto_bright;
libraw_ptr->imgdata.params.output_color = rawpyArgs.output_color;
libraw_ptr->imgdata.params.gamm[0] = rawpyArgs.gamma[0];
libraw_ptr->imgdata.params.gamm[1] = rawpyArgs.gamma[1];
libraw_ptr->imgdata.params.output_bps = rawpyArgs.output_bps;
}
cv::Mat postprocess(std::shared_ptr<LibRaw>& libraw_ptr, RawpyArgs rawpyArgs){
std::cout<<"postprocessing..."<<std::endl;
// set parameters
setParams(libraw_ptr,rawpyArgs);
std::cout<<"conversion to 16 bit using black and white levels, demosaicking, white balance, color correction..."<<std::endl;
libraw_ptr->dcraw_process();
int errorcode;
libraw_processed_image_t *ret_img = libraw_ptr->dcraw_make_mem_image(&errorcode);
int opencv_type = CV_16UC3; // 16bit RGB
if(ret_img->colors==1){ // grayscale
if(ret_img->bits == 8){ // uint8
opencv_type = CV_8UC1;
}else{ // uint16
opencv_type = CV_16UC1;
}
}else{// RGB
if(ret_img->bits == 8){ //8bit
opencv_type = CV_8UC3;
}else{ // 16bit
opencv_type = CV_16UC3;
}
}
cv::Mat processedImg(ret_img->height,ret_img->width,opencv_type,ret_img->data);
std::cout<<"postprocess finished!"<<std::endl;
return processedImg;
}
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,36 @@
// MACHINE GENERATED -- DO NOT EDIT
extern "C" {
struct halide_filter_metadata_t;
void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char * const *extra_key_value_pairs
);
}
extern "C" {
extern int hdrplus_pipeline_argv(void **args);
extern const struct halide_filter_metadata_t *hdrplus_pipeline_metadata();
}
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
extern "C" const char * const *HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC();
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
namespace halide_nsreg_hdrplus_pipeline {
namespace {
struct Registerer {
Registerer() {
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC());
#else
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), nullptr);
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
}
};
static Registerer registerer;
} // namespace
} // halide_nsreg_hdrplus_pipeline

File diff suppressed because it is too large Load Diff

@ -0,0 +1,36 @@
// MACHINE GENERATED -- DO NOT EDIT
extern "C" {
struct halide_filter_metadata_t;
void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char * const *extra_key_value_pairs
);
}
extern "C" {
extern int hdrplus_pipeline_argv(void **args);
extern const struct halide_filter_metadata_t *hdrplus_pipeline_metadata();
}
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
extern "C" const char * const *HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC();
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
namespace halide_nsreg_hdrplus_pipeline {
namespace {
struct Registerer {
Registerer() {
#ifdef HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC());
#else
halide_register_argv_and_metadata(::hdrplus_pipeline_argv, ::hdrplus_pipeline_metadata(), nullptr);
#endif // HALIDE_REGISTER_EXTRA_KEY_VALUE_PAIRS_FUNC
}
};
static Registerer registerer;
} // namespace
} // halide_nsreg_hdrplus_pipeline

@ -0,0 +1,13 @@
#ifndef __HDRPLUS__
#define __HDRPLUS__
#include <string>
#include <vector>
#include <opencv2/opencv.hpp> // all opencv header
int doHdrPlus(const std::string& dir_path, const std::string& out_name, const std::vector<std::string>& in_names);
bool doHdrPlus(const std::vector< std::vector<uint8_t> >& images, cv::Mat& mat);
#endif // __HDRPLUS__

File diff suppressed because it is too large Load Diff

@ -0,0 +1,39 @@
#include "Burst.h"
Halide::Runtime::Buffer<uint16_t> Burst::ToBuffer() const {
if (Raws.empty()) {
return Halide::Runtime::Buffer<uint16_t>();
}
Halide::Runtime::Buffer<uint16_t> result(GetWidth(), GetHeight(),
Raws.size());
for (int i = 0; i < Raws.size(); ++i) {
auto resultSlice = result.sliced(2, i);
Raws[i].CopyToBuffer(resultSlice);
}
return result;
}
void Burst::CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const {
buffer.copy_from(ToBuffer());
}
std::vector<RawImage> Burst::LoadRaws(const std::vector< std::vector<uint8_t> >& images) {
std::vector<RawImage> result;
for (const auto &img : images) {
result.emplace_back(&img[0], img.size());
}
return result;
}
std::vector<RawImage> Burst::LoadRaws(const std::string &dirPath,
std::vector<std::string> &inputs) {
std::vector<RawImage> result;
for (const auto &input : inputs) {
const std::string img_path = dirPath + "/" + input;
result.emplace_back(img_path);
}
return result;
}
const RawImage &Burst::GetRaw(const size_t i) const { return this->Raws[i]; }

@ -0,0 +1,76 @@
#pragma once
#include "InputSource.h"
#include <hdrplus_pipeline.h>
#include <string>
#include <vector>
class Burst {
public:
Burst(std::string dir_path, std::vector<std::string> inputs)
: Dir(std::move(dir_path)), Inputs(std::move(inputs)),
Raws(LoadRaws(Dir, Inputs))
{
}
Burst(const std::vector< std::vector<uint8_t> >& images)
: Raws(LoadRaws(images))
{
}
~Burst() = default;
Burst(const Burst& src)
{
this->Dir = src.Dir;
this->Inputs = src.Inputs;
this->Raws = src.Raws;
int aa = 0;
}
int GetWidth() const { return Raws.empty() ? -1 : Raws[0].GetWidth(); }
int GetHeight() const { return Raws.empty() ? -1 : Raws[0].GetHeight(); }
int GetBlackLevel() const
{
return Raws.empty() ? -1 : Raws[0].GetScalarBlackLevel();
}
int GetWhiteLevel() const {
return Raws.empty() ? -1 : Raws[0].GetWhiteLevel();
}
WhiteBalance GetWhiteBalance() const {
return Raws.empty() ? WhiteBalance{-1, -1, -1, -1}
: Raws[0].GetWhiteBalance();
}
CfaPattern GetCfaPattern() const {
return Raws.empty() ? CfaPattern::CFA_UNKNOWN : Raws[0].GetCfaPattern();
}
Halide::Runtime::Buffer<float> GetColorCorrectionMatrix() const {
return Raws.empty() ? Halide::Runtime::Buffer<float>()
: Raws[0].GetColorCorrectionMatrix();
}
Halide::Runtime::Buffer<uint16_t> ToBuffer() const;
void CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const;
const RawImage &GetRaw(const size_t i) const;
private:
std::string Dir;
std::vector<std::string> Inputs;
std::vector<RawImage> Raws;
private:
static std::vector<RawImage> LoadRaws(const std::string &dirPath,
std::vector<std::string> &inputs);
static std::vector<RawImage> LoadRaws(const std::vector< std::vector<uint8_t> >& images);
};

@ -0,0 +1,147 @@
#include <fstream>
#include <iostream>
#include <stdio.h>
#ifdef _DEBUG
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include <include/stb_image_write.h>
#endif
#include <hdrplus_pipeline.h>
#include "Burst.h"
#include <include/HDRPlus.h>
extern "C" void halide_register_argv_and_metadata(
int (*filter_argv_call)(void **),
const struct halide_filter_metadata_t *filter_metadata,
const char *const *extra_key_value_pairs) {
}
/*
* HDRPlus Class -- Houses file I/O, defines pipeline attributes and calls
* processes main stages of the pipeline.
*/
class HDRPlus {
const Burst &burst;
public:
const Compression c;
const Gain g;
HDRPlus(Burst& burst, const Compression c, const Gain g)
: burst(burst), c(c), g(g)
{
}
Halide::Runtime::Buffer<uint8_t> process() {
const int width = burst.GetWidth();
const int height = burst.GetHeight();
Halide::Runtime::Buffer<uint8_t> output_img(3, width, height);
#ifdef _DEBUG
std::cerr << "Black point: " << burst.GetBlackLevel() << std::endl;
std::cerr << "White point: " << burst.GetWhiteLevel() << std::endl;
#endif
const WhiteBalance wb = burst.GetWhiteBalance();
std::cerr << "RGGB: " << wb.r << " " << wb.g0 << " " << wb.g1 << " " << wb.b
<< std::endl;
Halide::Runtime::Buffer<uint16_t> imgs = burst.ToBuffer();
if (imgs.dimensions() != 3 || imgs.extent(2) < 2) {
return output_img;
#if 0
throw std::invalid_argument(
"The input of HDRPlus must be a 3-dimensional buffer with at least "
"two channels.");
#endif
}
const int cfa_pattern = static_cast<int>(burst.GetCfaPattern());
auto ccm = burst.GetColorCorrectionMatrix();
hdrplus_pipeline(imgs, burst.GetBlackLevel(), burst.GetWhiteLevel(), wb.r,
wb.g0, wb.g1, wb.b, cfa_pattern, ccm, c, g, output_img);
// transpose to account for interleaved layout
output_img.transpose(0, 1);
output_img.transpose(1, 2);
return output_img;
}
#ifdef _DEBUG
static bool save_png(const std::string &dir_path, const std::string &img_name,
const Halide::Runtime::Buffer<uint8_t> &img) {
const std::string img_path = dir_path + "/" + img_name;
const int stride_in_bytes = img.width() * img.channels();
if (!stbi_write_png(img_path.c_str(), img.width(), img.height(),
img.channels(), img.data(), stride_in_bytes)) {
std::cerr << "Unable to write output image '" << img_name << "'"
<< std::endl;
return false;
}
return true;
}
#endif
};
bool doHdrPlus(const std::vector< std::vector<uint8_t> >& images, cv::Mat& mat)
{
Compression c = 3.8f;
Gain g = 1.1f;
Burst burst(images);
HDRPlus hdr_plus(burst, c, g);
Halide::Runtime::Buffer<uint8_t> outputHdr = hdr_plus.process();
#ifdef _DEBUG
HDRPlus::save_png("/sdcard/com.xypower.mpapp/tmp", "2.png", outputHdr);
#endif
int width = outputHdr.width();
int height = outputHdr.height();
int channels = outputHdr.channels();
int jch = 0;
mat = cv::Mat::zeros(height, width, CV_8UC3);
for (int i = 0; i < height; ++i)
{
jch = 0;
for (int j = 0; j < width; ++j)
{
for (int n = 0; n < channels; ++n)
{
mat.at<uchar>(i, jch + n) = (uchar)outputHdr(j, i, n);
}
jch += channels;
}
}
// if (!HDRPlus::save_png(dir_path, out_name, output)) {
return true;
}
#if 0
int doHdrPlus(const std::string& dir_path, const std::string& out_name, const std::vector<std::string>& in_names) {
Compression c = 3.8f;
Gain g = 1.1f;
Burst burst(dir_path, in_names);
HDRPlus hdr_plus(burst, c, g);
Halide::Runtime::Buffer<uint8_t> output = hdr_plus.process();
if (!HDRPlus::save_png(dir_path, out_name, output)) {
return EXIT_FAILURE;
}
return 0;
}
#endif

@ -0,0 +1,154 @@
#include "InputSource.h"
#include <algorithm>
#include <unordered_map>
#include "LibRaw2DngConverter.h"
RawImage::RawImage(const std::string &path)
: Path(path), RawProcessor(std::make_shared<LibRaw>()) {
// TODO: Check LibRaw parametres.
// RawProcessor->imgdata.params.X = Y;
std::cerr << "Opening " << path << std::endl;
if (int err = RawProcessor->open_file(path.c_str())) {
std::cerr << "Cannot open file " << path
<< " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int err = RawProcessor->unpack()) {
std::cerr << "Cannot unpack file " << path
<< " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int ret = RawProcessor->raw2image()) {
std::cerr << "Cannot do raw2image on " << path
<< " error: " << libraw_strerror(ret) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
}
RawImage::RawImage(const uint8_t* data, size_t length)
: RawProcessor(std::make_shared<LibRaw>())
{
std::cerr << "Opening raw from memory" << std::endl;
if (int err = RawProcessor->open_buffer((void *)data, length)) {
std::cerr << "Cannot open raw from memory" << " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening raw");
#endif
}
if (int err = RawProcessor->unpack()) {
std::cerr << "Cannot unpack raw from memory " << " error: " << libraw_strerror(err) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
if (int ret = RawProcessor->raw2image()) {
std::cerr << "Cannot do raw2image" << " error: " << libraw_strerror(ret) << std::endl;
#if 0
throw std::runtime_error("Error opening " + path);
#endif
}
}
WhiteBalance RawImage::GetWhiteBalance() const {
const auto coeffs = RawProcessor->imgdata.color.cam_mul;
// Scale multipliers to green channel
const float r = coeffs[0] / coeffs[1];
const float g0 = 1.f; // same as coeffs[1] / coeffs[1];
const float g1 = 1.f;
const float b = coeffs[2] / coeffs[1];
return WhiteBalance{r, g0, g1, b};
}
void RawImage::CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const {
const auto image_data = (uint16_t *)RawProcessor->imgdata.rawdata.raw_image;
const auto raw_width = RawProcessor->imgdata.rawdata.sizes.raw_width;
const auto raw_height = RawProcessor->imgdata.rawdata.sizes.raw_height;
const auto top = RawProcessor->imgdata.rawdata.sizes.top_margin;
const auto left = RawProcessor->imgdata.rawdata.sizes.left_margin;
Halide::Runtime::Buffer<uint16_t> raw_buffer(image_data, raw_width,
raw_height);
buffer.copy_from(raw_buffer.translated({-left, -top}));
}
void RawImage::WriteDng(const std::string &output_path,
const Halide::Runtime::Buffer<uint16_t> &buffer) const {
LibRaw2DngConverter converter(*this);
converter.SetBuffer(buffer);
converter.Write(output_path);
}
std::array<float, 4> RawImage::GetBlackLevel() const {
// See https://www.libraw.org/node/2471
const auto raw_color = RawProcessor->imgdata.color;
const auto base_black_level = static_cast<float>(raw_color.black);
std::array<float, 4> black_level = {
base_black_level + static_cast<float>(raw_color.cblack[0]),
base_black_level + static_cast<float>(raw_color.cblack[1]),
base_black_level + static_cast<float>(raw_color.cblack[2]),
base_black_level + static_cast<float>(raw_color.cblack[3])};
if (raw_color.cblack[4] == 2 && raw_color.cblack[5] == 2) {
for (int x = 0; x < raw_color.cblack[4]; ++x) {
for (int y = 0; y < raw_color.cblack[5]; ++y) {
const auto index = y * 2 + x;
black_level[index] = raw_color.cblack[6 + index];
}
}
}
return black_level;
}
int RawImage::GetScalarBlackLevel() const {
const auto black_level = GetBlackLevel();
return static_cast<int>(
*std::min_element(black_level.begin(), black_level.end()));
}
std::string RawImage::GetCfaPatternString() const {
static const std::unordered_map<char, char> CDESC_TO_CFA = {
{'R', 0}, {'G', 1}, {'B', 2}, {'r', 0}, {'g', 1}, {'b', 2}};
const auto &cdesc = RawProcessor->imgdata.idata.cdesc;
return {CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(0, 0)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(0, 1)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(1, 0)]),
CDESC_TO_CFA.at(cdesc[RawProcessor->COLOR(1, 1)])};
}
CfaPattern RawImage::GetCfaPattern() const {
const auto cfa_pattern = GetCfaPatternString();
if (cfa_pattern == std::string{0, 1, 1, 2}) {
return CfaPattern::CFA_RGGB;
} else if (cfa_pattern == std::string{1, 0, 2, 1}) {
return CfaPattern::CFA_GRBG;
} else if (cfa_pattern == std::string{2, 1, 1, 0}) {
return CfaPattern::CFA_BGGR;
} else if (cfa_pattern == std::string{1, 2, 0, 1}) {
return CfaPattern::CFA_GBRG;
}
#if 0
throw std::invalid_argument("Unsupported CFA pattern: " + cfa_pattern);
#endif
return CfaPattern::CFA_UNKNOWN;
}
Halide::Runtime::Buffer<float> RawImage::GetColorCorrectionMatrix() const {
const auto raw_color = RawProcessor->imgdata.color;
Halide::Runtime::Buffer<float> ccm(3, 3);
for (int i = 0; i < 3; ++i) {
for (int j = 0; j < 3; ++j) {
ccm(i, j) = raw_color.rgb_cam[j][i];
}
}
return ccm;
}

@ -0,0 +1,47 @@
#pragma once
#include <array>
#include <string>
#include <libraw/libraw.h>
#include "finish.h"
#include <HalideBuffer.h>
class RawImage {
public:
explicit RawImage(const std::string &path);
explicit RawImage(const uint8_t* data, size_t length);
~RawImage() = default;
int GetWidth() const { return RawProcessor->imgdata.rawdata.sizes.width; }
int GetHeight() const { return RawProcessor->imgdata.rawdata.sizes.height; }
int GetScalarBlackLevel() const;
std::array<float, 4> GetBlackLevel() const;
int GetWhiteLevel() const { return RawProcessor->imgdata.color.maximum; }
WhiteBalance GetWhiteBalance() const;
std::string GetCfaPatternString() const;
CfaPattern GetCfaPattern() const;
Halide::Runtime::Buffer<float> GetColorCorrectionMatrix() const;
void CopyToBuffer(Halide::Runtime::Buffer<uint16_t> &buffer) const;
// Writes current RawImage as DNG. If buffer was provided, then use it instead
// of internal buffer.
void WriteDng(const std::string &path,
const Halide::Runtime::Buffer<uint16_t> &buffer = {}) const;
std::shared_ptr<LibRaw> GetRawProcessor() const { return RawProcessor; }
private:
std::string Path;
std::shared_ptr<LibRaw> RawProcessor;
};

@ -0,0 +1,95 @@
#include "LibRaw2DngConverter.h"
#include <unordered_map>
#include <libraw/libraw.h>
#include "InputSource.h"
LibRaw2DngConverter::LibRaw2DngConverter(const RawImage &raw)
: OutputStream(), Raw(raw),
Tiff(SetTiffFields(
TiffPtr(TIFFStreamOpen("", &OutputStream), TIFFClose))) {}
LibRaw2DngConverter::TiffPtr
LibRaw2DngConverter::SetTiffFields(LibRaw2DngConverter::TiffPtr tiff_ptr) {
const auto RawProcessor = Raw.GetRawProcessor();
const auto raw_color = RawProcessor->imgdata.color;
const uint16_t bayer_pattern_dimensions[] = {2, 2};
const auto tiff = tiff_ptr.get();
TIFFSetField(tiff, TIFFTAG_DNGVERSION, "\01\04\00\00");
TIFFSetField(tiff, TIFFTAG_DNGBACKWARDVERSION, "\01\04\00\00");
TIFFSetField(tiff, TIFFTAG_SUBFILETYPE, 0);
TIFFSetField(tiff, TIFFTAG_COMPRESSION, COMPRESSION_NONE);
TIFFSetField(tiff, TIFFTAG_BITSPERSAMPLE, 16);
TIFFSetField(tiff, TIFFTAG_ROWSPERSTRIP, 1);
TIFFSetField(tiff, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
TIFFSetField(tiff, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_CFA);
TIFFSetField(tiff, TIFFTAG_SAMPLESPERPIXEL, 1);
TIFFSetField(tiff, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG);
TIFFSetField(tiff, TIFFTAG_SAMPLEFORMAT, SAMPLEFORMAT_UINT);
TIFFSetField(tiff, TIFFTAG_CFAREPEATPATTERNDIM, &bayer_pattern_dimensions);
const std::string cfa = Raw.GetCfaPatternString();
TIFFSetField(tiff, TIFFTAG_CFAPATTERN, cfa.c_str());
TIFFSetField(tiff, TIFFTAG_MAKE, "hdr-plus");
TIFFSetField(tiff, TIFFTAG_UNIQUECAMERAMODEL, "hdr-plus");
const std::array<float, 9> color_matrix = {
raw_color.cam_xyz[0][0], raw_color.cam_xyz[0][1], raw_color.cam_xyz[0][2],
raw_color.cam_xyz[1][0], raw_color.cam_xyz[1][1], raw_color.cam_xyz[1][2],
raw_color.cam_xyz[2][0], raw_color.cam_xyz[2][1], raw_color.cam_xyz[2][2],
};
TIFFSetField(tiff, TIFFTAG_COLORMATRIX1, 9, &color_matrix);
TIFFSetField(tiff, TIFFTAG_CALIBRATIONILLUMINANT1, 21); // D65
const std::array<float, 3> as_shot_neutral = {
1.f / (raw_color.cam_mul[0] / raw_color.cam_mul[1]), 1.f,
1.f / (raw_color.cam_mul[2] / raw_color.cam_mul[1])};
TIFFSetField(tiff, TIFFTAG_ASSHOTNEUTRAL, 3, &as_shot_neutral);
TIFFSetField(tiff, TIFFTAG_CFALAYOUT, 1); // Rectangular (or square) layout
TIFFSetField(
tiff, TIFFTAG_CFAPLANECOLOR, 3,
"\00\01\02"); // RGB
// https://www.awaresystems.be/imaging/tiff/tifftags/cfaplanecolor.html
const std::array<float, 4> black_level = Raw.GetBlackLevel();
TIFFSetField(tiff, TIFFTAG_BLACKLEVEL, 4, &black_level);
static const uint32_t white_level = raw_color.maximum;
TIFFSetField(tiff, TIFFTAG_WHITELEVEL, 1, &white_level);
if (RawProcessor->imgdata.sizes.flip > 0) {
// Seems that LibRaw uses LibTIFF notation.
TIFFSetField(tiff, TIFFTAG_ORIENTATION, RawProcessor->imgdata.sizes.flip);
} else {
TIFFSetField(tiff, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
}
return tiff_ptr;
}
void LibRaw2DngConverter::SetBuffer(
const Halide::Runtime::Buffer<uint16_t> &buffer) const {
const auto width = buffer.width();
const auto height = buffer.height();
const auto tiff = Tiff.get();
TIFFSetField(tiff, TIFFTAG_IMAGEWIDTH, width);
TIFFSetField(tiff, TIFFTAG_IMAGELENGTH, height);
uint16_t *row_pointer = buffer.data();
for (int row = 0; row < height; row++) {
TIFFWriteScanline(tiff, row_pointer, row, 0);
row_pointer += width;
}
}
void LibRaw2DngConverter::Write(const std::string &path) const {
TIFFCheckpointDirectory(Tiff.get());
TIFFFlush(Tiff.get());
std::ofstream output(path, std::ofstream::binary);
output << OutputStream.str();
}

@ -0,0 +1,26 @@
#pragma once
#include <sstream>
#include <tiffio.h>
#include <tiffio.hxx>
#include <HalideBuffer.h>
class RawImage;
class LibRaw2DngConverter {
using TiffPtr = std::shared_ptr<TIFF>;
TiffPtr SetTiffFields(TiffPtr tiff_ptr);
public:
explicit LibRaw2DngConverter(const RawImage &raw);
void SetBuffer(const Halide::Runtime::Buffer<uint16_t> &buffer) const;
void Write(const std::string &path) const;
private:
std::ostringstream OutputStream;
const RawImage &Raw;
std::shared_ptr<TIFF> Tiff;
};

@ -0,0 +1,36 @@
#ifndef HDRPLUS_FINISH_H_
#define HDRPLUS_FINISH_H_
#include <hdrplus_pipeline.h>
template <class T = float> struct TypedWhiteBalance {
template <class TT>
explicit TypedWhiteBalance(const TypedWhiteBalance<TT> &other)
: r(other.r), g0(other.g0), g1(other.g1), b(other.b) {}
TypedWhiteBalance(T r, T g0, T g1, T b) : r(r), g0(g0), g1(g1), b(b) {}
T r;
T g0;
T g1;
T b;
};
using WhiteBalance = TypedWhiteBalance<float>;
typedef uint16_t BlackPoint;
typedef uint16_t WhitePoint;
typedef float Compression;
typedef float Gain;
enum class CfaPattern : int {
CFA_UNKNOWN = 0,
CFA_RGGB = 1,
CFA_GRBG = 2,
CFA_BGGR = 3,
CFA_GBRG = 4
};
#endif

@ -0,0 +1,60 @@
// Copyright 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
cc_library_shared {
name: "libimg_utils",
srcs: [
"src/EndianUtils.cpp",
"src/FileInput.cpp",
"src/FileOutput.cpp",
"src/SortedEntryVector.cpp",
"src/Input.cpp",
"src/Output.cpp",
"src/Orderable.cpp",
"src/TiffIfd.cpp",
"src/TiffWritable.cpp",
"src/TiffWriter.cpp",
"src/TiffEntry.cpp",
"src/TiffEntryImpl.cpp",
"src/ByteArrayOutput.cpp",
"src/DngUtils.cpp",
"src/StripSource.cpp",
],
shared_libs: [
"liblog",
"libutils",
"libcutils",
],
cflags: [
"-Wall",
"-Wextra",
"-Werror",
"-fvisibility=hidden",
],
product_variables: {
debuggable: {
// Enable assert() in eng builds
cflags: [
"-UNDEBUG",
"-DLOG_NDEBUG=1",
],
},
},
export_include_dirs: ["include"],
}

@ -0,0 +1,83 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_BYTE_ARRAY_OUTPUT_H
#define IMG_UTILS_BYTE_ARRAY_OUTPUT_H
#include <img_utils/Output.h>
#include <utils/Errors.h>
// #include <utils/Vector.h>
#include <cutils/compiler.h>
#include <stdint.h>
#include <vector>
namespace android {
namespace img_utils {
/**
* Utility class that accumulates written bytes into a buffer.
*/
class ANDROID_API ByteArrayOutput : public Output {
public:
ByteArrayOutput();
virtual ~ByteArrayOutput();
/**
* Open this ByteArrayOutput.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t open();
/**
* Write bytes from the given buffer. The number of bytes given in the count
* argument will be written. Bytes will be written from the given buffer starting
* at the index given in the offset argument.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
/**
* Close this ByteArrayOutput.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t close();
/**
* Get current size of the array of bytes written.
*/
virtual size_t getSize() const;
/**
* Get pointer to array of bytes written. It is not valid to use this pointer if
* open, write, or close is called after this method.
*/
virtual const uint8_t* getArray() const;
protected:
std::vector<uint8_t> mByteArray;
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_BYTE_ARRAY_OUTPUT_H*/

@ -0,0 +1,232 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_DNG_UTILS_H
#define IMG_UTILS_DNG_UTILS_H
#include <img_utils/ByteArrayOutput.h>
#include <img_utils/EndianUtils.h>
#include <utils/Errors.h>
#include <utils/Log.h>
#include <utils/RefBase.h>
#include <cutils/compiler.h>
#include <stdint.h>
namespace android {
namespace img_utils {
#define NELEMS(x) ((int) (sizeof(x) / sizeof((x)[0])))
#define CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x)))
/**
* Utility class for building values for the OpcodeList tags specified
* in the Adobe DNG 1.4 spec.
*/
class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
public:
// Note that the Adobe DNG 1.4 spec for Bayer phase (defined for the
// FixBadPixelsConstant and FixBadPixelsList opcodes) is incorrect. It's
// inconsistent with the DNG SDK (cf. dng_negative::SetBayerMosaic and
// dng_opcode_FixBadPixelsList::IsGreen), and Adobe confirms that the
// spec should be updated to match the SDK.
enum CfaLayout {
CFA_GRBG = 0,
CFA_RGGB,
CFA_BGGR,
CFA_GBRG,
CFA_NONE,
};
OpcodeListBuilder();
virtual ~OpcodeListBuilder();
/**
* Get the total size of this opcode list in bytes.
*/
virtual size_t getSize() const;
/**
* Get the number of opcodes defined in this list.
*/
virtual uint32_t getCount() const;
/**
* Write the opcode list into the given buffer. This buffer
* must be able to hold at least as many elements as returned
* by calling the getSize() method.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t buildOpList(/*out*/ uint8_t* buf) const;
/**
* Add GainMap opcode(s) for the given metadata parameters. The given
* CFA layout must match the layout of the shading map passed into the
* lensShadingMap parameter.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t addGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaTop,
uint32_t activeAreaLeft,
uint32_t activeAreaBottom,
uint32_t activeAreaRight,
CfaLayout cfa,
const float* lensShadingMap);
/**
* Add a GainMap opcode with the given fields. The mapGains array
* must have mapPointsV * mapPointsH * mapPlanes elements.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t addGainMap(uint32_t top,
uint32_t left,
uint32_t bottom,
uint32_t right,
uint32_t plane,
uint32_t planes,
uint32_t rowPitch,
uint32_t colPitch,
uint32_t mapPointsV,
uint32_t mapPointsH,
double mapSpacingV,
double mapSpacingH,
double mapOriginV,
double mapOriginH,
uint32_t mapPlanes,
const float* mapGains);
/**
* Add WarpRectilinear opcode for the given metadata parameters.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t addWarpRectilinearForMetadata(const float* kCoeffs,
uint32_t activeArrayWidth,
uint32_t activeArrayHeight,
float opticalCenterX,
float opticalCenterY);
/**
* Add a WarpRectilinear opcode.
*
* numPlanes - Number of planes included in this opcode.
* opticalCenterX, opticalCenterY - Normalized x,y coordinates of the sensor optical
* center relative to the top,left pixel of the produced images (e.g. [0.5, 0.5]
* gives a sensor optical center in the image center.
* kCoeffs - A list of coefficients for the polynomial equation representing the distortion
* correction. For each plane, 6 coefficients must be included:
* {k_r0, k_r1, k_r2, k_r3, k_t0, k_t1}. See the DNG 1.4 specification for an
* outline of the polynomial used here.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t addWarpRectilinear(uint32_t numPlanes,
double opticalCenterX,
double opticalCenterY,
const double* kCoeffs);
/**
* Add FixBadPixelsList opcode for the given metadata parameters.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t addBadPixelListForMetadata(const uint32_t* hotPixels,
uint32_t xyPairCount,
uint32_t colorFilterArrangement);
/**
* Add FixBadPixelsList opcode.
*
* bayerPhase - 0=top-left of image is red, 1=top-left of image is green pixel in red row,
* 2=top-left of image is green pixel in blue row, 3=top-left of image is
* blue.
* badPointCount - number of (x,y) pairs of bad pixels are given in badPointRowColPairs.
* badRectCount - number of (top, left, bottom, right) tuples are given in
* badRectTopLeftBottomRightTuples
*
* Returns OK on success, or a negative error code.
*/
virtual status_t addBadPixelList(uint32_t bayerPhase,
uint32_t badPointCount,
uint32_t badRectCount,
const uint32_t* badPointRowColPairs,
const uint32_t* badRectTopLeftBottomRightTuples);
// TODO: Add other Opcode methods
protected:
static const uint32_t FLAG_OPTIONAL = 0x1u;
static const uint32_t FLAG_OPTIONAL_FOR_PREVIEW = 0x2u;
// Opcode IDs
enum {
WARP_RECTILINEAR_ID = 1,
FIX_BAD_PIXELS_LIST = 5,
GAIN_MAP_ID = 9,
};
// LSM mosaic indices
enum {
LSM_R_IND = 0,
LSM_GE_IND = 1,
LSM_GO_IND = 2,
LSM_B_IND = 3,
};
uint32_t mCount;
ByteArrayOutput mOpList;
EndianOutput mEndianOut;
status_t addOpcodePreamble(uint32_t opcodeId);
private:
/**
* Add Bayer GainMap opcode(s) for the given metadata parameters.
* CFA layout must match the layout of the shading map passed into the
* lensShadingMap parameter.
*
* Returns OK on success, or a negative error code.
*/
status_t addBayerGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
CfaLayout cfa,
const float* lensShadingMap);
/**
* Add Bayer GainMap opcode(s) for the given metadata parameters.
* CFA layout must match the layout of the shading map passed into the
* lensShadingMap parameter.
*
* Returns OK on success, or a negative error code.
*/
status_t addMonochromeGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
const float* lensShadingMap);
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_DNG_UTILS_H*/

@ -0,0 +1,250 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_ENDIAN_UTILS
#define IMG_UTILS_ENDIAN_UTILS
#include <img_utils/Output.h>
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <stdint.h>
#include <endian.h>
#include <assert.h>
namespace android {
namespace img_utils {
/**
* Endianness types supported.
*/
enum ANDROID_API Endianness {
UNDEFINED_ENDIAN, // Default endianness will be used.
BIG,
LITTLE
};
/**
* Convert from the native device endianness to big endian.
*/
template<typename T>
T convertToBigEndian(T in);
/**
* Convert from the native device endianness to little endian.
*/
template<typename T>
T convertToLittleEndian(T in);
/**
* A utility class for writing to an Output with the given endianness.
*/
class ANDROID_API EndianOutput : public Output {
public:
/**
* Wrap the given Output. Calling write methods will result in
* writes to this output.
*/
explicit EndianOutput(Output* out, Endianness end=LITTLE);
virtual ~EndianOutput();
/**
* Call open on the wrapped output.
*/
virtual status_t open();
/**
* Call close on the wrapped output.
*/
virtual status_t close();
/**
* Set the endianness to use when writing.
*/
virtual void setEndianness(Endianness end);
/**
* Get the currently configured endianness.
*/
virtual Endianness getEndianness() const;
/**
* Get the current number of bytes written by this EndianOutput.
*/
virtual uint32_t getCurrentOffset() const;
// TODO: switch write methods to uint32_t instead of size_t,
// the max size of a TIFF files is bounded
/**
* The following methods will write elements from given input buffer to the output.
* Count elements in the buffer will be written with the endianness set for this
* EndianOutput. If the given offset is greater than zero, that many elements will
* be skipped in the buffer before writing.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
virtual status_t write(const int8_t* buf, size_t offset, size_t count);
virtual status_t write(const uint16_t* buf, size_t offset, size_t count);
virtual status_t write(const int16_t* buf, size_t offset, size_t count);
virtual status_t write(const uint32_t* buf, size_t offset, size_t count);
virtual status_t write(const int32_t* buf, size_t offset, size_t count);
virtual status_t write(const uint64_t* buf, size_t offset, size_t count);
virtual status_t write(const int64_t* buf, size_t offset, size_t count);
virtual status_t write(const float* buf, size_t offset, size_t count);
virtual status_t write(const double* buf, size_t offset, size_t count);
protected:
template<typename T>
inline status_t writeHelper(const T* buf, size_t offset, size_t count);
uint32_t mOffset;
Output* mOutput;
Endianness mEndian;
};
template<typename T>
inline status_t EndianOutput::writeHelper(const T* buf, size_t offset, size_t count) {
assert(offset <= count);
status_t res = OK;
size_t size = sizeof(T);
switch(mEndian) {
case BIG: {
for (size_t i = offset; i < count; ++i) {
T tmp = convertToBigEndian<T>(buf[offset + i]);
if ((res = mOutput->write(reinterpret_cast<uint8_t*>(&tmp), 0, size))
!= OK) {
return res;
}
mOffset += size;
}
break;
}
case LITTLE: {
for (size_t i = offset; i < count; ++i) {
T tmp = convertToLittleEndian<T>(buf[offset + i]);
if ((res = mOutput->write(reinterpret_cast<uint8_t*>(&tmp), 0, size))
!= OK) {
return res;
}
mOffset += size;
}
break;
}
default: {
return BAD_VALUE;
}
}
return res;
}
template<>
inline uint8_t convertToBigEndian(uint8_t in) {
return in;
}
template<>
inline int8_t convertToBigEndian(int8_t in) {
return in;
}
template<>
inline uint16_t convertToBigEndian(uint16_t in) {
return htobe16(in);
}
template<>
inline int16_t convertToBigEndian(int16_t in) {
return htobe16(in);
}
template<>
inline uint32_t convertToBigEndian(uint32_t in) {
return htobe32(in);
}
template<>
inline int32_t convertToBigEndian(int32_t in) {
return htobe32(in);
}
template<>
inline uint64_t convertToBigEndian(uint64_t in) {
return htobe64(in);
}
template<>
inline int64_t convertToBigEndian(int64_t in) {
return htobe64(in);
}
template<>
inline uint8_t convertToLittleEndian(uint8_t in) {
return in;
}
template<>
inline int8_t convertToLittleEndian(int8_t in) {
return in;
}
template<>
inline uint16_t convertToLittleEndian(uint16_t in) {
return htole16(in);
}
template<>
inline int16_t convertToLittleEndian(int16_t in) {
return htole16(in);
}
template<>
inline uint32_t convertToLittleEndian(uint32_t in) {
return htole32(in);
}
template<>
inline int32_t convertToLittleEndian(int32_t in) {
return htole32(in);
}
template<>
inline uint64_t convertToLittleEndian(uint64_t in) {
return htole64(in);
}
template<>
inline int64_t convertToLittleEndian(int64_t in) {
return htole64(in);
}
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_ENDIAN_UTILS*/

@ -0,0 +1,76 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_FILE_INPUT_H
#define IMG_UTILS_FILE_INPUT_H
#include <img_utils/Input.h>
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <utils/String8.h>
#include <stdio.h>
#include <stdint.h>
namespace android {
namespace img_utils {
/**
* Utility class for reading from a file.
*/
class ANDROID_API FileInput : public Input {
public:
/**
* Create a file input for the given path.
*/
explicit FileInput(String8 path);
virtual ~FileInput();
/**
* Open a file descriptor to the path given in the constructor.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t open();
/**
* Read bytes from the file into the given buffer. At most, the number
* of bytes given in the count argument will be read. Bytes will be written
* into the given buffer starting at the index given in the offset argument.
*
* Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
* error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
*/
virtual ssize_t read(uint8_t* buf, size_t offset, size_t count);
/**
* Close the file descriptor to the path given in the constructor.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t close();
private:
FILE *mFp;
String8 mPath;
bool mOpen;
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_INPUT_H*/

@ -0,0 +1,46 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_FILE_OUTPUT_H
#define IMG_UTILS_FILE_OUTPUT_H
#include <img_utils/Output.h>
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <utils/String8.h>
#include <stdio.h>
#include <stdint.h>
namespace android {
namespace img_utils {
class ANDROID_API FileOutput : public Output {
public:
explicit FileOutput(String8 path);
virtual ~FileOutput();
virtual status_t open();
virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
virtual status_t close();
private:
FILE *mFp;
String8 mPath;
bool mOpen;
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_FILE_OUTPUT_H*/

@ -0,0 +1,71 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_INPUT_H
#define IMG_UTILS_INPUT_H
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <stdint.h>
namespace android {
namespace img_utils {
/**
* Utility class used as a source of bytes.
*/
class ANDROID_API Input {
public:
virtual ~Input();
/**
* Open this Input.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t open();
/**
* Read bytes into the given buffer. At most, the number of bytes given in the
* count argument will be read. Bytes will be written into the given buffer starting
* at the index given in the offset argument.
*
* Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
* error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
*/
virtual ssize_t read(uint8_t* buf, size_t offset, size_t count) = 0;
/**
* Skips bytes in the input.
*
* Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
* error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
*/
virtual ssize_t skip(size_t count);
/**
* Close the Input. It is not valid to call open on a previously closed Input.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t close();
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_INPUT_H*/

@ -0,0 +1,57 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_ORDERABLE
#define IMG_UTILS_ORDERABLE
#include <cutils/compiler.h>
#include <stdint.h>
namespace android {
namespace img_utils {
#define COMPARE_DEF(op) \
inline bool operator op (const Orderable& orderable) const;
/**
* Subclasses of Orderable can be compared and sorted. This is
* intended to be used to create sorted arrays of TIFF entries
* and IFDs.
*/
class ANDROID_API Orderable {
public:
virtual ~Orderable();
/**
* Comparison operatotors are based on the value returned
* from this method.
*/
virtual uint32_t getComparableValue() const = 0;
COMPARE_DEF(>)
COMPARE_DEF(<)
COMPARE_DEF(>=)
COMPARE_DEF(<=)
COMPARE_DEF(==)
COMPARE_DEF(!=)
};
#undef COMPARE_DEF
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_ORDERABLE*/

@ -0,0 +1,61 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_OUTPUT_H
#define IMG_UTILS_OUTPUT_H
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <stdint.h>
namespace android {
namespace img_utils {
/**
* Utility class used to output bytes.
*/
class ANDROID_API Output {
public:
virtual ~Output();
/**
* Open this Output.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t open();
/**
* Write bytes from the given buffer. The number of bytes given in the count
* argument will be written. Bytes will be written from the given buffer starting
* at the index given in the offset argument.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t write(const uint8_t* buf, size_t offset, size_t count) = 0;
/**
* Close this Output. It is not valid to call open on a previously closed Output.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t close();
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_OUTPUT_H*/

@ -0,0 +1,44 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_PAIR_H
#define IMG_UTILS_PAIR_H
#include <cutils/compiler.h>
namespace android {
namespace img_utils {
/**
* Generic pair utility class. Nothing special here.
*/
template<typename F, typename S>
class ANDROID_API Pair {
public:
F first;
S second;
Pair() {}
Pair(const Pair& o) : first(o.first), second(o.second) {}
Pair(const F& f, const S& s) : first(f), second(s) {}
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_PAIR_H*/

@ -0,0 +1,53 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_SORTED_ENTRY_VECTOR_H
#define IMG_UTILS_SORTED_ENTRY_VECTOR_H
#include <img_utils/TiffEntry.h>
#include <utils/StrongPointer.h>
#include <utils/SortedVector.h>
namespace android {
namespace img_utils {
/**
* Subclass of SortedVector that has been extended to
* do comparisons/lookups based on the tag ID of the entries.
*/
class SortedEntryVector : public SortedVector<sp<TiffEntry> > {
public:
virtual ~SortedEntryVector();
/**
* Returns the index of the entry with the given tag ID, or
* -1 if none exists.
*/
ssize_t indexOfTag(uint16_t tag) const;
protected:
/**
* Compare tag ID.
*/
virtual int do_compare(const void* lhs, const void* rhs) const;
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_SORTED_ENTRY_VECTOR_H*/

@ -0,0 +1,53 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_STRIP_SOURCE_H
#define IMG_UTILS_STRIP_SOURCE_H
#include <img_utils/Output.h>
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <stdint.h>
namespace android {
namespace img_utils {
/**
* This class acts as a data source for strips set in a TiffIfd.
*/
class ANDROID_API StripSource {
public:
virtual ~StripSource();
/**
* Write count bytes to the stream.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t writeToStream(Output& stream, uint32_t count) = 0;
/**
* Return the source IFD.
*/
virtual uint32_t getIfd() const = 0;
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_STRIP_SOURCE_H*/

@ -0,0 +1,130 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_TIFF_ENTRY
#define IMG_UTILS_TIFF_ENTRY
#include <img_utils/TiffWritable.h>
#include <img_utils/TiffHelpers.h>
#include <img_utils/EndianUtils.h>
#include <cutils/compiler.h>
// #include <utils/String8.h>
#include <utils/Errors.h>
#include <stdint.h>
namespace android {
namespace img_utils {
#define COMPARE_DEF(op) \
inline bool operator op (const TiffEntry& entry) const;
/**
* This class holds a single TIFF IFD entry.
*
* Subclasses are expected to support assignment and copying operations.
*/
class ANDROID_API TiffEntry : public TiffWritable {
public:
virtual ~TiffEntry();
/**
* Write the 12-byte IFD entry to the output. The given offset will be
* set as the tag value if the size of the tag value exceeds the max
* size for the TIFF Value field (4 bytes), and should be word aligned.
*
* Returns OK on success, or a negative error code on failure.
*/
virtual status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const = 0;
/**
* Get the count set for this entry. This corresponds to the TIFF Count
* field.
*/
virtual uint32_t getCount() const = 0;
/**
* Get the tag id set for this entry. This corresponds to the TIFF Tag
* field.
*/
virtual uint16_t getTag() const = 0;
/**
* Get the type set for this entry. This corresponds to the TIFF Type
* field.
*/
virtual TagType getType() const = 0;
/**
* Get the defined endianness for this entry. If this is defined,
* the tag value will be written with the given byte order.
*/
virtual Endianness getEndianness() const = 0;
/**
* Get the value for this entry. This corresponds to the TIFF Value
* field.
*
* Returns NULL if the value is NULL, or if the type used does not
* match the type of this tag.
*/
template<typename T>
const T* getData() const;
virtual std::string toString() const;
/**
* Force the type used here to be a valid TIFF type.
*
* Returns NULL if the given value is NULL, or if the type given does
* not match the type of the value given.
*/
template<typename T>
static const T* forceValidType(TagType type, const T* value);
virtual const void* getDataHelper() const = 0;
COMPARE_DEF(>)
COMPARE_DEF(<)
protected:
enum {
MAX_PRINT_STRING_LENGTH = 256
};
};
#define COMPARE(op) \
bool TiffEntry::operator op (const TiffEntry& entry) const { \
return getComparableValue() op entry.getComparableValue(); \
}
COMPARE(>)
COMPARE(<)
template<typename T>
const T* TiffEntry::getData() const {
const T* value = reinterpret_cast<const T*>(getDataHelper());
return forceValidType<T>(getType(), value);
}
#undef COMPARE
#undef COMPARE_DEF
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_TIFF_ENTRY*/

@ -0,0 +1,219 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_TIFF_ENTRY_IMPL
#define IMG_UTILS_TIFF_ENTRY_IMPL
#include <img_utils/TiffIfd.h>
#include <img_utils/TiffEntry.h>
#include <img_utils/TiffHelpers.h>
#include <img_utils/Output.h>
#include <img_utils/EndianUtils.h>
#include <utils/Log.h>
#include <utils/Errors.h>
// #include <utils/Vector.h>
#include <utils/StrongPointer.h>
#include <stdint.h>
#include <vector>
namespace android {
namespace img_utils {
template<typename T>
class TiffEntryImpl : public TiffEntry {
public:
TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end, const T* data);
virtual ~TiffEntryImpl();
status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const;
status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const;
uint32_t getCount() const;
uint16_t getTag() const;
TagType getType() const;
Endianness getEndianness() const;
size_t getSize() const;
uint32_t getComparableValue() const;
protected:
const void* getDataHelper() const;
uint32_t getActualSize() const;
uint16_t mTag;
uint16_t mType;
uint32_t mCount;
Endianness mEnd;
std::vector<T> mData;
};
template<typename T>
TiffEntryImpl<T>::TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end,
const T* data)
: mTag(tag), mType(static_cast<uint16_t>(type)), mCount(count), mEnd(end) {
count = (type == RATIONAL || type == SRATIONAL) ? count * 2 : count;
auto it = mData.insert(mData.end(), data, data + count);
// LOG_ALWAYS_FATAL_IF(index < 0, "%s: Could not allocate vector for data.", __FUNCTION__);
}
template<typename T>
TiffEntryImpl<T>::~TiffEntryImpl() {}
template<typename T>
uint32_t TiffEntryImpl<T>::getCount() const {
return mCount;
}
template<typename T>
uint16_t TiffEntryImpl<T>::getTag() const {
return mTag;
}
template<typename T>
TagType TiffEntryImpl<T>::getType() const {
return static_cast<TagType>(mType);
}
template<typename T>
const void* TiffEntryImpl<T>::getDataHelper() const {
return reinterpret_cast<const void*>(&mData[0]);
}
template<typename T>
size_t TiffEntryImpl<T>::getSize() const {
uint32_t total = getActualSize();
WORD_ALIGN(total)
return (total <= OFFSET_SIZE) ? 0 : total;
}
template<typename T>
uint32_t TiffEntryImpl<T>::getActualSize() const {
uint32_t total = sizeof(T) * mCount;
if (getType() == RATIONAL || getType() == SRATIONAL) {
// 2 ints stored for each rational, multiply by 2
total <<= 1;
}
return total;
}
template<typename T>
Endianness TiffEntryImpl<T>::getEndianness() const {
return mEnd;
}
template<typename T>
uint32_t TiffEntryImpl<T>::getComparableValue() const {
return mTag;
}
template<typename T>
status_t TiffEntryImpl<T>::writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const {
assert((offset % TIFF_WORD_SIZE) == 0);
status_t ret = OK;
BAIL_ON_FAIL(out->write(&mTag, 0, 1), ret);
BAIL_ON_FAIL(out->write(&mType, 0, 1), ret);
BAIL_ON_FAIL(out->write(&mCount, 0, 1), ret);
uint32_t dataSize = getActualSize();
if (dataSize > OFFSET_SIZE) {
BAIL_ON_FAIL(out->write(&offset, 0, 1), ret);
} else {
uint32_t count = mCount;
if (getType() == RATIONAL || getType() == SRATIONAL) {
/**
* Rationals are stored as an array of ints. Each
* rational is represented by 2 ints. To recover the
* size of the array here, multiply the count by 2.
*/
count <<= 1;
}
BAIL_ON_FAIL(out->write(&mData[0], 0, count), ret);
ZERO_TILL_WORD(out, dataSize, ret);
}
return ret;
}
template<typename T>
status_t TiffEntryImpl<T>::writeData(uint32_t /*offset*/, EndianOutput* out) const {
status_t ret = OK;
// Some tags have fixed-endian value output
Endianness tmp = UNDEFINED_ENDIAN;
if (mEnd != UNDEFINED_ENDIAN) {
tmp = out->getEndianness();
out->setEndianness(mEnd);
}
uint32_t count = mCount;
if (getType() == RATIONAL || getType() == SRATIONAL) {
/**
* Rationals are stored as an array of ints. Each
* rational is represented by 2 ints. To recover the
* size of the array here, multiply the count by 2.
*/
count <<= 1;
}
BAIL_ON_FAIL(out->write(&mData[0], 0, count), ret);
if (mEnd != UNDEFINED_ENDIAN) {
out->setEndianness(tmp);
}
// Write to next word alignment
ZERO_TILL_WORD(out, sizeof(T) * count, ret);
return ret;
}
template<>
inline status_t TiffEntryImpl<sp<TiffIfd> >::writeTagInfo(uint32_t offset,
/*out*/EndianOutput* out) const {
assert((offset % TIFF_WORD_SIZE) == 0);
status_t ret = OK;
BAIL_ON_FAIL(out->write(&mTag, 0, 1), ret);
BAIL_ON_FAIL(out->write(&mType, 0, 1), ret);
BAIL_ON_FAIL(out->write(&mCount, 0, 1), ret);
BAIL_ON_FAIL(out->write(&offset, 0, 1), ret);
return ret;
}
template<>
inline uint32_t TiffEntryImpl<sp<TiffIfd> >::getActualSize() const {
uint32_t total = 0;
for (size_t i = 0; i < mData.size(); ++i) {
total += mData[i]->getSize();
}
return total;
}
template<>
inline status_t TiffEntryImpl<sp<TiffIfd> >::writeData(uint32_t offset, EndianOutput* out) const {
status_t ret = OK;
for (uint32_t i = 0; i < mCount; ++i) {
BAIL_ON_FAIL(mData[i]->writeData(offset, out), ret);
offset += mData[i]->getSize();
}
return ret;
}
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_TIFF_ENTRY_IMPL*/

@ -0,0 +1,132 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_TIFF_HELPERS_H
#define IMG_UTILS_TIFF_HELPERS_H
#include <stdint.h>
namespace android {
namespace img_utils {
const uint8_t ZERO_WORD[] = {0, 0, 0, 0};
#define BAIL_ON_FAIL(x, flag) \
if (((flag) = (x)) != OK) return flag;
#define BYTES_TILL_WORD(index) \
((TIFF_WORD_SIZE - ((index) % TIFF_WORD_SIZE)) % TIFF_WORD_SIZE)
#define WORD_ALIGN(count) \
count += BYTES_TILL_WORD(count);
#define ZERO_TILL_WORD(output, index, ret) \
{ \
size_t remaining = BYTES_TILL_WORD(index); \
if (remaining > 0) { \
BAIL_ON_FAIL((output)->write(ZERO_WORD, 0, remaining), ret); \
} \
}
/**
* Basic TIFF header constants.
*/
enum {
BAD_OFFSET = 0,
TIFF_WORD_SIZE = 4, // Size in bytes
IFD_HEADER_SIZE = 2, // Size in bytes
IFD_FOOTER_SIZE = 4, // Size in bytes
TIFF_ENTRY_SIZE = 12, // Size in bytes
MAX_IFD_ENTRIES = UINT16_MAX,
FILE_HEADER_SIZE = 8, // Size in bytes
ENDIAN_MARKER_SIZE = 2, // Size in bytes
TIFF_MARKER_SIZE = 2, // Size in bytes
OFFSET_MARKER_SIZE = 4, // Size in bytes
TIFF_FILE_MARKER = 42,
BIG_ENDIAN_MARKER = 0x4D4Du,
LITTLE_ENDIAN_MARKER = 0x4949u
};
/**
* Constants for the TIFF tag types.
*/
enum TagType {
UNKNOWN_TAGTYPE = 0,
BYTE=1,
ASCII,
SHORT,
LONG,
RATIONAL,
SBYTE,
UNDEFINED,
SSHORT,
SLONG,
SRATIONAL,
FLOAT,
DOUBLE
};
/**
* Sizes of the TIFF entry fields (in bytes).
*/
enum {
TAG_SIZE = 2,
TYPE_SIZE = 2,
COUNT_SIZE = 4,
OFFSET_SIZE = 4
};
/**
* Convenience IFD id constants.
*/
enum {
IFD_0 = 0,
RAW_IFD,
PROFILE_IFD,
PREVIEW_IFD
};
inline size_t getTypeSize(TagType type) {
switch(type) {
case UNDEFINED:
case ASCII:
case BYTE:
case SBYTE:
return 1;
case SHORT:
case SSHORT:
return 2;
case LONG:
case SLONG:
case FLOAT:
return 4;
case RATIONAL:
case SRATIONAL:
case DOUBLE:
return 8;
default:
return 0;
}
}
inline uint32_t calculateIfdSize(size_t numberOfEntries) {
return IFD_HEADER_SIZE + IFD_FOOTER_SIZE + TIFF_ENTRY_SIZE * numberOfEntries;
}
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_TIFF_HELPERS_H*/

@ -0,0 +1,164 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_TIFF_IFD_H
#define IMG_UTILS_TIFF_IFD_H
#include <img_utils/TiffWritable.h>
#include <img_utils/TiffEntry.h>
#include <img_utils/Output.h>
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <utils/StrongPointer.h>
#include <stdint.h>
#include <map>
namespace android {
namespace img_utils {
/**
* This class holds a single TIFF Image File Directory (IFD) structure.
*
* This maps to the TIFF IFD structure that is logically composed of:
* - A 2-byte field listing the number of entries.
* - A list of 12-byte TIFF entries.
* - A 4-byte offset to the next IFD.
*/
class ANDROID_API TiffIfd : public TiffWritable {
public:
explicit TiffIfd(uint32_t ifdId);
virtual ~TiffIfd();
/**
* Add a TiffEntry to this IFD or replace an existing entry with the
* same tag ID. No validation is done.
*
* Returns OK on success, or a negative error code on failure.
*/
virtual status_t addEntry(const sp<TiffEntry>& entry);
/**
* Set the pointer to the next IFD. This is used to create a linked
* list of IFDs as defined by the TIFF 6.0 spec., and is not included
* when calculating the size of IFD and entries for the getSize()
* method (unlike SubIFDs).
*/
virtual void setNextIfd(const sp<TiffIfd>& ifd);
/**
* Get the pointer to the next IFD, or NULL if none exists.
*/
virtual sp<TiffIfd> getNextIfd() const;
/**
* Write the IFD data. This includes the IFD header, entries, footer,
* and the corresponding values for each entry (recursively including
* sub-IFDs). The written amount should end on a word boundary, and
* the given offset should be word aligned.
*
* Returns OK on success, or a negative error code on failure.
*/
virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const;
/**
* Get the size of the IFD. This includes the IFD header, entries, footer,
* and the corresponding values for each entry (recursively including
* any sub-IFDs).
*/
virtual size_t getSize() const;
/**
* Get the id of this IFD.
*/
virtual uint32_t getId() const;
/**
* Get an entry with the given tag ID.
*
* Returns a strong pointer to the entry if it exists, or an empty strong
* pointer.
*/
virtual sp<TiffEntry> getEntry(uint16_t tag) const;
/**
* Remove the entry with the given tag ID if it exists.
*/
virtual void removeEntry(uint16_t tag);
/**
* Convenience method to validate and set strip-related image tags.
*
* This sets all strip related tags, but leaves offset values unitialized.
* setStripOffsets must be called with the desired offset before writing.
* The strip tag values are calculated from the existing tags for image
* dimensions and pixel type set in the IFD.
*
* Does not handle planar image configurations (PlanarConfiguration != 1).
*
* Returns OK on success, or a negative error code.
*/
virtual status_t validateAndSetStripTags();
/**
* Returns true if validateAndSetStripTags has been called, but not setStripOffsets.
*/
virtual bool uninitializedOffsets() const;
/**
* Convenience method to set beginning offset for strips.
*
* Call this to update the strip offsets before calling writeData.
*
* Returns OK on success, or a negative error code.
*/
virtual status_t setStripOffset(uint32_t offset);
/**
* Get the total size of the strips in bytes.
*
* This sums the byte count at each strip offset, and returns
* the total count of bytes stored in strips for this IFD.
*/
virtual uint32_t getStripSize() const;
/**
* Get a formatted string representing this IFD.
*/
virtual std::string toString() const;
/**
* Print a formatted string representing this IFD to logcat.
*/
void log() const;
/**
* Get value used to determine sort order.
*/
virtual uint32_t getComparableValue() const;
protected:
virtual uint32_t checkAndGetOffset(uint32_t offset) const;
std::map<uint16_t, sp<TiffEntry> > mEntries;
sp<TiffIfd> mNextIfd;
uint32_t mIfdId;
bool mStripOffsetsInitialized;
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_TIFF_IFD_H*/

@ -0,0 +1,60 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_TIFF_WRITABLE
#define IMG_UTILS_TIFF_WRITABLE
#include <img_utils/Orderable.h>
#include <img_utils/EndianUtils.h>
#include <img_utils/Output.h>
#include <cutils/compiler.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
#include <stdint.h>
namespace android {
namespace img_utils {
/**
* TiffWritable subclasses represent TIFF metadata objects that can be written
* to an EndianOutput object. This is used for TIFF entries and IFDs.
*/
class ANDROID_API TiffWritable : public Orderable, public LightRefBase<TiffWritable> {
public:
TiffWritable();
virtual ~TiffWritable();
/**
* Write the data to the output. The given offset is used to calculate
* the header offset for values written. The offset is defined
* relative to the beginning of the TIFF header, and is word aligned.
*
* Returns OK on success, or a negative error code on failure.
*/
virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const = 0;
/**
* Get the size of the data to write.
*/
virtual size_t getSize() const = 0;
};
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_TIFF_WRITABLE*/

@ -0,0 +1,328 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef IMG_UTILS_TIFF_WRITER_H
#define IMG_UTILS_TIFF_WRITER_H
#include <img_utils/EndianUtils.h>
#include <img_utils/StripSource.h>
#include <img_utils/TiffEntryImpl.h>
#include <img_utils/TagDefinitions.h>
#include <img_utils/TiffIfd.h>
#include <utils/Log.h>
#include <utils/Errors.h>
#include <utils/StrongPointer.h>
#include <cutils/compiler.h>
#include <stdint.h>
#include <vector>
#include <map>
namespace android {
namespace img_utils {
class TiffEntry;
class TiffIfd;
class Output;
/**
* This class holds a collection of TIFF IFDs that can be written as a
* complete DNG file header.
*
* This maps to the TIFF header structure that is logically composed of:
* - An 8-byte file header containing an endianness indicator, the TIFF
* file marker, and the offset to the first IFD.
* - A list of TIFF IFD structures.
*/
class ANDROID_API TiffWriter : public LightRefBase<TiffWriter> {
public:
enum SubIfdType {
SUBIFD = 0,
GPSINFO
};
/**
* Constructs a TiffWriter with the default tag mappings. This enables
* all of the tags defined in TagDefinitions.h, and uses the following
* mapping precedence to resolve collisions:
* (highest precedence) TIFF/EP > DNG > EXIF 2.3 > TIFF 6.0
*/
TiffWriter();
/**
* Constructs a TiffWriter with the given tag mappings. The mapping
* precedence will be in the order that the definition maps are given,
* where the lower index map gets precedence.
*
* This can be used with user-defined definitions, or definitions form
* TagDefinitions.h
*
* The enabledDefinitions mapping object is owned by the caller, and must
* stay alive for the lifespan of the constructed TiffWriter object.
*/
TiffWriter(std::map<uint16_t, const TagDefinition_t*>* enabledDefinitions,
size_t length);
virtual ~TiffWriter();
/**
* Write a TIFF header containing each IFD set. This will recursively
* write all SubIFDs and tags.
*
* Any StripSources passed in will be written to the output as image strips
* at the appropriate offests. The StripByteCounts, RowsPerStrip, and
* StripOffsets tags must be set to use this. To set these tags in a
* given IFD, use the addStrip method.
*
* Returns OK on success, or a negative error code on failure.
*/
virtual status_t write(Output* out, StripSource** sources, size_t sourcesCount,
Endianness end = LITTLE);
/**
* Write a TIFF header containing each IFD set. This will recursively
* write all SubIFDs and tags.
*
* Image data for strips or tiles must be written separately at the
* appropriate offsets. These offsets must not fall within the file
* header written this way. The size of the header written is given
* by the getTotalSize() method.
*
* Returns OK on success, or a negative error code on failure.
*/
virtual status_t write(Output* out, Endianness end = LITTLE);
/**
* Get the total size in bytes of the TIFF header. This includes all
* IFDs, tags, and values set for this TiffWriter.
*/
virtual uint32_t getTotalSize() const;
/**
* Add an entry to the IFD with the given ID.
*
* Returns OK on success, or a negative error code on failure. Valid
* error codes for this method are:
* - BAD_INDEX - The given tag doesn't exist.
* - BAD_VALUE - The given count doesn't match the required count for
* this tag.
* - BAD_TYPE - The type of the given data isn't compatible with the
* type required for this tag.
* - NAME_NOT_FOUND - No ifd exists with the given ID.
*/
virtual status_t addEntry(const sp<TiffEntry>& entry, uint32_t ifd);
/**
* Build an entry for a known tag and add it to the IFD with the given ID.
* This tag must be defined in one of the definition vectors this TIFF writer
* was constructed with. The count and type are validated.
*
* Returns OK on success, or a negative error code on failure. Valid
* error codes for this method are:
* - BAD_INDEX - The given tag doesn't exist.
* - BAD_VALUE - The given count doesn't match the required count for
* this tag.
* - BAD_TYPE - The type of the given data isn't compatible with the
* type required for this tag.
* - NAME_NOT_FOUND - No ifd exists with the given ID.
*/
template<typename T>
status_t addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd);
/**
* Build an entry for a known tag. This tag must be one of the tags
* defined in one of the definition vectors this TIFF writer was constructed
* with. The count and type are validated. If this succeeds, the resulting
* entry will be placed in the outEntry pointer.
*
* Returns OK on success, or a negative error code on failure. Valid
* error codes for this method are:
* - BAD_INDEX - The given tag doesn't exist.
* - BAD_VALUE - The given count doesn't match the required count for
* this tag.
* - BAD_TYPE - The type of the given data isn't compatible with the
* type required for this tag.
*/
template<typename T>
status_t buildEntry(uint16_t tag, uint32_t count, const T* data,
/*out*/sp<TiffEntry>* outEntry) const;
/**
* Convenience function to set the strip related tags for a given IFD.
*
* Call this before using a StripSource as an input to write.
* The following tags must be set before calling this method:
* - ImageWidth
* - ImageLength
* - SamplesPerPixel
* - BitsPerSample
*
* Returns OK on success, or a negative error code.
*/
virtual status_t addStrip(uint32_t ifd);
/**
* Return the TIFF entry with the given tag ID in the IFD with the given ID,
* or an empty pointer if none exists.
*/
virtual sp<TiffEntry> getEntry(uint16_t tag, uint32_t ifd) const;
/**
* Remove the TIFF entry with the given tag ID in the given IFD if it exists.
*/
virtual void removeEntry(uint16_t tag, uint32_t ifd);
/**
* Create an empty IFD with the given ID and add it to the end of the
* list of IFDs.
*/
virtual status_t addIfd(uint32_t ifd);
/**
* Create an empty IFD with the given ID and add it as a SubIfd of the
* parent IFD.
*/
virtual status_t addSubIfd(uint32_t parentIfd, uint32_t ifd, SubIfdType type = SUBIFD);
/**
* Returns the default type for the given tag ID.
*/
virtual TagType getDefaultType(uint16_t tag) const;
/**
* Returns the default count for a given tag ID, or 0 if this
* tag normally has a variable count.
*/
virtual uint32_t getDefaultCount(uint16_t tag) const;
/**
* Returns true if an IFD with the given ID exists.
*/
virtual bool hasIfd(uint32_t ifd) const;
/**
* Returns true if a definition exist for the given tag ID.
*/
virtual bool checkIfDefined(uint16_t tag) const;
/**
* Returns the name of the tag if a definition exists for the given tag
* ID, or null if no definition exists.
*/
virtual const char* getTagName(uint16_t tag) const;
/**
* Print the currently configured IFDs and entries to logcat.
*/
virtual void log() const;
/**
* Build an entry. No validation is done.
*
* WARNING: Using this method can result in creating poorly formatted
* TIFF files.
*
* Returns a TiffEntry with the given tag, type, count, endianness,
* and data.
*/
template<typename T>
static sp<TiffEntry> uncheckedBuildEntry(uint16_t tag, TagType type,
uint32_t count, Endianness end, const T* data);
/**
* Utility function to build atag-to-definition mapping from a given
* array of tag definitions.
*/
#if 0
static KeyedVector<uint16_t, const TagDefinition_t*> buildTagMap(
const TagDefinition_t* definitions, size_t length);
#endif
protected:
enum {
DEFAULT_NUM_TAG_MAPS = 4,
};
sp<TiffIfd> findLastIfd();
status_t writeFileHeader(EndianOutput& out);
const TagDefinition_t* lookupDefinition(uint16_t tag) const;
status_t calculateOffsets();
sp<TiffIfd> mIfd;
std::map<uint32_t, sp<TiffIfd> > mNamedIfds;
std::vector<std::map<uint16_t, const TagDefinition_t*> > mTagMaps;
size_t mNumTagMaps;
#if 0
static KeyedVector<uint16_t, const TagDefinition_t*> sTagMaps[];
#endif
};
template<typename T>
status_t TiffWriter::buildEntry(uint16_t tag, uint32_t count, const T* data,
/*out*/sp<TiffEntry>* outEntry) const {
const TagDefinition_t* definition = lookupDefinition(tag);
if (definition == NULL) {
ALOGE("%s: No such tag exists for id %x.", __FUNCTION__, tag);
return BAD_INDEX;
}
uint32_t fixedCount = definition->fixedCount;
if (fixedCount > 0 && fixedCount != count) {
ALOGE("%s: Invalid count %d for tag %x (expects %d).", __FUNCTION__, count, tag,
fixedCount);
return BAD_VALUE;
}
TagType fixedType = definition->defaultType;
if (TiffEntry::forceValidType(fixedType, data) == NULL) {
ALOGE("%s: Invalid type used for tag value for tag %x.", __FUNCTION__, tag);
return BAD_TYPE;
}
*outEntry = new TiffEntryImpl<T>(tag, fixedType, count,
definition->fixedEndian, data);
return OK;
}
template<typename T>
status_t TiffWriter::addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd) {
sp<TiffEntry> outEntry;
status_t ret = buildEntry<T>(tag, count, data, &outEntry);
if (ret != OK) {
ALOGE("%s: Could not build entry for tag %x.", __FUNCTION__, tag);
return ret;
}
return addEntry(outEntry, ifd);
}
template<typename T>
sp<TiffEntry> TiffWriter::uncheckedBuildEntry(uint16_t tag, TagType type, uint32_t count,
Endianness end, const T* data) {
TiffEntryImpl<T>* entry = new TiffEntryImpl<T>(tag, type, count, end, data);
return sp<TiffEntry>(entry);
}
} /*namespace img_utils*/
} /*namespace android*/
#endif /*IMG_UTILS_TIFF_WRITER_H*/

@ -0,0 +1,54 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <img_utils/ByteArrayOutput.h>
#include <utils/Log.h>
namespace android {
namespace img_utils {
ByteArrayOutput::ByteArrayOutput() {}
ByteArrayOutput::~ByteArrayOutput() {}
status_t ByteArrayOutput::open() {
return OK;
}
status_t ByteArrayOutput::write(const uint8_t* buf, size_t offset, size_t count) {
if (mByteArray.insert(mByteArray.end(), buf + offset, buf + offset + count) == mByteArray.end()) {
ALOGE("%s: Failed to write to ByteArrayOutput.", __FUNCTION__);
return BAD_VALUE;
}
return OK;
}
status_t ByteArrayOutput::close() {
mByteArray.clear();
return OK;
}
size_t ByteArrayOutput::getSize() const {
return mByteArray.size();
}
const uint8_t* ByteArrayOutput::getArray() const {
return &mByteArray[0];
}
} /*namespace img_utils*/
} /*namespace android*/

@ -0,0 +1,496 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <img_utils/DngUtils.h>
#include <inttypes.h>
#include <algorithm>
#include <vector>
#include <math.h>
namespace android {
namespace img_utils {
OpcodeListBuilder::OpcodeListBuilder() : mCount(0), mOpList(), mEndianOut(&mOpList, BIG) {
if(mEndianOut.open() != OK) {
ALOGE("%s: Open failed.", __FUNCTION__);
}
}
OpcodeListBuilder::~OpcodeListBuilder() {
if(mEndianOut.close() != OK) {
ALOGE("%s: Close failed.", __FUNCTION__);
}
}
size_t OpcodeListBuilder::getSize() const {
return mOpList.getSize() + sizeof(mCount);
}
uint32_t OpcodeListBuilder::getCount() const {
return mCount;
}
status_t OpcodeListBuilder::buildOpList(uint8_t* buf) const {
uint32_t count = convertToBigEndian(mCount);
memcpy(buf, &count, sizeof(count));
memcpy(buf + sizeof(count), mOpList.getArray(), mOpList.getSize());
return OK;
}
status_t OpcodeListBuilder::addGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaTop,
uint32_t activeAreaLeft,
uint32_t activeAreaBottom,
uint32_t activeAreaRight,
CfaLayout cfa,
const float* lensShadingMap) {
status_t err = OK;
uint32_t activeAreaWidth = activeAreaRight - activeAreaLeft;
uint32_t activeAreaHeight = activeAreaBottom - activeAreaTop;
switch (cfa) {
case CFA_RGGB:
case CFA_GRBG:
case CFA_GBRG:
case CFA_BGGR:
err = addBayerGainMapsForMetadata(lsmWidth, lsmHeight, activeAreaWidth,
activeAreaHeight, cfa, lensShadingMap);
break;
case CFA_NONE:
err = addMonochromeGainMapsForMetadata(lsmWidth, lsmHeight, activeAreaWidth,
activeAreaHeight, lensShadingMap);
break;
default:
ALOGE("%s: Unknown CFA layout %d", __FUNCTION__, cfa);
err = BAD_VALUE;
break;
}
return err;
}
status_t OpcodeListBuilder::addBayerGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
CfaLayout cfa,
const float* lensShadingMap) {
uint32_t redTop = 0;
uint32_t redLeft = 0;
uint32_t greenEvenTop = 0;
uint32_t greenEvenLeft = 1;
uint32_t greenOddTop = 1;
uint32_t greenOddLeft = 0;
uint32_t blueTop = 1;
uint32_t blueLeft = 1;
switch(cfa) {
case CFA_RGGB:
redTop = 0;
redLeft = 0;
greenEvenTop = 0;
greenEvenLeft = 1;
greenOddTop = 1;
greenOddLeft = 0;
blueTop = 1;
blueLeft = 1;
break;
case CFA_GRBG:
redTop = 0;
redLeft = 1;
greenEvenTop = 0;
greenEvenLeft = 0;
greenOddTop = 1;
greenOddLeft = 1;
blueTop = 1;
blueLeft = 0;
break;
case CFA_GBRG:
redTop = 1;
redLeft = 0;
greenEvenTop = 0;
greenEvenLeft = 0;
greenOddTop = 1;
greenOddLeft = 1;
blueTop = 0;
blueLeft = 1;
break;
case CFA_BGGR:
redTop = 1;
redLeft = 1;
greenEvenTop = 0;
greenEvenLeft = 1;
greenOddTop = 1;
greenOddLeft = 0;
blueTop = 0;
blueLeft = 0;
break;
default:
ALOGE("%s: Unknown CFA layout %d", __FUNCTION__, cfa);
return BAD_VALUE;
}
std::vector<float> redMapVector(lsmWidth * lsmHeight);
float *redMap = redMapVector.data();
std::vector<float> greenEvenMapVector(lsmWidth * lsmHeight);
float *greenEvenMap = greenEvenMapVector.data();
std::vector<float> greenOddMapVector(lsmWidth * lsmHeight);
float *greenOddMap = greenOddMapVector.data();
std::vector<float> blueMapVector(lsmWidth * lsmHeight);
float *blueMap = blueMapVector.data();
double spacingV = 1.0 / std::max(1u, lsmHeight - 1);
double spacingH = 1.0 / std::max(1u, lsmWidth - 1);
size_t lsmMapSize = lsmWidth * lsmHeight * 4;
// Split lens shading map channels into separate arrays
size_t j = 0;
for (size_t i = 0; i < lsmMapSize; i += 4, ++j) {
redMap[j] = lensShadingMap[i + LSM_R_IND];
greenEvenMap[j] = lensShadingMap[i + LSM_GE_IND];
greenOddMap[j] = lensShadingMap[i + LSM_GO_IND];
blueMap[j] = lensShadingMap[i + LSM_B_IND];
}
status_t err = addGainMap(/*top*/redTop,
/*left*/redLeft,
/*bottom*/activeAreaHeight,
/*right*/activeAreaWidth,
/*plane*/0,
/*planes*/1,
/*rowPitch*/2,
/*colPitch*/2,
/*mapPointsV*/lsmHeight,
/*mapPointsH*/lsmWidth,
/*mapSpacingV*/spacingV,
/*mapSpacingH*/spacingH,
/*mapOriginV*/0,
/*mapOriginH*/0,
/*mapPlanes*/1,
/*mapGains*/redMap);
if (err != OK) return err;
err = addGainMap(/*top*/greenEvenTop,
/*left*/greenEvenLeft,
/*bottom*/activeAreaHeight,
/*right*/activeAreaWidth,
/*plane*/0,
/*planes*/1,
/*rowPitch*/2,
/*colPitch*/2,
/*mapPointsV*/lsmHeight,
/*mapPointsH*/lsmWidth,
/*mapSpacingV*/spacingV,
/*mapSpacingH*/spacingH,
/*mapOriginV*/0,
/*mapOriginH*/0,
/*mapPlanes*/1,
/*mapGains*/greenEvenMap);
if (err != OK) return err;
err = addGainMap(/*top*/greenOddTop,
/*left*/greenOddLeft,
/*bottom*/activeAreaHeight,
/*right*/activeAreaWidth,
/*plane*/0,
/*planes*/1,
/*rowPitch*/2,
/*colPitch*/2,
/*mapPointsV*/lsmHeight,
/*mapPointsH*/lsmWidth,
/*mapSpacingV*/spacingV,
/*mapSpacingH*/spacingH,
/*mapOriginV*/0,
/*mapOriginH*/0,
/*mapPlanes*/1,
/*mapGains*/greenOddMap);
if (err != OK) return err;
err = addGainMap(/*top*/blueTop,
/*left*/blueLeft,
/*bottom*/activeAreaHeight,
/*right*/activeAreaWidth,
/*plane*/0,
/*planes*/1,
/*rowPitch*/2,
/*colPitch*/2,
/*mapPointsV*/lsmHeight,
/*mapPointsH*/lsmWidth,
/*mapSpacingV*/spacingV,
/*mapSpacingH*/spacingH,
/*mapOriginV*/0,
/*mapOriginH*/0,
/*mapPlanes*/1,
/*mapGains*/blueMap);
return err;
}
status_t OpcodeListBuilder::addMonochromeGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
const float* lensShadingMap) {
std::vector<float> mapVector(lsmWidth * lsmHeight);
float *map = mapVector.data();
double spacingV = 1.0 / std::max(1u, lsmHeight - 1);
double spacingH = 1.0 / std::max(1u, lsmWidth - 1);
size_t lsmMapSize = lsmWidth * lsmHeight * 4;
// Split lens shading map channels into separate arrays
size_t j = 0;
for (size_t i = 0; i < lsmMapSize; i += 4, ++j) {
map[j] = lensShadingMap[i];
}
status_t err = addGainMap(/*top*/0,
/*left*/0,
/*bottom*/activeAreaHeight,
/*right*/activeAreaWidth,
/*plane*/0,
/*planes*/1,
/*rowPitch*/1,
/*colPitch*/1,
/*mapPointsV*/lsmHeight,
/*mapPointsH*/lsmWidth,
/*mapSpacingV*/spacingV,
/*mapSpacingH*/spacingH,
/*mapOriginV*/0,
/*mapOriginH*/0,
/*mapPlanes*/1,
/*mapGains*/map);
if (err != OK) return err;
return err;
}
status_t OpcodeListBuilder::addGainMap(uint32_t top,
uint32_t left,
uint32_t bottom,
uint32_t right,
uint32_t plane,
uint32_t planes,
uint32_t rowPitch,
uint32_t colPitch,
uint32_t mapPointsV,
uint32_t mapPointsH,
double mapSpacingV,
double mapSpacingH,
double mapOriginV,
double mapOriginH,
uint32_t mapPlanes,
const float* mapGains) {
status_t err = addOpcodePreamble(GAIN_MAP_ID);
if (err != OK) return err;
// Allow this opcode to be skipped if not supported
uint32_t flags = FLAG_OPTIONAL;
err = mEndianOut.write(&flags, 0, 1);
if (err != OK) return err;
const uint32_t NUMBER_INT_ARGS = 11;
const uint32_t NUMBER_DOUBLE_ARGS = 4;
uint32_t totalSize = NUMBER_INT_ARGS * sizeof(uint32_t) + NUMBER_DOUBLE_ARGS * sizeof(double) +
mapPointsV * mapPointsH * mapPlanes * sizeof(float);
err = mEndianOut.write(&totalSize, 0, 1);
if (err != OK) return err;
// Batch writes as much as possible
uint32_t settings1[] = { top,
left,
bottom,
right,
plane,
planes,
rowPitch,
colPitch,
mapPointsV,
mapPointsH };
err = mEndianOut.write(settings1, 0, NELEMS(settings1));
if (err != OK) return err;
double settings2[] = { mapSpacingV,
mapSpacingH,
mapOriginV,
mapOriginH };
err = mEndianOut.write(settings2, 0, NELEMS(settings2));
if (err != OK) return err;
err = mEndianOut.write(&mapPlanes, 0, 1);
if (err != OK) return err;
err = mEndianOut.write(mapGains, 0, mapPointsV * mapPointsH * mapPlanes);
if (err != OK) return err;
mCount++;
return OK;
}
status_t OpcodeListBuilder::addWarpRectilinearForMetadata(const float* kCoeffs,
uint32_t activeArrayWidth,
uint32_t activeArrayHeight,
float opticalCenterX,
float opticalCenterY) {
if (activeArrayWidth <= 1 || activeArrayHeight <= 1) {
ALOGE("%s: Cannot add opcode for active array with dimensions w=%" PRIu32 ", h=%" PRIu32,
__FUNCTION__, activeArrayWidth, activeArrayHeight);
return BAD_VALUE;
}
double normalizedOCX = opticalCenterX / static_cast<double>(activeArrayWidth);
double normalizedOCY = opticalCenterY / static_cast<double>(activeArrayHeight);
normalizedOCX = CLAMP(normalizedOCX, 0, 1);
normalizedOCY = CLAMP(normalizedOCY, 0, 1);
double coeffs[6] = {
kCoeffs[0],
kCoeffs[1],
kCoeffs[2],
kCoeffs[3],
kCoeffs[4],
kCoeffs[5]
};
return addWarpRectilinear(/*numPlanes*/1,
/*opticalCenterX*/normalizedOCX,
/*opticalCenterY*/normalizedOCY,
coeffs);
}
status_t OpcodeListBuilder::addWarpRectilinear(uint32_t numPlanes,
double opticalCenterX,
double opticalCenterY,
const double* kCoeffs) {
status_t err = addOpcodePreamble(WARP_RECTILINEAR_ID);
if (err != OK) return err;
// Allow this opcode to be skipped if not supported
uint32_t flags = FLAG_OPTIONAL;
err = mEndianOut.write(&flags, 0, 1);
if (err != OK) return err;
const uint32_t NUMBER_CENTER_ARGS = 2;
const uint32_t NUMBER_COEFFS = numPlanes * 6;
uint32_t totalSize = (NUMBER_CENTER_ARGS + NUMBER_COEFFS) * sizeof(double) + sizeof(uint32_t);
err = mEndianOut.write(&totalSize, 0, 1);
if (err != OK) return err;
err = mEndianOut.write(&numPlanes, 0, 1);
if (err != OK) return err;
err = mEndianOut.write(kCoeffs, 0, NUMBER_COEFFS);
if (err != OK) return err;
err = mEndianOut.write(&opticalCenterX, 0, 1);
if (err != OK) return err;
err = mEndianOut.write(&opticalCenterY, 0, 1);
if (err != OK) return err;
mCount++;
return OK;
}
status_t OpcodeListBuilder::addBadPixelListForMetadata(const uint32_t* hotPixels,
uint32_t xyPairCount,
uint32_t colorFilterArrangement) {
if (colorFilterArrangement > 3) {
ALOGE("%s: Unknown color filter arrangement %" PRIu32, __FUNCTION__,
colorFilterArrangement);
return BAD_VALUE;
}
return addBadPixelList(colorFilterArrangement, xyPairCount, 0, hotPixels, nullptr);
}
status_t OpcodeListBuilder::addBadPixelList(uint32_t bayerPhase,
uint32_t badPointCount,
uint32_t badRectCount,
const uint32_t* badPointRowColPairs,
const uint32_t* badRectTopLeftBottomRightTuples) {
status_t err = addOpcodePreamble(FIX_BAD_PIXELS_LIST);
if (err != OK) return err;
// Allow this opcode to be skipped if not supported
uint32_t flags = FLAG_OPTIONAL;
err = mEndianOut.write(&flags, 0, 1);
if (err != OK) return err;
const uint32_t NUM_NON_VARLEN_FIELDS = 3;
const uint32_t SIZE_OF_POINT = 2;
const uint32_t SIZE_OF_RECT = 4;
uint32_t totalSize = (NUM_NON_VARLEN_FIELDS + badPointCount * SIZE_OF_POINT +
badRectCount * SIZE_OF_RECT) * sizeof(uint32_t);
err = mEndianOut.write(&totalSize, 0, 1);
if (err != OK) return err;
err = mEndianOut.write(&bayerPhase, 0, 1);
if (err != OK) return err;
err = mEndianOut.write(&badPointCount, 0, 1);
if (err != OK) return err;
err = mEndianOut.write(&badRectCount, 0, 1);
if (err != OK) return err;
if (badPointCount > 0) {
err = mEndianOut.write(badPointRowColPairs, 0, SIZE_OF_POINT * badPointCount);
if (err != OK) return err;
}
if (badRectCount > 0) {
err = mEndianOut.write(badRectTopLeftBottomRightTuples, 0, SIZE_OF_RECT * badRectCount);
if (err != OK) return err;
}
mCount++;
return OK;
}
status_t OpcodeListBuilder::addOpcodePreamble(uint32_t opcodeId) {
status_t err = mEndianOut.write(&opcodeId, 0, 1);
if (err != OK) return err;
uint8_t version[] = {1, 3, 0, 0};
err = mEndianOut.write(version, 0, NELEMS(version));
if (err != OK) return err;
return OK;
}
} /*namespace img_utils*/
} /*namespace android*/

@ -0,0 +1,83 @@
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <img_utils/EndianUtils.h>
namespace android {
namespace img_utils {
EndianOutput::EndianOutput(Output* out, Endianness end)
: mOffset(0), mOutput(out), mEndian(end) {}
EndianOutput::~EndianOutput() {}
status_t EndianOutput::open() {
mOffset = 0;
return mOutput->open();
}
status_t EndianOutput::close() {
return mOutput->close();
}
void EndianOutput::setEndianness(Endianness end) {
mEndian = end;
}
uint32_t EndianOutput::getCurrentOffset() const {
return mOffset;
}
Endianness EndianOutput::getEndianness() const {
return mEndian;
}
status_t EndianOutput::write(const uint8_t* buf, size_t offset, size_t count) {
status_t res = OK;
if((res = mOutput->write(buf, offset, count)) == OK) {
mOffset += count;
}
return res;
}
status_t EndianOutput::write(const int8_t* buf, size_t offset, size_t count) {
return write(reinterpret_cast<const uint8_t*>(buf), offset, count);
}
#define DEFINE_WRITE(_type_) \
status_t EndianOutput::write(const _type_* buf, size_t offset, size_t count) { \
return writeHelper<_type_>(buf, offset, count); \
}
DEFINE_WRITE(uint16_t)
DEFINE_WRITE(int16_t)
DEFINE_WRITE(uint32_t)
DEFINE_WRITE(int32_t)
DEFINE_WRITE(uint64_t)
DEFINE_WRITE(int64_t)
status_t EndianOutput::write(const float* buf, size_t offset, size_t count) {
assert(sizeof(float) == sizeof(uint32_t));
return writeHelper<uint32_t>(reinterpret_cast<const uint32_t*>(buf), offset, count);
}
status_t EndianOutput::write(const double* buf, size_t offset, size_t count) {
assert(sizeof(double) == sizeof(uint64_t));
return writeHelper<uint64_t>(reinterpret_cast<const uint64_t*>(buf), offset, count);
}
} /*namespace img_utils*/
} /*namespace android*/

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save