实现独立的AI识别

TempBranch
Matthew 9 months ago
parent 562d49382c
commit 3fee94e03e

@ -10,6 +10,7 @@
#include <sys/system_properties.h>
#include <AndroidHelper.h>
#include <linux/spi/spidev.h>
#include "ncnn/yolov5ncnn.h"
#include <android/multinetwork.h>
@ -642,6 +643,90 @@ Java_com_xypower_mpapp_MicroPhotoService_getPhotoTimeData(
return data;
}
extern "C" JNIEXPORT jintArray JNICALL
Java_com_xypower_mpapp_MicroPhotoService_recoganizePicture(
JNIEnv* env,
jclass cls, jstring paramPath, jstring binPath, jstring blobName8, jstring blobName16, jstring blobName32, jstring picPath) {
const char* pParamPathStr = env->GetStringUTFChars(paramPath, 0);
std::string paramPathStr = MakeString(pParamPathStr);
env->ReleaseStringUTFChars(paramPath, pParamPathStr);
const char* pBinPathStr = env->GetStringUTFChars(binPath, 0);
std::string binPathStr = MakeString(pBinPathStr);
env->ReleaseStringUTFChars(binPath, pBinPathStr);
const char* pBlobName8Str = env->GetStringUTFChars(blobName8, 0);
std::string blobName8Str = MakeString(pBlobName8Str);
env->ReleaseStringUTFChars(blobName8, pBlobName8Str);
const char* pBlobName16Str = env->GetStringUTFChars(blobName16, 0);
std::string blobName16Str = MakeString(pBlobName16Str);
env->ReleaseStringUTFChars(blobName16, pBlobName16Str);
const char* pBlobName32Str = env->GetStringUTFChars(blobName32, 0);
std::string blobName32Str = MakeString(pBlobName32Str);
env->ReleaseStringUTFChars(blobName32, pBlobName32Str);
const char* pPicPathStr = env->GetStringUTFChars(picPath, 0);
std::string picPathStr = MakeString(pPicPathStr);
env->ReleaseStringUTFChars(picPath, pPicPathStr);
cv::Mat mat = cv::imread(picPathStr);
if (mat.empty())
{
return NULL;
}
std::vector<int> dataArray;
ncnn_init();
ncnn::Net net;
bool res = YoloV5Ncnn_Init(net, paramPathStr, binPathStr);
if (res)
{
std::vector<IDevice::RECOG_OBJECT> objs;
res = YoloV5NcnnDetect(net, mat, true, blobName8Str, blobName16Str, blobName32Str, objs);
if (res && !objs.empty())
{
for (std::vector<IDevice::RECOG_OBJECT>::const_iterator it = objs.cbegin(); it != objs.cend(); ++it)
{
// float x;
// float y;
// float w;
// float h;
// int label;
// float prob;
dataArray.push_back(it->x);
dataArray.push_back(it->y);
dataArray.push_back(it->w);
dataArray.push_back(it->h);
dataArray.push_back(it->label);
dataArray.push_back((int)(it->prob * 100.0f));
}
}
}
// ncnn_uninit();
if (dataArray.empty())
{
return NULL;
}
jintArray data = env->NewIntArray(dataArray.size());
if (data == NULL) {
return NULL;
}
env->SetIntArrayRegion(data, 0, dataArray.size(), &dataArray[0]);
return data;
}
/*
extern "C" JNIEXPORT jlongArray JNICALL
Java_com_xypower_mpapp_MicroPhotoService_getNextScheduleItem(

@ -221,6 +221,48 @@ bool YoloV5Ncnn_Init(const std::string& paramFile, const std::string& binFile)
return true;
}
bool YoloV5Ncnn_Init(ncnn::Net& net, const std::string& paramFile, const std::string& binFile)
{
ncnn::Option opt;
opt.lightmode = true;
opt.num_threads = 4;
opt.blob_allocator = &g_blob_pool_allocator;
opt.workspace_allocator = &g_workspace_pool_allocator;
opt.use_packing_layout = true;
// use vulkan compute
if (ncnn::get_gpu_count() != 0)
opt.use_vulkan_compute = true;
// AAssetManager* mgr = AAssetManager_fromJava(env, assetManager);
net.opt = opt;
net.register_custom_layer("YoloV5Focus", YoloV5Focus_layer_creator);
// init param
{
int ret = net.load_param(paramFile.c_str());
if (ret != 0)
{
// __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "load_param failed");
return false;
}
}
// init bin
{
int ret = net.load_model(binFile.c_str());
if (ret != 0)
{
// __android_log_print(ANDROID_LOG_DEBUG, "YoloV5Ncnn", "load_model failed");
return false;
}
}
return true;
}
// public native Obj[] Detect(Bitmap bitmap, boolean use_gpu);
bool YoloV5NcnnDetect( ncnn::Mat& mat, bool use_gpu, std::vector<IDevice::RECOG_OBJECT>& objects)
{
@ -601,3 +643,159 @@ bool YoloV5NcnnDetect( cv::Mat& mat, bool use_gpu, const std::string& blobName8,
return true;
}
bool YoloV5NcnnDetect( ncnn::Net& net, cv::Mat& mat, bool use_gpu, const std::string& blobName8, const std::string& blobName16, const std::string& blobName32, std::vector<IDevice::RECOG_OBJECT>& objects)
{
if (use_gpu && ncnn::get_gpu_count() == 0)
{
return false;
//return env->NewStringUTF("no vulkan capable gpu");
}
// AndroidBitmapInfo info;
// AndroidBitmap_getInfo(env, bitmap, &info);
const int width = mat.cols;
const int height = mat.rows;
// if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888)
// return NULL;
// ncnn from bitmap
const int target_size = 640;
// letterbox pad to multiple of 32
int w = width;
int h = height;
float scale = 1.f;
if (w > h)
{
scale = (float)target_size / w;
w = target_size;
h = h * scale;
}
else
{
scale = (float)target_size / h;
h = target_size;
w = w * scale;
}
ncnn::Mat in = ncnn::Mat::from_pixels_resize(mat.data, ncnn::Mat::PIXEL_BGR2RGB, mat.cols, mat.rows, w, h);
// pad to target_size rectangle
// yolov5/utils/datasets.py letterbox
int wpad = (w + 31) / 32 * 32 - w;
int hpad = (h + 31) / 32 * 32 - h;
ncnn::Mat in_pad;
ncnn::copy_make_border(in, in_pad, hpad / 2, hpad - hpad / 2, wpad / 2, wpad - wpad / 2, ncnn::BORDER_CONSTANT, 114.f);
// yolov5
{
const float prob_threshold = 0.25f;
const float nms_threshold = 0.45f;
const float norm_vals[3] = {1 / 255.f, 1 / 255.f, 1 / 255.f};
in_pad.substract_mean_normalize(0, norm_vals);
ncnn::Extractor ex = net.create_extractor();
ex.set_vulkan_compute(use_gpu);
ex.input("images", in_pad);
std::vector<IDevice::RECOG_OBJECT> proposals;
// anchor setting from yolov5/models/yolov5s.yaml
// stride 8
{
ncnn::Mat out;
ex.extract(blobName8.c_str(), out);
ncnn::Mat anchors(6);
anchors[0] = 10.f;
anchors[1] = 13.f;
anchors[2] = 16.f;
anchors[3] = 30.f;
anchors[4] = 33.f;
anchors[5] = 23.f;
std::vector<IDevice::RECOG_OBJECT> objects8;
generate_proposals(anchors, 8, in_pad, out, prob_threshold, objects8);
proposals.insert(proposals.end(), objects8.begin(), objects8.end());
}
// stride 16
{
ncnn::Mat out;
ex.extract(blobName16.c_str(), out);
ncnn::Mat anchors(6);
anchors[0] = 30.f;
anchors[1] = 61.f;
anchors[2] = 62.f;
anchors[3] = 45.f;
anchors[4] = 59.f;
anchors[5] = 119.f;
std::vector<IDevice::RECOG_OBJECT> objects16;
generate_proposals(anchors, 16, in_pad, out, prob_threshold, objects16);
proposals.insert(proposals.end(), objects16.begin(), objects16.end());
}
// stride 32
{
ncnn::Mat out;
ex.extract(blobName32.c_str(), out);
ncnn::Mat anchors(6);
anchors[0] = 116.f;
anchors[1] = 90.f;
anchors[2] = 156.f;
anchors[3] = 198.f;
anchors[4] = 373.f;
anchors[5] = 326.f;
std::vector<IDevice::RECOG_OBJECT> objects32;
generate_proposals(anchors, 32, in_pad, out, prob_threshold, objects32);
proposals.insert(proposals.end(), objects32.begin(), objects32.end());
}
// sort all proposals by score from highest to lowest
qsort_descent_inplace(proposals);
// apply nms with nms_threshold
std::vector<int> picked;
nms_sorted_bboxes(proposals, picked, nms_threshold);
int count = picked.size();
objects.resize(count);
for (int i = 0; i < count; i++)
{
objects[i] = proposals[picked[i]];
// adjust offset to original unpadded
float x0 = (objects[i].x - (wpad / 2)) / scale;
float y0 = (objects[i].y - (hpad / 2)) / scale;
float x1 = (objects[i].x + objects[i].w - (wpad / 2)) / scale;
float y1 = (objects[i].y + objects[i].h - (hpad / 2)) / scale;
// clip
x0 = std::max(std::min(x0, (float)(width - 1)), 0.f);
y0 = std::max(std::min(y0, (float)(height - 1)), 0.f);
x1 = std::max(std::min(x1, (float)(width - 1)), 0.f);
y1 = std::max(std::min(y1, (float)(height - 1)), 0.f);
objects[i].x = x0;
objects[i].y = y0;
objects[i].w = x1 - x0;
objects[i].h = y1 - y0;
}
}
return true;
}

@ -107,6 +107,9 @@ inline void ncnn_uninit()
// public native boolean Init(AssetManager mgr);
bool YoloV5Ncnn_Init(const std::string& paramFile, const std::string& binFile);
bool YoloV5Ncnn_Init(ncnn::Net& net, const std::string& paramFile, const std::string& binFile);
// public native Obj[] Detect(Bitmap bitmap, boolean use_gpu);
bool YoloV5NcnnDetect( ncnn::Mat& mat, bool use_gpu, std::vector<IDevice::RECOG_OBJECT>& objects);
bool YoloV5NcnnDetect( cv::Mat& mat, bool use_gpu, const std::string& blobName8, const std::string& blobName16, const std::string& blobName32, std::vector<IDevice::RECOG_OBJECT>& objects);
bool YoloV5NcnnDetect( cv::Mat& mat, bool use_gpu, const std::string& blobName8, const std::string& blobName16, const std::string& blobName32, std::vector<IDevice::RECOG_OBJECT>& objects);
bool YoloV5NcnnDetect( ncnn::Net& net, cv::Mat& mat, bool use_gpu, const std::string& blobName8, const std::string& blobName16, const std::string& blobName32, std::vector<IDevice::RECOG_OBJECT>& objects);

@ -34,6 +34,8 @@ public class BridgeProvider extends ContentProvider {
private final static String PATH_TAKE_PHOTO = "/takePhoto";
private final static String PATH_TAKE_VIDEO = "/takeVideo";
private final static String PATH_RECOG_PIC = "/recogPic";
public BridgeProvider() {
Log.i(TAG, "BridgeProvider");
}
@ -72,6 +74,7 @@ public class BridgeProvider extends ContentProvider {
Log.i(TAG, uri.toString());
matcher.addURI(AUTHORITY, PATH_QUERY_SEC_VERSION, 1);
matcher.addURI(AUTHORITY, PATH_QUERY_BATTERY_VOLTAGE, 2);
matcher.addURI(AUTHORITY, PATH_RECOG_PIC, 3);
Cursor cursor = null;
int matched = matcher.match(uri);
@ -82,6 +85,9 @@ public class BridgeProvider extends ContentProvider {
case 2:
cursor = queryBattaryVoltage();
break;
case 3:
cursor = recoganizePicture(uri, selection, selectionArgs);
break;
default:
break;
}
@ -149,6 +155,55 @@ public class BridgeProvider extends ContentProvider {
return matrixCursor;
}
private Cursor recoganizePicture(Uri uri, String selection, String[] selectionArgs) {
String decodedSelection = stringFromBase64(selection);
String paramPath = null;
String binPath = null;
String blobName8 = null;
String blobName16 = null;
String blobName32 = null;
String path = null;
if (!TextUtils.isEmpty(decodedSelection)) {
Uri u = Uri.parse("http://a.com/?" + decodedSelection);
paramPath = u.getQueryParameter("param");
binPath = u.getQueryParameter("bin");
blobName8 = u.getQueryParameter("b8");
blobName16 = u.getQueryParameter("b16");
blobName32 = u.getQueryParameter("b32");
path = u.getQueryParameter("path");
}
if (TextUtils.isEmpty(paramPath) || TextUtils.isEmpty(binPath) || TextUtils.isEmpty(blobName8) ||
TextUtils.isEmpty(blobName16) || TextUtils.isEmpty(blobName32) || TextUtils.isEmpty(path)) {
return null;
}
int[] data = MicroPhotoService.recoganizePicture(paramPath, binPath, blobName8, blobName16, blobName32, path);
if (data == null || data.length == 0) {
return null;
}
int rows = data.length / 6;
if (rows == 0) {
return null;
}
String[] columns = { "x", "y", "w", "h", "label", "prob" };
MatrixCursor matrixCursor = new MatrixCursor(columns, rows);
int idx = 0;
for (int row = 0; row < rows; row++) {
matrixCursor.addRow(new Object[] { Integer.valueOf(data[idx]), Integer.valueOf(data[idx + 1]), Integer.valueOf(data[idx + 2]),
Integer.valueOf(data[idx + 3]), Integer.valueOf(data[idx + 4]), Integer.valueOf(data[idx + 5])});
idx += 6;
}
return matrixCursor;
}
private int importPrivateKey(Uri uri, ContentValues values) {
String cert = values.containsKey("cert") ? values.getAsString("cert") : null;
String path = values.containsKey("path") ? values.getAsString("path") : null;

@ -1340,6 +1340,7 @@ cellSignalStrengthGsm.getDbm();
public native static int getGpioInt(int cmd);
public static native int[] recoganizePicture(String paramPath, String binPath, String blobName8, String blobName16, String blobName32, String picPath);
public static native String querySecVersion();
public static native boolean genCertRequest(int index, int type, String subject, String outputPath);
public static native boolean importPrivateKeyFile(int index, String outputPath, String md5);

Loading…
Cancel
Save