Merge branch 'wang_dev' into test_pre

solve the Conflicts
# Conflicts:
#	APP_Framework/Framework/knowing/Kconfig
This commit is contained in:
chunyexixiaoyu 2021-12-24 13:51:48 +08:00
commit 17be211288
139 changed files with 763 additions and 1550 deletions

6
.gitmodules vendored
View File

@ -1,6 +1,6 @@
[submodule "Ubiquitous/RT_Thread/rt-thread"]
path = Ubiquitous/RT_Thread/rt-thread
url = https://git.trustie.net/chunyexixiaoyu/rt-thread.git
url = https://code.gitlink.org.cn/chunyexixiaoyu/rt-thread.git
[submodule "Ubiquitous/RT_Thread/bsp/k210/kendryte-sdk/kendryte-sdk-source"]
path = Ubiquitous/RT_Thread/bsp/k210/kendryte-sdk/kendryte-sdk-source
url = https://git.trustie.net/chunyexixiaoyu/kendryte-sdk-source.git
path = Ubiquitous/RT_Thread/aiit_board/k210/kendryte-sdk/kendryte-sdk-source
url = https://code.gitlink.org.cn/chunyexixiaoyu/kendryte-sdk-source.git

View File

@ -1,17 +1,15 @@
menu "knowing app"
menuconfig APPLICATION_KNOWING
bool "Using knowing apps"
default n
if APPLICATION_KNOWING
source "$APP_DIR/Applications/knowing_app/mnist/Kconfig"
source "$APP_DIR/Applications/knowing_app/face_detect/Kconfig"
source "$APP_DIR/Applications/knowing_app/instrusion_detect/Kconfig"
source "$APP_DIR/Applications/knowing_app/helmet_detect/Kconfig"
source "$APP_DIR/Applications/knowing_app/iris_ml_demo/Kconfig"
source "$APP_DIR/Applications/knowing_app/k210_fft_test/Kconfig"
source "$APP_DIR/Applications/knowing_app/image_processing/Kconfig"
source "$APP_DIR/Applications/knowing_app/cmsis_5_demo/Kconfig"
endif
endmenu
menu "knowing app"
menuconfig APPLICATION_KNOWING
bool "Using knowing apps"
default n
if APPLICATION_KNOWING
source "$APP_DIR/Applications/knowing_app/mnist/Kconfig"
source "$APP_DIR/Applications/knowing_app/k210_detect_entry/Kconfig"
source "$APP_DIR/Applications/knowing_app/iris_ml_demo/Kconfig"
source "$APP_DIR/Applications/knowing_app/k210_fft_test/Kconfig"
source "$APP_DIR/Applications/knowing_app/image_processing/Kconfig"
source "$APP_DIR/Applications/knowing_app/cmsis_5_demo/Kconfig"
endif
endmenu

View File

@ -1,8 +0,0 @@
config FACE_DETECT
bool "enable apps/face detect"
depends on BOARD_K210_EVB
depends on DRV_USING_OV2640
depends on USING_KPU_POSTPROCESSING
depends on USING_YOLOV2
select LIB_USING_CJSON
default n

View File

@ -1,9 +0,0 @@
from building import *
cwd = GetCurrentDir()
src = Glob('*.c') + Glob('*.cpp')
CPPPATH = [cwd]
group = DefineGroup('Applications', src, depend = ['FACE_DETECT'], LOCAL_CPPPATH = CPPPATH)
Return('group')

View File

@ -1,379 +0,0 @@
#include <transform.h>
#ifdef LIB_USING_CJSON
#include <cJSON.h>
#endif
#include "region_layer.h"
#define ANCHOR_NUM 5
#define STACK_SIZE (128 * 1024)
#define JSON_FILE_PATH "/kmodel/detect.json"
#define JSON_BUFFER_SIZE (4 * 1024)
static dmac_channel_number_t dma_ch = DMAC_CHANNEL_MAX;
// params from json
static float anchor[ANCHOR_NUM * 2] = {};
static int net_output_shape[3] = {};
static int net_input_size[2] = {};
static int sensor_output_size[2] = {};
static char kmodel_path[127] = "";
static int kmodel_size = 0;
static float obj_thresh[20] = {};
static float nms_thresh = 0.0;
static char labels[20][32] = {};
static int class_num = 0;
#define THREAD_PRIORITY_FACE_D (11)
static pthread_t facetid = 0;
static void *thread_face_detcet_entry(void *parameter);
static int g_fd = 0;
static int kmodel_fd = 0;
static int if_exit = 0;
static unsigned char *showbuffer = NULL;
static unsigned char *kpurgbbuffer = NULL;
static _ioctl_shoot_para shoot_para_t = {0};
unsigned char *model_data = NULL; // kpu data load memory
unsigned char *model_data_align = NULL;
kpu_model_context_t face_detect_task;
static region_layer_t face_detect_rl;
static obj_info_t face_detect_info;
volatile uint32_t g_ai_done_flag;
static void ai_done(void *ctx) { g_ai_done_flag = 1; }
static void param_parse()
{
int fin;
char buffer[JSON_BUFFER_SIZE] = "";
// char *buffer;
// if (NULL != (buffer = (char*)malloc(JSON_BUFFER_SIZE * sizeof(char)))) {
// memset(buffer, 0, JSON_BUFFER_SIZE * sizeof(char));
// } else {
// printf("Json buffer malloc failed!");
// exit(-1);
// }
int array_size;
cJSON *json_obj;
cJSON *json_item;
cJSON *json_array_item;
fin = open(JSON_FILE_PATH, O_RDONLY);
if (!fin) {
printf("Error open file %s", JSON_FILE_PATH);
exit(-1);
}
read(fin, buffer, sizeof(buffer));
close(fin);
// read json string
json_obj = cJSON_Parse(buffer);
// free(buffer);
char *json_print_str = cJSON_Print(json_obj);
printf("Json file content: \n%s\n", json_print_str);
cJSON_free(json_print_str);
// get anchors
json_item = cJSON_GetObjectItem(json_obj, "anchors");
array_size = cJSON_GetArraySize(json_item);
if (ANCHOR_NUM * 2 != array_size) {
printf("Expect anchor size: %d, got %d in json file", ANCHOR_NUM * 2, array_size);
exit(-1);
} else {
printf("Got %d anchors from json file\n", ANCHOR_NUM);
}
for (int i = 0; i < ANCHOR_NUM * 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
anchor[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, anchor[i]);
}
// net_input_size
json_item = cJSON_GetObjectItem(json_obj, "net_input_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect net_input_size: %d, got %d in json file", 2, array_size);
exit(-1);
} else {
printf("Got %d net_input_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
net_input_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, net_input_size[i]);
}
// net_output_shape
json_item = cJSON_GetObjectItem(json_obj, "net_output_shape");
array_size = cJSON_GetArraySize(json_item);
if (3 != array_size) {
printf("Expect net_output_shape: %d, got %d in json file", 3, array_size);
exit(-1);
} else {
printf("Got %d net_output_shape from json file\n", 3);
}
for (int i = 0; i < 3; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
net_output_shape[i] = json_array_item->valueint;
printf("%d: %d\n", i, net_output_shape[i]);
}
// sensor_output_size
json_item = cJSON_GetObjectItem(json_obj, "sensor_output_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect sensor_output_size: %d, got %d in json file", 2, array_size);
exit(-1);
} else {
printf("Got %d sensor_output_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
sensor_output_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, sensor_output_size[i]);
}
// kmodel_path
json_item = cJSON_GetObjectItem(json_obj, "kmodel_path");
memcpy(kmodel_path, json_item->valuestring, strlen(json_item->valuestring));
printf("Got kmodel_path: %s\n", kmodel_path);
// kmodel_size
json_item = cJSON_GetObjectItem(json_obj, "kmodel_size");
kmodel_size = json_item->valueint;
printf("Got kmodel_size: %d\n", kmodel_size);
// labels
json_item = cJSON_GetObjectItem(json_obj, "labels");
class_num = cJSON_GetArraySize(json_item);
if (0 >= class_num) {
printf("No labels!");
exit(-1);
} else {
printf("Got %d labels\n", class_num);
}
for (int i = 0; i < class_num; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
memcpy(labels[i], json_array_item->valuestring, strlen(json_array_item->valuestring));
printf("%d: %s\n", i, labels[i]);
}
// obj_thresh
json_item = cJSON_GetObjectItem(json_obj, "obj_thresh");
array_size = cJSON_GetArraySize(json_item);
if (class_num != array_size) {
printf("label number and thresh number mismatch! label number : %d, obj thresh number %d", class_num, array_size);
exit(-1);
} else {
printf("Got %d obj_thresh\n", array_size);
}
for (int i = 0; i < array_size; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
obj_thresh[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, obj_thresh[i]);
}
// nms_thresh
json_item = cJSON_GetObjectItem(json_obj, "nms_thresh");
nms_thresh = json_item->valuedouble;
printf("Got nms_thresh: %f\n", nms_thresh);
cJSON_Delete(json_obj);
return;
}
void face_detect()
{
int ret = 0;
int result = 0;
int size = 0;
param_parse();
g_fd = open("/dev/ov2640", O_RDONLY);
if (g_fd < 0) {
printf("open ov2640 fail !!");
return;
}
_ioctl_set_dvp_reso set_dvp_reso = {sensor_output_size[1], sensor_output_size[0]};
ioctl(g_fd, IOCTRL_CAMERA_SET_DVP_RESO, &set_dvp_reso);
showbuffer = (unsigned char *)rt_malloc_align(sensor_output_size[0] * sensor_output_size[1] * 2,64);
if (NULL == showbuffer) {
close(g_fd);
printf("showbuffer apply memory fail !!");
return;
}
kpurgbbuffer = (unsigned char *)rt_malloc_align(net_input_size[0] * net_input_size[1] * 3,64);
if (NULL == kpurgbbuffer) {
close(g_fd);
rt_free_align(showbuffer);
printf("kpurgbbuffer apply memory fail !!");
return;
}
model_data = (unsigned char *)malloc(kmodel_size + 255);
if (NULL == model_data) {
rt_free_align(showbuffer);
rt_free_align(kpurgbbuffer);
close(g_fd);
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, kmodel_size + 255);
memset(showbuffer, 0, sensor_output_size[0] * sensor_output_size[1] * 2);
memset(kpurgbbuffer, 0, net_input_size[0] * net_input_size[1] * 3);
shoot_para_t.pdata = (unsigned int *)(showbuffer);
shoot_para_t.length = (size_t)(sensor_output_size[0] * sensor_output_size[1] * 2);
/*
load memory
*/
kmodel_fd = open(kmodel_path, O_RDONLY);
if (kmodel_fd < 0) {
printf("open kmodel fail");
close(g_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
size = read(kmodel_fd, model_data, kmodel_size);
if (size != kmodel_size) {
printf("read kmodel error size %d\n", size);
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
printf("read kmodel success \n");
}
}
unsigned char *model_data_align = (unsigned char *)(((unsigned int)model_data + 255) & (~255));
dvp_set_ai_addr((uint32_t)kpurgbbuffer, (uint32_t)(kpurgbbuffer + net_input_size[0] * net_input_size[1]),
(uint32_t)(kpurgbbuffer + net_input_size[0] * net_input_size[1] * 2));
if (kpu_load_kmodel(&face_detect_task, model_data_align) != 0) {
printf("\nmodel init error\n");
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
}
face_detect_rl.anchor_number = ANCHOR_NUM;
face_detect_rl.anchor = anchor;
face_detect_rl.threshold = malloc(class_num * sizeof(float));
for (int idx = 0; idx < class_num; idx++) {
face_detect_rl.threshold[idx] = obj_thresh[idx];
}
face_detect_rl.nms_value = nms_thresh;
result = region_layer_init(&face_detect_rl, net_output_shape[0], net_output_shape[1], net_output_shape[2],
net_input_size[1], net_input_size[0]);
printf("region_layer_init result %d \n\r", result);
size_t stack_size = STACK_SIZE;
pthread_attr_t attr; /* 线程属性 */
struct sched_param prio; /* 线程优先级 */
prio.sched_priority = 8; /* 优先级设置为 8 */
pthread_attr_init(&attr); /* 先使用默认值初始化属性 */
pthread_attr_setschedparam(&attr, &prio); /* 修改属性对应的优先级 */
pthread_attr_setstacksize(&attr, stack_size);
/* 创建线程 1, 属性为 attr入口函数是 thread_entry入口函数参数是 1 */
result = pthread_create(&facetid, &attr, thread_face_detcet_entry, NULL);
if (0 == result) {
printf("thread_face_detcet_entry successfully!\n");
} else {
printf("thread_face_detcet_entry failed! error code is %d\n", result);
close(g_fd);
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(face_detect, face detect task);
#endif
static void *thread_face_detcet_entry(void *parameter)
{
extern void lcd_draw_picture(uint16_t x1, uint16_t y1, uint16_t width, uint16_t height, uint32_t * ptr);
printf("thread_face_detcet_entry start!\n");
int ret = 0;
// sysctl_enable_irq();
while (1) {
// memset(showbuffer,0,320*240*2);
g_ai_done_flag = 0;
ret = ioctl(g_fd, IOCTRL_CAMERA_START_SHOT, &shoot_para_t);
if (RT_ERROR == ret) {
printf("ov2640 can't wait event flag");
rt_free(showbuffer);
close(g_fd);
pthread_exit(NULL);
return NULL;
}
if (dmalock_sync_take(&dma_ch, 2000))
{
printf("Fail to take DMA channel");
}
kpu_run_kmodel(&face_detect_task, kpurgbbuffer, DMAC_CHANNEL5, ai_done, NULL);
while (!g_ai_done_flag)
;
dmalock_release(dma_ch);
float *output;
size_t output_size;
kpu_get_output(&face_detect_task, 0, (uint8_t **)&output, &output_size);
face_detect_rl.input = output;
region_layer_run(&face_detect_rl, &face_detect_info);
/* display result */
for (int face_cnt = 0; face_cnt < face_detect_info.obj_number; face_cnt++) {
draw_edge((uint32_t *)showbuffer, &face_detect_info, face_cnt, 0xF800, (uint16_t)sensor_output_size[1],
(uint16_t)sensor_output_size[0]);
// printf("%d: (%d, %d, %d, %d) cls: %s conf: %f\t", face_cnt, face_detect_info.obj[face_cnt].x1,
// face_detect_info.obj[face_cnt].y1, face_detect_info.obj[face_cnt].x2, face_detect_info.obj[face_cnt].y2,
// labels[face_detect_info.obj[face_cnt].class_id], face_detect_info.obj[face_cnt].prob);
}
#ifdef BSP_USING_LCD
lcd_draw_picture(0, 0, (uint16_t)sensor_output_size[1], (uint16_t)sensor_output_size[0], (uint32_t *)showbuffer);
//lcd_show_image(0, 0, (uint16_t)sensor_output_size[1], (uint16_t)sensor_output_size[0], (unsigned int *)showbuffer);
#endif
usleep(500);
if (1 == if_exit) {
if_exit = 0;
printf("thread_face_detcet_entry exit");
pthread_exit(NULL);
}
}
}
void face_detect_delete()
{
if (showbuffer != NULL) {
int ret = 0;
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
printf("face detect task cancel!!! ret %d ", ret);
if_exit = 1;
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(face_detect_delete, face detect task delete);
#endif
void kmodel_load(unsigned char *model_data)
{
int kmodel_fd = 0;
int size = 0;
kmodel_fd = open(kmodel_path, O_RDONLY);
model_data = (unsigned char *)malloc(kmodel_size + 255);
if (NULL == model_data) {
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, kmodel_size + 255);
if (kmodel_fd >= 0) {
size = read(kmodel_fd, model_data, kmodel_size);
if (size != kmodel_size) {
printf("read kmodel error size %d\n", size);
} else {
printf("read kmodel success");
}
} else {
free(model_data);
printf("open kmodel fail");
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(kmodel_load, kmodel load memory);
#endif

View File

@ -1,8 +0,0 @@
config HELMET_DETECT
bool "enable apps/helmet detect"
depends on BOARD_K210_EVB
depends on DRV_USING_OV2640
depends on USING_KPU_POSTPROCESSING
depends on USING_YOLOV2
select LIB_USING_CJSON
default n

View File

@ -1,167 +0,0 @@
# Helmet detection demo
### A helmet and head without helmet object detection task demo. Running MobileNet-yolo on K210-based edge devices.
---
## Training
### Enviroment preparation
Model generated by [aXeleRate](https://forgeplus.trustie.net/projects/yangtuo250/aXeleRate) and converted to kmodel by [nncase](https://github.com/kendryte/nncase/tree/v0.1.0-rc5).
```shell
# master branch for MobileNetv1-yolov2 and unstable branch to test MobileNetv1(v2)-yolov2(v3)
git clone https://git.trustie.net/yangtuo250/aXeleRate.git (-b unstable)
cd aXeleRate
pip install -r requirments.txt && pip install -e .
```
### training config setting
Example [config](https://forgeplus.trustie.net/projects/yangtuo250/aXeleRate/tree/master/configs/detector.json), some hyper-parameters:
- architecture: backbone, MobileNet7_5 for default, MobileNet1_0(α = 1.0) and above cannot run on K210 because of OOM on feature map in master branch. For unstable branch MobileNetV2_1_0 is OK.
- input_size: fixed model input size, single integer for height equals to width, otherwise a list([height, width]).
- anchors: yolov2 anchor(for master) or anchor scaled to 1.0(for unstable), can be generate by [darknet](https://github.com/AlexeyAB/darknet).
- labels: labels of all classes.
- train(valid)_image(annot)_folder: path of images and annoations for training and validation.
- saved_folder: path for trainig result storage(models, checkpoints, logs ...).
Mine config for unstable:
```json
{
"model": {
"type": "Detector",
"architecture": "MobileNetV2_1_0",
"input_size": [
224,
320
],
"anchors": [
[
[
0.1043,
0.1560
],
[
0.0839,
0.3036
],
[
0.1109,
0.3923
],
[
0.1378,
0.5244
],
[
0.2049,
0.6673
]
]
],
"labels": [
"human"
],
"obj_thresh": 0.5,
"iou_thresh": 0.45,
"coord_scale": 1.0,
"class_scale": 0.0,
"object_scale": 5.0,
"no_object_scale": 3.0
},
"weights": {
"full": "",
"backend": ""
},
"train": {
"actual_epoch": 2000,
"train_image_folder": "mydata/human/Images/train",
"train_annot_folder": "mydata/human/Annotations/train",
"train_times": 2,
"valid_image_folder": "mydata/human/Images/val",
"valid_annot_folder": "mydata/human/Annotations/val",
"valid_times": 1,
"valid_metric": "precision",
"batch_size": 32,
"learning_rate": 2e-5,
"saved_folder": "mydata/human/results",
"first_trainable_layer": "",
"augmentation": true,
"is_only_detect": false,
"validation_freq": 5,
"quantize": false,
"class_weights": [1.0]
},
"converter": {
"type": [
"k210"
]
}
}
```
*(For more detailed config usage, please refer to original aXeleRate repo.)*
### data preparation
Please refer to [VOC format](https://towardsdatascience.com/coco-data-format-for-object-detection-a4c5eaf518c5), path as config above.
### train it!
```shell
python -m aXeleRate.train -c PATH_TO_YOUR_CONFIG
```
### model convert
Please refer to [nncase repo](https://github.com/kendryte/nncase/tree/v0.1.0-rc5).
---
## Deployment
### compile and burn
Use `(scons --)menuconfig` in bsp folder *(Ubiquitous/RT_Thread/bsp/k210)*, open:
- More Drivers --> ov2640 driver
- Board Drivers Config --> Enable LCD on SPI0
- Board Drivers Config --> Enable SDCARD (spi1(ss0))
- Board Drivers Config --> Enable DVP(camera)
- RT-Thread Components --> POSIX layer and C standard library --> Enable pthreads APIs
- APP_Framework --> Framework --> support knowing framework --> kpu model postprocessing --> yolov2 region layer
- APP_Framework --> Applications --> knowing app --> enable apps/helmet detect
`scons -j(n)` to compile and burn in by *kflash*.
### json config and kmodel
Copy json config for deployment o SD card */kmodel*. Example config file is *helmet.json* in this directory. Something to be modified:
- net_input_size: same as *input_size* in training config file, but array only.
- net_output_shape: final feature map size, can be found in **nncase** output.
- sensor_output_size: image height and width from camera.
- kmodel_size: kmodel size shown in file system.
- anchors: same as *anchor* in training config file(multi-dimention anchors flatten to 1 dim).
- labels: same as *label* in training config file.
- obj_thresh: array, object threshold of each label.
- nms_thresh: NMS threshold of boxes.
Copy final kmodel to SD card */kmodel* either.
---
## Run
In serial terminal, `helmet_detect` to start a detection thread, `helmet_detect_delete` to stop it. Detection results can be found in output.
---
## TODO
- [ ] Fix LCD real-time result display.
- [ ] Test more object detection backbone and algorithm(like yolox).

View File

@ -1,380 +0,0 @@
#include <transform.h>
#ifdef LIB_USING_CJSON
#include <cJSON.h>
#endif
#include "region_layer.h"
#define ANCHOR_NUM 5
#define STACK_SIZE (128 * 1024)
#define JSON_FILE_PATH "/kmodel/helmet.json"
#define JSON_BUFFER_SIZE (4 * 1024)
// params from json
static float anchor[ANCHOR_NUM * 2] = {};
static int net_output_shape[3] = {};
static int net_input_size[2] = {};
static int sensor_output_size[2] = {};
static char kmodel_path[127] = "";
static int kmodel_size = 0;
static float obj_thresh[20] = {};
static float nms_thresh = 0.0;
static char labels[20][32] = {};
static int class_num = 0;
#define THREAD_PRIORITY_HELMET_D (11)
static pthread_t helmettid = 0;
static void *thread_helmet_detect_entry(void *parameter);
static int g_fd = 0;
static int kmodel_fd = 0;
static int if_exit = 0;
static unsigned char *showbuffer = NULL;
static unsigned char *kpurgbbuffer = NULL;
static _ioctl_shoot_para shoot_para_t = {0};
unsigned char *model_data = NULL; // kpu data load memory
unsigned char *model_data_align = NULL;
kpu_model_context_t helmet_detect_task;
static region_layer_t helmet_detect_rl;
static obj_info_t helmet_detect_info;
volatile uint32_t g_ai_done_flag;
static void ai_done(void *ctx) { g_ai_done_flag = 1; }
static void param_parse()
{
int fin;
char buffer[JSON_BUFFER_SIZE] = "";
// char *buffer;
// if (NULL != (buffer = (char*)malloc(JSON_BUFFER_SIZE * sizeof(char)))) {
// memset(buffer, 0, JSON_BUFFER_SIZE * sizeof(char));
// } else {
// printf("Json buffer malloc failed!");
// exit(-1);
// }
int array_size;
cJSON *json_obj;
cJSON *json_item;
cJSON *json_array_item;
fin = open(JSON_FILE_PATH, O_RDONLY);
if (!fin) {
printf("Error open file %s", JSON_FILE_PATH);
exit(-1);
}
read(fin, buffer, sizeof(buffer));
close(fin);
// read json string
json_obj = cJSON_Parse(buffer);
// free(buffer);
char *json_print_str = cJSON_Print(json_obj);
printf("Json file content: \n%s\n", json_print_str);
cJSON_free(json_print_str);
// get anchors
json_item = cJSON_GetObjectItem(json_obj, "anchors");
array_size = cJSON_GetArraySize(json_item);
if (ANCHOR_NUM * 2 != array_size) {
printf("Expect anchor size: %d, got %d in json file", ANCHOR_NUM * 2, array_size);
exit(-1);
} else {
printf("Got %d anchors from json file\n", ANCHOR_NUM);
}
for (int i = 0; i < ANCHOR_NUM * 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
anchor[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, anchor[i]);
}
// net_input_size
json_item = cJSON_GetObjectItem(json_obj, "net_input_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect net_input_size: %d, got %d in json file", 2, array_size);
exit(-1);
} else {
printf("Got %d net_input_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
net_input_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, net_input_size[i]);
}
// net_output_shape
json_item = cJSON_GetObjectItem(json_obj, "net_output_shape");
array_size = cJSON_GetArraySize(json_item);
if (3 != array_size) {
printf("Expect net_output_shape: %d, got %d in json file", 3, array_size);
exit(-1);
} else {
printf("Got %d net_output_shape from json file\n", 3);
}
for (int i = 0; i < 3; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
net_output_shape[i] = json_array_item->valueint;
printf("%d: %d\n", i, net_output_shape[i]);
}
// sensor_output_size
json_item = cJSON_GetObjectItem(json_obj, "sensor_output_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect sensor_output_size: %d, got %d in json file", 2, array_size);
exit(-1);
} else {
printf("Got %d sensor_output_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
sensor_output_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, sensor_output_size[i]);
}
// kmodel_path
json_item = cJSON_GetObjectItem(json_obj, "kmodel_path");
memcpy(kmodel_path, json_item->valuestring, strlen(json_item->valuestring));
printf("Got kmodel_path: %s\n", kmodel_path);
// kmodel_size
json_item = cJSON_GetObjectItem(json_obj, "kmodel_size");
kmodel_size = json_item->valueint;
printf("Got kmodel_size: %d\n", kmodel_size);
// labels
json_item = cJSON_GetObjectItem(json_obj, "labels");
class_num = cJSON_GetArraySize(json_item);
if (0 >= class_num) {
printf("No labels!");
exit(-1);
} else {
printf("Got %d labels\n", class_num);
}
for (int i = 0; i < class_num; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
memcpy(labels[i], json_array_item->valuestring, strlen(json_array_item->valuestring));
printf("%d: %s\n", i, labels[i]);
}
// obj_thresh
json_item = cJSON_GetObjectItem(json_obj, "obj_thresh");
array_size = cJSON_GetArraySize(json_item);
if (class_num != array_size) {
printf("label number and thresh number mismatch! label number : %d, obj thresh number %d", class_num, array_size);
exit(-1);
} else {
printf("Got %d obj_thresh\n", array_size);
}
for (int i = 0; i < array_size; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
obj_thresh[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, obj_thresh[i]);
}
// nms_thresh
json_item = cJSON_GetObjectItem(json_obj, "nms_thresh");
nms_thresh = json_item->valuedouble;
printf("Got nms_thresh: %f\n", nms_thresh);
cJSON_Delete(json_obj);
return;
}
void helmet_detect()
{
int ret = 0;
int result = 0;
int size = 0;
param_parse();
g_fd = open("/dev/ov2640", O_RDONLY);
if (g_fd < 0) {
printf("open ov2640 fail !!");
return;
}
_ioctl_set_dvp_reso set_dvp_reso = {sensor_output_size[1], sensor_output_size[0]};
ioctl(g_fd, IOCTRL_CAMERA_SET_DVP_RESO, &set_dvp_reso);
showbuffer = (unsigned char *)malloc(sensor_output_size[0] * sensor_output_size[1] * 2);
if (NULL == showbuffer) {
close(g_fd);
printf("showbuffer apply memory fail !!");
return;
}
kpurgbbuffer = (unsigned char *)malloc(net_input_size[0] * net_input_size[1] * 3);
if (NULL == kpurgbbuffer) {
close(g_fd);
free(showbuffer);
printf("kpurgbbuffer apply memory fail !!");
return;
}
model_data = (unsigned char *)malloc(kmodel_size + 255);
if (NULL == model_data) {
free(showbuffer);
free(kpurgbbuffer);
close(g_fd);
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, kmodel_size + 255);
memset(showbuffer, 0, sensor_output_size[0] * sensor_output_size[1] * 2);
memset(kpurgbbuffer, 127, net_input_size[0] * net_input_size[1] * 3);
shoot_para_t.pdata = (unsigned int *)(showbuffer);
shoot_para_t.length = (size_t)(sensor_output_size[0] * sensor_output_size[1] * 2);
/*
load memory
*/
kmodel_fd = open(kmodel_path, O_RDONLY);
if (kmodel_fd < 0) {
printf("open kmodel fail");
close(g_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
size = read(kmodel_fd, model_data, kmodel_size);
if (size != kmodel_size) {
printf("read kmodel error size %d\n", size);
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
printf("read kmodel success \n");
}
}
unsigned char *model_data_align = (unsigned char *)(((unsigned int)model_data + 255) & (~255));
dvp_set_ai_addr((uint32_t)(kpurgbbuffer + net_input_size[1] * (net_input_size[0] - sensor_output_size[0])),
(uint32_t)(kpurgbbuffer + net_input_size[1] * (net_input_size[0] - sensor_output_size[0]) +
net_input_size[0] * net_input_size[1]),
(uint32_t)(kpurgbbuffer + net_input_size[0] * net_input_size[1] * 2 +
net_input_size[1] * (net_input_size[0] - sensor_output_size[0])));
if (kpu_load_kmodel(&helmet_detect_task, model_data_align) != 0) {
printf("\nmodel init error\n");
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
}
helmet_detect_rl.anchor_number = ANCHOR_NUM;
helmet_detect_rl.anchor = anchor;
helmet_detect_rl.threshold = malloc(class_num * sizeof(float));
for (int idx = 0; idx < class_num; idx++) {
helmet_detect_rl.threshold[idx] = obj_thresh[idx];
}
helmet_detect_rl.nms_value = nms_thresh;
result = region_layer_init(&helmet_detect_rl, net_output_shape[0], net_output_shape[1], net_output_shape[2],
net_input_size[1], net_input_size[0]);
printf("region_layer_init result %d \n\r", result);
size_t stack_size = STACK_SIZE;
pthread_attr_t attr; /* 线程属性 */
struct sched_param prio; /* 线程优先级 */
prio.sched_priority = 8; /* 优先级设置为 8 */
pthread_attr_init(&attr); /* 先使用默认值初始化属性 */
pthread_attr_setschedparam(&attr, &prio); /* 修改属性对应的优先级 */
pthread_attr_setstacksize(&attr, stack_size);
/* 创建线程 1, 属性为 attr入口函数是 thread_entry入口函数参数是 1 */
result = pthread_create(&helmettid, &attr, thread_helmet_detect_entry, NULL);
if (0 == result) {
printf("thread_helmet_detect_entry successfully!\n");
} else {
printf("thread_helmet_detect_entry failed! error code is %d\n", result);
close(g_fd);
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(helmet_detect, helmet detect task);
#endif
static void *thread_helmet_detect_entry(void *parameter)
{
extern void lcd_draw_picture(uint16_t x1, uint16_t y1, uint16_t width, uint16_t height, uint32_t * ptr);
printf("thread_helmet_detect_entry start!\n");
int ret = 0;
// sysctl_enable_irq();
while (1) {
// memset(showbuffer,0,320*240*2);
g_ai_done_flag = 0;
ret = ioctl(g_fd, IOCTRL_CAMERA_START_SHOT, &shoot_para_t);
if (RT_ERROR == ret) {
printf("ov2640 can't wait event flag");
rt_free(showbuffer);
close(g_fd);
pthread_exit(NULL);
return NULL;
}
kpu_run_kmodel(&helmet_detect_task, kpurgbbuffer, DMAC_CHANNEL5, ai_done, NULL);
while (!g_ai_done_flag)
;
float *output;
size_t output_size;
kpu_get_output(&helmet_detect_task, 0, (uint8_t **)&output, &output_size);
helmet_detect_rl.input = output;
region_layer_run(&helmet_detect_rl, &helmet_detect_info);
/* display result */
#ifdef BSP_USING_LCD
for (int helmet_cnt = 0; helmet_cnt < helmet_detect_info.obj_number; helmet_cnt++) {
// draw_edge((uint32_t *)showbuffer, &helmet_detect_info, helmet_cnt, 0xF800,
// (uint16_t)sensor_output_size[1],
// (uint16_t)sensor_output_size[0]);
printf("%d: (%d, %d, %d, %d) cls: %s conf: %f\t", helmet_cnt, helmet_detect_info.obj[helmet_cnt].x1,
helmet_detect_info.obj[helmet_cnt].y1, helmet_detect_info.obj[helmet_cnt].x2,
helmet_detect_info.obj[helmet_cnt].y2, labels[helmet_detect_info.obj[helmet_cnt].class_id],
helmet_detect_info.obj[helmet_cnt].prob);
}
if (0 != helmet_detect_info.obj_number) {
printf("\n");
}
lcd_draw_picture(0, 0, (uint16_t)sensor_output_size[1], (uint16_t)sensor_output_size[0], (unsigned int *)showbuffer);
#endif
usleep(1);
if (1 == if_exit) {
if_exit = 0;
printf("thread_helmet_detect_entry exit");
pthread_exit(NULL);
}
}
}
void helmet_detect_delete()
{
if (showbuffer != NULL) {
int ret = 0;
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
printf("helmet detect task cancel!!! ret %d ", ret);
if_exit = 1;
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(helmet_detect_delete, helmet detect task delete);
#endif
void kmodel_load(unsigned char *model_data)
{
int kmodel_fd = 0;
int size = 0;
kmodel_fd = open(kmodel_path, O_RDONLY);
model_data = (unsigned char *)malloc(kmodel_size + 255);
if (NULL == model_data) {
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, kmodel_size + 255);
if (kmodel_fd >= 0) {
size = read(kmodel_fd, model_data, kmodel_size);
if (size != kmodel_size) {
printf("read kmodel error size %d\n", size);
} else {
printf("read kmodel success");
}
} else {
free(model_data);
printf("open kmodel fail");
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(kmodel_load, kmodel load memory);
#endif

View File

@ -1,6 +1,6 @@
menuconfig USING_IMAGE_PROCESSING_APP
bool "image processing app "
default n
if USING_IMAGE_PROCESSING_APP
source "$APP_DIR/Applications/knowing_app/image_processing/TJpgDec_APP/Kconfig"
endif
menuconfig USING_IMAGE_PROCESSING_APP
bool "image processing app "
default n
if USING_IMAGE_PROCESSING_APP
source "$APP_DIR/Applications/knowing_app/image_processing/TJpgDec_APP/Kconfig"
endif

View File

@ -1,4 +1,4 @@
config IMAGE_PROCESSING_TJPGDEC_APP
bool "image processing apps/TJpgDec(example)"
select IMAGE_PROCESSING_USING_TJPGD
default n
config IMAGE_PROCESSING_TJPGDEC_APP
bool "image processing apps/TJpgDec(example)"
select IMAGE_PROCESSING_USING_TJPGD
default n

View File

@ -1,8 +0,0 @@
config INSTRUSION_DETECT
bool "enable apps/instrusion detect"
depends on BOARD_K210_EVB
depends on DRV_USING_OV2640
depends on USING_KPU_POSTPROCESSING
depends on USING_YOLOV2
select LIB_USING_CJSON
default n

View File

@ -1,5 +0,0 @@
# Instrusion detect demo
### A human object detection task demo. Running MobileNet-yolo on K210-based edge devices.
***Training, deployment and running, please see helmet_detect***

View File

@ -1,9 +0,0 @@
from building import *
cwd = GetCurrentDir()
src = Glob('*.c') + Glob('*.cpp')
CPPPATH = [cwd]
group = DefineGroup('Applications', src, depend = ['INSTRUSION_DETECT'], LOCAL_CPPPATH = CPPPATH)
Return('group')

View File

@ -1,390 +0,0 @@
#include <transform.h>
#include <unistd.h>
#ifdef LIB_USING_CJSON
#include <cJSON.h>
#endif
#include "region_layer.h"
#define ANCHOR_NUM 5
#define STACK_SIZE (128 * 1024)
#define JSON_FILE_PATH "/kmodel/human.json"
#define JSON_BUFFER_SIZE (4 * 1024)
static dmac_channel_number_t dma_ch = DMAC_CHANNEL_MAX;
extern void lcd_show_image(int x, int y, int wide, int height,const rt_uint8_t *buf);
// params from json
static float anchor[ANCHOR_NUM * 2] = {};
static int net_output_shape[3] = {};
static int net_input_size[2] = {};
static int sensor_output_size[2] = {};
static char kmodel_path[127] = "";
static int kmodel_size = 0;
static float obj_thresh[20] = {};
static float nms_thresh = 0.0;
static char labels[20][32] = {};
static int class_num = 0;
#define THREAD_PRIORITY_HUMAN_D (11)
static pthread_t instrusiontid = 0;
static void *thread_instrusion_detect_entry(void *parameter);
static int g_fd = 0;
static int kmodel_fd = 0;
static int if_exit = 0;
static unsigned char *showbuffer = NULL;
static unsigned char *kpurgbbuffer = NULL;
static _ioctl_shoot_para shoot_para_t = {0};
unsigned char *model_data = NULL; // kpu data load memory
unsigned char *model_data_align = NULL;
kpu_model_context_t instrusion_detect_task;
static region_layer_t instrusion_detect_rl;
static obj_info_t instrusion_detect_info;
volatile uint32_t g_ai_done_flag;
static void ai_done(void *ctx) { g_ai_done_flag = 1; }
static void param_parse()
{
int fin;
char buffer[JSON_BUFFER_SIZE] = "";
// char *buffer;
// if (NULL != (buffer = (char*)malloc(JSON_BUFFER_SIZE * sizeof(char)))) {
// memset(buffer, 0, JSON_BUFFER_SIZE * sizeof(char));
// } else {
// printf("Json buffer malloc failed!");
// exit(-1);
// }
int array_size;
cJSON *json_obj;
cJSON *json_item;
cJSON *json_array_item;
fin = open(JSON_FILE_PATH, O_RDONLY);
if (!fin) {
printf("Error open file %s", JSON_FILE_PATH);
exit(-1);
}
read(fin, buffer, sizeof(buffer));
close(fin);
// read json string
json_obj = cJSON_Parse(buffer);
// free(buffer);
char *json_print_str = cJSON_Print(json_obj);
printf("Json file content: \n%s\n", json_print_str);
cJSON_free(json_print_str);
// get anchors
json_item = cJSON_GetObjectItem(json_obj, "anchors");
array_size = cJSON_GetArraySize(json_item);
if (ANCHOR_NUM * 2 != array_size) {
printf("Expect anchor size: %d, got %d in json file", ANCHOR_NUM * 2, array_size);
exit(-1);
} else {
printf("Got %d anchors from json file\n", ANCHOR_NUM);
}
for (int i = 0; i < ANCHOR_NUM * 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
anchor[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, anchor[i]);
}
// net_input_size
json_item = cJSON_GetObjectItem(json_obj, "net_input_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect net_input_size: %d, got %d in json file", 2, array_size);
exit(-1);
} else {
printf("Got %d net_input_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
net_input_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, net_input_size[i]);
}
// net_output_shape
json_item = cJSON_GetObjectItem(json_obj, "net_output_shape");
array_size = cJSON_GetArraySize(json_item);
if (3 != array_size) {
printf("Expect net_output_shape: %d, got %d in json file", 3, array_size);
exit(-1);
} else {
printf("Got %d net_output_shape from json file\n", 3);
}
for (int i = 0; i < 3; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
net_output_shape[i] = json_array_item->valueint;
printf("%d: %d\n", i, net_output_shape[i]);
}
// sensor_output_size
json_item = cJSON_GetObjectItem(json_obj, "sensor_output_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect sensor_output_size: %d, got %d in json file", 2, array_size);
exit(-1);
} else {
printf("Got %d sensor_output_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
sensor_output_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, sensor_output_size[i]);
}
// kmodel_path
json_item = cJSON_GetObjectItem(json_obj, "kmodel_path");
memcpy(kmodel_path, json_item->valuestring, strlen(json_item->valuestring));
printf("Got kmodel_path: %s\n", kmodel_path);
// kmodel_size
json_item = cJSON_GetObjectItem(json_obj, "kmodel_size");
kmodel_size = json_item->valueint;
printf("Got kmodel_size: %d\n", kmodel_size);
// labels
json_item = cJSON_GetObjectItem(json_obj, "labels");
class_num = cJSON_GetArraySize(json_item);
if (0 >= class_num) {
printf("No labels!");
exit(-1);
} else {
printf("Got %d labels\n", class_num);
}
for (int i = 0; i < class_num; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
memcpy(labels[i], json_array_item->valuestring, strlen(json_array_item->valuestring));
printf("%d: %s\n", i, labels[i]);
}
// obj_thresh
json_item = cJSON_GetObjectItem(json_obj, "obj_thresh");
array_size = cJSON_GetArraySize(json_item);
if (class_num != array_size) {
printf("label number and thresh number mismatch! label number : %d, obj thresh number %d", class_num, array_size);
exit(-1);
} else {
printf("Got %d obj_thresh\n", array_size);
}
for (int i = 0; i < array_size; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
obj_thresh[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, obj_thresh[i]);
}
// nms_thresh
json_item = cJSON_GetObjectItem(json_obj, "nms_thresh");
nms_thresh = json_item->valuedouble;
printf("Got nms_thresh: %f\n", nms_thresh);
cJSON_Delete(json_obj);
return;
}
void instrusion_detect()
{
int ret = 0;
int result = 0;
int size = 0;
param_parse();
g_fd = open("/dev/ov2640", O_RDONLY);
if (g_fd < 0) {
printf("open ov2640 fail !!");
return;
}
_ioctl_set_dvp_reso set_dvp_reso = {sensor_output_size[1], sensor_output_size[0]};
ioctl(g_fd, IOCTRL_CAMERA_SET_DVP_RESO, &set_dvp_reso);
showbuffer = (unsigned char *)rt_malloc_align(sensor_output_size[0] * sensor_output_size[1] * 2,64);
if (NULL == showbuffer) {
close(g_fd);
printf("showbuffer apply memory fail !!");
return;
}
kpurgbbuffer = (unsigned char *)rt_malloc_align(net_input_size[0] * net_input_size[1] * 3,64);
if (NULL == kpurgbbuffer) {
close(g_fd);
rt_free_align(showbuffer);
printf("kpurgbbuffer apply memory fail !!");
return;
}
model_data = (unsigned char *)malloc(kmodel_size + 255);
if (NULL == model_data) {
rt_free_align(showbuffer);
rt_free_align(kpurgbbuffer);
close(g_fd);
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, kmodel_size + 255);
memset(showbuffer, 0, sensor_output_size[0] * sensor_output_size[1] * 2);
memset(kpurgbbuffer, 127, net_input_size[0] * net_input_size[1] * 3);
shoot_para_t.pdata = (unsigned int *)(showbuffer);
shoot_para_t.length = (size_t)(sensor_output_size[0] * sensor_output_size[1] * 2);
/*
load memory
*/
kmodel_fd = open(kmodel_path, O_RDONLY);
if (kmodel_fd < 0) {
printf("open kmodel fail");
close(g_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
size = read(kmodel_fd, model_data, kmodel_size);
if (size != kmodel_size) {
printf("read kmodel error size %d\n", size);
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
printf("read kmodel success \n");
}
}
unsigned char *model_data_align = (unsigned char *)(((unsigned int)model_data + 255) & (~255));
dvp_set_ai_addr((uint32_t)(kpurgbbuffer + net_input_size[1] * (net_input_size[0] - sensor_output_size[0])),
(uint32_t)(kpurgbbuffer + net_input_size[1] * (net_input_size[0] - sensor_output_size[0]) +
net_input_size[0] * net_input_size[1]),
(uint32_t)(kpurgbbuffer + net_input_size[0] * net_input_size[1] * 2 +
net_input_size[1] * (net_input_size[0] - sensor_output_size[0])));
if (kpu_load_kmodel(&instrusion_detect_task, model_data_align) != 0) {
printf("\nmodel init error\n");
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
}
instrusion_detect_rl.anchor_number = ANCHOR_NUM;
instrusion_detect_rl.anchor = anchor;
instrusion_detect_rl.threshold = malloc(class_num * sizeof(float));
for (int idx = 0; idx < class_num; idx++) {
instrusion_detect_rl.threshold[idx] = obj_thresh[idx];
}
instrusion_detect_rl.nms_value = nms_thresh;
result = region_layer_init(&instrusion_detect_rl, net_output_shape[0], net_output_shape[1], net_output_shape[2],
net_input_size[1], net_input_size[0]);
printf("region_layer_init result %d \n\r", result);
size_t stack_size = STACK_SIZE;
pthread_attr_t attr; /* 线程属性 */
struct sched_param prio; /* 线程优先级 */
prio.sched_priority = 8; /* 优先级设置为 8 */
pthread_attr_init(&attr); /* 先使用默认值初始化属性 */
pthread_attr_setschedparam(&attr, &prio); /* 修改属性对应的优先级 */
pthread_attr_setstacksize(&attr, stack_size);
/* 创建线程 1, 属性为 attr入口函数是 thread_entry入口函数参数是 1 */
result = pthread_create(&instrusiontid, &attr, thread_instrusion_detect_entry, NULL);
if (0 == result) {
printf("thread_instrusion_detect_entry successfully!\n");
} else {
printf("thread_instrusion_detect_entry failed! error code is %d\n", result);
close(g_fd);
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(instrusion_detect, instrusion detect task);
#endif
extern void lcd_draw_picture(uint16_t x1, uint16_t y1, uint16_t width, uint16_t height, uint32_t * ptr);
extern void lcd_show_image(int x, int y, int wide, int height,const rt_uint8_t *buf);
extern void lcd_draw_16_picture(uint16_t x1, uint16_t y1, uint16_t width, uint16_t height, uint32_t * ptr);
static void *thread_instrusion_detect_entry(void *parameter)
{
printf("thread_instrusion_detect_entry start!\n");
int ret = 0;
// sysctl_enable_irq();
while (1) {
// memset(showbuffer,0,320*240*2);
g_ai_done_flag = 0;
ret = ioctl(g_fd, IOCTRL_CAMERA_START_SHOT, &shoot_para_t);
if (RT_ERROR == ret) {
printf("ov2640 can't wait event flag");
rt_free(showbuffer);
close(g_fd);
pthread_exit(NULL);
return NULL;
}
if (dmalock_sync_take(&dma_ch, 2000))
{
printf("Fail to take DMA channel");
}
kpu_run_kmodel(&instrusion_detect_task, kpurgbbuffer, DMAC_CHANNEL5, ai_done, NULL);
while (!g_ai_done_flag)
;
dmalock_release(dma_ch);
float *output;
size_t output_size;
kpu_get_output(&instrusion_detect_task, 0, (uint8_t **)&output, &output_size);
instrusion_detect_rl.input = output;
region_layer_run(&instrusion_detect_rl, &instrusion_detect_info);
/* display result */
for (int instrusion_cnt = 0; instrusion_cnt < instrusion_detect_info.obj_number; instrusion_cnt++)
{
draw_edge((uint32_t *)showbuffer, &instrusion_detect_info, instrusion_cnt, 0xF800,(uint16_t)sensor_output_size[1],(uint16_t)sensor_output_size[0]);
printf("%d: (%d, %d, %d, %d) cls: %s conf: %f\t", instrusion_cnt, instrusion_detect_info.obj[instrusion_cnt].x1,
instrusion_detect_info.obj[instrusion_cnt].y1, instrusion_detect_info.obj[instrusion_cnt].x2,
instrusion_detect_info.obj[instrusion_cnt].y2, labels[instrusion_detect_info.obj[instrusion_cnt].class_id],
instrusion_detect_info.obj[instrusion_cnt].prob);
}
#ifdef BSP_USING_LCD
//lcd_show_image(0, 0,(uint16_t)sensor_output_size[1], (uint16_t)sensor_output_size[0],(unsigned int *)showbuffer);
lcd_draw_picture(0, 0, (uint16_t)sensor_output_size[1], (uint16_t)sensor_output_size[0], (uint32_t *)showbuffer);
#endif
if (0 != instrusion_detect_info.obj_number) {
printf("\n");
}
usleep(1);
if (1 == if_exit) {
if_exit = 0;
printf("thread_instrusion_detect_entry exit");
pthread_exit(NULL);
}
}
}
void instrusion_detect_delete()
{
if (showbuffer != NULL) {
int ret = 0;
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
printf("instrusion detect task cancel!!! ret %d ", ret);
if_exit = 1;
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(instrusion_detect_delete, instrusion detect task delete);
#endif
void kmodel_load(unsigned char *model_data)
{
int kmodel_fd = 0;
int size = 0;
kmodel_fd = open(kmodel_path, O_RDONLY);
model_data = (unsigned char *)malloc(kmodel_size + 255);
if (NULL == model_data) {
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, kmodel_size + 255);
if (kmodel_fd >= 0) {
size = read(kmodel_fd, model_data, kmodel_size);
if (size != kmodel_size) {
printf("read kmodel error size %d\n", size);
} else {
printf("read kmodel success");
}
} else {
free(model_data);
printf("open kmodel fail");
}
}
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(kmodel_load, kmodel load memory);
#endif

View File

@ -19,7 +19,8 @@ void simple_CSV_read()
fin = open(CSV_PATH, O_RDONLY);
if (!fin) {
printf("Error open file %s", CSV_PATH);
exit(-1);
// exit(-1);
return;
}
read(fin, buffer, sizeof(buffer));
close(fin);

View File

@ -0,0 +1,10 @@
config K210_DETECT_ENTRY
bool "enable apps/k210 yolov2 detect entry"
depends on BOARD_K210_EVB
depends on DRV_USING_OV2640
depends on USING_KPU_PROCESSING
depends on USING_YOLOV2
depends on USING_YOLOV2_JSONPARSER
depends on USING_K210_YOLOV2_DETECT
select LIB_USING_CJSON
default n

View File

@ -4,6 +4,6 @@ cwd = GetCurrentDir()
src = Glob('*.c') + Glob('*.cpp')
CPPPATH = [cwd]
group = DefineGroup('Applications', src, depend = ['HELMET_DETECT'], LOCAL_CPPPATH = CPPPATH)
group = DefineGroup('Applications', src, depend = ['USING_K210_YOLOV2_DETECT'], LOCAL_CPPPATH = CPPPATH)
Return('group')

View File

@ -24,7 +24,6 @@
6.718375,
9.01025
],
"kmodel_path": "/kmodel/detect.kmodel",
"kmodel_size": 388776,
"obj_thresh": [
0.7

View File

@ -24,11 +24,10 @@
2.1128,
3.184
],
"kmodel_path": "/kmodel/helmet.kmodel",
"kmodel_size": 2714044,
"obj_thresh": [
0.7,
0.9
0.99
],
"labels": [
"head",

View File

@ -24,7 +24,6 @@
2.049,
4.6711
],
"kmodel_path": "/kmodel/human.kmodel",
"kmodel_size": 2713236,
"obj_thresh": [
0.7

View File

@ -0,0 +1,16 @@
#include "k210_yolov2_detect.h"
static void detect_app(int argc, char *argv[])
{
if (2 != argc) {
printf("Usage: detect_app <ABSOLUTE_CONFIG_JSON_PATH>");
} else {
k210_detect(argv[1]);
}
return;
}
// clang-format off
#ifdef __RT_THREAD_H__
MSH_CMD_EXPORT(detect_app, k210 detect app usage: detect_app <ABSOLUTE_CONFIG_JSON_PATH>);
#endif
// clang-format on

View File

@ -5,9 +5,9 @@ menuconfig SUPPORT_KNOWING_FRAMEWORK
if SUPPORT_KNOWING_FRAMEWORK
source "$APP_DIR/Framework/knowing/tensorflow-lite/Kconfig"
source "$APP_DIR/Framework/knowing/kpu-postprocessing/Kconfig"
source "$APP_DIR/Framework/knowing/filter/Kconfig"
source "$APP_DIR/Framework/knowing/ota/Kconfig"
source "$APP_DIR/Framework/knowing/image_processing/Kconfig"
source "$APP_DIR/Framework/knowing/cmsis_5/Kconfig"
source "$APP_DIR/Framework/knowing/kpu/Kconfig"
endif

View File

@ -1,6 +1,6 @@
menuconfig USING_IMAGE_PROCESSING
bool "image processing "
default n
if USING_IMAGE_PROCESSING
source "$APP_DIR/Framework/knowing/image_processing/TJpgDec/Kconfig"
endif
menuconfig USING_IMAGE_PROCESSING
bool "image processing "
default n
if USING_IMAGE_PROCESSING
source "$APP_DIR/Framework/knowing/image_processing/TJpgDec/Kconfig"
endif

View File

@ -1,27 +1,27 @@
menuconfig IMAGE_PROCESSING_USING_TJPGD
bool "TJpgDec: Tiny JPEG Decompressor."
default n
if IMAGE_PROCESSING_USING_TJPGD
config TJPGD_INPUT_BUFFER_SIZE
int "Size of stream input buffer"
default 512
choice
prompt "Output format"
default TJPGD_USING_FORMAT_RGB888
help
Select the RGB output format
config TJPGD_USING_FORMAT_RGB888
bool "RGB888"
config TJPGD_USING_FORMAT_RGB565
bool "RGB565"
endchoice
config TJPGD_USING_SCALE
bool "Use output descaling"
default y
config TJPGD_USING_TBLCLIP
bool "Use table for saturation"
default y
endif
menuconfig IMAGE_PROCESSING_USING_TJPGD
bool "TJpgDec: Tiny JPEG Decompressor."
default n
if IMAGE_PROCESSING_USING_TJPGD
config TJPGD_INPUT_BUFFER_SIZE
int "Size of stream input buffer"
default 512
choice
prompt "Output format"
default TJPGD_USING_FORMAT_RGB888
help
Select the RGB output format
config TJPGD_USING_FORMAT_RGB888
bool "RGB888"
config TJPGD_USING_FORMAT_RGB565
bool "RGB565"
endchoice
config TJPGD_USING_SCALE
bool "Use output descaling"
default y
config TJPGD_USING_TBLCLIP
bool "Use table for saturation"
default y
endif

View File

@ -1,6 +0,0 @@
menuconfig USING_KPU_POSTPROCESSING
bool "kpu model postprocessing"
default y
if USING_KPU_POSTPROCESSING
source "$APP_DIR/Framework/knowing/kpu-postprocessing/yolov2/Kconfig"
endif

View File

@ -0,0 +1,8 @@
menuconfig USING_KPU_PROCESSING
bool "kpu model processing"
default y
if USING_KPU_PROCESSING
source "$APP_DIR/Framework/knowing/kpu/yolov2/Kconfig"
source "$APP_DIR/Framework/knowing/kpu/yolov2_json/Kconfig"
source "$APP_DIR/Framework/knowing/kpu/k210_yolov2_detect_procedure/Kconfig"
endif

View File

@ -0,0 +1,7 @@
menuconfig USING_K210_YOLOV2_DETECT
bool "k210 yolov2 detect procedure"
depends on USING_KPU_PROCESSING
default n

View File

@ -0,0 +1,10 @@
from building import *
import os
cwd = GetCurrentDir()
src = Glob('*.c')
group = DefineGroup('k210_yolov2_detect_procedure', src, depend = ['USING_YOLOV2', 'USING_YOLOV2_JSONPARSER', 'USING_KPU_PROCESSING'], CPPPATH = [cwd])
Return('group')

View File

@ -0,0 +1,278 @@
#include "k210_yolov2_detect.h"
#include "cJSON.h"
#ifdef USING_YOLOV2_JSONPARSER
#include <json_parser.h>
#endif
#include "region_layer.h"
#define STACK_SIZE (128 * 1024)
static dmac_channel_number_t dma_ch = DMAC_CHANNEL_MAX;
#define THREAD_PRIORITY_D (11)
static pthread_t tid = 0;
static void *thread_detect_entry(void *parameter);
static int g_fd = 0;
static int kmodel_fd = 0;
static int if_exit = 0;
static unsigned char *showbuffer = NULL;
static unsigned char *kpurgbbuffer = NULL;
static _ioctl_shoot_para shoot_para_t = {0};
unsigned char *model_data = NULL; // kpu data load memory
unsigned char *model_data_align = NULL;
kpu_model_context_t detect_task;
static region_layer_t detect_rl;
static obj_info_t detect_info;
volatile uint32_t g_ai_done_flag;
static void ai_done(void *ctx) { g_ai_done_flag = 1; }
void k210_detect(char *json_file_path)
{
int ret = 0;
int result = 0;
int size = 0;
char kmodel_path[127] = {};
yolov2_params_t detect_params = param_parse(json_file_path);
if (!detect_params.is_valid) {
return;
}
g_fd = open("/dev/ov2640", O_RDONLY);
if (g_fd < 0) {
printf("open ov2640 fail !!");
return;
}
_ioctl_set_dvp_reso set_dvp_reso = {detect_params.sensor_output_size[1], detect_params.sensor_output_size[0]};
ioctl(g_fd, IOCTRL_CAMERA_SET_DVP_RESO, &set_dvp_reso);
showbuffer =
(unsigned char *)rt_malloc_align(detect_params.sensor_output_size[0] * detect_params.sensor_output_size[1] * 2, 64);
if (NULL == showbuffer) {
close(g_fd);
printf("showbuffer apply memory fail !!");
return;
}
kpurgbbuffer = (unsigned char *)rt_malloc_align(detect_params.net_input_size[0] * detect_params.net_input_size[1] * 3, 64);
if (NULL == kpurgbbuffer) {
close(g_fd);
rt_free_align(showbuffer);
printf("kpurgbbuffer apply memory fail !!");
return;
}
model_data = (unsigned char *)malloc(detect_params.kmodel_size + 255);
if (NULL == model_data) {
rt_free_align(showbuffer);
rt_free_align(kpurgbbuffer);
close(g_fd);
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, detect_params.kmodel_size + 255);
memset(showbuffer, 0, detect_params.sensor_output_size[0] * detect_params.sensor_output_size[1] * 2);
memset(kpurgbbuffer, 0, detect_params.net_input_size[0] * detect_params.net_input_size[1] * 3);
shoot_para_t.pdata = (unsigned int *)(showbuffer);
shoot_para_t.length = (size_t)(detect_params.sensor_output_size[0] * detect_params.sensor_output_size[1] * 2);
/*
load memory
*/
// kmodel path generate from json file path, *.json -> *.kmodel
memcpy(kmodel_path, json_file_path, strlen(json_file_path));
int idx_suffix_start = strlen(json_file_path) - 4;
const char kmodel_suffix[7] = "kmodel";
int kmodel_suffix_len = 6;
while (kmodel_suffix_len--) {
kmodel_path[idx_suffix_start + 5 - kmodel_suffix_len] = kmodel_suffix[5 - kmodel_suffix_len];
}
printf("kmodel path: %s\n", kmodel_path);
kmodel_fd = open(kmodel_path, O_RDONLY);
if (kmodel_fd < 0) {
printf("open kmodel fail");
close(g_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
size = read(kmodel_fd, model_data, detect_params.kmodel_size);
if (size != detect_params.kmodel_size) {
printf("read kmodel error size %d\n", size);
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
printf("read kmodel success \n");
}
}
unsigned char *model_data_align = (unsigned char *)(((unsigned int)model_data + 255) & (~255));
// dvp_set_ai_addr((uint32_t)kpurgbbuffer,
// (uint32_t)(kpurgbbuffer + detect_params.net_input_size[0] * detect_params.net_input_size[1]),
// (uint32_t)(kpurgbbuffer + detect_params.net_input_size[0] * detect_params.net_input_size[1] * 2));
dvp_set_ai_addr(
(uint32_t)(kpurgbbuffer +
detect_params.net_input_size[1] * (detect_params.net_input_size[0] - detect_params.sensor_output_size[0])),
(uint32_t)(kpurgbbuffer +
detect_params.net_input_size[1] * (detect_params.net_input_size[0] - detect_params.sensor_output_size[0]) +
detect_params.net_input_size[0] * detect_params.net_input_size[1]),
(uint32_t)(kpurgbbuffer +
detect_params.net_input_size[1] * (detect_params.net_input_size[0] - detect_params.sensor_output_size[0]) +
detect_params.net_input_size[0] * detect_params.net_input_size[1] * 2));
if (kpu_load_kmodel(&detect_task, model_data_align) != 0) {
printf("\nmodel init error\n");
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
}
detect_rl.anchor_number = ANCHOR_NUM;
detect_rl.anchor = detect_params.anchor;
detect_rl.nms_value = detect_params.nms_thresh;
detect_rl.classes = detect_params.class_num;
result =
region_layer_init(&detect_rl, detect_params.net_output_shape[0], detect_params.net_output_shape[1],
detect_params.net_output_shape[2], detect_params.net_input_size[1], detect_params.net_input_size[0]);
printf("region_layer_init result %d \n\r", result);
for (int idx = 0; idx < detect_params.class_num; idx++) {
detect_rl.threshold[idx] = detect_params.obj_thresh[idx];
}
size_t stack_size = STACK_SIZE;
pthread_attr_t attr; /* 线程属性 */
struct sched_param prio; /* 线程优先级 */
prio.sched_priority = 8; /* 优先级设置为 8 */
pthread_attr_init(&attr); /* 先使用默认值初始化属性 */
pthread_attr_setschedparam(&attr, &prio); /* 修改属性对应的优先级 */
pthread_attr_setstacksize(&attr, stack_size);
/* 创建线程 1, 属性为 attr入口函数是 thread_entry入口函数参数是 1 */
result = pthread_create(&tid, &attr, thread_detect_entry, &detect_params);
if (0 == result) {
printf("thread_detect_entry successfully!\n");
} else {
printf("thread_detect_entry failed! error code is %d\n", result);
close(g_fd);
}
}
// #ifdef __RT_THREAD_H__
// MSH_CMD_EXPORT(detect, detect task);
// #endif
static void *thread_detect_entry(void *parameter)
{
yolov2_params_t detect_params = *(yolov2_params_t *)parameter;
extern void lcd_draw_picture(uint16_t x1, uint16_t y1, uint16_t width, uint16_t height, uint32_t * ptr);
printf("thread_detect_entry start!\n");
int ret = 0;
// sysctl_enable_irq();
while (1) {
// memset(showbuffer,0,320*240*2);
g_ai_done_flag = 0;
ret = ioctl(g_fd, IOCTRL_CAMERA_START_SHOT, &shoot_para_t);
if (RT_ERROR == ret) {
printf("ov2640 can't wait event flag");
rt_free(showbuffer);
close(g_fd);
pthread_exit(NULL);
return NULL;
}
if (dmalock_sync_take(&dma_ch, 2000)) {
printf("Fail to take DMA channel");
}
kpu_run_kmodel(&detect_task, kpurgbbuffer, DMAC_CHANNEL5, ai_done, NULL);
while (!g_ai_done_flag)
;
dmalock_release(dma_ch);
float *output;
size_t output_size;
kpu_get_output(&detect_task, 0, (uint8_t **)&output, &output_size);
detect_rl.input = output;
region_layer_run(&detect_rl, &detect_info);
/* display result */
for (int cnt = 0; cnt < detect_info.obj_number; cnt++) {
draw_edge((uint32_t *)showbuffer, &detect_info, cnt, 0xF800, (uint16_t)detect_params.sensor_output_size[1],
(uint16_t)detect_params.sensor_output_size[0]);
printf("%d: (%d, %d, %d, %d) cls: %s conf: %f\t", cnt, detect_info.obj[cnt].x1, detect_info.obj[cnt].y1,
detect_info.obj[cnt].x2, detect_info.obj[cnt].y2, detect_params.labels[detect_info.obj[cnt].class_id],
detect_info.obj[cnt].prob);
}
#ifdef BSP_USING_LCD
lcd_draw_picture(0, 0, (uint16_t)detect_params.sensor_output_size[1] - 1,
(uint16_t)detect_params.sensor_output_size[0] - 1, (uint32_t *)showbuffer);
// lcd_show_image(0, 0, (uint16_t)detect_params.sensor_output_size[1], (uint16_t)detect_params.sensor_output_size[0],
// (unsigned int *)showbuffer);
#endif
usleep(500);
if (1 == if_exit) {
if_exit = 0;
printf("thread_detect_entry exit");
pthread_exit(NULL);
}
}
}
void detect_delete()
{
if (showbuffer != NULL) {
int ret = 0;
close(g_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
printf("detect task cancel!!! ret %d ", ret);
if_exit = 1;
}
}
// #ifdef __RT_THREAD_H__
// MSH_CMD_EXPORT(detect_delete, detect task delete);
// #endif
// void kmodel_load(unsigned char *model_data)
// {
// int kmodel_fd = 0;
// int size = 0;
// char kmodel_path[127] = {};
// // kmodel path generate from json file path, *.json -> *.kmodel
// memcpy(kmodel_path, json_file_path, strlen(json_file_path));
// int idx_suffix_start = strlen(json_file_path) - 4;
// const char kmodel_suffix[5] = "kmodel";
// int kmodel_suffix_len = 5;
// while (kmodel_suffix_len--) {
// kmodel_path[idx_suffix_start + 4 - kmodel_suffix_len] = kmodel_suffix[4 - kmodel_suffix_len];
// }
// printf("Kmodel path: %s\n", kmodel_path);
// kmodel_fd = open(kmodel_path, O_RDONLY);
// model_data = (unsigned char *)malloc(detect_params.kmodel_size + 255);
// if (NULL == model_data) {
// printf("model_data apply memory fail !!");
// return;
// }
// memset(model_data, 0, detect_params.kmodel_size + 255);
// if (kmodel_fd >= 0) {
// size = read(kmodel_fd, model_data, detect_params.kmodel_size);
// if (size != detect_params.kmodel_size) {
// printf("read kmodel error size %d\n", size);
// } else {
// printf("read kmodel success");
// }
// } else {
// free(model_data);
// printf("open kmodel fail");
// }
// }
// #ifdef __RT_THREAD_H__
// MSH_CMD_EXPORT(kmodel_load, kmodel load memory);
// #endif

View File

@ -0,0 +1,8 @@
#ifndef _K210_DETECT_H_
#define _K210_DETECT_H_
#include <transform.h>
void k210_detect(char *json_file_path);
#endif

View File

@ -1,6 +1,6 @@
menuconfig USING_YOLOV2
bool "yolov2 region layer"
depends on USING_KPU_POSTPROCESSING
depends on USING_KPU_PROCESSING
default n

View File

@ -58,12 +58,14 @@ int region_layer_init(region_layer_t *rl, int width, int height, int channels, i
goto malloc_error;
}
for (uint32_t i = 0; i < rl->boxes_number; i++) rl->probs[i] = &(rl->probs_buf[i * (rl->classes + 1)]);
rl->threshold = malloc(rl->classes * sizeof(float));
return 0;
malloc_error:
free(rl->output);
free(rl->boxes);
free(rl->probs_buf);
free(rl->probs);
free(rl->threshold);
return flag;
}
@ -73,6 +75,7 @@ void region_layer_deinit(region_layer_t *rl)
free(rl->boxes);
free(rl->probs_buf);
free(rl->probs);
free(rl->threshold);
}
static inline float sigmoid(float x) { return 1.f / (1.f + expf(-x)); }

View File

@ -0,0 +1,7 @@
menuconfig USING_YOLOV2_JSONPARSER
bool "yolov2 model json parser"
depends on USING_KPU_PROCESSING
default n

View File

@ -0,0 +1,10 @@
from building import *
import os
cwd = GetCurrentDir()
src = Glob('*.c')
group = DefineGroup('yolov2_json', src, depend = ['LIB_USING_CJSON'], CPPPATH = [cwd])
Return('group')

View File

@ -0,0 +1,157 @@
#include "json_parser.h"
#include <fcntl.h>
#include "cJSON.h"
yolov2_params_t param_parse(char *json_file_path)
{
yolov2_params_t params_return;
params_return.is_valid = 1;
int fin;
char buffer[JSON_BUFFER_SIZE] = "";
// char *buffer;
// if ((yolov2_params_t *)NULL != (buffer = (char*)malloc(JSON_BUFFER_SIZE * sizeof(char)))) {
// memset(buffer, 0, JSON_BUFFER_SIZE * sizeof(char));
// } else {
// printf("Json buffer malloc failed!");
// params_return.is_valid = 0;
// return params_return;
// }
int array_size;
cJSON *json_obj;
cJSON *json_item;
cJSON *json_array_item;
fin = open(json_file_path, O_RDONLY);
if (!fin) {
printf("Error open file %s\n", json_file_path);
params_return.is_valid = 0;
return params_return;
} else {
printf("Reading config from: %s\n", json_file_path);
}
read(fin, buffer, sizeof(buffer));
close(fin);
// read json string
json_obj = cJSON_Parse(buffer);
// free(buffer);
char *json_print_str = cJSON_Print(json_obj);
printf("Json file content: \n%s\n", json_print_str);
cJSON_free(json_print_str);
// get anchors
json_item = cJSON_GetObjectItem(json_obj, "anchors");
array_size = cJSON_GetArraySize(json_item);
if (ANCHOR_NUM * 2 != array_size) {
printf("Expect anchor size: %d, got %d in json file", ANCHOR_NUM * 2, array_size);
params_return.is_valid = 0;
return params_return;
} else {
printf("Got %d anchors from json file\n", ANCHOR_NUM);
}
for (int i = 0; i < ANCHOR_NUM * 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
params_return.anchor[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, params_return.anchor[i]);
}
// net_input_size
json_item = cJSON_GetObjectItem(json_obj, "net_input_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect net_input_size: %d, got %d in json file", 2, array_size);
params_return.is_valid = 0;
return params_return;
} else {
printf("Got %d net_input_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
params_return.net_input_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, params_return.net_input_size[i]);
}
// net_output_shape
json_item = cJSON_GetObjectItem(json_obj, "net_output_shape");
array_size = cJSON_GetArraySize(json_item);
if (3 != array_size) {
printf("Expect net_output_shape: %d, got %d in json file", 3, array_size);
params_return.is_valid = 0;
return params_return;
} else {
printf("Got %d net_output_shape from json file\n", 3);
}
for (int i = 0; i < 3; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
params_return.net_output_shape[i] = json_array_item->valueint;
printf("%d: %d\n", i, params_return.net_output_shape[i]);
}
// sensor_output_size
json_item = cJSON_GetObjectItem(json_obj, "sensor_output_size");
array_size = cJSON_GetArraySize(json_item);
if (2 != array_size) {
printf("Expect sensor_output_size: %d, got %d in json file", 2, array_size);
params_return.is_valid = 0;
return params_return;
} else {
printf("Got %d sensor_output_size from json file\n", 2);
}
for (int i = 0; i < 2; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
params_return.sensor_output_size[i] = json_array_item->valueint;
printf("%d: %d\n", i, params_return.sensor_output_size[i]);
}
// check sensor output width and net input width
if (params_return.sensor_output_size[1] != params_return.net_input_size[1]) {
printf("Net input width must match sensor output width!\n");
params_return.is_valid = 0;
return params_return;
}
// // kmodel_path
// json_item = cJSON_GetObjectItem(json_obj, "kmodel_path");
// memset(params_return.kmodel_path, 0, 127);
// memcpy(params_return.kmodel_path, json_item->valuestring, strlen(json_item->valuestring));
// printf("Got kmodel_path: %s\n", params_return.kmodel_path);
// kmodel_size
json_item = cJSON_GetObjectItem(json_obj, "kmodel_size");
params_return.kmodel_size = json_item->valueint;
printf("Got kmodel_size: %d\n", params_return.kmodel_size);
// labels
json_item = cJSON_GetObjectItem(json_obj, "labels");
params_return.class_num = cJSON_GetArraySize(json_item);
if (0 >= params_return.class_num) {
printf("No labels!");
params_return.is_valid = 0;
return params_return;
} else {
printf("Got %d labels\n", params_return.class_num);
}
for (int i = 0; i < params_return.class_num; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
memset(params_return.labels[i], 0, 127);
memcpy(params_return.labels[i], json_array_item->valuestring, strlen(json_array_item->valuestring));
printf("%d: %s\n", i, params_return.labels[i]);
}
// obj_thresh
json_item = cJSON_GetObjectItem(json_obj, "obj_thresh");
array_size = cJSON_GetArraySize(json_item);
if (params_return.class_num != array_size) {
printf("label number and thresh number mismatch! label number : %d, obj thresh number %d", params_return.class_num,
array_size);
params_return.is_valid = 0;
return params_return;
} else {
printf("Got %d obj_thresh\n", array_size);
}
for (int i = 0; i < array_size; i++) {
json_array_item = cJSON_GetArrayItem(json_item, i);
params_return.obj_thresh[i] = json_array_item->valuedouble;
printf("%d: %f\n", i, params_return.obj_thresh[i]);
}
// nms_thresh
json_item = cJSON_GetObjectItem(json_obj, "nms_thresh");
params_return.nms_thresh = json_item->valuedouble;
printf("Got nms_thresh: %f\n", params_return.nms_thresh);
cJSON_Delete(json_obj);
return params_return;
}

View File

@ -0,0 +1,23 @@
#ifndef _JSON_PARSER_H_
#define _JSON_PARSER_H_
#define ANCHOR_NUM 5
#define JSON_BUFFER_SIZE (4 * 1024)
// params from json
typedef struct {
float anchor[ANCHOR_NUM * 2];
int net_output_shape[3];
int net_input_size[2];
int sensor_output_size[2];
int kmodel_size;
float obj_thresh[20];
float nms_thresh;
char labels[20][32];
int class_num;
int is_valid;
} yolov2_params_t;
yolov2_params_t param_parse(char *json_file_path);
#endif

View File

@ -426,9 +426,6 @@ CONFIG_BSP_DVP_CMOS_PWDN_PIN=44
CONFIG_BSP_DVP_CMOS_XCLK_PIN=46
CONFIG_BSP_DVP_CMOS_PCLK_PIN=47
CONFIG_BSP_DVP_CMOS_HREF_PIN=45
CONFIG_RW007_SPIDEV_NAME="spi11"
CONFIG_RW007_INT_BUSY_PIN=7
CONFIG_RW007_RST_PIN=6
#
# Kendryte SDK Config
@ -438,10 +435,7 @@ CONFIG_PKG_KENDRYTE_SDK_VERNUM=0x0055
#
# More Drivers
#
CONFIG_PKG_USING_RW007=y
CONFIG_RW007_NOT_USE_EXAMPLE_DRIVERS=y
# CONFIG_RW007_USING_STM32_DRIVERS is not set
CONFIG_RW007_SPI_MAX_HZ=20000000
# CONFIG_PKG_USING_RW007 is not set
CONFIG_DRV_USING_OV2640=y
#
@ -457,6 +451,11 @@ CONFIG_DRV_USING_OV2640=y
#
CONFIG_MAIN_KTASK_STACK_SIZE=1024
#
# ota app
#
# CONFIG_APPLICATION_OTA is not set
#
# test app
#
@ -475,11 +474,10 @@ CONFIG_MAIN_KTASK_STACK_SIZE=1024
# knowing app
#
CONFIG_APPLICATION_KNOWING=y
CONFIG_APP_MNIST=y
CONFIG_FACE_DETECT=y
# CONFIG_INSTRUSION_DETECT is not set
# CONFIG_HELMET_DETECT is not set
CONFIG_K210_DETECT_ENTRY=y
# CONFIG_IRIS_ML_DEMO is not set
# CONFIG_K210_FFT_TEST is not set
# CONFIG_USING_IMAGE_PROCESSING_APP is not set
#
# sensor app
@ -487,8 +485,7 @@ CONFIG_FACE_DETECT=y
CONFIG_APPLICATION_SENSOR=y
# CONFIG_APPLICATION_SENSOR_CO2 is not set
# CONFIG_APPLICATION_SENSOR_PM1_0 is not set
CONFIG_APPLICATION_SENSOR_VOICE=y
CONFIG_APPLICATION_SENSOR_VOICE_D124=y
# CONFIG_APPLICATION_SENSOR_VOICE is not set
# CONFIG_APPLICATION_SENSOR_HUMIDITY is not set
# CONFIG_APPLICATION_SENSOR_TEMPERATURE is not set
@ -499,27 +496,18 @@ CONFIG_TRANSFORM_LAYER_ATTRIUBUTE=y
CONFIG_ADD_XIUOS_FETURES=y
# CONFIG_ADD_NUTTX_FETURES is not set
# CONFIG_ADD_RTTHREAD_FETURES is not set
CONFIG_SUPPORT_SENSOR_FRAMEWORK=y
# CONFIG_SENSOR_CO2 is not set
# CONFIG_SENSOR_PM is not set
CONFIG_SENSOR_VOICE=y
CONFIG_SENSOR_D124=y
CONFIG_SENSOR_DEVICE_D124="d124_1"
CONFIG_SENSOR_QUANTITY_D124_VOICE="voice_1"
# CONFIG_SENSOR_D124_DRIVER_EXTUART is not set
CONFIG_SENSOR_DEVICE_D124_DEV="/dev/uar2"
# CONFIG_SENSOR_TEMPERATURE is not set
# CONFIG_SENSOR_HUMIDITY is not set
# CONFIG_SUPPORT_SENSOR_FRAMEWORK is not set
# CONFIG_SUPPORT_CONNECTION_FRAMEWORK is not set
CONFIG_SUPPORT_KNOWING_FRAMEWORK=y
CONFIG_USING_TENSORFLOWLITEMICRO=y
CONFIG_USING_TENSORFLOWLITEMICRO_NORMAL=y
# CONFIG_USING_TENSORFLOWLITEMICRO_CMSISNN is not set
# CONFIG_USING_TENSORFLOWLITEMICRO_DEMOAPP is not set
CONFIG_USING_KPU_POSTPROCESSING=y
CONFIG_USING_YOLOV2=y
# CONFIG_USING_TENSORFLOWLITEMICRO is not set
# CONFIG_USING_KNOWING_FILTER is not set
# CONFIG_USING_OTA_MODEL is not set
# CONFIG_USING_IMAGE_PROCESSING is not set
# CONFIG_USING_CMSIS_5 is not set
CONFIG_USING_KPU_PROCESSING=y
CONFIG_USING_YOLOV2=y
CONFIG_USING_YOLOV2_JSONPARSER=y
CONFIG_USING_K210_YOLOV2_DETECT=y
# CONFIG_SUPPORT_CONTROL_FRAMEWORK is not set
#

View File

@ -31,7 +31,7 @@ config APP_DIR
source "$RTT_DIR/Kconfig"
source "base-drivers/Kconfig"
source "kendryte-sdk/Kconfig"
source "$RT_Thread_DIR/drivers/Kconfig"
source "$RT_Thread_DIR/app_match_rt-thread/Kconfig"
source "$ROOT_DIR/APP_Framework/Kconfig"
config __STACKSIZE__

View File

@ -51,7 +51,7 @@ stack_lds.write('__STACKSIZE__ = %d;' % stack_size)
stack_lds.close()
# include more drivers
objs.extend(SConscript(os.getcwd() + '/../../drivers/SConscript'))
objs.extend(SConscript(os.getcwd() + '/../../app_match_rt-thread/SConscript'))
# include APP_Framework/Framework
objs.extend(SConscript(os.getcwd() + '/../../../../APP_Framework/Framework/SConscript'))

View File

@ -291,9 +291,6 @@
#define BSP_DVP_CMOS_XCLK_PIN 46
#define BSP_DVP_CMOS_PCLK_PIN 47
#define BSP_DVP_CMOS_HREF_PIN 45
#define RW007_SPIDEV_NAME "spi11"
#define RW007_INT_BUSY_PIN 7
#define RW007_RST_PIN 6
/* Kendryte SDK Config */
@ -301,9 +298,6 @@
/* More Drivers */
#define PKG_USING_RW007
#define RW007_NOT_USE_EXAMPLE_DRIVERS
#define RW007_SPI_MAX_HZ 20000000
#define DRV_USING_OV2640
/* APP_Framework */
@ -314,6 +308,9 @@
#define MAIN_KTASK_STACK_SIZE 1024
/* ota app */
/* test app */
@ -325,30 +322,21 @@
/* knowing app */
#define APPLICATION_KNOWING
#define APP_MNIST
#define FACE_DETECT
#define K210_DETECT_ENTRY
/* sensor app */
#define APPLICATION_SENSOR
#define APPLICATION_SENSOR_VOICE
#define APPLICATION_SENSOR_VOICE_D124
/* Framework */
#define TRANSFORM_LAYER_ATTRIUBUTE
#define ADD_XIUOS_FETURES
#define SUPPORT_SENSOR_FRAMEWORK
#define SENSOR_VOICE
#define SENSOR_D124
#define SENSOR_DEVICE_D124 "d124_1"
#define SENSOR_QUANTITY_D124_VOICE "voice_1"
#define SENSOR_DEVICE_D124_DEV "/dev/uar2"
#define SUPPORT_KNOWING_FRAMEWORK
#define USING_TENSORFLOWLITEMICRO
#define USING_TENSORFLOWLITEMICRO_NORMAL
#define USING_KPU_POSTPROCESSING
#define USING_KPU_PROCESSING
#define USING_YOLOV2
#define USING_YOLOV2_JSONPARSER
#define USING_K210_YOLOV2_DETECT
/* Security */

View File

@ -19,7 +19,7 @@ CONFIG_RT_ALIGN_SIZE=4
CONFIG_RT_THREAD_PRIORITY_32=y
# CONFIG_RT_THREAD_PRIORITY_256 is not set
CONFIG_RT_THREAD_PRIORITY_MAX=32
CONFIG_RT_TICK_PER_SECOND=100
CONFIG_RT_TICK_PER_SECOND=1000
CONFIG_RT_USING_OVERFLOW_CHECK=y
CONFIG_RT_USING_HOOK=y
CONFIG_RT_USING_IDLE_HOOK=y
@ -149,12 +149,12 @@ CONFIG_RT_DFS_ELM_LFN_UNICODE_0=y
CONFIG_RT_DFS_ELM_LFN_UNICODE=0
CONFIG_RT_DFS_ELM_MAX_LFN=255
CONFIG_RT_DFS_ELM_DRIVES=2
CONFIG_RT_DFS_ELM_MAX_SECTOR_SIZE=512
CONFIG_RT_DFS_ELM_MAX_SECTOR_SIZE=4096
# CONFIG_RT_DFS_ELM_USE_ERASE is not set
CONFIG_RT_DFS_ELM_REENTRANT=y
CONFIG_RT_DFS_ELM_MUTEX_TIMEOUT=3000
CONFIG_RT_USING_DFS_DEVFS=y
# CONFIG_RT_USING_DFS_ROMFS is not set
CONFIG_RT_USING_DFS_ROMFS=y
# CONFIG_RT_USING_DFS_RAMFS is not set
# CONFIG_RT_USING_DFS_NFS is not set
@ -191,7 +191,12 @@ CONFIG_RT_USING_PIN=y
CONFIG_RT_USING_SPI=y
# CONFIG_RT_USING_QSPI is not set
CONFIG_RT_USING_SPI_MSD=y
# CONFIG_RT_USING_SFUD is not set
CONFIG_RT_USING_SFUD=y
CONFIG_RT_SFUD_USING_SFDP=y
CONFIG_RT_SFUD_USING_FLASH_INFO_TABLE=y
# CONFIG_RT_SFUD_USING_QSPI is not set
CONFIG_RT_SFUD_SPI_MAX_HZ=50000000
# CONFIG_RT_DEBUG_SFUD is not set
# CONFIG_RT_USING_ENC28J60 is not set
# CONFIG_RT_USING_SPI_WIFI is not set
# CONFIG_RT_USING_WDT is not set
@ -379,7 +384,7 @@ CONFIG_BSP_USING_USB_TO_USART=y
# CONFIG_BSP_USING_COM2 is not set
# CONFIG_BSP_USING_COM3 is not set
CONFIG_BSP_USING_SRAM=y
# CONFIG_BSP_USING_SPI_FLASH is not set
CONFIG_BSP_USING_SPI_FLASH=y
# CONFIG_BSP_USING_EEPROM is not set
CONFIG_BSP_USING_OV2640=y
# CONFIG_BSP_USING_ETH is not set
@ -415,8 +420,8 @@ CONFIG_BSP_USING_SPI2=y
# CONFIG_BSP_SPI2_RX_USING_DMA is not set
# CONFIG_BSP_USING_ADC is not set
CONFIG_BSP_USING_I2C1=y
CONFIG_BSP_I2C1_SCL_PIN=54
CONFIG_BSP_I2C1_SDA_PIN=55
CONFIG_BSP_I2C1_SCL_PIN=55
CONFIG_BSP_I2C1_SDA_PIN=54
# CONFIG_BSP_USING_ONCHIP_RTC is not set
# CONFIG_BSP_USING_WDT is not set
# CONFIG_BSP_USING_SDIO is not set
@ -461,6 +466,11 @@ CONFIG_DRV_USING_OV2640=y
#
CONFIG_MAIN_KTASK_STACK_SIZE=1024
#
# ota app
#
# CONFIG_APPLICATION_OTA is not set
#
# test app
#
@ -512,11 +522,14 @@ CONFIG_SENSOR_DEVICE_D124_DEV="/dev/uart2"
# CONFIG_SUPPORT_CONNECTION_FRAMEWORK is not set
CONFIG_SUPPORT_KNOWING_FRAMEWORK=y
# CONFIG_USING_TENSORFLOWLITEMICRO is not set
CONFIG_USING_KPU_POSTPROCESSING=y
# CONFIG_USING_YOLOV2 is not set
# CONFIG_USING_KNOWING_FILTER is not set
# CONFIG_USING_OTA_MODEL is not set
# CONFIG_USING_IMAGE_PROCESSING is not set
# CONFIG_USING_CMSIS_5 is not set
CONFIG_USING_KPU_PROCESSING=y
# CONFIG_USING_YOLOV2 is not set
# CONFIG_USING_YOLOV2_JSONPARSER is not set
# CONFIG_USING_K210_YOLOV2_DETECT is not set
# CONFIG_SUPPORT_CONTROL_FRAMEWORK is not set
#

View File

@ -24,6 +24,6 @@ config APP_DIR
source "$RTT_DIR/Kconfig"
source "$RTT_DIR/bsp/stm32/libraries/Kconfig"
source "board/Kconfig"
source "$RT_Thread_DIR/drivers/Kconfig"
source "$RT_Thread_DIR/app_match_rt-thread/Kconfig"
source "$ROOT_DIR/APP_Framework/Kconfig"

View File

@ -74,7 +74,7 @@ objs.extend(SConscript(os.path.join(libraries_path_prefix, stm32_library, 'SCons
objs.extend(SConscript(os.path.join(libraries_path_prefix, 'HAL_Drivers', 'SConscript')))
# include more drivers
objs.extend(SConscript(os.getcwd() + '/../../drivers/SConscript'))
objs.extend(SConscript(os.getcwd() + '/../../app_match_rt-thread/SConscript'))
# include APP_Framework/Framework
objs.extend(SConscript(os.getcwd() + '/../../../../APP_Framework/Framework/SConscript'))

Some files were not shown because too many files have changed in this diff Show More