temporarily adapt kpu for XiZi

This commit is contained in:
wuzheng 2022-12-12 16:24:54 +08:00
parent 04dc796f95
commit 6a89a12c49
12 changed files with 268 additions and 134 deletions

View File

@ -2,15 +2,6 @@
#include <string.h>
#include <transform.h>
#define NULL_PARAMETER 0
#define DVP_INIT 0x00U
#define REG_SCCB_READ 0x12U
#define REG_SCCB_WRITE 0x13U
#define OUTPUT_CONFIG 0x20U
#define LCD_STRING_TYPE 0
#define LCD_DOT_TYPE 1
#define LCD_SIZE 320
static uint16_t image_buff[384000];

View File

@ -1,4 +1,4 @@
SRC_DIR := tensorflow-lite
SRC_DIR := kpu tensorflow-lite
include $(KERNEL_ROOT)/compiler.mk

View File

@ -0,0 +1,3 @@
SRC_DIR := k210_yolov2_detect_procedure yolov2 yolov2_json
include $(KERNEL_ROOT)/compiler.mk

View File

@ -4,8 +4,11 @@ menuconfig USING_K210_YOLOV2_DETECT
default n
config CAMERA_DEV_DRIVER
string "Set camera dev path"
string "Set camera dev path for kpu"
default "/dev/ov2640"
config KPU_LCD_DEV_DRIVER
string "Set lcd dev path for kpu"
default "/dev/lcd_dev"

View File

@ -1,24 +1,26 @@
#include "k210_yolov2_detect.h"
#include "cJSON.h"
#include "dvp.h"
#ifdef USING_YOLOV2_JSONPARSER
#include <json_parser.h>
#endif
#include "region_layer.h"
#define STACK_SIZE (128 * 1024)
static dmac_channel_number_t dma_ch = DMAC_CHANNEL_MAX;
static dmac_channel_number_t dma_ch = DMAC_CHANNEL_MAX-1;
static _ioctl_shoot_para shoot_para_t = {0};
#define THREAD_PRIORITY_D (11)
static pthread_t tid = 0;
static void *thread_detect_entry(void *parameter);
static int g_fd = 0;
static int camera_fd = 0;
static int kmodel_fd = 0;
static int if_exit = 0;
static unsigned char *showbuffer = NULL;
static unsigned char *kpurgbbuffer = NULL;
static _ioctl_shoot_para shoot_para_t = {0};
unsigned char *model_data = NULL; // kpu data load memory
unsigned char *model_data_align = NULL;
@ -29,6 +31,8 @@ volatile uint32_t g_ai_done_flag;
static void ai_done(void *ctx) { g_ai_done_flag = 1; }
#define ERROR_FLAG (1)
void k210_detect(char *json_file_path)
{
int ret = 0;
@ -36,50 +40,78 @@ void k210_detect(char *json_file_path)
int size = 0;
char kmodel_path[127] = {};
// open and parse from json file
yolov2_params_t detect_params = param_parse(json_file_path);
if (!detect_params.is_valid) {
return;
}
g_fd = open("/dev/ov2640", O_RDONLY);
if (g_fd < 0) {
printf("open ov2640 fail !!");
printf("select camera device name:%s\n",CAMERA_DEV_DRIVER);
camera_fd = PrivOpen(CAMERA_DEV_DRIVER, O_RDONLY);
if (camera_fd < 0) {
printf("open %s fail !!",CAMERA_DEV_DRIVER);
return;
}
struct PrivIoctlCfg ioctl_cfg;
ioctl_cfg.ioctl_driver_type = CAMERA_TYPE;
struct CameraCfg camera_init_cfg ={
.gain_manu_enable = 0,
.gain = 0xFF,
.window_w = 800,
.window_h = 600,
.output_w = IMAGE_WIDTH,
.output_h = IMAGE_HEIGHT,
.window_xoffset = 0,
.window_yoffset = 0
};
ioctl_cfg.args = &camera_init_cfg;
if (0 != PrivIoctl(camera_fd, OPE_CFG, &ioctl_cfg))
{
printf("camera pin fd error %d\n", camera_fd);
PrivClose(camera_fd);
}
// configure the resolution of camera
_ioctl_set_reso set_dvp_reso = {detect_params.sensor_output_size[1], detect_params.sensor_output_size[0]};
ioctl(g_fd, IOCTRL_CAMERA_OUT_SIZE_RESO, &set_dvp_reso);
showbuffer =
(unsigned char *)rt_malloc_align(detect_params.sensor_output_size[0] * detect_params.sensor_output_size[1] * 2, 64);
struct PrivIoctlCfg camera_cfg;
camera_cfg.args = &set_dvp_reso;
camera_cfg.ioctl_driver_type = CAMERA_TYPE;
PrivIoctl(camera_fd, IOCTRL_CAMERA_OUT_SIZE_RESO, &camera_cfg);
// alloc the memory for camera and kpu running
showbuffer = (unsigned char *)malloc(detect_params.sensor_output_size[0] * detect_params.sensor_output_size[1] * 2);
if (NULL == showbuffer) {
close(g_fd);
close(camera_fd);
printf("showbuffer apply memory fail !!");
return;
}
kpurgbbuffer = (unsigned char *)rt_malloc_align(detect_params.net_input_size[0] * detect_params.net_input_size[1] * 3, 64);
kpurgbbuffer = (unsigned char *)malloc(detect_params.net_input_size[0] * detect_params.net_input_size[1] * 3);
if (NULL == kpurgbbuffer) {
close(g_fd);
rt_free_align(showbuffer);
close(camera_fd);
free(showbuffer);
printf("kpurgbbuffer apply memory fail !!");
return;
}
model_data = (unsigned char *)malloc(detect_params.kmodel_size + 255);
printf("model address:%x->%x\n",model_data_align,model_data_align + detect_params.kmodel_size);
if (NULL == model_data) {
rt_free_align(showbuffer);
rt_free_align(kpurgbbuffer);
close(g_fd);
free(showbuffer);
free(kpurgbbuffer);
close(camera_fd);
printf("model_data apply memory fail !!");
return;
}
memset(model_data, 0, detect_params.kmodel_size + 255);
memset(showbuffer, 0, detect_params.sensor_output_size[0] * detect_params.sensor_output_size[1] * 2);
memset(kpurgbbuffer, 0, detect_params.net_input_size[0] * detect_params.net_input_size[1] * 3);
shoot_para_t.pdata = (unsigned int *)(showbuffer);
shoot_para_t.pdata = (uintptr_t)(showbuffer);
shoot_para_t.length = (size_t)(detect_params.sensor_output_size[0] * detect_params.sensor_output_size[1] * 2);
/*
load memory
*/
// kmodel path generate from json file path, *.json -> *.kmodel
memcpy(kmodel_path, json_file_path, strlen(json_file_path));
int idx_suffix_start = strlen(json_file_path) - 4;
const char kmodel_suffix[7] = "kmodel";
int kmodel_suffix_len = 6;
@ -87,45 +119,56 @@ void k210_detect(char *json_file_path)
kmodel_path[idx_suffix_start + 5 - kmodel_suffix_len] = kmodel_suffix[5 - kmodel_suffix_len];
}
printf("kmodel path: %s\n", kmodel_path);
kmodel_fd = open(kmodel_path, O_RDONLY);
unsigned char *model_data_align = (unsigned char *)(((uintptr_t)model_data + 255) & (~255));
kmodel_fd = PrivOpen(kmodel_path, O_RDONLY);
memset(model_data,0,sizeof(model_data));
if (kmodel_fd < 0) {
printf("open kmodel fail");
close(g_fd);
close(camera_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
size = read(kmodel_fd, model_data, detect_params.kmodel_size);
size = read(kmodel_fd, model_data_align, detect_params.kmodel_size);
if (size != detect_params.kmodel_size) {
printf("read kmodel error size %d\n", size);
close(g_fd);
close(camera_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
free(model_data);
return;
} else {
close(kmodel_fd);
printf("read kmodel success \n");
}
}
unsigned char *model_data_align = (unsigned char *)(((unsigned int)model_data + 255) & (~255));
// dvp_set_ai_addr((uint32_t)kpurgbbuffer,
// (uint32_t)(kpurgbbuffer + detect_params.net_input_size[0] * detect_params.net_input_size[1]),
// (uint32_t)(kpurgbbuffer + detect_params.net_input_size[0] * detect_params.net_input_size[1] * 2));
#ifdef ADD_RTTHREAD_FETURES
dvp_set_ai_addr(
(uint32_t)(kpurgbbuffer +
(uintptr_t)(kpurgbbuffer +
detect_params.net_input_size[1] * (detect_params.net_input_size[0] - detect_params.sensor_output_size[0])),
(uint32_t)(kpurgbbuffer +
(uintptr_t)(kpurgbbuffer +
detect_params.net_input_size[1] * (detect_params.net_input_size[0] - detect_params.sensor_output_size[0]) +
detect_params.net_input_size[0] * detect_params.net_input_size[1]),
(uint32_t)(kpurgbbuffer +
(uintptr_t)(kpurgbbuffer +
detect_params.net_input_size[1] * (detect_params.net_input_size[0] - detect_params.sensor_output_size[0]) +
detect_params.net_input_size[0] * detect_params.net_input_size[1] * 2));
#endif
// Set AI buff address of Camera
RgbAddress ai_address_preset;
ai_address_preset.r_addr = (uintptr_t)kpurgbbuffer + detect_params.net_input_size[1] * (detect_params.net_input_size[0] - detect_params.sensor_output_size[0]);
ai_address_preset.g_addr = ai_address_preset.r_addr + detect_params.net_input_size[0] * detect_params.net_input_size[1];
ai_address_preset.b_addr = ai_address_preset.g_addr + detect_params.net_input_size[0] * detect_params.net_input_size[1];
camera_cfg.args = &ai_address_preset;
PrivIoctl(camera_fd,SET_AI_ADDR,&camera_cfg);
// Load kmodel into kpu task
if (kpu_load_kmodel(&detect_task, model_data_align) != 0) {
printf("\nmodel init error\n");
close(g_fd);
close(camera_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
@ -137,8 +180,7 @@ void k210_detect(char *json_file_path)
detect_rl.anchor = detect_params.anchor;
detect_rl.nms_value = detect_params.nms_thresh;
detect_rl.classes = detect_params.class_num;
result =
region_layer_init(&detect_rl, detect_params.net_output_shape[0], detect_params.net_output_shape[1],
result =region_layer_init(&detect_rl, detect_params.net_output_shape[0], detect_params.net_output_shape[1],
detect_params.net_output_shape[2], detect_params.net_input_size[1], detect_params.net_input_size[0]);
printf("region_layer_init result %d \n\r", result);
for (int idx = 0; idx < detect_params.class_num; idx++) {
@ -159,7 +201,7 @@ void k210_detect(char *json_file_path)
printf("thread_detect_entry successfully!\n");
} else {
printf("thread_detect_entry failed! error code is %d\n", result);
close(g_fd);
close(camera_fd);
}
}
// #ifdef __RT_THREAD_H__
@ -168,22 +210,38 @@ void k210_detect(char *json_file_path)
static void *thread_detect_entry(void *parameter)
{
#ifdef BSP_USING_LCD
int lcd_fd = PrivOpen(KPU_LCD_DEV_DRIVER, O_RDWR);
if (lcd_fd < 0)
{
printf("open lcd fd error:%d\n", lcd_fd);
}
LcdWriteParam graph_param;
graph_param.type = LCD_DOT_TYPE;
#endif
yolov2_params_t detect_params = *(yolov2_params_t *)parameter;
extern void lcd_draw_picture(uint16_t x1, uint16_t y1, uint16_t width, uint16_t height, uint32_t * ptr);
struct PrivIoctlCfg camera_cfg;
camera_cfg.ioctl_driver_type = CAMERA_TYPE;
printf("thread_detect_entry start!\n");
int ret = 0;
// sysctl_enable_irq();
while (1) {
// memset(showbuffer,0,320*240*2);
g_ai_done_flag = 0;
ret = ioctl(g_fd, IOCTRL_CAMERA_START_SHOT, &shoot_para_t);
if (RT_ERROR == ret) {
//get a graph map from camera
camera_cfg.args = &shoot_para_t;
ret = PrivIoctl(camera_fd, IOCTRL_CAMERA_START_SHOT, &camera_cfg);
if (ERROR_FLAG == ret) {
printf("ov2640 can't wait event flag");
rt_free(showbuffer);
close(g_fd);
free(showbuffer);
close(camera_fd);
pthread_exit(NULL);
return NULL;
}
#ifdef ADD_RTTHREAD_FETURES
if (dmalock_sync_take(&dma_ch, 2000)) {
printf("Fail to take DMA channel");
}
@ -191,13 +249,19 @@ static void *thread_detect_entry(void *parameter)
while (!g_ai_done_flag)
;
dmalock_release(dma_ch);
#elif defined ADD_XIZI_FETURES
kpu_run_kmodel(&detect_task, kpurgbbuffer, dma_ch, ai_done, NULL);
while (!g_ai_done_flag)
;
#endif
float *output;
size_t output_size;
kpu_get_output(&detect_task, 0, (uint8_t **)&output, &output_size);
detect_rl.input = output;
region_layer_run(&detect_rl, &detect_info);
printf("detect_info.obj_number:%d\n",detect_info.obj_number);
/* display result */
for (int cnt = 0; cnt < detect_info.obj_number; cnt++) {
detect_info.obj[cnt].y1 += (detect_params.sensor_output_size[0] - detect_params.net_input_size[0])/2;
detect_info.obj[cnt].y2 += (detect_params.sensor_output_size[0] - detect_params.net_input_size[0])/2;
@ -208,12 +272,27 @@ static void *thread_detect_entry(void *parameter)
detect_info.obj[cnt].prob);
}
#ifdef BSP_USING_LCD
lcd_draw_picture(0, 0, (uint16_t)detect_params.sensor_output_size[1] - 1,
(uint16_t)detect_params.sensor_output_size[0] - 1, (uint32_t *)showbuffer);
// lcd_show_image(0, 0, (uint16_t)detect_params.sensor_output_size[1], (uint16_t)detect_params.sensor_output_size[0],
// (unsigned int *)showbuffer);
#ifdef ADD_RTTHREAD_FETURES
extern void lcd_draw_picture(uint16_t x1, uint16_t y1, uint16_t width, uint16_t height, uint32_t * ptr);
lcd_draw_picture(0, 0, (uint16_t)detect_params.sensor_output_size[1] - 1,
(uint16_t)detect_params.sensor_output_size[0] - 1, (uint32_t *)showbuffer);
// lcd_show_image(0, 0, (uint16_t)detect_params.sensor_output_size[1], (uint16_t)detect_params.sensor_output_size[0],
// (unsigned int *)showbuffer);
#else
//refresh the LCD using photo of camera
for (int i = 0; i < IMAGE_HEIGHT; i++)
{
graph_param.pixel_info.pixel_color = (uint16_t*)showbuffer + i * IMAGE_WIDTH;
graph_param.pixel_info.x_startpos = 0;
graph_param.pixel_info.y_startpos = i + (LCD_SIZE - IMAGE_HEIGHT) / 2;
graph_param.pixel_info.x_endpos = IMAGE_WIDTH - 1;
graph_param.pixel_info.y_endpos = i + (LCD_SIZE - IMAGE_HEIGHT) / 2;
PrivWrite(lcd_fd, &graph_param, NULL_PARAMETER);
}
#endif
#endif
usleep(500);
if (1 == if_exit) {
if_exit = 0;
printf("thread_detect_entry exit");
@ -226,7 +305,7 @@ void detect_delete()
{
if (showbuffer != NULL) {
int ret = 0;
close(g_fd);
close(camera_fd);
close(kmodel_fd);
free(showbuffer);
free(kpurgbbuffer);
@ -235,46 +314,3 @@ void detect_delete()
if_exit = 1;
}
}
// #ifdef __RT_THREAD_H__
// MSH_CMD_EXPORT(detect_delete, detect task delete);
// #endif
// void kmodel_load(unsigned char *model_data)
// {
// int kmodel_fd = 0;
// int size = 0;
// char kmodel_path[127] = {};
// // kmodel path generate from json file path, *.json -> *.kmodel
// memcpy(kmodel_path, json_file_path, strlen(json_file_path));
// int idx_suffix_start = strlen(json_file_path) - 4;
// const char kmodel_suffix[5] = "kmodel";
// int kmodel_suffix_len = 5;
// while (kmodel_suffix_len--) {
// kmodel_path[idx_suffix_start + 4 - kmodel_suffix_len] = kmodel_suffix[4 - kmodel_suffix_len];
// }
// printf("Kmodel path: %s\n", kmodel_path);
// kmodel_fd = open(kmodel_path, O_RDONLY);
// model_data = (unsigned char *)malloc(detect_params.kmodel_size + 255);
// if (NULL == model_data) {
// printf("model_data apply memory fail !!");
// return;
// }
// memset(model_data, 0, detect_params.kmodel_size + 255);
// if (kmodel_fd >= 0) {
// size = read(kmodel_fd, model_data, detect_params.kmodel_size);
// if (size != detect_params.kmodel_size) {
// printf("read kmodel error size %d\n", size);
// } else {
// printf("read kmodel success");
// }
// } else {
// free(model_data);
// printf("open kmodel fail");
// }
// }
// #ifdef __RT_THREAD_H__
// MSH_CMD_EXPORT(kmodel_load, kmodel load memory);
// #endif

View File

@ -154,7 +154,6 @@ int PrivIoctl(int fd, int cmd, void *args)
{
int ret;
struct PrivIoctlCfg *ioctl_cfg = (struct PrivIoctlCfg *)args;
switch (ioctl_cfg->ioctl_driver_type)
{
case SERIAL_TYPE:
@ -163,12 +162,10 @@ int PrivIoctl(int fd, int cmd, void *args)
case PIN_TYPE:
ret = PrivPinIoctl(fd, cmd, ioctl_cfg->args);
break;
case I2C_TYPE:
ret = ioctl(fd, cmd, ioctl_cfg->args);
break;
case LCD_TYPE:
ret = PrivLcdIoctl(fd, cmd, ioctl_cfg->args);
break;
case I2C_TYPE:
case RTC_TYPE:
case ADC_TYPE:
case DAC_TYPE:

View File

@ -227,6 +227,25 @@ struct RtcDrvConfigureParam
time_t *time;
};
typedef struct
{
uintptr_t pdata;
uint32_t length;
}_ioctl_shoot_para;
typedef struct
{
uint32_t width; // width The width of image
uint32_t height; // height The height of image
}_ioctl_set_reso;
typedef struct
{
uintptr_t r_addr;
uintptr_t g_addr;
uintptr_t b_addr;
}RgbAddress;
enum TCP_OPTION {
SEND_DATA = 0,
RECV_DATA,
@ -241,6 +260,25 @@ enum TCP_OPTION {
#define MY_INDEV_X BSP_LCD_Y_MAX
#define MY_INDEV_Y BSP_LCD_X_MAX
#define DVP_INIT 0x00U
#define REG_SCCB_READ 0x12U
#define REG_SCCB_WRITE 0x13U
#define OUTPUT_CONFIG 0x20U
#define LCD_STRING_TYPE 0
#define LCD_DOT_TYPE 1
#define LCD_SIZE 320
#define IMAGE_HEIGHT 240
#define IMAGE_WIDTH 320
#define NULL_PARAMETER 0
#define REG_SCCB_READ 0x12U
#define REG_SCCB_WRITE 0x13U
#define SCCB_REG_LENGTH 0x08U
#define IOCTRL_CAMERA_START_SHOT (20)
#define SET_DISPLAY_ADDR (21)
#define SET_AI_ADDR (22)
#define IOCTRL_CAMERA_OUT_SIZE_RESO (23)
/*********************shell***********************/
//for int func(int argc, char *agrv[])
#define PRIV_SHELL_CMD_MAIN_ATTR (SHELL_CMD_PERMISSION(0) | SHELL_CMD_TYPE(SHELL_TYPE_CMD_MAIN))

View File

@ -77,6 +77,7 @@ int pthread_attr_setschedparam(pthread_attr_t *attr,
int pthread_attr_setstacksize(pthread_attr_t *attr, size_t stack_size)
{
attr->stacksize = stack_size;
return 0;
}

View File

@ -31,6 +31,7 @@
#include "utils.h"
#include "plic.h"
#include "stdlib.h"
#include <device.h>
volatile dmac_t *const dmac = (dmac_t *)DMAC_BASE_ADDR;
@ -172,6 +173,7 @@ void dmac_channel_disable(dmac_channel_number_t channel_num)
}
writeq(chen.data, &dmac->chen);
}
int32_t dmac_check_channel_busy(dmac_channel_number_t channel_num)
@ -766,26 +768,30 @@ void dmac_set_src_dest_length(dmac_channel_number_t channel_num, const void *src
dmac_channel_enable(channel_num);
}
static int dmac_irq_callback(void *ctx)
static int dmac_irq_callback(int vector,void *ctx)
{
dmac_context_t *v_dmac_context = (dmac_context_t *)(ctx);
dmac_channel_number_t v_dmac_channel = v_dmac_context->dmac_channel;
dmac_chanel_interrupt_clear(v_dmac_channel);
if(v_dmac_context->callback != NULL)
if(v_dmac_context->callback != NULL){
v_dmac_context->callback(v_dmac_context->ctx);
}
return 0;
}
void dmac_irq_register(dmac_channel_number_t channel_num , plic_irq_callback_t dmac_callback, void *ctx, uint32_t priority)
{
dmac_context[channel_num].dmac_channel = channel_num;
dmac_context[channel_num].callback = dmac_callback;
dmac_context[channel_num].ctx = ctx;
dmac_enable_channel_interrupt(channel_num);
plic_set_priority(IRQN_DMA0_INTERRUPT + channel_num, priority);
plic_irq_enable(IRQN_DMA0_INTERRUPT + channel_num);
plic_irq_register(IRQN_DMA0_INTERRUPT + channel_num, dmac_irq_callback, &dmac_context[channel_num]);
// plic_set_priority(IRQN_DMA0_INTERRUPT + channel_num, priority);
// plic_irq_enable(IRQN_DMA0_INTERRUPT + channel_num);
// plic_irq_register(IRQN_DMA0_INTERRUPT + channel_num, dmac_irq_callback, &dmac_context[channel_num]);
isrManager.done->enableIrq(IRQN_DMA0_INTERRUPT + channel_num);
isrManager.done->registerIrq(IRQN_DMA0_INTERRUPT + channel_num, (IsrHandlerType)dmac_irq_callback, &dmac_context[channel_num]);
}
void __attribute__((weak, alias("dmac_irq_register"))) dmac_set_irq(dmac_channel_number_t channel_num , plic_irq_callback_t dmac_callback, void *ctx, uint32_t priority);

View File

@ -6,9 +6,11 @@
#include "plic.h"
#include <ov2640.h>
#define REG_SCCB_READ 0x12U
#define REG_SCCB_WRITE 0x13U
#define SCCB_REG_LENGTH 0x08U
#define CONTINOUS_SHOOTS 1
#define ONLY_ONE_SHOOT 2
#define STOP_SHOOT 0
static int shoot_flag = 0; // shoot will close when shoot_flag == 0
// irq interrupt function
static int on_irq_dvp(int irq, void *arg)
@ -17,11 +19,16 @@ static int on_irq_dvp(int irq, void *arg)
{
dvp_clear_interrupt(DVP_STS_FRAME_FINISH);
}
else
{
dvp_start_convert();
else{
if(shoot_flag>0){
dvp_start_convert();
if(ONLY_ONE_SHOOT==shoot_flag){
shoot_flag=STOP_SHOOT;
}
}
dvp_clear_interrupt(DVP_STS_FRAME_START);
}
return 0;
}
@ -64,14 +71,14 @@ static uint32 DvpDrvInit(void)
#endif
#ifdef DVP_AI_OUTPUT
dvp_set_output_enable(DVP_OUTPUT_AI, 1);
dvp_set_ai_addr((uint32_t)DVP_AI_RED_ADRESS, (uint32_t)DVP_AI_GREEN_ADRESS, (uint32_t)DVP_AI_BLUE_ADRESS);
// dvp_set_ai_addr((uint32_t)DVP_AI_RED_ADRESS, (uint32_t)DVP_AI_GREEN_ADRESS, (uint32_t)DVP_AI_BLUE_ADRESS);
#endif
#ifdef DVP_INTERRUPT_ENABLE
dvp_config_interrupt(DVP_CFG_START_INT_ENABLE | DVP_CFG_FINISH_INT_ENABLE, 0);
isrManager.done->registerIrq(IRQN_DVP_INTERRUPT, (IsrHandlerType)on_irq_dvp, NULL);
isrManager.done->enableIrq(IRQN_DVP_INTERRUPT);
dvp_clear_interrupt(DVP_STS_FRAME_START | DVP_STS_FRAME_FINISH);
dvp_config_interrupt(DVP_CFG_START_INT_ENABLE | DVP_CFG_FINISH_INT_ENABLE, 1);
dvp_config_interrupt(DVP_CFG_START_INT_ENABLE | DVP_CFG_FINISH_INT_ENABLE, 0);
KPrintf("camera interrupt has open!\n");
#endif
return ret;
@ -125,15 +132,33 @@ static uint32 DvpDrvConfigure(void *drv, struct BusConfigureInfo *args)
int cmd_type = args->configure_cmd;
struct CameraCfg* tmp_cfg;
RgbAddress* kpu_rgb_address;
_ioctl_shoot_para* pixel_cfg;
switch (cmd_type)
{
case OPE_INT:
break;
case OPE_CFG:
tmp_cfg = (struct CameraCfg *)args->private_data;
SensorConfigure(tmp_cfg);
memcpy(&sensor_config,tmp_cfg,sizeof(struct CameraCfg));
SensorConfigure(&sensor_config);
dvp_set_image_size(tmp_cfg->output_w, tmp_cfg->output_h);
break;
case IOCTRL_CAMERA_START_SHOT:
pixel_cfg = (_ioctl_shoot_para*)args->private_data;
dvp_set_display_addr(pixel_cfg->pdata);
dvp_set_output_enable(DVP_OUTPUT_DISPLAY, 1);
dvp_config_interrupt(DVP_CFG_START_INT_ENABLE | DVP_CFG_FINISH_INT_ENABLE, 1);
shoot_flag=ONLY_ONE_SHOOT;
break;
case SET_AI_ADDR:
kpu_rgb_address = (RgbAddress*)args->private_data;
dvp_set_output_enable(DVP_OUTPUT_AI, 1);
dvp_set_ai_addr(kpu_rgb_address->r_addr,kpu_rgb_address->g_addr,kpu_rgb_address->b_addr);
break;
case IOCTRL_CAMERA_OUT_SIZE_RESO:
dvp_set_image_size(((uint32_t*)args->private_data)[0], ((uint32_t*)args->private_data)[1]);
break;
case REG_SCCB_READ:
ReadDvpReg(drv, (struct DvpRegConfigureInfo *)args->private_data);
break;

View File

@ -25,6 +25,34 @@
extern "C" {
#endif
#define REG_SCCB_READ 0x12U
#define REG_SCCB_WRITE 0x13U
#define SCCB_REG_LENGTH 0x08U
#define IOCTRL_CAMERA_START_SHOT (20)
#define SET_DISPLAY_ADDR (21)
#define SET_AI_ADDR (22)
#define IOCTRL_CAMERA_OUT_SIZE_RESO (23)
typedef struct
{
uintptr_t r_addr;
uintptr_t g_addr;
uintptr_t b_addr;
}RgbAddress;
typedef struct
{
uintptr_t pdata;
uint32_t length;
}_ioctl_shoot_para;
typedef struct
{
uint32_t width; // width The width of image
uint32_t height; // height The height of image
}_ioctl_set_reso;
int HwDvpInit(void);
#ifdef __cplusplus

View File

@ -10,6 +10,8 @@
#include "bsp.h"
#include <assert.h>
#include <float.h>
#include "plic.h"
#include <device.h>
#define LAYER_BURST_SIZE 12
@ -765,7 +767,6 @@ static void kpu_kmodel_input_with_padding(const kpu_layer_argument_t *layer, con
size_t width = layer->image_size.data.i_row_wid + 1;
size_t height = layer->image_size.data.i_col_high + 1;
size_t channels = layer->image_channel_num.data.i_ch_num + 1;
kpu_upload_core(width, height, channels, src, layer->image_addr.data.image_src_addr);
}
@ -1349,7 +1350,7 @@ int kpu_load_kmodel(kpu_model_context_t *ctx, const uint8_t *buffer)
{
uintptr_t base_addr = (uintptr_t)buffer;
const kpu_kmodel_header_t *header = (const kpu_kmodel_header_t *)buffer;
printf("\nheader->version:%d,header->arch:%d\n",header->version,header->arch);
if (header->version == 3 && header->arch == 0)
{
ctx->model_buffer = buffer;
@ -1500,7 +1501,6 @@ static int ai_step(void *userdata)
last_layer_type = cnt_layer_header->type;
last_time = sysctl_get_time_us();
#endif
switch (cnt_layer_header->type)
{
case KL_ADD:
@ -1564,9 +1564,10 @@ static int ai_step(void *userdata)
default:
assert(!"Layer is not supported.");
}
if (cnt_layer_id != (ctx->layers_length - 1))
ai_step(userdata);
if (cnt_layer_id != (ctx->layers_length - 1)){
ai_step(userdata);
}
else
kpu_kmodel_done(ctx);
return 0;
@ -1579,6 +1580,11 @@ static void ai_step_not_isr(void *userdata)
sysctl_enable_irq();
}
static void ai_my_step(int vector,void *userdata)
{
ai_step(userdata);
}
int kpu_run_kmodel(kpu_model_context_t *ctx, const uint8_t *src, dmac_channel_number_t dma_ch, kpu_done_callback_t done_callback, void *userdata)
{
ctx->dma_ch = dma_ch;
@ -1608,10 +1614,12 @@ int kpu_run_kmodel(kpu_model_context_t *ctx, const uint8_t *src, dmac_channel_nu
.layer_cfg_almost_empty_int = 0,
.layer_cfg_almost_full_int = 1
};
plic_irq_enable(IRQN_AI_INTERRUPT);
plic_set_priority(IRQN_AI_INTERRUPT, 1);
plic_irq_register(IRQN_AI_INTERRUPT, ai_step, ctx);
isrManager.done->enableIrq(IRQN_AI_INTERRUPT);
isrManager.done->registerIrq(IRQN_AI_INTERRUPT, (IsrHandlerType)ai_my_step, ctx);
// plic_irq_enable(IRQN_AI_INTERRUPT);
// plic_set_priority(IRQN_AI_INTERRUPT, 1);
// plic_irq_register(IRQN_AI_INTERRUPT, ai_step, ctx);
const kpu_model_layer_header_t *first_layer_header = ctx->layer_headers;
if (first_layer_header->type != KL_K210_CONV)
@ -1624,11 +1632,9 @@ int kpu_run_kmodel(kpu_model_context_t *ctx, const uint8_t *src, dmac_channel_nu
kpu_kmodel_input_with_padding(&layer_arg, src);
ai_step_not_isr(ctx);
}
else
{
else{
kpu_input_dma(&layer_arg, src, ctx->dma_ch, ai_step, ctx);
}
return 0;
}