add gimbal demo

This commit is contained in:
AiYangSky 2023-06-19 19:01:46 +08:00
parent 6218ad39fc
commit e21327dd71
5 changed files with 705 additions and 1 deletions

View File

@ -0,0 +1,251 @@
#include <iostream>
#include <string>
// 包含SpireCV SDK头文件
#include <sv_world.h>
using namespace std;
// 定义窗口名称
static const std::string RGB_WINDOW = "Image window";
// 框选到的矩形
cv::Rect rect_sel;
// 框选起始点
cv::Point pt_origin;
// 是否得到一个新的框选区域
bool b_renew_ROI = false;
// 是否开始跟踪
bool b_begin_TRACK = false;
// 实现框选逻辑的回调函数
void onMouse(int event, int x, int y, int, void *);
struct node
{
double x, y;
};
node p1, p2, p3, p4;
node p;
double getCross(node p1, node p2, node p)
{
return (p2.x - p1.x) * (p.y - p1.y) - (p.x - p1.x) * (p2.y - p1.y);
}
bool b_clicked = false;
bool detect_tracking = true;
// 定义吊舱
sv::Gimbal *gimbal;
// 吊舱状态获取回调函数
void GimableCallback(double &frame_ang_r, double &frame_ang_p, double &frame_ang_y,
double &imu_ang_r, double &imu_ang_p, double &imu_ang_y,
double &fov_x, double &fov_y);
// 使用KP控制器实现吊舱追踪
void gimbalTrack(double xOffset, double yOffset)
{
gimbal->setAngleRateEuler(0, yOffset * 150.0f, xOffset * 150.0f);
}
// 停止吊舱追踪
void gimbalNoTrack(void)
{
gimbal->setAngleRateEuler(0, 0, 0);
}
int main(int argc, char *argv[])
{
// 实例化吊舱
gimbal = new sv::Gimbal(sv::GimbalType::G1, sv::GimbalLink::SERIAL);
// 使用 /dev/ttyUSB0 作为控制串口
gimbal->setSerialPort("/dev/ttyUSB0");
// 以GimableCallback作为状态回调函数启用吊舱控制
gimbal->open(GimableCallback);
// 定义相机
sv::Camera cap;
// 设置相机流媒体地址为 192.168.2.64
cap.setIp("192.168.2.64");
// 设置获取画面分辨率为720P
cap.setWH(1280, 720);
// 设置视频帧率为30帧
cap.setFps(30);
// 开启相机
cap.open(sv::CameraType::G1);
// 定义一个新的窗口,可在上面进行框选操作
cv::namedWindow(RGB_WINDOW);
// 设置窗口操作回调函数,该函数实现整个框选逻辑
cv::setMouseCallback(RGB_WINDOW, onMouse, 0);
// 实例化 框选目标跟踪类
sv::SingleObjectTracker sot;
// 手动导入相机参数如果使用Amov的G1等吊舱或相机则可以忽略该步骤将自动下载相机参数文件
sot.loadCameraParams(sv::get_home() + "/SpireCV/calib_webcam_640x480.yaml");
sv::CommonObjectDetector cod;
cod.loadCameraParams(sv::get_home() + "/SpireCV/calib_webcam_640x480.yaml");
// 实例化OpenCV的Mat类用于内存单帧图像
cv::Mat img;
int frame_id = 0;
while (1)
{
if (detect_tracking == true)
{
// 实例化SpireCV的 单帧检测结果 接口类 TargetsInFrame
sv::TargetsInFrame tgts(frame_id++);
// 读取一帧图像到img
gimbal.cap.read(img);
cv::resize(img, img, cv::Size(cod.image_width, cod.image_height));
// 执行通用目标检测
cod.detect(img, tgts);
gimbalNoTrack();
// 可视化检测结果叠加到img上
sv::drawTargetsInFrame(img, tgts);
// 控制台打印通用目标检测结果
printf("Frame-[%d]\n", frame_id);
// 打印当前检测的FPS
printf(" FPS = %.2f\n", tgts.fps);
// 打印当前相机的视场角degree
printf(" FOV (fx, fy) = (%.2f, %.2f)\n", tgts.fov_x, tgts.fov_y);
for (int i = 0; i < tgts.targets.size(); i++)
{
printf("Frame-[%d], Object-[%d]\n", frame_id, i);
// 打印每个目标的中心位置cxcy的值域为[0, 1]
printf(" Object Center (cx, cy) = (%.3f, %.3f)\n", tgts.targets[i].cx, tgts.targets[i].cy);
// 打印每个目标的外接矩形框的宽度、高度wh的值域为(0, 1]
printf(" Object Size (w, h) = (%.3f, %.3f)\n", tgts.targets[i].w, tgts.targets[i].h);
// 打印每个目标的置信度
printf(" Object Score = %.3f\n", tgts.targets[i].score);
// 打印每个目标的类别,字符串类型
printf(" Object Category = %s, Category ID = [%d]\n", tgts.targets[i].category.c_str(), tgts.targets[i].category_id);
// 打印每个目标的视线角,跟相机视场相关
printf(" Object Line-of-sight (ax, ay) = (%.3f, %.3f)\n", tgts.targets[i].los_ax, tgts.targets[i].los_ay);
// 打印每个目标的3D位置在相机坐标系下跟目标实际长宽、相机参数相关
printf(" Object Position = (x, y, z) = (%.3f, %.3f, %.3f)\n", tgts.targets[i].px, tgts.targets[i].py, tgts.targets[i].pz);
p1.x = tgts.targets[i].cx * tgts.width - tgts.targets[i].w * tgts.width / 2;
p1.y = tgts.targets[i].cy * tgts.height - tgts.targets[i].h * tgts.height / 2;
p2.x = tgts.targets[i].cx * tgts.width + tgts.targets[i].w * tgts.width / 2;
p2.y = tgts.targets[i].cy * tgts.height - tgts.targets[i].h * tgts.height / 2;
p4.x = tgts.targets[i].cx * tgts.width - tgts.targets[i].w * tgts.width / 2;
p4.y = tgts.targets[i].cy * tgts.height + tgts.targets[i].h * tgts.height / 2;
p3.x = tgts.targets[i].cx * tgts.width + tgts.targets[i].w * tgts.width / 2;
p3.y = tgts.targets[i].cy * tgts.height + tgts.targets[i].h * tgts.height / 2;
p.x = pt_origin.x;
p.y = pt_origin.y;
std::cout << "p.x " << p.x << "\t"
<< "p.y " << p.y << std::endl;
if (getCross(p1, p2, p) * getCross(p3, p4, p) >= 0 && getCross(p2, p3, p) * getCross(p4, p1, p) >= 0)
{
b_begin_TRACK = false;
detect_tracking = false;
// pt_origin = cv::Point(nor_x, nor_p_y);
// std::cout << "pt_origin " <<nor_x<<"/t"<<nor_p_y<< std::endl;
rect_sel = cv::Rect(p1.x, p1.y, tgts.targets[i].w * tgts.width, tgts.targets[i].h * tgts.height);
// std::cout << rect_sel << std::endl;
b_renew_ROI = true;
frame_id = 0;
printf("rect_sel Yes\n");
}
else
{
printf("rect_sel No\n");
}
}
}
else
{
// 实例化SpireCV的 单帧检测结果 接口类 TargetsInFrame
sv::TargetsInFrame tgts(frame_id++);
// 读取一帧图像到img
gimbal.cap.read(img);
cv::resize(img, img, cv::Size(sot.image_width, sot.image_height));
// 开始 单目标跟踪 逻辑
// 是否有新的目标被手动框选
if (b_renew_ROI)
{
// 拿新的框选区域 来 初始化跟踪器
sot.init(img, rect_sel);
// std::cout << rect_sel << std::endl;
// 重置框选标志
b_renew_ROI = false;
// 开始跟踪
b_begin_TRACK = true;
}
else if (b_begin_TRACK)
{
// 以前一帧的结果继续跟踪
sot.track(img, tgts);
gimbalTrack(tgts.targets[0].cx - 0.5f, tgts.targets[0].cy - 0.5f);
// 可视化检测结果叠加到img上
sv::drawTargetsInFrame(img, tgts);
// 控制台打印 单目标跟踪 结果
printf("Frame-[%d]\n", frame_id);
// 打印当前检测的FPS
printf(" FPS = %.2f\n", tgts.fps);
// 打印当前相机的视场角degree
printf(" FOV (fx, fy) = (%.2f, %.2f)\n", tgts.fov_x, tgts.fov_y);
if (tgts.targets.size() > 0)
{
printf("Frame-[%d]\n", frame_id);
// 打印 跟踪目标 的中心位置cxcy的值域为[0, 1]
printf(" Tracking Center (cx, cy) = (%.3f, %.3f)\n", tgts.targets[0].cx, tgts.targets[0].cy);
// 打印 跟踪目标 的外接矩形框的宽度、高度wh的值域为(0, 1]
printf(" Tracking Size (w, h) = (%.3f, %.3f)\n", tgts.targets[0].w, tgts.targets[0].h);
// 打印 跟踪目标 的视线角,跟相机视场相关
printf(" Tracking Line-of-sight (ax, ay) = (%.3f, %.3f)\n", tgts.targets[0].los_ax, tgts.targets[0].los_ay);
}
}
} // end of tracking
// 显示检测结果img
cv::imshow(RGB_WINDOW, img);
cv::waitKey(10);
}
return 0;
}
void onMouse(int event, int x, int y, int, void *)
{
if (b_clicked)
{
// 更新框选区域坐标
pt_origin.x = 0;
pt_origin.y = 0;
}
// 左键按下
if (event == cv::EVENT_LBUTTONDOWN)
{
detect_tracking = true;
pt_origin = cv::Point(x, y);
}
else if (event == cv::EVENT_RBUTTONDOWN)
{
detect_tracking = true;
b_renew_ROI = false;
b_begin_TRACK = false;
b_clicked = true;
}
}
void GimableCallback(double &frame_ang_r, double &frame_ang_p, double &frame_ang_y,
double &imu_ang_r, double &imu_ang_p, double &imu_ang_y,
double &fov_x, double &fov_y)
{
static int count = 0;
if (count == 25)
{
std::cout << "GIMBAL_CMD_RCV_POS" << std::endl;
std::cout << "=============================================" << std::endl;
std::cout << "HALL_yaw:" << frame_ang_y << " " << std::endl;
std::cout << "HALL_roll:" << frame_ang_r << " " << std::endl;
std::cout << "HALL_pitch:" << frame_ang_p << " " << std::endl;
std::cout << "GYRO_yaw:" << imu_ang_y << " " << std::endl;
std::cout << "GYRO_roll:" << imu_ang_r << " " << std::endl;
std::cout << "GYRO_pitch:" << imu_ang_p << " " << std::endl;
count = 0;
}
count++;
}

View File

@ -0,0 +1,147 @@
#include <iostream>
#include <string>
// 包含SpireCV SDK头文件
#include <sv_world.h>
// #include "gimbal_tools.hpp"
using namespace std;
// 云台
// sampleGimbal gimbal;
// 定义窗口名称
static const std::string RGB_WINDOW = "Image window";
// 鼠标点击事件的回调函数
void onMouse(int event, int x, int y, int, void *);
// 定义吊舱
sv::Gimbal *gimbal;
// 吊舱状态获取回调函数
void GimableCallback(double &frame_ang_r, double &frame_ang_p, double &frame_ang_y,
double &imu_ang_r, double &imu_ang_p, double &imu_ang_y,
double &fov_x, double &fov_y);
// 使用KP控制器实现吊舱追踪
void gimbalTrack(double xOffset, double yOffset)
{
gimbal->setAngleRateEuler(0, yOffset * 150.0f, xOffset * 150.0f);
}
// 停止吊舱追踪
void gimbalNoTrack(void)
{
gimbal->setAngleRateEuler(0, 0, 0);
}
int main(int argc, char *argv[])
{
// 实例化吊舱
gimbal = new sv::Gimbal(sv::GimbalType::G1, sv::GimbalLink::SERIAL);
// 使用 /dev/ttyUSB0 作为控制串口
gimbal->setSerialPort("/dev/ttyUSB0");
// 以GimableCallback作为状态回调函数启用吊舱控制
gimbal->open(GimableCallback);
// 定义相机
sv::Camera cap;
// 设置相机流媒体地址为 192.168.2.64
cap.setIp("192.168.2.64");
// 设置获取画面分辨率为720P
cap.setWH(1280, 720);
// 设置视频帧率为30帧
cap.setFps(30);
// 开启相机
cap.open(sv::CameraType::G1);
// 定义一个窗口 才可以在上面操作
cv::namedWindow(RGB_WINDOW);
// 设置窗口操作回调函数,该函数实现吊舱控制
cv::setMouseCallback(RGB_WINDOW, onMouse, 0);
// 实例化 圆形降落标志 检测器类
sv::LandingMarkerDetector lmd;
// 手动导入相机参数如果使用Amov的G1等吊舱或相机则可以忽略该步骤将自动下载相机参数文件
lmd.loadCameraParams(sv::get_home() + "/SpireCV/calib_webcam_1280x720.yaml");
// 实例化OpenCV的Mat类用于内存单帧图像
cv::Mat img;
int frame_id = 0;
while (1)
{
// 实例化SpireCV的 单帧检测结果 接口类 TargetsInFrame
sv::TargetsInFrame tgts(frame_id++);
// 读取一帧图像到img
gimbal.cap.read(img);
cv::resize(img, img, cv::Size(lmd.image_width, lmd.image_height));
// 执行 降落标志 检测
lmd.detect(img, tgts);
// 可视化检测结果叠加到img上
sv::drawTargetsInFrame(img, tgts);
// 控制台打印 降落标志 检测结果
printf("Frame-[%d]\n", frame_id);
// 打印当前检测的FPS
printf(" FPS = %.2f\n", tgts.fps);
// 打印当前相机的视场角degree
printf(" FOV (fx, fy) = (%.2f, %.2f)\n", tgts.fov_x, tgts.fov_y);
for (int i = 0; i < tgts.targets.size(); i++)
{
// 仅追踪 X 类型的标靶
if (tgts.targets[i].category_id == 2)
{
gimbalTrack(tgts.targets[0].cx - 0.5f, tgts.targets[0].cy - 0.5f);
}
printf("Frame-[%d], Marker-[%d]\n", frame_id, i);
// 打印每个 降落标志 的中心位置cxcy的值域为[0, 1]
printf(" Marker Center (cx, cy) = (%.3f, %.3f)\n", tgts.targets[i].cx, tgts.targets[i].cy);
// 打印每个 降落标志 的外接矩形框的宽度、高度wh的值域为(0, 1]
printf(" Marker Size (w, h) = (%.3f, %.3f)\n", tgts.targets[i].w, tgts.targets[i].h);
// 打印每个 降落标志 的置信度
printf(" Marker Score = %.3f\n", tgts.targets[i].score);
// 打印每个 降落标志 的类别,字符串类型,"h"、"x"、"1"、"2"、"3"...
printf(" Marker Category = %s, Category ID = %d\n", tgts.targets[i].category.c_str(), tgts.targets[i].category_id);
// 打印每个 降落标志 的视线角,跟相机视场相关
printf(" Marker Line-of-sight (ax, ay) = (%.3f, %.3f)\n", tgts.targets[i].los_ax, tgts.targets[i].los_ay);
// 打印每个 降落标志 的3D位置在相机坐标系下跟降落标志实际半径、相机参数相关
printf(" Marker Position = (x, y, z) = (%.3f, %.3f, %.3f)\n", tgts.targets[i].px, tgts.targets[i].py, tgts.targets[i].pz);
}
// 显示检测结果img
cv::imshow(RGB_WINDOW, img);
cv::waitKey(10);
}
return 0;
}
void onMouse(int event, int x, int y, int, void *)
{
if (event == cv::EVENT_LBUTTONDOWN)
{
gimbal.gimbal->setAngleRateEuler(0, 0.02 * (y - 720 / 2), 0.02 * (x - 1280 / 2));
}
if (event == cv::EVENT_RBUTTONDOWN)
{
gimbalNoTrack();
gimbal.gimbal->setHome();
}
}
void GimableCallback(double &frame_ang_r, double &frame_ang_p, double &frame_ang_y,
double &imu_ang_r, double &imu_ang_p, double &imu_ang_y,
double &fov_x, double &fov_y)
{
static int count = 0;
if (count == 25)
{
std::cout << "GIMBAL_CMD_RCV_POS" << std::endl;
std::cout << "=============================================" << std::endl;
std::cout << "HALL_yaw:" << frame_ang_y << " " << std::endl;
std::cout << "HALL_roll:" << frame_ang_r << " " << std::endl;
std::cout << "HALL_pitch:" << frame_ang_p << " " << std::endl;
std::cout << "GYRO_yaw:" << imu_ang_y << " " << std::endl;
std::cout << "GYRO_roll:" << imu_ang_r << " " << std::endl;
std::cout << "GYRO_pitch:" << imu_ang_p << " " << std::endl;
count = 0;
}
count++;
}

View File

@ -0,0 +1,175 @@
#ifndef GIMBAL_TOOLS_H
#define GIMBAL_TOOLS_H
#include <map>
#include <iostream>
#include <iterator>
#include <stdlib.h>
#include <sv_world.h>
#include <sv_gimbal.h>
std::map<std::string, sv::GimbalType> amovGimbalTypeList =
{
{"G1", sv::GimbalType::G1},
{"Q10", sv::GimbalType::Q10f}};
std::map<std::string, sv::GimbalLink> amovGimbalLinkTypeList =
{
{"serial", sv::GimbalLink::SERIAL},
{"TCP", sv::GimbalLink::ETHERNET_TCP},
{"UDP", sv::GimbalLink::ETHERNET_UDP}};
std::map<std::string, sv::CameraType> amovCameraTypeList =
{
{"NONE", sv::CameraType::NONE},
{"WEBCAM", sv::CameraType::WEBCAM},
{"G1", sv::CameraType::G1},
{"Q10", sv::CameraType::Q10}};
typedef struct
{
int width;
int height;
} AmovCameraSize_T;
std::map<std::string, AmovCameraSize_T> amovCameraSizeType = {
{"480P", {640, 480}},
{"720P", {1280, 720}},
{"1080P", {1920, 1080}},
{"2K", {2560, 1440}},
{"4K", {4096, 2160}}};
static inline AmovCameraSize_T str2CameraSizeWidht(const std::string &strType)
{
std::map<std::string, AmovCameraSize_T>::iterator temp = amovCameraSizeType.find(strType);
if (temp != amovCameraSizeType.end())
{
return temp->second;
}
else
{
std::cout << "error Camera Size type!!!!" << strType << std::endl;
exit(1);
}
}
static inline sv::GimbalType str2GimbalType(const std::string &strType)
{
std::map<std::string, sv::GimbalType>::iterator temp = amovGimbalTypeList.find(strType);
if (temp != amovGimbalTypeList.end())
{
return temp->second;
}
else
{
std::cout << "error giimbal type!!!!" << strType << std::endl;
exit(1);
}
}
static inline sv::GimbalLink str2GimbalLinkType(const std::string &strLinkType)
{
std::map<std::string, sv::GimbalLink>::iterator temp = amovGimbalLinkTypeList.find(strLinkType);
if (temp != amovGimbalLinkTypeList.end())
{
return temp->second;
}
else
{
std::cout << "error giimbal link type!!!!" << strLinkType << std::endl;
exit(1);
}
}
static inline sv::CameraType str2CameraType(const std::string &strCameraType)
{
std::map<std::string, sv::CameraType>::iterator temp = amovCameraTypeList.find(strCameraType);
if (temp != amovCameraTypeList.end())
{
return temp->second;
}
else
{
std::cout << "error Camera Type link type!!!!" << strCameraType << std::endl;
exit(1);
}
}
static inline void defGimableCallback(double &frame_ang_r, double &frame_ang_p, double &frame_ang_y,
double &imu_ang_r, double &imu_ang_p, double &imu_ang_y,
double &fov_x, double &fov_y)
{
static int count = 0;
if (count == 25)
{
std::cout << "GIMBAL_CMD_RCV_POS" << std::endl;
std::cout << "=============================================" << std::endl;
std::cout << "HALL_yaw:" << frame_ang_y << " " << std::endl;
std::cout << "HALL_roll:" << frame_ang_r << " " << std::endl;
std::cout << "HALL_pitch:" << frame_ang_p << " " << std::endl;
std::cout << "GYRO_yaw:" << imu_ang_y << " " << std::endl;
std::cout << "GYRO_roll:" << imu_ang_r << " " << std::endl;
std::cout << "GYRO_pitch:" << imu_ang_p << " " << std::endl;
count = 0;
}
count++;
}
class sampleGimbal
{
public:
sv::Gimbal *gimbal;
sv::Camera cap;
sv::PStateInvoke callback;
AmovCameraSize_T imageSize;
void creat(char *type, char *link, char *ip, char *port, char *size, char *FPS)
{
imageSize = str2CameraSizeWidht(size);
gimbal = new sv::Gimbal(str2GimbalType(type), str2GimbalLinkType(link));
if (str2GimbalLinkType(link) == sv::GimbalLink::SERIAL)
{
gimbal->setSerialPort(port);
}
else
{
gimbal->setNetIp(ip);
gimbal->setNetPort(atoi(port));
}
if (!gimbal->open(callback))
{
std::cout << "gimabl open failed";
exit(1);
}
cap.setIp(ip);
cap.setWH(imageSize.width, imageSize.height);
cap.setFps(atoi(FPS));
cap.open(str2CameraType(type));
}
void gimbalTrack(double xOffset, double yOffset)
{
gimbal->setAngleRateEuler(0, yOffset * 150.0f, xOffset * 150.0f);
}
void gimbalNoTrack(void)
{
gimbal->setAngleRateEuler(0, 0, 0);
}
sampleGimbal(sv::PStateInvoke _callback = defGimableCallback)
{
callback = _callback;
}
~sampleGimbal() {}
};
#endif

View File

@ -0,0 +1,127 @@
#include <iostream>
#include <string>
// 包含SpireCV SDK头文件
#include <sv_world.h>
using namespace std;
// yaw roll pitch
double gimbalEulerAngle[3];
void gimableCallback(double &imu_ang_r, double &imu_ang_p, double &imu_ang_y,
double &frame_ang_r, double &frame_ang_p, double &frame_ang_y,
double &fov_x, double &fov_y)
{
static int count = 0;
if (count == 25)
{
std::cout << "GIMBAL_CMD_RCV_POS" << std::endl;
std::cout << "=============================================" << std::endl;
std::cout << "HALL_yaw:" << frame_ang_y << " " << std::endl;
std::cout << "HALL_roll:" << frame_ang_r << " " << std::endl;
std::cout << "HALL_pitch:" << frame_ang_p << " " << std::endl;
std::cout << "GYRO_yaw:" << imu_ang_y << " " << std::endl;
std::cout << "GYRO_roll:" << imu_ang_r << " " << std::endl;
std::cout << "GYRO_pitch:" << imu_ang_p << " " << std::endl;
count = 0;
}
gimbalEulerAngle[0] = frame_ang_y;
gimbalEulerAngle[1] = imu_ang_r;
gimbalEulerAngle[2] = imu_ang_p;
count++;
}
// 定义吊舱
sv::Gimbal *gimbal;
// 定义窗口名称
static const std::string RGB_WINDOW = "Image window";
// 鼠标点击事件的回调函数
void onMouse(int event, int x, int y, int, void *);
int main(int argc, char *argv[])
{
// 实例化吊舱
gimbal = new sv::Gimbal(sv::GimbalType::G1, sv::GimbalLink::SERIAL);
// 使用 /dev/ttyUSB0 作为控制串口
gimbal->setSerialPort("/dev/ttyUSB0");
// 以GimableCallback作为状态回调函数启用吊舱控制
gimbal->open(GimableCallback);
// 定义相机
sv::Camera cap;
// 设置相机流媒体地址为 192.168.2.64
cap.setIp("192.168.2.64");
// 设置获取画面分辨率为720P
cap.setWH(1280, 720);
// 设置视频帧率为30帧
cap.setFps(30);
// 开启相机
cap.open(sv::CameraType::G1);
// 定义一个窗口 才可以在上面操作
cv::namedWindow(RGB_WINDOW);
// 设置窗口操作回调函数,该函数实现吊舱控制
cv::setMouseCallback(RGB_WINDOW, onMouse, 0);
// 实例化Aruco检测器类
sv::ArucoDetector ad;
// 手动导入相机参数如果使用Amov的G1等吊舱或相机则可以忽略该步骤将自动下载相机参数文件
ad.loadCameraParams("/home/amov/SpireCV/calib_webcam_640x480.yaml");
sv::UDPServer udp;
// 实例化OpenCV的Mat类用于内存单帧图像
cv::Mat img;
int frame_id = 0;
while (1)
{
// 实例化SpireCV的 单帧检测结果 接口类 TargetsInFrame
sv::TargetsInFrame tgts(frame_id++);
// 读取一帧图像到img
gimbal->cap.read(img);
// 执行Aruco二维码检测
ad.detect(img, tgts);
tgts.has_pod_info = true;
tgts.pod_patch = gimbalEulerAngle[2];
tgts.pod_roll = gimbalEulerAngle[1];
tgts.pod_yaw = gimbalEulerAngle[0];
tgts.has_uav_pos = true;
tgts.longitude = 1.1234567;
tgts.latitude = 2.2345678;
tgts.altitude = 3.3456789;
tgts.has_uav_vel = true;
tgts.uav_vx = 4;
tgts.uav_vy = 5;
tgts.uav_vz = 6;
tgts.has_ill = true;
tgts.illumination = 7;
// www.write(img, tgts);
udp.send(tgts);
// 可视化检测结果叠加到img上
sv::drawTargetsInFrame(img, tgts);
// 显示检测结果img
cv::imshow(RGB_WINDOW, img);
cv::waitKey(10);
}
return 0;
}
void onMouse(int event, int x, int y, int, void *)
{
if (event == cv::EVENT_LBUTTONDOWN)
{
gimbal->gimbal->setAngleRateEuler(0, 0.02 * (y - 720 / 2), 0.02 * (x - 1280 / 2));
}
if (event == cv::EVENT_RBUTTONDOWN)
{
gimbal->gimbal->setHome();
}
}

View File

@ -47,7 +47,11 @@ cp -r ~/opencv_build/opencv_cache_x86-4.7.0/* ~/opencv_build/opencv-4.7.0/.cache
mkdir build mkdir build
cd build cd build
cmake -D CMAKE_BUILD_TYPE=Release -D WITH_CUDA=OFF -D OPENCV_ENABLE_NONFREE=ON -D CMAKE_INSTALL_PREFIX=/usr/local -D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.7.0/modules .. cmake -D CMAKE_BUILD_TYPE=Release \
-D WITH_CUDA=OFF \
-D OPENCV_ENABLE_NONFREE=ON \
-D CMAKE_INSTALL_PREFIX=/usr/local \
-D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.7.0/modules ..
make -j2 make -j2
sudo make install sudo make install