diff --git a/gimbal_ctrl/driver/src/GX40/GX40_gimbal_driver.cpp b/gimbal_ctrl/driver/src/GX40/GX40_gimbal_driver.cpp index e03664d..7c09a02 100644 --- a/gimbal_ctrl/driver/src/GX40/GX40_gimbal_driver.cpp +++ b/gimbal_ctrl/driver/src/GX40/GX40_gimbal_driver.cpp @@ -163,12 +163,13 @@ void GX40GimbalDriver::convert(void *buf) this->state.workMode = (amovGimbal::AMOV_GIMBAL_SERVO_MODE_T)primary->workMode; this->state.cameraFlag = (amovGimbal::AMOV_GIMBAL_CAMERA_FLAG_T)primary->state; // 应该需要再解算一下,才能出具体的框架角度 - this->state.rel.yaw = 360.0f - (primary->motorYaw * XF_ANGLE_DPI); + this->state.rel.yaw = -(primary->motorYaw * XF_ANGLE_DPI); + this->state.rel.yaw = this->state.rel.yaw < -180.0f ? this->state.rel.yaw + 360.0f : this->state.rel.yaw; this->state.rel.pitch = -(primary->motorPitch * XF_ANGLE_DPI); this->state.rel.roll = -(primary->motorRoll * XF_ANGLE_DPI); - this->state.abs.yaw = (primary->yaw * XF_ANGLE_DPI); - this->state.abs.yaw = this->state.abs.yaw > 180.0f ? this->state.abs.yaw - 360.0f : this->state.abs.yaw; + this->state.abs.yaw = -(primary->yaw * XF_ANGLE_DPI); + this->state.abs.yaw = this->state.abs.yaw < -180.0f ? this->state.abs.yaw + 360.0f : this->state.abs.yaw; this->state.abs.pitch = -(primary->pitch * XF_ANGLE_DPI); this->state.abs.roll = -(primary->roll * XF_ANGLE_DPI); diff --git a/samples/demo/gimbal_detection_with_clicked_tracking.cpp b/samples/demo/gimbal_detection_with_clicked_tracking.cpp index 441fd26..d6b924b 100644 --- a/samples/demo/gimbal_detection_with_clicked_tracking.cpp +++ b/samples/demo/gimbal_detection_with_clicked_tracking.cpp @@ -94,7 +94,6 @@ int main(int argc, char *argv[]) // 读取一帧图像到img cap.read(img); cv::resize(img, img, cv::Size(cod.image_width, cod.image_height)); - cv::imshow(RGB_WINDOW, img); // 执行通用目标检测 cod.detect(img, tgts); @@ -201,7 +200,7 @@ int main(int argc, char *argv[]) } } // end of tracking // 显示检测结果img - // cv::imshow(RGB_WINDOW, img); + cv::imshow(RGB_WINDOW, img); cv::waitKey(10); } return 0; diff --git a/samples/demo/gimbal_udp_detection_info_sender.cpp b/samples/demo/gimbal_udp_detection_info_sender.cpp index 27a3cbd..0d826df 100644 --- a/samples/demo/gimbal_udp_detection_info_sender.cpp +++ b/samples/demo/gimbal_udp_detection_info_sender.cpp @@ -7,21 +7,21 @@ using namespace std; // yaw roll pitch double gimbalEulerAngle[3]; -void gimableCallback(double &imu_ang_r, double &imu_ang_p, double &imu_ang_y, - double &frame_ang_r, double &frame_ang_p, double &frame_ang_y, +void gimableCallback(double &frame_ang_r, double &frame_ang_p, double &frame_ang_y, + double &imu_ang_r, double &imu_ang_p, double &imu_ang_y, double &fov_x, double &fov_y) { static int count = 0; if (count == 25) { - std::cout << "GIMBAL_CMD_RCV_POS" << std::endl; - std::cout << "=============================================" << std::endl; - std::cout << "HALL_yaw:" << frame_ang_y << " " << std::endl; - std::cout << "HALL_roll:" << frame_ang_r << " " << std::endl; - std::cout << "HALL_pitch:" << frame_ang_p << " " << std::endl; - std::cout << "GYRO_yaw:" << imu_ang_y << " " << std::endl; - std::cout << "GYRO_roll:" << imu_ang_r << " " << std::endl; - std::cout << "GYRO_pitch:" << imu_ang_p << " " << std::endl; + // std::cout << "GIMBAL_CMD_RCV_POS" << std::endl; + // std::cout << "=============================================" << std::endl; + // std::cout << "HALL_yaw:" << frame_ang_y << " " << std::endl; + // std::cout << "HALL_roll:" << frame_ang_r << " " << std::endl; + // std::cout << "HALL_pitch:" << frame_ang_p << " " << std::endl; + // std::cout << "GYRO_yaw:" << imu_ang_y << " " << std::endl; + // std::cout << "GYRO_roll:" << imu_ang_r << " " << std::endl; + // std::cout << "GYRO_pitch:" << imu_ang_p << " " << std::endl; count = 0; } diff --git a/video_io/sv_video_input.cpp b/video_io/sv_video_input.cpp index 5862280..1a3c6ae 100644 --- a/video_io/sv_video_input.cpp +++ b/video_io/sv_video_input.cpp @@ -86,21 +86,22 @@ namespace sv this->_port = 554; } #ifdef PLATFORM_X86_CUDA - camera_url << "rtspsrc location = rtsp://user:0000@" << this->_ip << ":" << this->_port + camera_url << "rtspsrc location = rtsp://user:0000@" << this->_ip << ":" << this->_port << "/cam/realmonitor?channel=1&subtype=0 latency=100 ! application/x-rtp,media=video ! \ - rtph265depay ! parsebin ! avdec_h265 ! \ + rtph265depay ! parsebin ! avdec_h265 ! videoscale ! video/x-raw,width=(int)" << this->_width + << ",height=(int)" << this->_height << " ! videoflip video-direction=4 ! videoconvert ! video/x-raw,format=(string)BGR ! \ appsink sync=false"; - std::cout << camera_url.str()<_cap.open(camera_url.str(),cv::CAP_GSTREAMER); + this->_cap.open(camera_url.str(), cv::CAP_GSTREAMER); #endif #ifdef PLATFORM_JETSON - camera_url << "rtspsrc location = rtsp://user:0000@" << this->_ip << ":" << this->_port + camera_url << "rtspsrc location = rtsp://user:0000@" << this->_ip << ":" << this->_port << "/cam/realmonitor?channel=1&subtype=0 latency=100 ! \ application/x-rtp,media=video ! rtph265depay ! parsebin ! \ nvv4l2decoder ! nvvidconv flip-method=4 ! \ - video/x-raw,format=(string)BGRx,width=" << this->_width << ",height=" << this->_height << " ! videoconvert ! \ + video/x-raw,format=(string)BGRx,width=(int)" + << this->_width << ",height=(int)" << this->_height << " ! videoconvert ! video/x-raw,format=(string)BGR ! \ appsink sync=false"; - this->_cap.open(camera_url.str(),cv::CAP_GSTREAMER); + this->_cap.open(camera_url.str(), cv::CAP_GSTREAMER); #endif } else if (this->_type == CameraType::MIPI)