temporary storage

This commit is contained in:
AiYangSky 2023-11-21 15:27:21 +08:00
parent 6cf8422835
commit 169f4ba55b
24 changed files with 1130 additions and 144 deletions

View File

@ -69,6 +69,7 @@ include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/G1
${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/AT10
${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/Q10f
${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/GX40
${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src
${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl
${CMAKE_CURRENT_SOURCE_DIR}/algorithm/common_det/cuda
@ -76,7 +77,7 @@ include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/algorithm/sot/ocv470
${CMAKE_CURRENT_SOURCE_DIR}/algorithm/color_line
${CMAKE_CURRENT_SOURCE_DIR}/video_io
${CMAKE_CURRENT_SOURCE_DIR}/algorithm/veri/cuda
${CMAKE_CURRENT_SOURCE_DIR}/algorithm/veri/cuda
${CMAKE_CURRENT_SOURCE_DIR}/algorithm/ellipse_det
${CMAKE_CURRENT_SOURCE_DIR}/utils
)
@ -139,6 +140,8 @@ file(GLOB DRV_LIB_FILES ${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/AT10/
list(APPEND driver_SRCS ${DRV_LIB_FILES})
file(GLOB DRV_LIB_FILES ${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/Q10f/*.cpp)
list(APPEND driver_SRCS ${DRV_LIB_FILES})
file(GLOB DRV_LIB_FILES ${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/GX40/*.cpp)
list(APPEND driver_SRCS ${DRV_LIB_FILES})
file(GLOB DRV_LIB_FILES ${CMAKE_CURRENT_SOURCE_DIR}/gimbal_ctrl/driver/src/*.cpp)
list(APPEND driver_SRCS ${DRV_LIB_FILES})

View File

@ -4,6 +4,6 @@ rm -rf build
mkdir build
cd build
cmake .. -DPLATFORM=X86_CUDA
make -j4
make -j
sudo make install

View File

@ -10,9 +10,9 @@ add_definitions(
add_subdirectory(FIFO)
########## add types of gimbal ##############
add_subdirectory(G1)
add_subdirectory(G2)
add_subdirectory(Q10f)
add_subdirectory(AT10)
add_subdirectory(GX40)
file(GLOB LIB_FILES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
@ -27,6 +27,7 @@ target_link_libraries(AMOV_Gimbal
Gimabl_G2
Gimabl_Q10f
Gimabl_AT10
Gimabl_GX40
)
target_include_directories(AMOV_Gimbal

View File

@ -0,0 +1,18 @@
add_library(Gimabl_GX40)
file(GLOB LIB_FILES ${CMAKE_CURRENT_SOURCE_DIR}/*.cpp)
target_sources(Gimabl_GX40
PUBLIC
${LIB_FILES}
)
target_link_libraries(Gimabl_GX40
PUBLIC
FIFO
)
target_include_directories(Gimabl_GX40
PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}
)

View File

@ -0,0 +1,41 @@
/*
* @Description:
* @Author: L LC @amov
* @Date: 2023-10-20 16:33:07
* @LastEditors: L LC @amov
* @LastEditTime: 2023-11-01 17:03:16
* @FilePath: /gimbal-sdk-multi-platform/src/GX40/GX40_gimbal_crc16.h
*/
#ifndef GX40_GIMBAL_CRC16_H
#define GX40_GIMBAL_CRC16_H
#include <stdint.h>
namespace GX40
{
const static uint16_t crc16Tab[16] = {
0x0000, 0x1021, 0x2042, 0x3063,
0x4084, 0x50a5, 0x60c6, 0x70e7,
0x8108, 0x9129, 0xa14a, 0xb16b,
0xc18c, 0xd1ad, 0xe1ce, 0xf1ef};
static inline uint16_t CalculateCrc16(const uint8_t *ptr, uint8_t len)
{
uint16_t crc = 0;
uint8_t temp;
while (len-- != 0)
{
temp = crc >> 12;
crc <<= 4;
crc ^= crc16Tab[temp ^ (*ptr >> 4)];
temp = crc >> 12;
crc <<= 4;
crc ^= crc16Tab[temp ^ (*ptr & 0x0F)];
ptr++;
}
crc = (crc >> 8) | (crc << 8);
return (crc);
}
}
#endif

View File

@ -0,0 +1,301 @@
/*
* @Description:
* @Author: L LC @amov
* @Date: 2023-10-20 16:08:17
* @LastEditors: L LC @amov
* @LastEditTime: 2023-11-16 17:23:38
* @FilePath: /gimbal-sdk-multi-platform/src/GX40/GX40_gimbal_driver.cpp
*/
#include <string.h>
#include "GX40_gimbal_driver.h"
#include "GX40_gimbal_crc16.h"
#include <math.h>
/**
* The above function is a constructor for the GX40GimbalDriver class in C++, which initializes member
* variables and sets the parser state to idle.
*
* @param _IO _IO is a pointer to an object of type amovGimbal::IOStreamBase. It is used to communicate
* with the gimbal device.
*/
GX40GimbalDriver::GX40GimbalDriver(amovGimbal::IOStreamBase *_IO) : amovGimbal::amovGimbalBase(_IO)
{
rxQueue = new fifoRing(sizeof(GX40::GIMBAL_FRAME_T), MAX_QUEUE_SIZE);
txQueue = new fifoRing(sizeof(GX40::GIMBAL_FRAME_T), MAX_QUEUE_SIZE);
targetPos[0] = 0;
targetPos[1] = 0;
targetPos[2] = 0;
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_IDLE;
}
/**
* The function `nopSend` continuously sends a "no operation" command to a GX40 gimbal driver.
*/
void GX40GimbalDriver::nopSend(void)
{
while (1)
{
// 50Hz
std::this_thread::sleep_for(std::chrono::milliseconds(20));
pack(GX40::GIMBAL_CMD_NOP, nullptr, 0);
}
}
/**
* The function `parserStart` initializes the gimbal driver by setting the callback function, creating
* two threads for the main loop and sending NOP commands, and detaching the threads.
*
* @param callback The parameter "callback" is of type "amovGimbal::pStateInvoke", which is a function
* pointer type. It is used to specify a callback function that will be invoked when the gimbal state
* is updated.
*/
void GX40GimbalDriver::parserStart(amovGimbal::pStateInvoke callback)
{
this->updateGimbalStateCallback = callback;
std::thread mainLoop(&GX40GimbalDriver::mainLoop, this);
std::thread sendNop(&GX40GimbalDriver::nopSend, this);
this->stackThreadHanle = mainLoop.native_handle();
this->nopSendThreadHandle = sendNop.native_handle();
mainLoop.detach();
sendNop.detach();
}
/**
* The function `pack` in the `GX40GimbalDriver` class is responsible for packing data into a frame for
* transmission.
*
* @param uint32_t The parameter `cmd` is an unsigned 32-bit integer representing the command.
* @param pPayload The `pPayload` parameter is a pointer to the payload data that needs to be packed.
* It is of type `uint8_t*`, which means it is a pointer to an array of unsigned 8-bit integers. The
* payload data is stored in this array.
* @param payloadSize The parameter `payloadSize` represents the size of the payload data in bytes. It
* is used to determine the size of the payload data that needs to be packed into the `temp` structure.
*
* @return a uint32_t value, which is stored in the variable "ret".
*/
uint32_t GX40GimbalDriver::pack(IN uint32_t cmd, uint8_t *pPayload, uint8_t payloadSize)
{
uint32_t ret = 0;
GX40::GIMBAL_FRAME_T temp;
memset(&temp, 0, sizeof(GX40::GIMBAL_FRAME_T));
GX40::GIMBAL_PRIMARY_MASTER_FRAME_T *primary = (GX40::GIMBAL_PRIMARY_MASTER_FRAME_T *)temp.primaryData;
carrierStateMutex.lock();
primary->state = 0X00;
// 姿态数据&指令数据填充
primary->roll = targetPos[0];
primary->pitch = targetPos[1];
primary->yaw = targetPos[2];
primary->state |= (0X01 << 2);
temp.otherData[0] = cmd;
memcpy(temp.otherData + 1, pPayload, payloadSize);
// 固定字节填充
temp.head.u16 = XF_SEND_HEAD;
temp.version = 0X01;
primary->secondaryFlag = 0X01;
temp.lenght.u16 = 69 + payloadSize + 1 + 2;
// 惯导数据填充
std::chrono::milliseconds nowTs = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch());
// over 1s GNSS has losed
if ((nowTs.count() - upDataTs.count()) < std::chrono::milliseconds(1500).count())
{
primary->selfRoll = (int16_t)(-(carrierPos.roll / 0.01f));
primary->selfPitch = (int16_t)(-(carrierPos.pitch / 0.01f));
primary->selfYaw = (int16_t)(carrierPos.yaw / 0.01f);
primary->accE = (int16_t)(carrierAcc.y / 0.01f);
primary->accN = (int16_t)(carrierAcc.x / 0.01f);
primary->accUp = (int16_t)(carrierAcc.z / 0.01f);
primary->speedE = (int16_t)(carrierSpeed.y / 0.01f);
primary->speedN = (int16_t)(carrierSpeed.x / 0.01f);
primary->speedUp = (int16_t)(carrierSpeed.z / 0.01f);
carrierGNSS.GPSweeks = ((nowTs.count() / 1000) - 315964800) / 604800;
carrierGNSS.GPSms = nowTs.count() - (carrierGNSS.GPSweeks * 604800000);
memcpy(temp.secondaryData, &carrierGNSS, sizeof(GX40::GIMBAL_SECONDARY_MASTER_FRAME_T));
primary->state |= (0X01 << 0);
}
else
{
primary->state &= (~(0X01 << 0));
}
carrierStateMutex.unlock();
// 校验
*(uint16_t *)(&temp.otherData[payloadSize + 1]) = GX40::CalculateCrc16((uint8_t *)&temp, 69 + 1 + payloadSize);
// 添加至发送队列
if (txQueue->inCell(&temp))
{
ret = temp.lenght.u16;
}
return ret;
}
/**
* The function `convert` takes a buffer and extracts data from it to update the state of a gimbal
* driver.
*
* @param buf The `buf` parameter is a void pointer that points to a buffer containing data that needs
* to be converted.
*/
void GX40GimbalDriver::convert(void *buf)
{
GX40::GIMBAL_FRAME_T *temp;
GX40::GIMBAL_PRIMARY_SLAVE_FRAME_T *primary;
GX40::GIMBAL_SECONDARY_SLAVE_FRAME_T *secondary;
temp = reinterpret_cast<GX40::GIMBAL_FRAME_T *>(buf);
primary = (GX40::GIMBAL_PRIMARY_SLAVE_FRAME_T *)temp->primaryData;
secondary = (GX40::GIMBAL_SECONDARY_SLAVE_FRAME_T *)temp->secondaryData;
mState.lock();
this->state.workMode = (amovGimbal::AMOV_GIMBAL_SERVO_MODE_T)primary->workMode;
this->state.cameraFlag = (amovGimbal::AMOV_GIMBAL_CAMERA_FLAG_T)primary->state;
// 应该需要再解算一下,才能出具体的框架角度
this->state.rel.yaw = 360.0f - (primary->motorYaw * XF_ANGLE_DPI);
this->state.rel.pitch = -(primary->motorPitch * XF_ANGLE_DPI);
this->state.rel.roll = -(primary->motorRoll * XF_ANGLE_DPI);
this->state.abs.yaw = (primary->yaw * XF_ANGLE_DPI);
this->state.abs.yaw = this->state.abs.yaw > 180.0f ? this->state.abs.yaw - 360.0f : this->state.abs.yaw;
this->state.abs.pitch = -(primary->pitch * XF_ANGLE_DPI);
this->state.abs.roll = -(primary->roll * XF_ANGLE_DPI);
this->state.relSpeed.yaw = -(primary->speedYaw * XF_ANGLE_DPI);
this->state.relSpeed.pitch = -(primary->speedPitch * XF_ANGLE_DPI);
this->state.relSpeed.roll = -(primary->speedRoll * XF_ANGLE_DPI);
// 近似值 不准
this->state.fov.x = secondary->camera1Zoom * 0.1f;
this->state.fov.x = 60.2f / this->state.fov.x;
this->state.fov.y = secondary->camera1Zoom * 0.1f;
this->state.fov.y = 36.1f / this->state.fov.y;
updateGimbalStateCallback(state.rel.roll, state.rel.pitch, state.rel.yaw,
state.abs.roll, state.abs.pitch, state.abs.yaw,
state.fov.x, state.fov.y);
mState.unlock();
}
/**
* The function calculates the total length of a data packet by adding the length of the payload to the
* size of a uint16_t.
*
* @param pack The parameter "pack" is a void pointer, which means it can point to any type of data. In
* this case, it is expected to point to a structure of type "GX40::GIMBAL_FRAME_T".
*
* @return the sum of the length of the gimbal frame and the size of a uint16_t.
*/
uint32_t GX40GimbalDriver::calPackLen(void *pack)
{
return ((GX40::GIMBAL_FRAME_T *)pack)->lenght.u16;
}
/**
* The function `parser` is used to parse incoming data frames in a specific format and returns a
* boolean value indicating whether the parsing was successful or not.
*
* @param uint8_t The parameter `byte` is of type `uint8_t`, which is an unsigned 8-bit integer. It is
* used to store a single byte of data that is being parsed by the `GX40GimbalDriver::parser` function.
*
* @return a boolean value, either true or false.
*/
#include <stdio.h>
bool GX40GimbalDriver::parser(IN uint8_t byte)
{
bool state = false;
static uint8_t payloadLenght = 0;
static uint8_t *pRx = nullptr;
switch (parserState)
{
case GX40::GIMBAL_FRAME_PARSER_STATE_IDLE:
if (byte == ((XF_RCV_HEAD >> 8) & 0XFF))
{
rx.head.u8[0] = byte;
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_HEAD;
}
break;
case GX40::GIMBAL_FRAME_PARSER_STATE_HEAD:
if (byte == ((XF_RCV_HEAD >> 0) & 0XFF))
{
rx.head.u8[1] = byte;
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_LEN1;
}
else
{
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_IDLE;
rx.head.u16 = 0;
}
break;
case GX40::GIMBAL_FRAME_PARSER_STATE_LEN1:
rx.lenght.u8[0] = byte;
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_LEN2;
break;
case GX40::GIMBAL_FRAME_PARSER_STATE_LEN2:
rx.lenght.u8[1] = byte;
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_VERSION;
break;
case GX40::GIMBAL_FRAME_PARSER_STATE_VERSION:
if (byte == XF_VERSION)
{
rx.version = byte;
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_PAYLOAD;
pRx = rx.primaryData;
payloadLenght = rx.lenght.u16 - 5;
}
else
{
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_IDLE;
rx.head.u16 = 0;
rx.lenght.u16 = 0;
}
break;
case GX40::GIMBAL_FRAME_PARSER_STATE_PAYLOAD:
*pRx = byte;
payloadLenght--;
pRx++;
if (payloadLenght <= 0)
{
if (*(uint16_t *)(pRx - sizeof(uint16_t)) == GX40::CalculateCrc16((uint8_t *)&rx, rx.lenght.u16 - 2))
{
state = true;
rxQueue->inCell(&rx);
}
else
{
memset(&rx, 0, sizeof(GX40::GIMBAL_FRAME_T));
}
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_IDLE;
pRx = nullptr;
payloadLenght = 0;
}
break;
default:
parserState = GX40::GIMBAL_FRAME_PARSER_STATE_IDLE;
pRx = nullptr;
payloadLenght = 0;
break;
}
return state;
}

View File

@ -0,0 +1,88 @@
/*
* @Description:
* @Author: L LC @amov
* @Date: 2023-10-20 16:08:13
* @LastEditors: L LC @amov
* @LastEditTime: 2023-11-16 17:23:27
* @FilePath: /gimbal-sdk-multi-platform/src/GX40/GX40_gimbal_driver.h
*/
#include "../amov_gimbal.h"
#include "../amov_gimbal_private.h"
#include "GX40_gimbal_struct.h"
#include <mutex>
#include <malloc.h>
#include <iostream>
#include <chrono>
#include <time.h>
#ifndef __GX40_DRIVER_H
#define __GX40_DRIVER_H
class GX40GimbalDriver : public amovGimbal::amovGimbalBase
{
GX40::GIMBAL_FRAME_PARSER_STATE_T parserState;
GX40::GIMBAL_FRAME_T rx;
std::chrono::milliseconds upDataTs;
std::mutex carrierStateMutex;
int16_t targetPos[3];
amovGimbal::AMOV_GIMBAL_POS_T carrierPos;
amovGimbal::AMOV_GIMBAL_VELOCITY_T carrierSpeed;
amovGimbal::AMOV_GIMBAL_VELOCITY_T carrierAcc;
GX40::GIMBAL_SECONDARY_MASTER_FRAME_T carrierGNSS;
std::thread::native_handle_type nopSendThreadHandle;
void nopSend(void);
void parserStart(amovGimbal::pStateInvoke callback);
public:
uint32_t pack(IN uint32_t cmd, uint8_t *pPayload, uint8_t payloadSize);
bool parser(IN uint8_t byte);
void convert(void *buf);
uint32_t calPackLen(void *pack);
// funtions
uint32_t setGimabalPos(const amovGimbal::AMOV_GIMBAL_POS_T &pos);
uint32_t setGimabalSpeed(const amovGimbal::AMOV_GIMBAL_POS_T &speed);
uint32_t setGimabalHome(void);
uint32_t setGimbalZoom(amovGimbal::AMOV_GIMBAL_ZOOM_T zoom, float targetRate = 0);
uint32_t setGimbalFocus(amovGimbal::AMOV_GIMBAL_ZOOM_T zoom, float targetRate = 0);
uint32_t takePic(void);
uint32_t setVideo(const amovGimbal::AMOV_GIMBAL_VIDEO_T newState);
uint32_t attitudeCorrection(const amovGimbal::AMOV_GIMBAL_POS_T &pos,
const amovGimbal::AMOV_GIMBAL_VELOCITY_T &seppd,
const amovGimbal::AMOV_GIMBAL_VELOCITY_T &acc,
void *extenData);
uint32_t setGNSSInfo(float lng, float lat, float alt, uint32_t nState, float relAlt);
uint32_t extensionFuntions(void *cmd);
static amovGimbal::amovGimbalBase *creat(amovGimbal::IOStreamBase *_IO)
{
return new GX40GimbalDriver(_IO);
}
GX40GimbalDriver(amovGimbal::IOStreamBase *_IO);
~GX40GimbalDriver()
{
if (txQueue != nullptr)
{
delete txQueue;
}
if (rxQueue != nullptr)
{
delete rxQueue;
}
pthread_setcanceltype(PTHREAD_CANCEL_ASYNCHRONOUS, NULL);
parserThreadHanle = parserThreadHanle == 0 ? 0 : pthread_cancel(parserThreadHanle);
sendThreadHanle = sendThreadHanle == 0 ? 0 : pthread_cancel(sendThreadHanle);
stackThreadHanle = stackThreadHanle == 0 ? 0 : pthread_cancel(stackThreadHanle);
nopSendThreadHandle = nopSendThreadHandle == 0 ? 0 : pthread_cancel(nopSendThreadHandle);
}
};
#endif

View File

@ -0,0 +1,251 @@
/*
* @Description:
* @Author: L LC @amov
* @Date: 2023-11-02 17:50:26
* @LastEditors: L LC @amov
* @LastEditTime: 2023-11-16 17:23:07
* @FilePath: /gimbal-sdk-multi-platform/src/GX40/GX40_gimbal_funtion.cpp
*/
#include <string.h>
#include "GX40_gimbal_driver.h"
/**
* The function sets the target position of a gimbal based on the input roll, pitch, and yaw values.
*
* @param pos The parameter "pos" is of type "amovGimbal::AMOV_GIMBAL_POS_T". It is a structure that
* contains the roll, pitch, and yaw values of the gimbal position.
*
* @return a packed value of type uint32_t.
*/
uint32_t GX40GimbalDriver::setGimabalPos(const amovGimbal::AMOV_GIMBAL_POS_T &pos)
{
carrierStateMutex.lock();
targetPos[0] = (int16_t)(-pos.roll / 0.01f);
targetPos[1] = (int16_t)(-pos.pitch / 0.01f);
targetPos[2] = (int16_t)(-pos.yaw / 0.01f);
carrierStateMutex.unlock();
return pack(GX40::GIMBAL_CMD_MODE_EULER, nullptr, 0);
}
/**
* The function sets the gimbal speed based on the provided roll, pitch, and yaw values.
*
* @param speed The parameter "speed" is of type "amovGimbal::AMOV_GIMBAL_POS_T". It is a structure
* that contains the roll, pitch, and yaw values of the gimbal speed.
*
* @return the result of the pack() function, which is of type uint32_t.
*/
uint32_t GX40GimbalDriver::setGimabalSpeed(const amovGimbal::AMOV_GIMBAL_POS_T &speed)
{
carrierStateMutex.lock();
targetPos[0] = (int16_t)(-speed.roll / 0.1f);
targetPos[1] = (int16_t)(-speed.pitch / 0.1f);
targetPos[2] = (int16_t)(-speed.yaw / 0.1f);
carrierStateMutex.unlock();
return pack(GX40::GIMBAL_CMD_MODE_FOLLOW, nullptr, 0);
}
/**
* The function sets the gimbal's home position to (0, 0, 0) and sends a command to the gimbal to go to
* the home position.
*
* @return the result of the pack() function call with the arguments GX40::GIMBAL_CMD_HOME, nullptr, and
* 0.
*/
uint32_t GX40GimbalDriver::setGimabalHome(void)
{
carrierStateMutex.lock();
targetPos[0] = 0;
targetPos[1] = 0;
targetPos[2] = 0;
carrierStateMutex.unlock();
pack(GX40::GIMBAL_CMD_MODE_FOLLOW, nullptr, 0);
return pack(GX40::GIMBAL_CMD_HOME, nullptr, 0);
}
/**
* The function `takePic` in the `GX40GimbalDriver` class takes a picture using the GX40 gimbal and
* returns the packed command.
*
* @return a uint32_t value.
*/
uint32_t GX40GimbalDriver::takePic(void)
{
uint8_t temp = 0X01;
return pack(GX40::GIMBAL_CMD_TAKEPIC, &temp, 1);
}
/**
* The function `setVideo` toggles the video state of a gimbal driver and returns a packed command.
*
* @param newState The parameter `newState` is of type `amovGimbal::AMOV_GIMBAL_VIDEO_T`, which is an
* enumeration representing the state of the video in the gimbal. It can have two possible values:
*
* @return the result of the `pack` function, which is a `uint32_t` value.
*/
uint32_t GX40GimbalDriver::setVideo(const amovGimbal::AMOV_GIMBAL_VIDEO_T newState)
{
uint8_t temp = 0X01;
mState.lock();
if (state.video == amovGimbal::AMOV_GIMBAL_VIDEO_TAKE)
{
state.video = amovGimbal::AMOV_GIMBAL_VIDEO_OFF;
}
else
{
state.video = amovGimbal::AMOV_GIMBAL_VIDEO_TAKE;
}
mState.unlock();
return pack(GX40::GIMBAL_CMD_TAKEPIC, &temp, 1);
}
/**
* The function `attitudeCorrection` updates the state of a gimbal driver with position, velocity, and
* acceleration data.
*
* @param pos The "pos" parameter is of type "amovGimbal::AMOV_GIMBAL_POS_T" and represents the current
* position of the gimbal. It likely contains information such as the pitch, yaw, and roll angles of
* the gimbal.
* @param seppd The parameter `seppd` stands for "Separate Pointing Device" and represents the velocity
* of the gimbal in separate axes (e.g., pitch, yaw, roll). It is of type
* `amovGimbal::AMOV_GIMBAL_VELOCITY_T`.
* @param acc The "acc" parameter is of type "amovGimbal::AMOV_GIMBAL_VELOCITY_T" and represents the
* acceleration of the gimbal.
* @param extenData The extenData parameter is a pointer to additional data that can be passed to the
* attitudeCorrection function. It can be used to provide any extra information or context that may be
* needed for the attitude correction calculation. The specific type and structure of the extenData is
* not provided in the code snippet,
*
* @return the size of the data being passed as arguments. The size is calculated by adding the sizes
* of the three types: sizeof(amovGimbal::AMOV_GIMBAL_POS_T),
* sizeof(amovGimbal::AMOV_GIMBAL_VELOCITY_T), and sizeof(amovGimbal::AMOV_GIMBAL_VELOCITY_T).
*/
uint32_t GX40GimbalDriver::attitudeCorrection(const amovGimbal::AMOV_GIMBAL_POS_T &pos,
const amovGimbal::AMOV_GIMBAL_VELOCITY_T &seppd,
const amovGimbal::AMOV_GIMBAL_VELOCITY_T &acc,
void *extenData)
{
carrierStateMutex.lock();
carrierPos = pos;
carrierSpeed = seppd;
carrierAcc = acc;
upDataTs = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::system_clock::now().time_since_epoch());
carrierStateMutex.unlock();
return sizeof(amovGimbal::AMOV_GIMBAL_POS_T) + sizeof(amovGimbal::AMOV_GIMBAL_VELOCITY_T) + sizeof(amovGimbal::AMOV_GIMBAL_VELOCITY_T);
}
/**
* The function `extensionFuntions` takes a command as input, converts it to a specific format, and
* returns a 32-bit unsigned integer.
*
* @param cmd The parameter "cmd" is a void pointer, which means it can point to any type of data. In
* this case, it is being cast to a uint8_t pointer, which means it is expected to point to an array of
* uint8_t (8-bit unsigned integers).
*
* @return a value of type uint32_t.
*/
uint32_t GX40GimbalDriver::extensionFuntions(void *cmd)
{
uint8_t *temp = (uint8_t *)cmd;
return pack(temp[0], &temp[2], temp[1]);
}
/**
* The function `setGimbalZoom` in the `GX40GimbalDriver` class sets the zoom level of a gimbal based on
* the specified zoom type and target rate.
*
* @param zoom The "zoom" parameter is of type amovGimbal::AMOV_GIMBAL_ZOOM_T, which is an enumeration
* type. It represents the zoom action to be performed on the gimbal. The possible values for this
* parameter are:
* @param targetRate The targetRate parameter is a float value representing the desired zoom rate for
* the gimbal. It is used to control the zoom functionality of the gimbal.
*
* @return a value of type uint32_t.
*/
uint32_t GX40GimbalDriver::setGimbalZoom(amovGimbal::AMOV_GIMBAL_ZOOM_T zoom, float targetRate)
{
uint8_t temp[4];
uint8_t len = 0;
temp[1] = 0X01;
if (targetRate == 0.0f)
{
len = 1;
switch (zoom)
{
case amovGimbal::AMOV_GIMBAL_ZOOM_IN:
temp[0] = GX40::GIMBAL_CMD_ZOMM_IN;
break;
case amovGimbal::AMOV_GIMBAL_ZOOM_OUT:
temp[0] = GX40::GIMBAL_CMD_ZOOM_OUT;
break;
case amovGimbal::AMOV_GIMBAL_ZOOM_STOP:
temp[0] = GX40::GIMBAL_CMD_ZOOM_STOP;
break;
}
}
else
{
len = 3;
temp[0] = GX40::GIMBAL_CMD_ZOOM;
int16_t targetTemp = (int16_t)(-targetRate / 0.1f);
temp[2] = (targetTemp >> 0) & 0XFF;
temp[3] = (targetTemp >> 8) & 0XFF;
}
return pack(temp[0], &temp[1], len);
}
/**
* The function "setGimbalFocus" sets the focus of a gimbal by specifying the zoom level and target
* rate.
*
* @param zoom The zoom parameter is of type amovGimbal::AMOV_GIMBAL_ZOOM_T, which is an enumeration
* type representing different zoom levels for the gimbal. It is used to specify the desired zoom level
* for the gimbal focus.
* @param targetRate The targetRate parameter is a float value representing the desired zoom rate for
* the gimbal.
*
* @return the result of the pack() function, which is of type uint32_t.
*/
uint32_t GX40GimbalDriver::setGimbalFocus(amovGimbal::AMOV_GIMBAL_ZOOM_T zoom, float targetRate)
{
uint8_t temp = 0X01;
return pack(GX40::GIMBAL_CMD_FOCUE, &temp, 1);
}
/**
* The function sets the GNSS information in the carrierGNSS struct and returns the size of the struct.
*
* @param lng The "lng" parameter represents the longitude value of the GNSS (Global Navigation
* Satellite System) information.
* @param lat The "lat" parameter represents the latitude value of the GNSS (Global Navigation
* Satellite System) information.
* @param alt The "alt" parameter represents the altitude value in meters.
* @param nState The parameter "nState" represents the state of the GNSS (Global Navigation Satellite
* System) information. It is of type uint32_t, which means it is an unsigned 32-bit integer. The
* specific values and their meanings for the "nState" parameter are not provided in the code snippet
* @param relAlt Relative altitude of the carrier (in meters)
*
* @return the size of the structure GX40::GIMBAL_SECONDARY_MASTER_FRAME_T.
*/
uint32_t GX40GimbalDriver::setGNSSInfo(float lng, float lat, float alt, uint32_t nState, float relAlt)
{
carrierStateMutex.lock();
carrierGNSS.head = 0X01;
carrierGNSS.lng = lng;
carrierGNSS.lat = lat;
carrierGNSS.alt = alt;
carrierGNSS.relAlt = relAlt;
carrierGNSS.nState = nState;
carrierStateMutex.unlock();
return sizeof(GX40::GIMBAL_SECONDARY_MASTER_FRAME_T);
}

View File

@ -0,0 +1,154 @@
/*
* @Description:
* @Author: L LC @amov
* @Date: 2023-10-20 16:08:13
* @LastEditors: L LC @amov
* @LastEditTime: 2023-11-16 17:27:05
* @FilePath: /gimbal-sdk-multi-platform/src/GX40/GX40_gimbal_struct.h
*/
#ifndef GX40_GIMBAL_STRUCT_H
#define GX40_GIMBAL_STRUCT_H
#include <stdint.h>
namespace GX40
{
#define XF_SEND_HEAD 0XE5A8
#define XF_RCV_HEAD 0X8A5E
#define XF_VERSION 0X00
#define XF_ANGLE_DPI 0.01f
typedef enum
{
GIMBAL_FRAME_PARSER_STATE_IDLE,
GIMBAL_FRAME_PARSER_STATE_HEAD,
GIMBAL_FRAME_PARSER_STATE_LEN1,
GIMBAL_FRAME_PARSER_STATE_LEN2,
GIMBAL_FRAME_PARSER_STATE_VERSION,
GIMBAL_FRAME_PARSER_STATE_PAYLOAD,
} GIMBAL_FRAME_PARSER_STATE_T;
typedef enum
{
GIMBAL_CMD_NOP = 0X00,
GIMBAL_CMD_CAL = 0X01,
GIMBAL_CMD_HOME = 0X03,
GIMBAL_CMD_MODE_FPV = 0X10,
GIMBAL_CMD_MODE_LOCK = 0X11,
GIMBAL_CMD_MODE_FOLLOW = 0X12,
GIMBAL_CMD_MODE_OVERLOCK = 0X13,
GIMBAL_CMD_MODE_EULER = 0X14,
GIMBAL_CMD_MODE_WATCH_POS = 0X15,
GIMBAL_CMD_MODE_WATCH = 0X16,
GIMBAL_CMD_MODE_TRACK = 0X17,
GIMBAL_CMD_MODE_MOVE = 0X1A,
GIMBAL_CMD_MODE_MOVE_TRACK = 0X1B,
GIMBAL_CMD_TAKEPIC = 0X20,
GIMBAL_CMD_TAKEVIDEO = 0X21,
GIMBAL_CMD_ZOOM_OUT = 0X22,
GIMBAL_CMD_ZOMM_IN = 0X23,
GIMBAL_CMD_ZOOM_STOP = 0X24,
GIMBAL_CMD_ZOOM = 0X25,
GIMBAL_CMD_FOCUE = 0X26,
GIMBAL_CMD_VIDEO_MODE = 0X2A,
GIMBAL_CMD_NIGHT = 0X2B,
GIMBAL_CMD_OSD = 0X73,
GIMBAL_CMD_FIX_MODE = 0X74,
GIMBAL_CMD_LIGHT = 0X80,
GIMBAL_CMD_TAKE_DISTANCE = 0X81,
} GIMBAL_CMD_T;
#pragma pack(1)
typedef struct
{
union
{
uint8_t u8[2];
uint16_t u16;
} head;
union
{
uint8_t u8[2];
uint16_t u16;
} lenght;
uint8_t version;
uint8_t primaryData[32];
uint8_t secondaryData[32];
uint8_t otherData[32];
union
{
uint8_t u8[2];
uint16_t u16;
} crc16;
} GIMBAL_FRAME_T;
typedef struct
{
int16_t roll;
int16_t pitch;
int16_t yaw;
uint8_t state;
int16_t selfRoll;
int16_t selfPitch;
uint16_t selfYaw;
int16_t accN;
int16_t accE;
int16_t accUp;
int16_t speedN;
int16_t speedE;
int16_t speedUp;
uint8_t secondaryFlag;
uint8_t reserve[6];
} GIMBAL_PRIMARY_MASTER_FRAME_T;
typedef struct
{
uint8_t workMode;
uint16_t state;
int16_t offsetX;
int16_t offsetY;
uint16_t motorRoll;
uint16_t motorPitch;
uint16_t motorYaw;
int16_t roll;
int16_t pitch;
uint16_t yaw;
int16_t speedRoll;
int16_t speedPitch;
int16_t speedYaw;
uint8_t reserve[7];
} GIMBAL_PRIMARY_SLAVE_FRAME_T;
typedef struct
{
uint8_t head;
int32_t lng;
int32_t lat;
int32_t alt;
uint8_t nState;
uint32_t GPSms;
int32_t GPSweeks;
int32_t relAlt;
uint8_t reserve[8];
} GIMBAL_SECONDARY_MASTER_FRAME_T;
typedef struct
{
uint8_t head;
uint8_t versionHW;
uint8_t versionSoft;
uint8_t type;
uint16_t error;
int32_t targetDistance;
int32_t targetLng;
int32_t targetLat;
int32_t targetAlt;
uint16_t camera1Zoom;
uint16_t camera2Zoom;
uint8_t reserve[6];
} GIMBAL_SECONDARY_SLAVE_FRAME_T;
#pragma pack()
}
#endif

View File

@ -3,15 +3,15 @@
* @Author: L LC @amov
* @Date: 2022-10-28 11:54:11
* @LastEditors: L LC @amov
* @LastEditTime: 2023-08-17 11:57:11
* @FilePath: /gimbal-sdk-multi-platform/src/amov_gimbal.cpp
* @LastEditTime: 2023-11-16 17:27:28
* @FilePath: gimbal_ctrl/driver/src/amov_gimbal.cpp
*/
// #include "amov_gimbal.h"
#include "amov_gimbal_private.h"
#include "g1_gimbal_driver.h"
#include "Q10f_gimbal_driver.h"
#include "AT10_gimbal_driver.h"
#include "GX40_gimbal_driver.h"
#include <iostream>
#include <thread>
@ -19,30 +19,18 @@
#include <iterator>
#define MAX_PACK_SIZE 280
typedef enum
{
AMOV_GIMBAL_TYPE_NULL,
AMOV_GIMBAL_TYPE_G1 = 1,
AMOV_GIMBAL_TYPE_G2,
AMOV_GIMBAL_TYPE_Q10,
AMOV_GIMBAL_TYPE_AT10,
} AMOV_GIMBAL_TYPE_T;
namespace amovGimbal
{
typedef amovGimbal::amovGimbalBase *(*createCallback)(amovGimbal::IOStreamBase *_IO);
typedef std::map<std::string, createCallback> callbackMap;
std::map<std::string, AMOV_GIMBAL_TYPE_T> amovGimbalTypeList =
{
{"G1", AMOV_GIMBAL_TYPE_G1},
{"Q10f", AMOV_GIMBAL_TYPE_Q10},
{"AT10", AMOV_GIMBAL_TYPE_AT10}};
callbackMap amovGimbals =
{
{"G1", g1GimbalDriver::creat},
{"Q10f", Q10fGimbalDriver::creat},
{"AT10", AT10GimbalDriver::creat}};
{"AT10", AT10GimbalDriver::creat},
{"GX40", GX40GimbalDriver::creat}};
}
/* The amovGimbalCreator class is a factory class that creates an instance of the amovGimbal class */
@ -133,6 +121,11 @@ void amovGimbal::amovGimbalBase::send(void)
if (state)
{
IO->outPutBytes((uint8_t *)&tempBuffer, calPackLen(tempBuffer));
// printf("\r\n");
// for(uint16_t i =0 ; i< calPackLen(tempBuffer);i++)
// {
// printf("%02X ",tempBuffer[i]);
// }
}
}
}
@ -186,6 +179,10 @@ void amovGimbal::amovGimbalBase::stackStart(void)
}
std::thread parserLoop(&amovGimbalBase::parserLoop, this);
std::thread sendLoop(&amovGimbalBase::sendLoop, this);
this->sendThreadHanle = sendLoop.native_handle();
this->parserThreadHanle = parserLoop.native_handle();
parserLoop.detach();
sendLoop.detach();
}
@ -203,12 +200,19 @@ void amovGimbal::gimbal::setParserCallback(amovGimbal::pStateInvoke callback)
((amovGimbalBase *)(this->dev))->updateGimbalStateCallback = callback;
}
void amovGimbal::amovGimbalBase::nodeSet(SET uint32_t _self, SET uint32_t _remote)
{
return;
}
void amovGimbal::amovGimbalBase::parserStart(pStateInvoke callback)
{
this->updateGimbalStateCallback = callback;
std::thread mainLoop(&amovGimbalBase::mainLoop, this);
this->stackThreadHanle = mainLoop.native_handle();
mainLoop.detach();
}
/**
@ -235,11 +239,6 @@ amovGimbal::AMOV_GIMBAL_STATE_T amovGimbal::gimbal::getGimabalState(void)
/**
* Default implementation of interface functions, not pure virtual functions for ease of extension.
*/
void amovGimbal::IamovGimbalBase::nodeSet(SET uint32_t _self, SET uint32_t _remote)
{
return;
}
uint32_t amovGimbal::IamovGimbalBase::setGimabalPos(const amovGimbal::AMOV_GIMBAL_POS_T &pos)
{
return 0;
@ -300,6 +299,36 @@ uint32_t amovGimbal::IamovGimbalBase::extensionFuntions(void *cmd)
return 0;
}
uint32_t amovGimbal::IamovGimbalBase::setGNSSInfo(float lng, float lat, float alt, uint32_t nState, float relAlt)
{
return 0;
}
bool amovGimbal::IamovGimbalBase::setGimbalPosBlock(const amovGimbal::AMOV_GIMBAL_POS_T &pos)
{
return false;
}
bool amovGimbal::IamovGimbalBase::setGimabalHomeBlock(void)
{
return false;
}
bool amovGimbal::IamovGimbalBase::setGimbalZoomBlock(float targetRate)
{
return false;
}
bool amovGimbal::IamovGimbalBase::takePicBlock(void)
{
return false;
}
bool amovGimbal::IamovGimbalBase::calibrationBlock(void)
{
return false;
}
/**
* The function creates a new gimbal object, which is a pointer to a new amovGimbal object, which is a
* pointer to a new Gimbal object, which is a pointer to a new IOStreamBase object
@ -317,7 +346,7 @@ amovGimbal::gimbal::gimbal(const std::string &type, IOStreamBase *_IO,
dev = amovGimbalCreator::createAmovGimbal(typeName, IO);
dev->nodeSet(_self, _remote);
((amovGimbalBase *)(dev))->nodeSet(_self, _remote);
}
amovGimbal::gimbal::~gimbal()

View File

@ -3,8 +3,8 @@
* @Author: L LC @amov
* @Date: 2022-10-27 18:34:26
* @LastEditors: L LC @amov
* @LastEditTime: 2023-08-16 22:21:28
* @FilePath: /gimbal-sdk-multi-platform/src/amov_gimbal.h
* @LastEditTime: 2023-11-14 13:57:33
* @FilePath: gimbal_ctrl/driver/src/amov_gimbal.h
*/
#ifndef AMOV_GIMBAL_H
@ -32,7 +32,6 @@ namespace amovGimbal
{
}
// Control data input and output
class IOStreamBase
{
@ -55,11 +54,12 @@ namespace amovGimbal
public:
IamovGimbalBase() {}
virtual ~IamovGimbalBase() {}
// functions
virtual void nodeSet(SET uint32_t _self, SET uint32_t _remote);
// non-block functions
virtual uint32_t setGimabalPos(const AMOV_GIMBAL_POS_T &pos);
virtual uint32_t setGimabalSpeed(const AMOV_GIMBAL_POS_T &speed);
virtual uint32_t setGimabalFollowSpeed(const AMOV_GIMBAL_POS_T &followSpeed);
virtual uint32_t setGimabalHome(void);
virtual uint32_t setGimbalZoom(AMOV_GIMBAL_ZOOM_T zoom, float targetRate = 0);
virtual uint32_t setGimbalFocus(AMOV_GIMBAL_ZOOM_T zoom, float targetRate = 0);
@ -68,7 +68,15 @@ namespace amovGimbal
virtual uint32_t setVideo(const AMOV_GIMBAL_VIDEO_T newState);
virtual uint32_t attitudeCorrection(const AMOV_GIMBAL_QUATERNION_T &quaterion, const AMOV_GIMBAL_VELOCITY_T &speed, const AMOV_GIMBAL_VELOCITY_T &acc, void *extenData);
virtual uint32_t attitudeCorrection(const AMOV_GIMBAL_POS_T &pos, const AMOV_GIMBAL_VELOCITY_T &seppd, const AMOV_GIMBAL_VELOCITY_T &acc, void *extenData);
virtual uint32_t setGNSSInfo(float lng, float lat, float alt, uint32_t nState, float relAlt);
virtual uint32_t extensionFuntions(void *cmd);
// block functions
virtual bool setGimbalPosBlock(const AMOV_GIMBAL_POS_T &pos);
virtual bool setGimabalHomeBlock(void);
virtual bool setGimbalZoomBlock(float targetRate);
virtual bool takePicBlock(void);
virtual bool calibrationBlock(void);
};
class gimbal

View File

@ -3,8 +3,8 @@
* @Author : Aiyangsky
* @Date : 2023-05-13 10:39:20
* @LastEditors: L LC @amov
* @LastEditTime: 2023-08-16 22:30:53
* @FilePath: /gimbal-sdk-multi-platform/src/amov_gimbal_private.h
* @LastEditTime: 2023-11-14 10:35:56
* @FilePath: gimbal_ctrl/driver/src/amov_gimbal_private.h
*/
#ifndef __AMOV_GIMABL_PRIVATE_H
#define __AMOV_GIMABL_PRIVATE_H
@ -35,6 +35,10 @@ namespace amovGimbal
fifoRing *rxQueue;
fifoRing *txQueue;
std::thread::native_handle_type parserThreadHanle = 0;
std::thread::native_handle_type sendThreadHanle = 0;
std::thread::native_handle_type stackThreadHanle = 0;
PamovGimbalBase(SET IOStreamBase *_IO)
{
IO = _IO;
@ -49,6 +53,11 @@ namespace amovGimbal
{
delete rxQueue;
}
// set thread kill anytime
pthread_setcanceltype(PTHREAD_CANCEL_ASYNCHRONOUS, NULL);
parserThreadHanle = parserThreadHanle == 0 ? 0 : pthread_cancel(parserThreadHanle);
sendThreadHanle = sendThreadHanle == 0 ? 0 : pthread_cancel(sendThreadHanle);
stackThreadHanle = stackThreadHanle == 0 ? 0 : pthread_cancel(stackThreadHanle);
}
};
@ -70,6 +79,8 @@ namespace amovGimbal
virtual void stackStart(void);
virtual void parserStart(amovGimbal::pStateInvoke callback);
virtual void nodeSet(SET uint32_t _self, SET uint32_t _remote);
public:
amovGimbalBase(IOStreamBase *_IO);
virtual ~amovGimbalBase();

View File

@ -3,8 +3,8 @@
* @Author: L LC @amov
* @Date: 2022-10-31 11:56:43
* @LastEditors: L LC @amov
* @LastEditTime: 2023-08-16 22:21:46
* @FilePath: /gimbal-sdk-multi-platform/src/amov_gimbal_struct.h
* @LastEditTime: 2023-10-23 18:05:59
* @FilePath: gimbal_ctrl/driver/src/amov_gimbal_struct.h
*/
#include <stdint.h>
@ -21,9 +21,22 @@ namespace amovGimbal
typedef enum
{
AMOV_GIMBAL_MODE_LOCK,
AMOV_GIMBAL_MODE_NULOCK,
} AMOV_GIMBAL_MODE_T;
AMOV_GIMBAL_SERVO_MODE_FPV = 0X10,
AMOV_GIMBAL_SERVO_MODE_LOCK = 0X11,
AMOV_GIMBAL_SERVO_MODE_FOLLOW = 0X12,
AMOV_GIMBAL_SERVO_MODE_OVERLOOK = 0X13,
AMOV_GIMBAL_SERVO_MODE_EULER = 0X14,
AMOV_GIMBAL_SERVO_MODE_WATCH = 0X16,
AMOV_GIMBAL_SERVO_MODE_TRACK = 0X17,
} AMOV_GIMBAL_SERVO_MODE_T;
typedef enum
{
AMOV_GIMBAL_CAMERA_FLAG_INVERSION = 0X1000,
AMOV_GIMBAL_CAMERA_FLAG_IR = 0X0200,
AMOV_GIMBAL_CAMERA_FLAG_RF = 0X0100,
AMOV_GIMBAL_CAMERA_FLAG_LOCK = 0X0001,
} AMOV_GIMBAL_CAMERA_FLAG_T;
typedef enum
{
@ -53,10 +66,12 @@ namespace amovGimbal
typedef struct
{
AMOV_GIMBAL_MODE_T workMode;
AMOV_GIMBAL_SERVO_MODE_T workMode;
AMOV_GIMBAL_CAMERA_FLAG_T cameraFlag;
AMOV_GIMBAL_VIDEO_T video;
AMOV_GIMBAL_POS_T abs;
AMOV_GIMBAL_POS_T rel;
AMOV_GIMBAL_POS_T relSpeed;
AMOV_GIMBAL_POS_T maxFollow;
AMOV_GIMBAL_FOV_T fov;
} AMOV_GIMBAL_STATE_T;
@ -79,9 +94,9 @@ namespace amovGimbal
typedef struct
{
double x;
double y;
double z;
double x; // or N
double y; // or E
double z; // or UP
} AMOV_GIMBAL_VELOCITY_T;
} // namespace amovGimbal

View File

@ -4,7 +4,7 @@
* @Date: 2023-07-31 18:30:33
* @LastEditors: L LC @amov
* @LastEditTime: 2023-07-31 18:55:18
* @FilePath: /gimbal-sdk-multi-platform/src/amov_tool.h
* @FilePath: gimbal_ctrl/driver/src/amov_tool.h
*/
namespace amovGimbalTools

View File

@ -24,7 +24,8 @@ namespace sv
{
{GimbalType::G1, "G1"},
{GimbalType::Q10f, "Q10f"},
{GimbalType::AT10, "AT10"}};
{GimbalType::AT10, "AT10"},
{GimbalType::GX40, "GX40"}};
std::string &cvGimbalType2Str(const GimbalType &type)
{
@ -191,11 +192,13 @@ void sv::Gimbal::removeIO(sv::Gimbal *dev)
*
* @return A boolean value is being returned.
*/
#include "stdlib.h"
bool sv::Gimbal::open(PStateInvoke callback)
{
bool ret = false;
this->IO = creatIO(this);
if (this->IO != nullptr)
{
std::string driverName;

View File

@ -27,6 +27,7 @@ namespace sv
G1,
Q10f,
AT10,
GX40,
};
enum class GimbalLink
{

View File

@ -326,7 +326,7 @@ protected:
};
enum class CameraType {NONE, WEBCAM, G1, Q10, MIPI};
enum class CameraType {NONE, WEBCAM, G1, Q10, MIPI, GX40};
class CameraBase {
public:

View File

@ -2,6 +2,8 @@
#include <string>
// 包含SpireCV SDK头文件
#include <sv_world.h>
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc.hpp>
using namespace std;
@ -48,13 +50,17 @@ int main(int argc, char *argv[]) {
// 打开摄像头
sv::Camera cap;
// sv::Camera cap;
// cap.setWH(640, 480);
// cap.setFps(30);
cap.open(sv::CameraType::WEBCAM, 0); // CameraID 0
// cap.open(sv::CameraType::WEBCAM, 0); // CameraID 0
// cv::VideoCapture cap("/home/amov/SpireCV/test/tracking_1280x720.mp4");
// 实例化OpenCV的Mat类用于内存单帧图像
cv::VideoCapture cap;
cap.open("/dev/video0");
cv::Mat img;
int frame_id = 0;
while (1)
{

View File

@ -54,7 +54,7 @@ void gimbalNoTrack(void)
int main(int argc, char *argv[])
{
// 实例化吊舱
gimbal = new sv::Gimbal(sv::GimbalType::G1, sv::GimbalLink::SERIAL);
gimbal = new sv::Gimbal(sv::GimbalType::GX40, sv::GimbalLink::SERIAL);
// 使用 /dev/ttyUSB0 作为控制串口
gimbal->setSerialPort("/dev/ttyUSB0");
// 以GimableCallback作为状态回调函数启用吊舱控制
@ -62,13 +62,13 @@ int main(int argc, char *argv[])
// 定义相机
sv::Camera cap;
// 设置相机流媒体地址为 192.168.2.64
cap.setIp("192.168.2.64");
cap.setIp("192.168.144.108");
// 设置获取画面分辨率为720P
cap.setWH(1280, 720);
// 设置视频帧率为30帧
cap.setFps(30);
// 开启相机
cap.open(sv::CameraType::G1);
cap.open(sv::CameraType::GX40);
// 定义一个新的窗口,可在上面进行框选操作
cv::namedWindow(RGB_WINDOW);
@ -77,10 +77,10 @@ int main(int argc, char *argv[])
// 实例化 框选目标跟踪类
sv::SingleObjectTracker sot;
// 手动导入相机参数如果使用Amov的G1等吊舱或相机则可以忽略该步骤将自动下载相机参数文件
sot.loadCameraParams(sv::get_home() + "/SpireCV/calib_webcam_640x480.yaml");
sot.loadCameraParams(sv::get_home() + "/SpireCV/calib_webcam_1280x720.yaml");
sv::CommonObjectDetector cod;
cod.loadCameraParams(sv::get_home() + "/SpireCV/calib_webcam_640x480.yaml");
cod.loadCameraParams(sv::get_home() + "/SpireCV/calib_webcam_1280x720.yaml");
// 实例化OpenCV的Mat类用于内存单帧图像
cv::Mat img;
@ -94,6 +94,7 @@ int main(int argc, char *argv[])
// 读取一帧图像到img
cap.read(img);
cv::resize(img, img, cv::Size(cod.image_width, cod.image_height));
cv::imshow(RGB_WINDOW, img);
// 执行通用目标检测
cod.detect(img, tgts);
@ -200,7 +201,7 @@ int main(int argc, char *argv[])
}
} // end of tracking
// 显示检测结果img
cv::imshow(RGB_WINDOW, img);
// cv::imshow(RGB_WINDOW, img);
cv::waitKey(10);
}
return 0;

View File

@ -44,7 +44,7 @@ void onMouse(int event, int x, int y, int, void *);
int main(int argc, char *argv[])
{
// 实例化吊舱
gimbal = new sv::Gimbal(sv::GimbalType::G1, sv::GimbalLink::SERIAL);
gimbal = new sv::Gimbal(sv::GimbalType::GX40, sv::GimbalLink::SERIAL);
// 使用 /dev/ttyUSB0 作为控制串口
gimbal->setSerialPort("/dev/ttyUSB0");
// 以gimableCallback作为状态回调函数启用吊舱控制
@ -52,13 +52,13 @@ int main(int argc, char *argv[])
// 定义相机
sv::Camera cap;
// 设置相机流媒体地址为 192.168.2.64
cap.setIp("192.168.2.64");
cap.setIp("192.168.144.108");
// 设置获取画面分辨率为720P
cap.setWH(1280, 720);
// 设置视频帧率为30帧
cap.setFps(30);
// cap.setWH(1280, 720);
// // 设置视频帧率为30帧
// cap.setFps(30);
// 开启相机
cap.open(sv::CameraType::G1);
cap.open(sv::CameraType::GX40);
// 定义一个窗口 才可以在上面操作
cv::namedWindow(RGB_WINDOW);
@ -80,6 +80,8 @@ int main(int argc, char *argv[])
// 读取一帧图像到img
cap.read(img);
std::cout << img.type() << std::endl;
// 执行Aruco二维码检测
ad.detect(img, tgts);

View File

@ -60,9 +60,20 @@ cmake -D CMAKE_BUILD_TYPE=Release \
-D CUDA_TOOLKIT_ROOT_DIR=/usr/local/cuda \
-D OPENCV_ENABLE_NONFREE=ON \
-D CMAKE_INSTALL_PREFIX=/usr/local \
-D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.7.0/modules ..
-D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.7.0/modules .. \
-D CMAKE_VERBOSE_MAKEFILE=ON \
-D ENABLE_CXX11=ON \
-D BUILD_TESTS=OFF \
-D WITH_GSTREAMER=ON \
-D OPENCV_GENERATE_PKGCONFIG=ON \
-D ENABLE_FAST_MATH=ON \
-D CUDA_FAST_MATH=ON \
-D BUILD_opencv_python3=ON \
-D PYTHON3_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython3.8.so \
-D PYTHON3_NUMPY_INCLUDE_DIRS=/home/aiyangsky/.local/lib/python3.8/site-packages/numpy/core/include \
-D PYTHON3_PACKAGES_PATH=/usr/lib/python3/dist-packages
make -j2
make -j
sudo make install
cd

View File

@ -52,13 +52,30 @@ cp -r ~/opencv_build/opencv_cache_x86-4.7.0/* ~/opencv_build/opencv-4.7.0/.cache
mkdir build
cd build
sudo apt-get install -y libcurl4 build-essential pkg-config cmake libopenblas-dev libeigen3-dev \
libtbb-dev libavcodec-dev libavformat-dev libgstreamer-plugins-base1.0-dev \
libgstreamer1.0-dev libswscale-dev libgtk-3-dev libpng-dev libjpeg-dev \
libcanberra-gtk-module libcanberra-gtk3-module
cmake -D CMAKE_BUILD_TYPE=Release \
-D WITH_CUDA=OFF \
-D CMAKE_VERBOSE_MAKEFILE=ON \
-D ENABLE_CXX11=ON \
-D BUILD_TESTS=OFF \
-D WITH_GSTREAMER=ON \
-D OPENCV_GENERATE_PKGCONFIG=ON \
-D ENABLE_FAST_MATH=ON \
-D CUDA_FAST_MATH=ON \
-D WITH_CUBLAS=ON \
-D WITH_CUDA=ON \
-D OPENCV_ENABLE_NONFREE=ON \
-D CMAKE_INSTALL_PREFIX=/usr/local \
-D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.7.0/modules ..
-D OPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.7.0/modules .. \
-D BUILD_opencv_python3=ON \
-D PYTHON3_LIBRARY=/usr/lib/x86_64-linux-gnu/libpython3.8.so \
-D PYTHON3_NUMPY_INCLUDE_DIRS=/home/aiyangsky/.local/lib/python3.8/site-packages/numpy/core/include \
-D PYTHON3_PACKAGES_PATH=/usr/lib/python3/dist-packages
make -j2
make -j
sudo make install
cd

View File

@ -1110,7 +1110,7 @@ void CameraBase::_run()
}
bool CameraBase::read(cv::Mat& image)
{
if (this->_type == CameraType::WEBCAM || this->_type == CameraType::G1 || this->_type == CameraType::MIPI)
if (this->_type == CameraType::WEBCAM || this->_type == CameraType::G1 || this->_type == CameraType::MIPI || this->_type == CameraType::GX40)
{
int n_try = 0;
while (n_try < 5000)

View File

@ -2,98 +2,123 @@
#include <cmath>
#include <fstream>
namespace sv {
Camera::Camera()
namespace sv
{
}
Camera::~Camera()
{
}
void Camera::openImpl()
{
if (this->_type == CameraType::WEBCAM)
Camera::Camera()
{
this->_cap.open(this->_camera_id);
if (this->_width > 0 && this->_height > 0)
{
this->_cap.set(cv::CAP_PROP_FRAME_WIDTH, this->_width);
this->_cap.set(cv::CAP_PROP_FRAME_HEIGHT, this->_height);
}
if (this->_fps > 0)
{
this->_cap.set(cv::CAP_PROP_FPS, this->_fps);
}
if (this->_brightness > 0)
{
this->_cap.set(cv::CAP_PROP_BRIGHTNESS, this->_brightness);
}
if (this->_contrast > 0)
{
this->_cap.set(cv::CAP_PROP_CONTRAST, this->_contrast);
}
if (this->_saturation > 0)
{
this->_cap.set(cv::CAP_PROP_SATURATION, this->_saturation);
}
if (this->_hue > 0)
{
this->_cap.set(cv::CAP_PROP_HUE, this->_hue);
}
if (this->_exposure > 0)
{
this->_cap.set(cv::CAP_PROP_EXPOSURE, this->_exposure);
}
}
else if (this->_type == CameraType::G1)
Camera::~Camera()
{
char pipe[512];
if (this->_width <= 0 || this->_height <= 0)
}
void Camera::openImpl()
{
if (this->_type == CameraType::WEBCAM)
{
this->_width = 1280;
this->_height = 720;
this->_cap.open(this->_camera_id);
if (this->_width > 0 && this->_height > 0)
{
this->_cap.set(cv::CAP_PROP_FRAME_WIDTH, this->_width);
this->_cap.set(cv::CAP_PROP_FRAME_HEIGHT, this->_height);
}
if (this->_fps > 0)
{
this->_cap.set(cv::CAP_PROP_FPS, this->_fps);
}
if (this->_brightness > 0)
{
this->_cap.set(cv::CAP_PROP_BRIGHTNESS, this->_brightness);
}
if (this->_contrast > 0)
{
this->_cap.set(cv::CAP_PROP_CONTRAST, this->_contrast);
}
if (this->_saturation > 0)
{
this->_cap.set(cv::CAP_PROP_SATURATION, this->_saturation);
}
if (this->_hue > 0)
{
this->_cap.set(cv::CAP_PROP_HUE, this->_hue);
}
if (this->_exposure > 0)
{
this->_cap.set(cv::CAP_PROP_EXPOSURE, this->_exposure);
}
}
if (this->_port <= 0)
else if (this->_type == CameraType::G1)
{
this->_port = 554;
}
if (this->_fps <= 0)
{
this->_fps = 30;
}
char pipe[512];
if (this->_width <= 0 || this->_height <= 0)
{
this->_width = 1280;
this->_height = 720;
}
if (this->_port <= 0)
{
this->_port = 554;
}
if (this->_fps <= 0)
{
this->_fps = 30;
}
#ifdef PLATFORM_X86_CUDA
sprintf(pipe, "rtsp://%s:%d/H264?W=%d&H=%d&FPS=%d&BR=4000000", this->_ip.c_str(), this->_port, this->_width, this->_height, this->_fps);
this->_cap.open(pipe);
sprintf(pipe, "rtsp://%s:%d/H264?W=%d&H=%d&FPS=%d&BR=4000000", this->_ip.c_str(), this->_port, this->_width, this->_height, this->_fps);
this->_cap.open(pipe);
#endif
#ifdef PLATFORM_JETSON
sprintf(pipe, "rtspsrc location=rtsp://%s:%d/H264?W=%d&H=%d&FPS=%d&BR=4000000 latency=100 ! application/x-rtp,media=video ! rtph264depay ! parsebin ! nvv4l2decoder enable-max-performancegst=1 ! nvvidconv ! video/x-raw,format=(string)BGRx ! videoconvert ! appsink sync=false", this->_ip.c_str(), this->_port, this->_width, this->_height, this->_fps);
this->_cap.open(pipe, cv::CAP_GSTREAMER);
sprintf(pipe, "rtspsrc location=rtsp://%s:%d/H264?W=%d&H=%d&FPS=%d&BR=4000000 latency=100 ! application/x-rtp,media=video ! rtph264depay ! parsebin ! nvv4l2decoder enable-max-performancegst=1 ! nvvidconv ! video/x-raw,format=(string)BGRx ! videoconvert ! appsink sync=false", this->_ip.c_str(), this->_port, this->_width, this->_height, this->_fps);
this->_cap.open(pipe, cv::CAP_GSTREAMER);
#endif
}
else if (this->_type == CameraType::GX40)
{
std::ostringstream camera_url;
if (this->_width <= 0 || this->_height <= 0)
{
this->_width = 1280;
this->_height = 720;
}
if (this->_port <= 0)
{
this->_port = 554;
}
#ifdef PLATFORM_X86_CUDA
camera_url << "rtspsrc location = rtsp://user:0000@" << this->_ip << ":" << this->_port
<< "/cam/realmonitor?channel=1&subtype=0 latency=100 ! application/x-rtp,media=video ! \
rtph265depay ! parsebin ! avdec_h265 ! \
appsink sync=false";
std::cout << camera_url.str()<<std::endl;
this->_cap.open(camera_url.str(),cv::CAP_GSTREAMER);
#endif
#ifdef PLATFORM_JETSON
camera_url << "rtspsrc location = rtsp://user:0000@" << this->_ip << ":" << this->_port
<< "/cam/realmonitor?channel=1&subtype=0 latency=100 ! \
application/x-rtp,media=video ! rtph265depay ! parsebin ! \
nvv4l2decoder ! nvvidconv flip-method=4 ! \
video/x-raw,format=(string)BGRx,width=" << this->_width << ",height=" << this->_height << " ! videoconvert ! \
appsink sync=false";
this->_cap.open(camera_url.str(),cv::CAP_GSTREAMER);
#endif
}
else if (this->_type == CameraType::MIPI)
{
char pipe[512];
if (this->_width <= 0 || this->_height <= 0)
{
this->_width = 1280;
this->_height = 720;
}
if (this->_fps <= 0)
{
this->_fps = 30;
}
sprintf(pipe, "nvarguscamerasrc sensor-id=%d ! video/x-raw(memory:NVMM), width=(int)%d, height=(int)%d, format=(string)NV12, framerate=(fraction)%d/1 ! nvvidconv flip-method=0 ! video/x-raw, width=(int)%d, height=(int)%d, format=(string)BGRx ! videoconvert ! video/x-raw, format=(string)BGR ! appsink", this->_camera_id, this->_width, this->_height, this->_fps, this->_width, this->_height);
this->_cap.open(pipe, cv::CAP_GSTREAMER);
}
}
else if (this->_type == CameraType::MIPI)
{
char pipe[512];
if (this->_width <= 0 || this->_height <= 0)
{
this->_width = 1280;
this->_height = 720;
}
if (this->_fps <= 0)
{
this->_fps = 30;
}
sprintf(pipe, "nvarguscamerasrc sensor-id=%d ! video/x-raw(memory:NVMM), width=(int)%d, height=(int)%d, format=(string)NV12, framerate=(fraction)%d/1 ! nvvidconv flip-method=0 ! video/x-raw, width=(int)%d, height=(int)%d, format=(string)BGRx ! videoconvert ! video/x-raw, format=(string)BGR ! appsink", this->_camera_id, this->_width, this->_height, this->_fps, this->_width, this->_height);
this->_cap.open(pipe, cv::CAP_GSTREAMER);
}
}
}