Add message to rotate gimbal to specific coordinates

opencv
PgSocks 1 year ago
parent c4e3a9ceea
commit f1f08a9a21

@ -18,3 +18,14 @@ gimbal_ctrl_speed (
int16_t p,
int16_t y,
int16_t r );
#define GIMBAL_ROTATE_CMD 0xb03f
void
gimbal_rotate (
union Request* req,
uint16_t seq,
bool ack,
int16_t p,
int16_t y );
#define GIMBAL_ACTION_PUSH_CMD 0xb13f

@ -316,6 +316,73 @@ struct PACKED VisionDetectInfo {
struct VisionRect rects[];
};
struct PACKED GimbalActionPush {
struct Header header;
uint8_t id;
uint8_t progress;
uint8_t state;
int16_t yaw;
int16_t roll;
int16_t pitch;
};
struct PACKED GimbalRotateReq {
struct Header header;
uint8_t id;
//union {
uint8_t control_bitfield;
// struct {
// // always 0 for start
// uint8_t control : 1;
// uint8_t frequency : 2;
// };
//};
//union {
uint8_t coord_bitfield;
// struct {
// uint8_t coordinate : 3;
// // always true
// uint8_t pitch_valid : 1;
// // always false
// uint8_t roll_valid : 1;
// // always true
// uint8_t yaw_valid : 1;
// };
//};
union {
int16_t yaw;
struct {
int8_t l_yaw;
int8_t h_yaw;
};
};
int16_t roll;
union {
int16_t pitch;
struct {
int8_t l_pitch;
int8_t h_pitch;
};
};
uint16_t error;
uint16_t yaw_speed;
uint16_t roll_speed;
uint16_t pitch_speed;
struct Footer footer;
};
struct PACKED GimbalRotateResp {
struct Header header;
uint8_t retcode;
uint8_t accept;
struct Footer footer;
};
struct PACKED VisionDetectEnableReq {
struct Header header;
uint16_t type;
@ -345,6 +412,7 @@ union Request {
struct BlasterFireReq blaster;
struct StreamCtrlReq stream;
struct VisionDetectEnableReq enablevision;
struct GimbalRotateReq gimbalrot;
};
union Response {
struct Header header;
@ -362,10 +430,14 @@ union Response {
struct BlasterFireResp blaster;
struct StreamCtrlResp stream;
struct VisionDetectEnableResp enablevision;
struct GimbalRotateResp gimbalrot;
};
union Push {
struct GimbalActionPush gimbalaction;
struct VisionDetectInfo vision;
};
union Message {
struct Header header;
union Request req;
@ -373,6 +445,13 @@ union Message {
union Push push;
};
enum ACTIONSTATE {
ACTION_RUNNING,
ACTION_SUCCEEDED,
ACTION_FAILED,
ACTION_STARTED
};
enum MESSAGEERR {
MESSAGEERR_NONE,
MESSAGEERR_HEADERCRC,

@ -59,6 +59,8 @@ int robot_stop(Robot robot);
*/
int robot_aim(Robot, float p, float y);
int robot_target(Robot robot, float pitch, float yaw);
/*
* Set the velocity of the robot chassis. A packet will be sent to the robot on
* the next ready tick of the work function.

@ -43,6 +43,8 @@ message_length(int cmd) {
return sizeof(struct StreamCtrlReq);
case VISION_DETECT_ENABLE_CMD:
return sizeof(struct VisionDetectEnableReq);
case GIMBAL_ROTATE_CMD:
return sizeof(struct GimbalRotateReq);
default:
return 0;
}
@ -65,6 +67,7 @@ message_module(int cmd) {
case CHASSIS_SPEED_MODE_CMD:
return host2byte(CHASSIS_HOST, CHASSIS_INDEX);
case GIMBAL_CTRL_SPEED_CMD:
case GIMBAL_ROTATE_CMD:
return host2byte(GIMBAL_HOST, GIMBAL_INDEX);
case BLASTER_FIRE_CMD:
return host2byte(BLASTER_HOST, BLASTER_INDEX);

@ -16,3 +16,35 @@ gimbal_ctrl_speed (
req->gimbspeed.ctrl = 0xDC;
req_finalize(seq, GIMBAL_CTRL_SPEED_CMD, ack, req);
}
void
gimbal_rotate (
union Request* req,
uint16_t seq,
bool ack,
int16_t p,
int16_t y ) {
//req->gimbalrot.coordinate = 1;
//req->gimbalrot.yaw_valid = 1;
//req->gimbalrot.pitch_valid = 1;
//req->gimbalrot.roll_valid = 0;
req->gimbalrot.coord_bitfield = 0;
req->gimbalrot.coord_bitfield = 1 | 0 << 1 | 1 << 2 | 1 << 3;
//req->gimbalrot.frequency = 2;
//req->gimbalrot.control = 0;
req->gimbalrot.control_bitfield = 0;
req->gimbalrot.control_bitfield = 0 | 1 << 2;
req->gimbalrot.id = 33;
req->gimbalrot.yaw = 0;
req->gimbalrot.yaw = y;// >> 8;//(y >> 8 | y << 8);
req->gimbalrot.roll = 0;
req->gimbalrot.pitch = 0;
req->gimbalrot.pitch = p;// >> 8;//(p >> 8 | y << 8);
req->gimbalrot.error = 0;
req->gimbalrot.yaw_speed = 30;
req->gimbalrot.roll_speed = 0;
req->gimbalrot.pitch_speed = 30;
req_finalize(seq, GIMBAL_ROTATE_CMD, ack, req);
}

@ -4,6 +4,7 @@
#include <string.h>
#include <stdlib.h>
#include <math.h>
#include <stdio.h>
@ -31,6 +32,10 @@ struct RobotImp {
int16_t gimbal[2];
bool dirty_gimbal;
int16_t pitch;
int16_t yaw;
bool dirty_target;
bool dirty_blaster;
bool stream_state;
@ -87,6 +92,25 @@ robot_aim(Robot robot, float p, float y) {
return 1;
}
int
robot_target(Robot robot, float pitch, float yaw) {
// Get the FOV angle of the point in radians
float FOV = 120 * (M_PI / 180);
yaw = yaw * (FOV / 2);
pitch = pitch * (FOV / 2);
// Convert to degrees
yaw = yaw * (180 / M_PI);
pitch = pitch * (180 / M_PI);
// Convert for Robomaster
robot->yaw = yaw * 10;
robot->pitch = -pitch * 10;
robot->dirty_target = true;
return 1;
};
int
robot_led(Robot robot, unsigned char r, unsigned char g, unsigned char b) {
robot->colors[0] = r;
@ -221,7 +245,7 @@ robot_work(Robot robot) {
case SETTING_MOVEMENT_MODE:
{
set_robot_mode(&req, robot->seq++, true, MOVEMENTMODE_FREE);
set_robot_mode(&req, robot->seq++, true, MOVEMENTMODE_GIMBAL_LEAD);
req_send(robot->client->dev_conn, &req);
robot->state = WAITING;
break;
@ -256,6 +280,14 @@ robot_work(Robot robot) {
req_send(robot->client->dev_conn, &req);
robot->dirty_gimbal = false;
}
if(robot->dirty_target) {
gimbal_rotate (
&req, robot->seq++, true,
robot->pitch,
robot->yaw );
req_send(robot->client->dev_conn, &req);
robot->dirty_target = false;
}
if(robot->dirty_colors) {
set_system_led (
&req, robot->seq++, false,

@ -13,6 +13,7 @@ extern "C" {
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <thread>
#include <mutex>
@ -27,11 +28,9 @@ struct {
};
int color = 0;
float pitch = 0, yaw = 0;
float x = 0, y = 0, z = 0;
static Uint32 drive_timer_handler(Uint32 interval, void* param) {
robot_drive((Robot)param, x, y, z);
robot_aim((Robot)param, pitch, yaw);
return 75;
}
static Uint32 heartbeat_timer_handler(Uint32 interval, void* param) {
@ -42,9 +41,13 @@ static Uint32 heartbeat_timer_handler(Uint32 interval, void* param) {
std::mutex mtx;
cv::Mat img;
std::vector<cv::Rect> found;
std::vector<double> weights;
unsigned target;
bool stop = false;
bool track = false;
static void processFrameThread() {
static void processFrameThread(Robot robot) {
cv::HOGDescriptor hog;
hog.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector());
@ -52,7 +55,26 @@ static void processFrameThread() {
std::unique_lock<std::mutex> lock(mtx);
if(!img.empty()) {
hog.detectMultiScale(img, found);
cv::Mat gray;
cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY);
hog.detectMultiScale(gray, found, weights, 0, cv::Size(), cv::Size(), 1.1);
target = 0;
for(unsigned i = 0; i < weights.size(); i++) {
if(weights[i] > weights[target])
target = i;
}
if(track) {
float yaw = found[target].x;
float pitch = found[target].y;
// Normalize the coordinates
yaw = 2 * (yaw - img.cols / 2) / img.cols;
pitch = 2 * (pitch - img.rows / 2) / img.rows;
robot_target(robot, pitch, yaw);
}
}
lock.unlock();
@ -102,6 +124,7 @@ static void captureFrameThread(SDL_Window* window, const char* fname) {
}
AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->pix_fmt = AV_PIX_FMT_GRAY8;
avcodec_parameters_to_context(pCodecCtx, pCodecParameters);
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
@ -114,7 +137,7 @@ static void captureFrameThread(SDL_Window* window, const char* fname) {
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);
SwsContext* sws_ctx = sws_getContext(
pCodecCtx->width,
@ -122,19 +145,19 @@ static void captureFrameThread(SDL_Window* window, const char* fname) {
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_RGB24,
AV_PIX_FMT_BGR24,
SWS_BILINEAR,
NULL,
NULL,
NULL);
SDL_SetWindowSize(window, pCodecCtx->width, pCodecCtx->height);
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
SDL_Texture* texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, pCodecCtx->width, pCodecCtx->height);
while (!stop) {
AVPacket packet;
if(av_read_frame(pFormatCtx, &packet) < 0) {
std::cerr << "Error while reading a frame\n";
stop = true;
break;
}
@ -143,7 +166,8 @@ static void captureFrameThread(SDL_Window* window, const char* fname) {
int response = avcodec_send_packet(pCodecCtx, &packet);
if (response < 0) {
std::cerr << "Error while sending a packet to the decoder: " << response << '\n';
return;
stop = true;
break;
}
while (response >= 0) {
@ -152,7 +176,8 @@ static void captureFrameThread(SDL_Window* window, const char* fname) {
break;
} else if (response < 0) {
std::cerr << "Error while receiving a frame from the decoder: " << response << '\n';
return;
stop = true;
break;
}
if (response >= 0) {
@ -161,26 +186,22 @@ static void captureFrameThread(SDL_Window* window, const char* fname) {
std::unique_lock<std::mutex> lock(mtx);
img = cv::Mat(pCodecCtx->height, pCodecCtx->width, CV_8UC3, pFrameRGB->data[0], pFrameRGB->linesize[0]);
for (const auto& rect : found) {
rectangle(img, rect, cv::Scalar(0, 255, 0), 3);
for (unsigned i = 0; i < found.size(); i++) {
rectangle(img, found[i], cv::Scalar(255 - weights[i] * 255, 0, weights[i] * 255), 3);
}
lock.unlock();
{
SDL_UpdateTexture(texture, NULL, img.data, img.cols * 3);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
}
SDL_UpdateTexture(texture, NULL, img.data, img.cols * 3);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
lock.unlock();
}
}
}
av_packet_unref(&packet);
SDL_Delay(33);
}
av_free(buffer);
@ -207,7 +228,7 @@ int main(int argc, char* argv[]) {
"Robomaster",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
800, 300,
1280, 720,
SDL_WINDOW_RESIZABLE );
if(!win) {
fprintf(stderr, "%s", SDL_GetError());
@ -227,8 +248,7 @@ int main(int argc, char* argv[]) {
while(robot_work(robot)) {
int h, w;
SDL_GetWindowSize(win, &w, &h);
int h = 720, w = 1280;
struct Fragment fragment;
while(robot_poll(robot, &fragment)) {
@ -244,6 +264,19 @@ int main(int argc, char* argv[]) {
break;
case FRAGMENT_MESSAGE:
switch(fragment.message.header.cmd) {
case GIMBAL_ACTION_PUSH_CMD:
if(fragment.message.push.gimbalaction.state != 1)
break;
printf("Gimbal action %d\n", fragment.message.push.gimbalaction.id);
printf("\tProgress %d\n", fragment.message.push.gimbalaction.progress);
printf("\tYaw %d, Pitch %d\n", fragment.message.push.gimbalaction.yaw, fragment.message.push.gimbalaction.pitch);
break;
case GIMBAL_ROTATE_CMD:
if(fragment.message.resp.gimbalrot.retcode) {
printf("ERROR: Gimbal rotate message failure\n");
break;
}
break;
case STREAM_CTRL_CMD:
if(fragment.message.resp.stream.retcode) {
printf("ERROR: Stream not enabled\n");
@ -252,7 +285,7 @@ int main(int argc, char* argv[]) {
printf("Stream enabled\n");
if(++streamcount >= 2) {
captureThread = new std::thread(captureFrameThread, win, "tcp://192.168.2.1:40921");
processThread = new std::thread(processFrameThread);
processThread = new std::thread(processFrameThread, robot);
}
break;
case VISION_DETECT_ENABLE_CMD:
@ -275,6 +308,7 @@ int main(int argc, char* argv[]) {
}
}
std::unique_lock<std::mutex> lock(mtx);
SDL_Event event;
while(SDL_PollEvent(&event)) {
switch(event.type) {
@ -310,35 +344,29 @@ int main(int argc, char* argv[]) {
case SDL_SCANCODE_RETURN:
robot_blast(robot);
break;
case SDL_SCANCODE_T:
track = true;
break;
default: break;
}
break;
case SDL_MOUSEMOTION:
yaw = (float)event.motion.xrel;
pitch = (float)event.motion.yrel;
break;
case SDL_JOYAXISMOTION:
switch(event.jaxis.axis) {
case 0:
x = (float)event.jaxis.value / 32767;
break;
case 1:
y = (float)event.jaxis.value / 32767;
break;
case 4:
z = (float)event.jaxis.value / 32767 / 2;
break;
case 2:
yaw = (float)event.jaxis.value / 32767;
break;
case 3:
pitch = (float)event.jaxis.value / 32767;
break;
default:
printf("axis: %d\n", event.jaxis.axis);
break;
}
case SDL_MOUSEBUTTONDOWN:
{
// Get window coordinates
int target_x = 0, target_y = 0;
SDL_GetMouseState(&target_x, &target_y);
float yaw = target_x;
float pitch = target_y;
// Normalize the coordinates
yaw = 2 * (yaw - w / 2) / w;
pitch = 2 * (pitch - h / 2) / h;
robot_target(robot, pitch, yaw);
break;
}
case SDL_WINDOWEVENT:
if(event.window.event != SDL_WINDOWEVENT_CLOSE) break;
case SDL_QUIT:
@ -346,9 +374,8 @@ int main(int argc, char* argv[]) {
stop = true;
default: break;
}
yaw = (int)(yaw * 0.89);
pitch = (int)(pitch * 0.89);
}
lock.unlock();
}
if(captureThread != nullptr) {

@ -0,0 +1,240 @@
#include <iostream>
#include <thread>
#include <mutex>
#include <cmath>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <SDL2/SDL.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
std::mutex mtx;
cv::Mat img;
std::vector<cv::Rect> found;
std::vector<double> weights;
unsigned target;
bool stop = false;
static void processFrameThread() {
cv::HOGDescriptor hog;
hog.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector());
while(!stop) {
std::unique_lock<std::mutex> lock(mtx);
if(!img.empty()) {
cv::Mat gray;
cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY);
hog.detectMultiScale(gray, found, weights, 0, cv::Size(), cv::Size(), 1.1);
target = 0;
for(unsigned i = 0; i < weights.size(); i++) {
if(weights[i] > weights[target])
target = i;
}
// Get the center of the highest weighted rectangle
float x = found[target].x + found[target].width / 2;
float y = found[target].y + found[target].height / 2;
// Normalize the coordinates
x = 2 * (x - img.cols / 2) / img.cols;
y = 2 * (y - img.rows / 2) / img.rows;
// Get the FOV angle of the point in radians
float FOV = 120 * (M_PI / 180);
x = x * (FOV / 2);
y = y * (FOV / 2);
// Convert to degrees
x = x * (180 / M_PI);
y = y * (180 / M_PI);
printf("y: %f, p: %f\n", x, y);
}
lock.unlock();
SDL_Delay(75);
}
}
static void captureFrameThread(SDL_Window* window, const char* fname) {
av_register_all();
avcodec_register_all();
AVFormatContext* pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, fname, NULL, NULL) != 0) {
std::cerr << "Couldn't open stream\n";
return;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
std::cerr << "Couldn't find stream information\n";
return;
}
int videoStream = -1;
for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
std::cerr << "Didn't find a video stream\n";
return;
}
AVCodecParameters* pCodecParameters = pFormatCtx->streams[videoStream]->codecpar;
AVCodec* pCodec = avcodec_find_decoder(pCodecParameters->codec_id);
if (pCodec == NULL) {
std::cerr << "Unsupported codec\n";
return;
}
AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->pix_fmt = AV_PIX_FMT_GRAY8;
avcodec_parameters_to_context(pCodecCtx, pCodecParameters);
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
std::cerr << "Could not open codec\n";
return;
}
AVFrame* pFrame = av_frame_alloc();
AVFrame* pFrameRGB = av_frame_alloc();
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);
SwsContext* sws_ctx = sws_getContext(
pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_BGR24,
SWS_BILINEAR,
NULL,
NULL,
NULL);
//AVRational time_base = pFormatCtx->streams[videoStream]->time_base;
//AVRational frame_rate = av_guess_frame_rate(pFormatCtx, pFormatCtx->streams[videoStream], NULL);
//uint32_t delay = (av_rescale_q(1, av_inv_q(frame_rate), time_base) / AV_TIME_BASE) * 1000;
//printf("delay: %u\n", delay);
SDL_SetWindowSize(window, pCodecCtx->width, pCodecCtx->height);
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
SDL_Texture* texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, pCodecCtx->width, pCodecCtx->height);
while (!stop) {
AVPacket packet;
if(av_read_frame(pFormatCtx, &packet) < 0) {
stop = true;
break;
}
if (packet.stream_index == videoStream) {
int response = avcodec_send_packet(pCodecCtx, &packet);
if (response < 0) {
std::cerr << "Error while sending a packet to the decoder: " << response << '\n';
return;
}
while (response >= 0) {
response = avcodec_receive_frame(pCodecCtx, pFrame);
if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
break;
} else if (response < 0) {
std::cerr << "Error while receiving a frame from the decoder: " << response << '\n';
return;
}
if (response >= 0) {
sws_scale(sws_ctx, (uint8_t const* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
std::unique_lock<std::mutex> lock(mtx);
img = cv::Mat(pCodecCtx->height, pCodecCtx->width, CV_8UC3, pFrameRGB->data[0], pFrameRGB->linesize[0]);
for (unsigned i = 0; i < found.size(); i++) {
rectangle(img, found[i], cv::Scalar(255 - weights[i] * 255, 0, weights[i] * 255), 3);
}
lock.unlock();
{
SDL_UpdateTexture(texture, NULL, img.data, img.cols * 3);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
}
}
}
}
av_packet_unref(&packet);
SDL_Event event;
while(SDL_PollEvent(&event)) {
switch(event.type) {
case SDL_QUIT:
stop = true;
default: break;
}
}
SDL_Delay(33);
}
av_free(buffer);
av_frame_free(&pFrameRGB);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
}
int main(int argc, char* argv[]) {
if(SDL_Init(SDL_INIT_VIDEO) < 0) {
fprintf(stderr, "%s", SDL_GetError());
return 1;
}
SDL_Window* win = SDL_CreateWindow(
"Robomaster",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
800, 300,
SDL_WINDOW_RESIZABLE );
if(!win) {
fprintf(stderr, "%s", SDL_GetError());
return 1;
}
std::thread captureThread(captureFrameThread, win, argv[1]);
std::thread processThread(processFrameThread);
captureThread.join();
processThread.join();
SDL_Quit();
return 0;
}
Loading…
Cancel
Save