You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
robomaster-sdk/src/sdl.cpp

394 lines
13 KiB
C++

extern "C" {
#include "robomaster.h"
#include "roboeasy.h"
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}
#include <SDL2/SDL.h>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <thread>
#include <mutex>
struct {
unsigned char r;
unsigned char g;
unsigned char b;
} const colors[] = {
{0xFF, 0x00, 0x00},
{0x00, 0xFF, 0x00},
{0x00, 0x00, 0xFF}
};
int color = 0;
float x = 0, y = 0, z = 0;
static Uint32 drive_timer_handler(Uint32 interval, void* param) {
robot_drive((Robot)param, x, y, z);
return 75;
}
static Uint32 heartbeat_timer_handler(Uint32 interval, void* param) {
robot_heartbeat((Robot)param);
return 1000;
}
std::mutex mtx;
cv::Mat img;
std::vector<cv::Rect> found;
std::vector<double> weights;
unsigned target;
bool stop = false;
bool track = false;
static void processFrameThread(Robot robot) {
cv::HOGDescriptor hog;
hog.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector());
while(!stop) {
std::unique_lock<std::mutex> lock(mtx);
if(!img.empty()) {
cv::Mat gray;
cv::cvtColor(img, gray, cv::COLOR_BGR2GRAY);
hog.detectMultiScale(gray, found, weights, 0, cv::Size(), cv::Size(), 1.1);
target = 0;
for(unsigned i = 0; i < weights.size(); i++) {
if(weights[i] > weights[target])
target = i;
}
if(track) {
float yaw = found[target].x;
float pitch = found[target].y;
// Normalize the coordinates
yaw = 2 * (yaw - img.cols / 2) / img.cols;
pitch = 2 * (pitch - img.rows / 2) / img.rows;
robot_target(robot, pitch, yaw);
}
}
lock.unlock();
SDL_Delay(250);
}
}
static void captureFrameThread(SDL_Window* window, const char* fname) {
SDL_Delay(750);
av_register_all();
avcodec_register_all();
AVFormatContext* pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, fname, NULL, NULL) != 0) {
std::cerr << "Couldn't open stream\n";
return;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
std::cerr << "Couldn't find stream information\n";
return;
}
int videoStream = -1;
for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
std::cerr << "Didn't find a video stream\n";
return;
}
AVCodecParameters* pCodecParameters = pFormatCtx->streams[videoStream]->codecpar;
AVCodec* pCodec = avcodec_find_decoder(pCodecParameters->codec_id);
if (pCodec == NULL) {
std::cerr << "Unsupported codec\n";
return;
}
AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->pix_fmt = AV_PIX_FMT_GRAY8;
avcodec_parameters_to_context(pCodecCtx, pCodecParameters);
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
std::cerr << "Could not open codec\n";
return;
}
AVFrame* pFrame = av_frame_alloc();
AVFrame* pFrameRGB = av_frame_alloc();
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);
SwsContext* sws_ctx = sws_getContext(
pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_BGR24,
SWS_BILINEAR,
NULL,
NULL,
NULL);
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
SDL_Texture* texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, pCodecCtx->width, pCodecCtx->height);
while (!stop) {
AVPacket packet;
if(av_read_frame(pFormatCtx, &packet) < 0) {
std::cerr << "Error while reading a frame\n";
stop = true;
break;
}
if (packet.stream_index == videoStream) {
int response = avcodec_send_packet(pCodecCtx, &packet);
if (response < 0) {
std::cerr << "Error while sending a packet to the decoder: " << response << '\n';
stop = true;
break;
}
while (response >= 0) {
response = avcodec_receive_frame(pCodecCtx, pFrame);
if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
break;
} else if (response < 0) {
std::cerr << "Error while receiving a frame from the decoder: " << response << '\n';
stop = true;
break;
}
if (response >= 0) {
sws_scale(sws_ctx, (uint8_t const* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
std::unique_lock<std::mutex> lock(mtx);
img = cv::Mat(pCodecCtx->height, pCodecCtx->width, CV_8UC3, pFrameRGB->data[0], pFrameRGB->linesize[0]);
for (unsigned i = 0; i < found.size(); i++) {
rectangle(img, found[i], cv::Scalar(255 - weights[i] * 255, 0, weights[i] * 255), 3);
}
SDL_UpdateTexture(texture, NULL, img.data, img.cols * 3);
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
lock.unlock();
}
}
}
av_packet_unref(&packet);
}
av_free(buffer);
av_frame_free(&pFrameRGB);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
}
int main(int argc, char* argv[]) {
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_JOYSTICK) < 0) {
fprintf(stderr, "%s", SDL_GetError());
return 1;
}
printf("Detected %d joysticks\n", SDL_NumJoysticks());
SDL_Joystick* joystick = NULL;
if(SDL_NumJoysticks() > 0) {
joystick = SDL_JoystickOpen(0);
}
SDL_Window* win = SDL_CreateWindow(
"Robomaster",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
1280, 720,
SDL_WINDOW_RESIZABLE );
if(!win) {
fprintf(stderr, "%s", SDL_GetError());
return 1;
}
Robot robot = robot_new();
robot_init(robot);
robot_stream(robot, true);
SDL_AddTimer(75, drive_timer_handler, robot);
SDL_AddTimer(1000, heartbeat_timer_handler, robot);
int streamcount = 0;
std::thread *captureThread = nullptr;
std::thread *processThread = nullptr;
while(robot_work(robot)) {
int h = 720, w = 1280;
struct Fragment fragment;
while(robot_poll(robot, &fragment)) {
switch(fragment.type) {
case FRAGMENT_RECTANGLE:
case FRAGMENT_LINE:
case FRAGMENT_MARKER:
printf("Rect(%f,%f,%f,%f)\n",
fragment.rect.rect.x,
fragment.rect.rect.y,
fragment.rect.rect.w,
fragment.rect.rect.h );
break;
case FRAGMENT_MESSAGE:
switch(fragment.message.header.cmd) {
case GIMBAL_ACTION_PUSH_CMD:
if(fragment.message.push.gimbalaction.state != 1)
break;
printf("Gimbal action %d\n", fragment.message.push.gimbalaction.id);
printf("\tProgress %d\n", fragment.message.push.gimbalaction.progress);
printf("\tYaw %d, Pitch %d\n", fragment.message.push.gimbalaction.yaw, fragment.message.push.gimbalaction.pitch);
break;
case GIMBAL_ROTATE_CMD:
if(fragment.message.resp.gimbalrot.retcode) {
printf("ERROR: Gimbal rotate message failure\n");
break;
}
break;
case STREAM_CTRL_CMD:
if(fragment.message.resp.stream.retcode) {
printf("ERROR: Stream not enabled\n");
break;
}
printf("Stream enabled\n");
if(++streamcount >= 2) {
captureThread = new std::thread(captureFrameThread, win, "tcp://192.168.2.1:40921");
processThread = new std::thread(processFrameThread, robot);
}
break;
case VISION_DETECT_ENABLE_CMD:
if(fragment.message.resp.enablevision.retcode) {
printf("ERROR: Vision not enabled\n");
break;
}
if(fragment.message.resp.enablevision.error) {
printf("ERROR: Vision not enabled %d\n",
fragment.message.resp.enablevision.error);
break;
}
printf("Vision enabled\n");
break;
}
break;
default:
printf("Unhandled fragment type\n");
break;
}
}
std::unique_lock<std::mutex> lock(mtx);
SDL_Event event;
while(SDL_PollEvent(&event)) {
switch(event.type) {
case SDL_KEYUP:
case SDL_KEYDOWN:
switch(event.key.keysym.scancode) {
case SDL_SCANCODE_Q:
z = event.type == SDL_KEYUP ? 0 : 0.25;
break;
case SDL_SCANCODE_E:
z = event.type == SDL_KEYUP ? 0 : -0.25;
break;
case SDL_SCANCODE_LEFT:
case SDL_SCANCODE_A:
x = event.type == SDL_KEYUP ? 0 : 0.25;
break;
case SDL_SCANCODE_RIGHT:
case SDL_SCANCODE_D:
x = event.type == SDL_KEYUP ? 0 : -0.25;
break;
case SDL_SCANCODE_UP:
case SDL_SCANCODE_W:
y = event.type == SDL_KEYUP ? 0 : 0.25;
break;
case SDL_SCANCODE_DOWN:
case SDL_SCANCODE_S:
y = event.type == SDL_KEYUP ? 0 : -0.25;
break;
case SDL_SCANCODE_SPACE:
robot_led(robot, colors[color].r, colors[color].g, colors[color].b);
color = (color + 1) % 3;
break;
case SDL_SCANCODE_RETURN:
robot_blast(robot);
break;
case SDL_SCANCODE_T:
track = true;
break;
default: break;
}
break;
case SDL_MOUSEBUTTONDOWN:
{
// Get window coordinates
int target_x = 0, target_y = 0;
SDL_GetMouseState(&target_x, &target_y);
float yaw = target_x;
float pitch = target_y;
// Normalize the coordinates
yaw = 2 * (yaw - w / 2) / w;
pitch = 2 * (pitch - h / 2) / h;
robot_target(robot, pitch, yaw);
break;
}
case SDL_WINDOWEVENT:
if(event.window.event != SDL_WINDOWEVENT_CLOSE) break;
case SDL_QUIT:
robot_stop(robot);
stop = true;
default: break;
}
}
lock.unlock();
}
if(captureThread != nullptr) {
captureThread->join();
delete captureThread;
}
if(processThread != nullptr) {
processThread->join();
delete processThread;
}
SDL_JoystickClose(joystick);
SDL_Quit();
return 0;
}