Compare commits
13 Commits
Author | SHA1 | Date |
---|---|---|
PgSocks | bd6ecd9df0 | 1 year ago |
PgSocks | 7b867480d1 | 1 year ago |
PgSocks | db9cb09cc1 | 1 year ago |
PgSocks | f1f08a9a21 | 1 year ago |
PgSocks | c4e3a9ceea | 1 year ago |
PgSocks | f7bda10f7d | 1 year ago |
PgSocks | a709f5d64b | 1 year ago |
PgSocks | ed352fd71e | 1 year ago |
PgSocks | 09257471cd | 1 year ago |
PgSocks | 118209125f | 1 year ago |
PgSocks | 4d39c3811c | 1 year ago |
PgSocks | 94363b56f5 | 1 year ago |
PgSocks | 103086b1a4 | 1 year ago |
@ -1,140 +0,0 @@
|
||||
#include "roboeasy.h"
|
||||
#include "SDL2/SDL.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
struct {
|
||||
unsigned char r;
|
||||
unsigned char g;
|
||||
unsigned char b;
|
||||
} const colors[] = {
|
||||
{0xFF, 0x00, 0x00},
|
||||
{0x00, 0xFF, 0x00},
|
||||
{0x00, 0x00, 0xFF}
|
||||
};
|
||||
int color = 0;
|
||||
|
||||
float pitch = 0, yaw = 0;
|
||||
float x = 0, y = 0, z = 0;
|
||||
static Uint32 drive_timer_handler(Uint32 interval, void* param) {
|
||||
robot_drive((Robot)param, x, y, z);
|
||||
robot_aim((Robot)param, pitch, yaw);
|
||||
return 75;
|
||||
}
|
||||
static Uint32 heartbeat_timer_handler(Uint32 interval, void* param) {
|
||||
robot_heartbeat((Robot)param);
|
||||
return 1000;
|
||||
}
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_JOYSTICK) < 0) {
|
||||
fprintf(stderr, "%s", SDL_GetError());
|
||||
return 1;
|
||||
}
|
||||
printf("Detected %d joysticks\n", SDL_NumJoysticks());
|
||||
SDL_Joystick* joystick = NULL;
|
||||
if(SDL_NumJoysticks() > 0) {
|
||||
joystick = SDL_JoystickOpen(0);
|
||||
}
|
||||
SDL_Window* win = SDL_CreateWindow(
|
||||
"Robomaster",
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
800, 300,
|
||||
SDL_WINDOW_RESIZABLE );
|
||||
if(!win) {
|
||||
fprintf(stderr, "%s", SDL_GetError());
|
||||
return 1;
|
||||
}
|
||||
|
||||
Robot robot = robot_new();
|
||||
robot_init(robot);
|
||||
robot_stream(robot, true);
|
||||
|
||||
SDL_AddTimer(75, drive_timer_handler, robot);
|
||||
SDL_AddTimer(1000, heartbeat_timer_handler, robot);
|
||||
|
||||
int h, w;
|
||||
SDL_GetWindowSize(win, &w, &h);
|
||||
|
||||
while(robot_work(robot)) {
|
||||
SDL_Event event;
|
||||
while(SDL_PollEvent(&event)) {
|
||||
switch(event.type) {
|
||||
case SDL_KEYUP:
|
||||
case SDL_KEYDOWN:
|
||||
switch(event.key.keysym.scancode) {
|
||||
case SDL_SCANCODE_Q:
|
||||
z = event.type == SDL_KEYUP ? 0 : 0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_E:
|
||||
z = event.type == SDL_KEYUP ? 0 : -0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_LEFT:
|
||||
case SDL_SCANCODE_A:
|
||||
x = event.type == SDL_KEYUP ? 0 : 0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_RIGHT:
|
||||
case SDL_SCANCODE_D:
|
||||
x = event.type == SDL_KEYUP ? 0 : -0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_UP:
|
||||
case SDL_SCANCODE_W:
|
||||
y = event.type == SDL_KEYUP ? 0 : 0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_DOWN:
|
||||
case SDL_SCANCODE_S:
|
||||
y = event.type == SDL_KEYUP ? 0 : -0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_SPACE:
|
||||
robot_led(robot, colors[color].r, colors[color].g, colors[color].b);
|
||||
color = (color + 1) % 3;
|
||||
break;
|
||||
case SDL_SCANCODE_RETURN:
|
||||
robot_blast(robot);
|
||||
break;
|
||||
default: break;
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEMOTION:
|
||||
yaw = (float)event.motion.xrel;
|
||||
pitch = (float)event.motion.yrel;
|
||||
break;
|
||||
case SDL_JOYAXISMOTION:
|
||||
switch(event.jaxis.axis) {
|
||||
case 0:
|
||||
x = (float)event.jaxis.value / 32767;
|
||||
break;
|
||||
case 1:
|
||||
y = (float)event.jaxis.value / 32767;
|
||||
break;
|
||||
case 4:
|
||||
z = (float)event.jaxis.value / 32767 / 2;
|
||||
break;
|
||||
case 2:
|
||||
yaw = (float)event.jaxis.value / 32767;
|
||||
break;
|
||||
case 3:
|
||||
pitch = (float)event.jaxis.value / 32767;
|
||||
break;
|
||||
default:
|
||||
printf("axis: %d\n", event.jaxis.axis);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case SDL_WINDOWEVENT:
|
||||
if(event.window.event != SDL_WINDOWEVENT_CLOSE) break;
|
||||
case SDL_QUIT:
|
||||
robot_stop(robot);
|
||||
default: break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SDL_JoystickClose(joystick);
|
||||
SDL_Quit();
|
||||
return 0;
|
||||
}
|
||||
|
@ -0,0 +1,417 @@
|
||||
extern "C" {
|
||||
#include "robomaster.h"
|
||||
#include "roboeasy.h"
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <libswscale/swscale.h>
|
||||
#include <libavutil/imgutils.h>
|
||||
}
|
||||
|
||||
#include <SDL2/SDL.h>
|
||||
#include <opencv2/opencv.hpp>
|
||||
#include <opencv2/dnn/dnn.hpp>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <math.h>
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
|
||||
struct {
|
||||
unsigned char r;
|
||||
unsigned char g;
|
||||
unsigned char b;
|
||||
} const colors[] = {
|
||||
{0xFF, 0x00, 0x00},
|
||||
{0x00, 0xFF, 0x00},
|
||||
{0x00, 0x00, 0xFF}
|
||||
};
|
||||
int color = 0;
|
||||
|
||||
float x = 0, y = 0, z = 0;
|
||||
int target_x = 0, target_y = 0;
|
||||
int yaw_cur = 0, pitch_cur = 0;
|
||||
static Uint32 drive_timer_handler(Uint32 interval, void* param) {
|
||||
robot_drive((Robot)param, x, y, z);
|
||||
return 75;
|
||||
}
|
||||
static Uint32 heartbeat_timer_handler(Uint32 interval, void* param) {
|
||||
robot_heartbeat((Robot)param);
|
||||
return 1000;
|
||||
}
|
||||
|
||||
std::mutex mtx;
|
||||
cv::Mat img;
|
||||
std::vector<cv::Rect> found;
|
||||
std::vector<double> weights;
|
||||
unsigned target;
|
||||
|
||||
bool stop = false;
|
||||
bool track = false;
|
||||
|
||||
static void processFrameThread(Robot robot) {
|
||||
cv::dnn::Net net = cv::dnn::readNet("yolov3-tiny.weights", "yolov3-tiny.cfg");
|
||||
|
||||
while(!stop) {
|
||||
std::unique_lock<std::mutex> lock(mtx);
|
||||
|
||||
if(!img.empty()) {
|
||||
found.clear();
|
||||
weights.clear();
|
||||
cv::Mat blob = cv::dnn::blobFromImage(img, 1/255.0, cv::Size(416, 416), cv::Scalar(0, 0, 0), true, false);
|
||||
net.setInput(blob);
|
||||
std::vector<cv::Mat> outs;
|
||||
net.forward(outs, net.getUnconnectedOutLayersNames());
|
||||
float *data = (float*)outs[0].data;
|
||||
for(int i = 0; i < outs[0].rows; ++i) {
|
||||
float final_score = data[4] * data[5];
|
||||
if(final_score >= 0.0075)
|
||||
{
|
||||
weights.push_back(final_score);
|
||||
int cx = data[0] * img.cols;
|
||||
int cy = data[1] * img.rows;
|
||||
int width = data[2] * img.cols;
|
||||
int height = data[3] * img.rows;
|
||||
int left = cx - width / 2;
|
||||
int top = cy - height / 2;
|
||||
found.push_back(cv::Rect(left, top, width, height));
|
||||
}
|
||||
data += 85;
|
||||
}
|
||||
|
||||
target = 0;
|
||||
for(unsigned i = 0; i < weights.size(); i++) {
|
||||
if(weights[i] > weights[target])
|
||||
target = i;
|
||||
}
|
||||
|
||||
if(track && !weights.empty()) {
|
||||
float yaw = target_x = found[target].x;
|
||||
float pitch = target_y = found[target].y;
|
||||
|
||||
// Normalize the coordinates
|
||||
yaw = 2 * (yaw - img.cols / 2) / img.cols;
|
||||
pitch = 2 * (pitch - img.rows / 2) / img.rows;
|
||||
|
||||
robot_target(robot, pitch, yaw);
|
||||
}
|
||||
}
|
||||
|
||||
lock.unlock();
|
||||
|
||||
SDL_Delay(250);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static void captureFrameThread(SDL_Window* window, const char* fname) {
|
||||
|
||||
SDL_Delay(750);
|
||||
|
||||
av_register_all();
|
||||
avcodec_register_all();
|
||||
AVFormatContext* pFormatCtx = avformat_alloc_context();
|
||||
|
||||
if (avformat_open_input(&pFormatCtx, fname, NULL, NULL) != 0) {
|
||||
std::cerr << "Couldn't open stream\n";
|
||||
return;
|
||||
}
|
||||
|
||||
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
|
||||
std::cerr << "Couldn't find stream information\n";
|
||||
return;
|
||||
}
|
||||
|
||||
int videoStream = -1;
|
||||
for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
|
||||
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
||||
videoStream = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (videoStream == -1) {
|
||||
std::cerr << "Didn't find a video stream\n";
|
||||
return;
|
||||
}
|
||||
|
||||
AVCodecParameters* pCodecParameters = pFormatCtx->streams[videoStream]->codecpar;
|
||||
AVCodec* pCodec = avcodec_find_decoder(pCodecParameters->codec_id);
|
||||
|
||||
if (pCodec == NULL) {
|
||||
std::cerr << "Unsupported codec\n";
|
||||
return;
|
||||
}
|
||||
|
||||
AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
|
||||
pCodecCtx->pix_fmt = AV_PIX_FMT_GRAY8;
|
||||
avcodec_parameters_to_context(pCodecCtx, pCodecParameters);
|
||||
|
||||
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
|
||||
std::cerr << "Could not open codec\n";
|
||||
return;
|
||||
}
|
||||
|
||||
AVFrame* pFrame = av_frame_alloc();
|
||||
AVFrame* pFrameRGB = av_frame_alloc();
|
||||
|
||||
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
|
||||
uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
|
||||
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);
|
||||
|
||||
SwsContext* sws_ctx = sws_getContext(
|
||||
pCodecCtx->width,
|
||||
pCodecCtx->height,
|
||||
pCodecCtx->pix_fmt,
|
||||
pCodecCtx->width,
|
||||
pCodecCtx->height,
|
||||
AV_PIX_FMT_BGR24,
|
||||
SWS_BILINEAR,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL);
|
||||
|
||||
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
|
||||
SDL_Texture* texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, pCodecCtx->width, pCodecCtx->height);
|
||||
|
||||
while (!stop) {
|
||||
AVPacket packet;
|
||||
if(av_read_frame(pFormatCtx, &packet) < 0) {
|
||||
std::cerr << "Error while reading a frame\n";
|
||||
stop = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (packet.stream_index == videoStream) {
|
||||
int response = avcodec_send_packet(pCodecCtx, &packet);
|
||||
if (response < 0) {
|
||||
std::cerr << "Error while sending a packet to the decoder: " << response << '\n';
|
||||
stop = true;
|
||||
break;
|
||||
}
|
||||
|
||||
while (response >= 0) {
|
||||
response = avcodec_receive_frame(pCodecCtx, pFrame);
|
||||
if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
|
||||
break;
|
||||
} else if (response < 0) {
|
||||
std::cerr << "Error while receiving a frame from the decoder: " << response << '\n';
|
||||
stop = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (response >= 0) {
|
||||
sws_scale(sws_ctx, (uint8_t const* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
|
||||
|
||||
std::unique_lock<std::mutex> lock(mtx);
|
||||
|
||||
img = cv::Mat(pCodecCtx->height, pCodecCtx->width, CV_8UC3, pFrameRGB->data[0], pFrameRGB->linesize[0]);
|
||||
for (unsigned i = 0; i < found.size(); i++) {
|
||||
rectangle(img, found[i], cv::Scalar(255 - weights[i] * 255, 0, weights[i] * 255), 3);
|
||||
}
|
||||
|
||||
|
||||
SDL_UpdateTexture(texture, NULL, img.data, img.cols * 3);
|
||||
SDL_RenderDrawLine(renderer, 1280 / 2, 720 / 2, target_x, target_y);
|
||||
|
||||
SDL_RenderClear(renderer);
|
||||
SDL_RenderCopy(renderer, texture, NULL, NULL);
|
||||
SDL_RenderPresent(renderer);
|
||||
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
}
|
||||
av_packet_unref(&packet);
|
||||
|
||||
}
|
||||
|
||||
av_free(buffer);
|
||||
av_frame_free(&pFrameRGB);
|
||||
av_frame_free(&pFrame);
|
||||
avcodec_close(pCodecCtx);
|
||||
avformat_close_input(&pFormatCtx);
|
||||
|
||||
SDL_DestroyTexture(texture);
|
||||
SDL_DestroyRenderer(renderer);
|
||||
}
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_JOYSTICK) < 0) {
|
||||
fprintf(stderr, "%s", SDL_GetError());
|
||||
return 1;
|
||||
}
|
||||
printf("Detected %d joysticks\n", SDL_NumJoysticks());
|
||||
SDL_Joystick* joystick = NULL;
|
||||
if(SDL_NumJoysticks() > 0) {
|
||||
joystick = SDL_JoystickOpen(0);
|
||||
}
|
||||
SDL_Window* win = SDL_CreateWindow(
|
||||
"Robomaster",
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
1280, 720,
|
||||
SDL_WINDOW_RESIZABLE );
|
||||
if(!win) {
|
||||
fprintf(stderr, "%s", SDL_GetError());
|
||||
return 1;
|
||||
}
|
||||
|
||||
Robot robot = robot_new();
|
||||
robot_init(robot);
|
||||
robot_stream(robot, true);
|
||||
|
||||
SDL_AddTimer(75, drive_timer_handler, robot);
|
||||
SDL_AddTimer(1000, heartbeat_timer_handler, robot);
|
||||
|
||||
int streamcount = 0;
|
||||
std::thread *captureThread = nullptr;
|
||||
std::thread *processThread = nullptr;
|
||||
|
||||
target_x = 720 / 2;
|
||||
target_y = 1280 / 2;
|
||||
|
||||
while(robot_work(robot)) {
|
||||
|
||||
int h = 720, w = 1280;
|
||||
|
||||
struct Fragment fragment;
|
||||
while(robot_poll(robot, &fragment)) {
|
||||
switch(fragment.type) {
|
||||
case FRAGMENT_RECTANGLE:
|
||||
case FRAGMENT_LINE:
|
||||
case FRAGMENT_MARKER:
|
||||
printf("Rect(%f,%f,%f,%f)\n",
|
||||
fragment.rect.rect.x,
|
||||
fragment.rect.rect.y,
|
||||
fragment.rect.rect.w,
|
||||
fragment.rect.rect.h );
|
||||
break;
|
||||
case FRAGMENT_MESSAGE:
|
||||
switch(fragment.message.header.cmd) {
|
||||
case GIMBAL_ACTION_PUSH_CMD:
|
||||
if(fragment.message.push.gimbalaction.state != 1)
|
||||
break;
|
||||
yaw_cur = fragment.message.push.gimbalaction.yaw;
|
||||
pitch_cur = fragment.message.push.gimbalaction.pitch;
|
||||
printf("Gimbal action %d\n", fragment.message.push.gimbalaction.id);
|
||||
printf("\tProgress %d\n", fragment.message.push.gimbalaction.progress);
|
||||
printf("\tYaw %d, Pitch %d\n", yaw_cur, pitch_cur);
|
||||
break;
|
||||
case GIMBAL_ROTATE_CMD:
|
||||
if(fragment.message.resp.gimbalrot.retcode) {
|
||||
printf("ERROR: Gimbal rotate message failure\n");
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case STREAM_CTRL_CMD:
|
||||
if(fragment.message.resp.stream.retcode) {
|
||||
printf("ERROR: Stream not enabled\n");
|
||||
break;
|
||||
}
|
||||
printf("Stream enabled\n");
|
||||
if(++streamcount >= 2) {
|
||||
captureThread = new std::thread(captureFrameThread, win, "tcp://192.168.2.1:40921");
|
||||
processThread = new std::thread(processFrameThread, robot);
|
||||
}
|
||||
break;
|
||||
case VISION_DETECT_ENABLE_CMD:
|
||||
if(fragment.message.resp.enablevision.retcode) {
|
||||
printf("ERROR: Vision not enabled\n");
|
||||
break;
|
||||
}
|
||||
if(fragment.message.resp.enablevision.error) {
|
||||
printf("ERROR: Vision not enabled %d\n",
|
||||
fragment.message.resp.enablevision.error);
|
||||
break;
|
||||
}
|
||||
printf("Vision enabled\n");
|
||||
break;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
printf("Unhandled fragment type\n");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
SDL_Event event;
|
||||
while(SDL_PollEvent(&event)) {
|
||||
switch(event.type) {
|
||||
case SDL_KEYUP:
|
||||
case SDL_KEYDOWN:
|
||||
switch(event.key.keysym.scancode) {
|
||||
case SDL_SCANCODE_Q:
|
||||
z = event.type == SDL_KEYUP ? 0 : 0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_E:
|
||||
z = event.type == SDL_KEYUP ? 0 : -0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_LEFT:
|
||||
case SDL_SCANCODE_A:
|
||||
x = event.type == SDL_KEYUP ? 0 : 0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_RIGHT:
|
||||
case SDL_SCANCODE_D:
|
||||
x = event.type == SDL_KEYUP ? 0 : -0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_UP:
|
||||
case SDL_SCANCODE_W:
|
||||
y = event.type == SDL_KEYUP ? 0 : 0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_DOWN:
|
||||
case SDL_SCANCODE_S:
|
||||
y = event.type == SDL_KEYUP ? 0 : -0.25;
|
||||
break;
|
||||
case SDL_SCANCODE_SPACE:
|
||||
robot_led(robot, colors[color].r, colors[color].g, colors[color].b);
|
||||
color = (color + 1) % 3;
|
||||
break;
|
||||
case SDL_SCANCODE_RETURN:
|
||||
robot_blast(robot);
|
||||
break;
|
||||
case SDL_SCANCODE_T:
|
||||
track = true;
|
||||
break;
|
||||
default: break;
|
||||
}
|
||||
break;
|
||||
case SDL_MOUSEBUTTONDOWN:
|
||||
{
|
||||
// Get window coordinates
|
||||
SDL_GetMouseState(&target_x, &target_y);
|
||||
|
||||
float yaw = target_x;
|
||||
float pitch = target_y;
|
||||
|
||||
// Normalize the coordinates
|
||||
yaw = 2 * (yaw - w / 2) / w;
|
||||
pitch = 2 * (pitch - h / 2) / h;
|
||||
|
||||
robot_target(robot, pitch, yaw);
|
||||
|
||||
break;
|
||||
}
|
||||
case SDL_WINDOWEVENT:
|
||||
if(event.window.event != SDL_WINDOWEVENT_CLOSE) break;
|
||||
case SDL_QUIT:
|
||||
robot_stop(robot);
|
||||
stop = true;
|
||||
default: break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(captureThread != nullptr) {
|
||||
captureThread->join();
|
||||
delete captureThread;
|
||||
}
|
||||
if(processThread != nullptr) {
|
||||
processThread->join();
|
||||
delete processThread;
|
||||
}
|
||||
SDL_JoystickClose(joystick);
|
||||
SDL_Quit();
|
||||
return 0;
|
||||
}
|
||||
|
@ -0,0 +1,233 @@
|
||||
#include <iostream>
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
#include <cmath>
|
||||
#include <opencv2/opencv.hpp>
|
||||
#include <opencv2/dnn/dnn.hpp>
|
||||
#include <SDL2/SDL.h>
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <libswscale/swscale.h>
|
||||
#include <libavutil/imgutils.h>
|
||||
}
|
||||
|
||||
std::mutex mtx;
|
||||
cv::Mat img;
|
||||
std::vector<cv::Rect> found;
|
||||
std::vector<double> weights;
|
||||
unsigned target;
|
||||
|
||||
bool stop = false;
|
||||
|
||||
static void processFrameThread() {
|
||||
cv::dnn::Net net = cv::dnn::readNet("yolov3-tiny.weights", "yolov3-tiny.cfg");
|
||||
|
||||
while(!stop) {
|
||||
std::unique_lock<std::mutex> lock(mtx);
|
||||
|
||||
if(!img.empty()) {
|
||||
found.clear();
|
||||
weights.clear();
|
||||
cv::Mat blob = cv::dnn::blobFromImage(img, 1/255.0, cv::Size(416, 416), cv::Scalar(0, 0, 0), true, false);
|
||||
net.setInput(blob);
|
||||
std::vector<cv::Mat> outs;
|
||||
net.forward(outs, net.getUnconnectedOutLayersNames());
|
||||
float *data = (float*)outs[0].data;
|
||||
for(int i = 0; i < outs[0].rows; ++i) {
|
||||
float final_score = data[4] * data[5];
|
||||
if(final_score >= 0.0075)
|
||||
{
|
||||
weights.push_back(final_score);
|
||||
int cx = data[0] * img.cols;
|
||||
int cy = data[1] * img.rows;
|
||||
int width = data[2] * img.cols;
|
||||
int height = data[3] * img.rows;
|
||||
int left = cx - width / 2;
|
||||
int top = cy - height / 2;
|
||||
found.push_back(cv::Rect(left, top, width, height));
|
||||
}
|
||||
data += 85;
|
||||
}
|
||||
}
|
||||
|
||||
lock.unlock();
|
||||
|
||||
SDL_Delay(250);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static void captureFrameThread(SDL_Window* window, const char* fname) {
|
||||
|
||||
av_register_all();
|
||||
avcodec_register_all();
|
||||
AVFormatContext* pFormatCtx = avformat_alloc_context();
|
||||
|
||||
if (avformat_open_input(&pFormatCtx, fname, NULL, NULL) != 0) {
|
||||
std::cerr << "Couldn't open stream\n";
|
||||
return;
|
||||
}
|
||||
|
||||
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
|
||||
std::cerr << "Couldn't find stream information\n";
|
||||
return;
|
||||
}
|
||||
|
||||
int videoStream = -1;
|
||||
for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) {
|
||||
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
|
||||
videoStream = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (videoStream == -1) {
|
||||
std::cerr << "Didn't find a video stream\n";
|
||||
return;
|
||||
}
|
||||
|
||||
AVCodecParameters* pCodecParameters = pFormatCtx->streams[videoStream]->codecpar;
|
||||
AVCodec* pCodec = avcodec_find_decoder(pCodecParameters->codec_id);
|
||||
|
||||
if (pCodec == NULL) {
|
||||
std::cerr << "Unsupported codec\n";
|
||||
return;
|
||||
}
|
||||
|
||||
AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec);
|
||||
pCodecCtx->pix_fmt = AV_PIX_FMT_GRAY8;
|
||||
avcodec_parameters_to_context(pCodecCtx, pCodecParameters);
|
||||
|
||||
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
|
||||
std::cerr << "Could not open codec\n";
|
||||
return;
|
||||
}
|
||||
|
||||
AVFrame* pFrame = av_frame_alloc();
|
||||
AVFrame* pFrameRGB = av_frame_alloc();
|
||||
|
||||
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
|
||||
uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
|
||||
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);
|
||||
|
||||
SwsContext* sws_ctx = sws_getContext(
|
||||
pCodecCtx->width,
|
||||
pCodecCtx->height,
|
||||
pCodecCtx->pix_fmt,
|
||||
pCodecCtx->width,
|
||||
pCodecCtx->height,
|
||||
AV_PIX_FMT_BGR24,
|
||||
SWS_BILINEAR,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL);
|
||||
|
||||
//AVRational time_base = pFormatCtx->streams[videoStream]->time_base;
|
||||
//AVRational frame_rate = av_guess_frame_rate(pFormatCtx, pFormatCtx->streams[videoStream], NULL);
|
||||
//uint32_t delay = (av_rescale_q(1, av_inv_q(frame_rate), time_base) / AV_TIME_BASE) * 1000;
|
||||
//printf("delay: %u\n", delay);
|
||||
|
||||
SDL_SetWindowSize(window, pCodecCtx->width, pCodecCtx->height);
|
||||
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
|
||||
SDL_Texture* texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, pCodecCtx->width, pCodecCtx->height);
|
||||
|
||||
while (!stop) {
|
||||
AVPacket packet;
|
||||
if(av_read_frame(pFormatCtx, &packet) < 0) {
|
||||
stop = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (packet.stream_index == videoStream) {
|
||||
int response = avcodec_send_packet(pCodecCtx, &packet);
|
||||
if (response < 0) {
|
||||
std::cerr << "Error while sending a packet to the decoder: " << response << '\n';
|
||||
return;
|
||||
}
|
||||
|
||||
while (response >= 0) {
|
||||
response = avcodec_receive_frame(pCodecCtx, pFrame);
|
||||
if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) {
|
||||
break;
|
||||
} else if (response < 0) {
|
||||
std::cerr << "Error while receiving a frame from the decoder: " << response << '\n';
|
||||
return;
|
||||
}
|
||||
|
||||
if (response >= 0) {
|
||||
sws_scale(sws_ctx, (uint8_t const* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
|
||||
|
||||
std::unique_lock<std::mutex> lock(mtx);
|
||||
|
||||
img = cv::Mat(pCodecCtx->height, pCodecCtx->width, CV_8UC3, pFrameRGB->data[0], pFrameRGB->linesize[0]);
|
||||
|
||||
for (unsigned i = 0; i < found.size(); i++) {
|
||||
rectangle(img, found[i], cv::Scalar(255 - weights[i] * 255, 0, weights[i] * 255), 3);
|
||||
}
|
||||
|
||||
lock.unlock();
|
||||
|
||||
{
|
||||
SDL_UpdateTexture(texture, NULL, img.data, img.cols * 3);
|
||||
SDL_RenderClear(renderer);
|
||||
SDL_RenderCopy(renderer, texture, NULL, NULL);
|
||||
SDL_RenderPresent(renderer);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
av_packet_unref(&packet);
|
||||
|
||||
SDL_Event event;
|
||||
while(SDL_PollEvent(&event)) {
|
||||
switch(event.type) {
|
||||
case SDL_QUIT:
|
||||
stop = true;
|
||||
default: break;
|
||||
}
|
||||
}
|
||||
|
||||
SDL_Delay(33);
|
||||
|
||||
}
|
||||
|
||||
av_free(buffer);
|
||||
av_frame_free(&pFrameRGB);
|
||||
av_frame_free(&pFrame);
|
||||
avcodec_close(pCodecCtx);
|
||||
avformat_close_input(&pFormatCtx);
|
||||
|
||||
SDL_DestroyTexture(texture);
|
||||
SDL_DestroyRenderer(renderer);
|
||||
}
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
if(SDL_Init(SDL_INIT_VIDEO) < 0) {
|
||||
fprintf(stderr, "%s", SDL_GetError());
|
||||
return 1;
|
||||
}
|
||||
|
||||
SDL_Window* win = SDL_CreateWindow(
|
||||
"Robomaster",
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
SDL_WINDOWPOS_UNDEFINED,
|
||||
800, 300,
|
||||
SDL_WINDOW_RESIZABLE );
|
||||
if(!win) {
|
||||
fprintf(stderr, "%s", SDL_GetError());
|
||||
return 1;
|
||||
}
|
||||
|
||||
std::thread captureThread(captureFrameThread, win, argv[1]);
|
||||
std::thread processThread(processFrameThread);
|
||||
|
||||
captureThread.join();
|
||||
processThread.join();
|
||||
|
||||
SDL_Quit();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
Loading…
Reference in New Issue