diff --git a/src/sdl.c b/src/sdl.cpp similarity index 53% rename from src/sdl.c rename to src/sdl.cpp index cedb339..d75e223 100644 --- a/src/sdl.c +++ b/src/sdl.cpp @@ -1,10 +1,20 @@ +extern "C" { #include "robomaster.h" #include "roboeasy.h" +#include +#include +#include +#include +} + #include +#include +#include #include #include -#include +#include +#include struct { unsigned char r; @@ -29,6 +39,160 @@ static Uint32 heartbeat_timer_handler(Uint32 interval, void* param) { return 1000; } +std::mutex mtx; +cv::Mat img; +std::vector found; +bool stop = false; + +static void processFrameThread() { + cv::HOGDescriptor hog; + hog.setSVMDetector(cv::HOGDescriptor::getDefaultPeopleDetector()); + + while(!stop) { + std::unique_lock lock(mtx); + + if(!img.empty()) { + hog.detectMultiScale(img, found); + } + + lock.unlock(); + + SDL_Delay(250); + } + +} + +static void captureFrameThread(SDL_Window* window, const char* fname) { + + SDL_Delay(750); + + av_register_all(); + avcodec_register_all(); + AVFormatContext* pFormatCtx = avformat_alloc_context(); + + if (avformat_open_input(&pFormatCtx, fname, NULL, NULL) != 0) { + std::cerr << "Couldn't open stream\n"; + return; + } + + if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { + std::cerr << "Couldn't find stream information\n"; + return; + } + + int videoStream = -1; + for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) { + if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { + videoStream = i; + break; + } + } + + if (videoStream == -1) { + std::cerr << "Didn't find a video stream\n"; + return; + } + + AVCodecParameters* pCodecParameters = pFormatCtx->streams[videoStream]->codecpar; + AVCodec* pCodec = avcodec_find_decoder(pCodecParameters->codec_id); + + if (pCodec == NULL) { + std::cerr << "Unsupported codec\n"; + return; + } + + AVCodecContext* pCodecCtx = avcodec_alloc_context3(pCodec); + avcodec_parameters_to_context(pCodecCtx, pCodecParameters); + + if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { + std::cerr << "Could not open codec\n"; + return; + } + + AVFrame* pFrame = av_frame_alloc(); + AVFrame* pFrameRGB = av_frame_alloc(); + + int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1); + uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t)); + av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1); + + SwsContext* sws_ctx = sws_getContext( + pCodecCtx->width, + pCodecCtx->height, + pCodecCtx->pix_fmt, + pCodecCtx->width, + pCodecCtx->height, + AV_PIX_FMT_RGB24, + SWS_BILINEAR, + NULL, + NULL, + NULL); + + SDL_SetWindowSize(window, pCodecCtx->width, pCodecCtx->height); + SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0); + SDL_Texture* texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, pCodecCtx->width, pCodecCtx->height); + + while (!stop) { + AVPacket packet; + if(av_read_frame(pFormatCtx, &packet) < 0) { + stop = true; + break; + } + + if (packet.stream_index == videoStream) { + int response = avcodec_send_packet(pCodecCtx, &packet); + if (response < 0) { + std::cerr << "Error while sending a packet to the decoder: " << response << '\n'; + return; + } + + while (response >= 0) { + response = avcodec_receive_frame(pCodecCtx, pFrame); + if (response == AVERROR(EAGAIN) || response == AVERROR_EOF) { + break; + } else if (response < 0) { + std::cerr << "Error while receiving a frame from the decoder: " << response << '\n'; + return; + } + + if (response >= 0) { + sws_scale(sws_ctx, (uint8_t const* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); + + std::unique_lock lock(mtx); + + img = cv::Mat(pCodecCtx->height, pCodecCtx->width, CV_8UC3, pFrameRGB->data[0], pFrameRGB->linesize[0]); + + for (const auto& rect : found) { + rectangle(img, rect, cv::Scalar(0, 255, 0), 3); + } + + lock.unlock(); + + { + SDL_UpdateTexture(texture, NULL, img.data, img.cols * 3); + SDL_RenderClear(renderer); + SDL_RenderCopy(renderer, texture, NULL, NULL); + SDL_RenderPresent(renderer); + } + } + } + } + av_packet_unref(&packet); + + SDL_Delay(33); + + } + + av_free(buffer); + av_frame_free(&pFrameRGB); + av_frame_free(&pFrame); + avcodec_close(pCodecCtx); + avformat_close_input(&pFormatCtx); + + SDL_DestroyTexture(texture); + SDL_DestroyRenderer(renderer); +} + int main(int argc, char* argv[]) { if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_JOYSTICK) < 0) { fprintf(stderr, "%s", SDL_GetError()); @@ -57,11 +221,15 @@ int main(int argc, char* argv[]) { SDL_AddTimer(75, drive_timer_handler, robot); SDL_AddTimer(1000, heartbeat_timer_handler, robot); - int h, w; - SDL_GetWindowSize(win, &w, &h); + int streamcount = 0; + std::thread *captureThread = nullptr; + std::thread *processThread = nullptr; while(robot_work(robot)) { + int h, w; + SDL_GetWindowSize(win, &w, &h); + struct Fragment fragment; while(robot_poll(robot, &fragment)) { switch(fragment.type) { @@ -82,6 +250,10 @@ int main(int argc, char* argv[]) { break; } printf("Stream enabled\n"); + if(++streamcount >= 2) { + captureThread = new std::thread(captureFrameThread, win, "tcp://192.168.2.1:40921"); + processThread = new std::thread(processFrameThread); + } break; case VISION_DETECT_ENABLE_CMD: if(fragment.message.resp.enablevision.retcode) { @@ -171,6 +343,7 @@ int main(int argc, char* argv[]) { if(event.window.event != SDL_WINDOWEVENT_CLOSE) break; case SDL_QUIT: robot_stop(robot); + stop = true; default: break; } yaw = (int)(yaw * 0.89); @@ -178,6 +351,14 @@ int main(int argc, char* argv[]) { } } + if(captureThread != nullptr) { + captureThread->join(); + delete captureThread; + } + if(processThread != nullptr) { + processThread->join(); + delete processThread; + } SDL_JoystickClose(joystick); SDL_Quit(); return 0;