GetVideoThread.cpp 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. /**
  2. * 叶海辉
  3. * QQ群121376426
  4. * http://blog.yundiantech.com/
  5. */
  6. #include "AppConfig.h"
  7. #include "GetVideoThread.h"
  8. //'1' Use Dshow
  9. //'0' Use VFW
  10. #define USE_DSHOW 0
  11. //Show Dshow Device
  12. void show_dshow_device()
  13. {
  14. AVFormatContext *pFormatCtx = avformat_alloc_context();
  15. AVDictionary* options = nullptr;
  16. av_dict_set(&options,"list_devices","true",0);
  17. AVInputFormat *iformat = av_find_input_format("dshow");
  18. printf("========Device Info=============\n");
  19. avformat_open_input(&pFormatCtx,"video=dummy",iformat,&options);
  20. printf("================================\n");
  21. }
  22. //Show Dshow Device Option
  23. void show_dshow_device_option()
  24. {
  25. AVFormatContext *pFormatCtx = avformat_alloc_context();
  26. AVDictionary* options = nullptr;
  27. av_dict_set(&options,"list_options","true",0);
  28. AVInputFormat *iformat = av_find_input_format("dshow");
  29. printf("========Device Option Info======\n");
  30. avformat_open_input(&pFormatCtx,"video=Integrated Camera",iformat,&options);
  31. printf("================================\n");
  32. }
  33. //Show VFW Device
  34. void show_vfw_device()
  35. {
  36. AVFormatContext *pFormatCtx = avformat_alloc_context();
  37. AVInputFormat *iformat = av_find_input_format("vfwcap");
  38. printf("========VFW Device Info======\n");
  39. avformat_open_input(&pFormatCtx,"list",iformat,nullptr);
  40. printf("=============================\n");
  41. }
  42. //Show AVFoundation Device
  43. void show_avfoundation_device()
  44. {
  45. AVFormatContext *pFormatCtx = avformat_alloc_context();
  46. AVDictionary* options = nullptr;
  47. av_dict_set(&options,"list_devices","true",0);
  48. AVInputFormat *iformat = av_find_input_format("avfoundation");
  49. printf("==AVFoundation Device Info===\n");
  50. avformat_open_input(&pFormatCtx, "",iformat, &options);
  51. printf("=============================\n");
  52. }
  53. GetVideoThread::GetVideoThread()
  54. {
  55. m_isRun = false;
  56. pFormatCtx = NULL;
  57. out_buffer = NULL;
  58. pFrame = NULL;
  59. pFrameYUV = NULL;
  60. pCodecCtx = NULL;
  61. m_pause = false;
  62. mVideoEncoder = new VideoEncoder();
  63. }
  64. GetVideoThread::~GetVideoThread()
  65. {
  66. }
  67. void GetVideoThread::setQuantity(int value)
  68. {
  69. mVideoEncoder->setQuantity(value);
  70. }
  71. ErroCode GetVideoThread::init()
  72. {
  73. AVCodec *pCodec = nullptr;
  74. pFormatCtx = avformat_alloc_context();
  75. #if defined(WIN32)
  76. //Show Dshow Device
  77. show_dshow_device();
  78. //Show Device Options
  79. show_dshow_device_option();
  80. //Show VFW Options
  81. show_vfw_device();
  82. AVInputFormat *ifmt = av_find_input_format("dshow"); //使用dshow
  83. if(avformat_open_input(&pFormatCtx, "video=screen-capture-recorder", ifmt, nullptr)!=0)
  84. {
  85. fprintf(stderr, "Couldn't open input stream video.(无法打开输入流)\n");
  86. return VideoOpenFailed;
  87. }
  88. #elif defined __linux
  89. //Linux
  90. // AVInputFormat *ifmt=av_find_input_format("video4linux2");
  91. // if(avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)!=0)
  92. // {
  93. // fprintf(stderr, "Couldn't open input stream.\n");
  94. // return -1;
  95. // }
  96. AVDictionary* options = NULL;
  97. // av_dict_set(&options,"list_devices","true", 0);
  98. /* set frame per second */
  99. // av_dict_set( &options,"framerate","30", 0);
  100. av_dict_set( &options,"show_region","1", 0);
  101. // av_dict_set( &options,"video_size","1240x480", 0);
  102. // av_dict_set( &options, "preset", "medium", 0 );
  103. /*
  104. X11 video input device.
  105. To enable this input device during configuration you need libxcb installed on your system. It will be automatically detected during configuration.
  106. This device allows one to capture a region of an X11 display.
  107. refer : https://www.ffmpeg.org/ffmpeg-devices.html#x11grab
  108. */
  109. AVInputFormat *ifmt = av_find_input_format("x11grab");
  110. if(avformat_open_input(&pFormatCtx, ":0.0+10,250", ifmt, &options) != 0)
  111. // if(avformat_open_input(&pFormatCtx, ":0.0", ifmt, &options) != 0)
  112. {
  113. fprintf(stderr, "\nerror in opening input device\n");
  114. return VideoOpenFailed;
  115. }
  116. #else
  117. show_avfoundation_device();
  118. //Mac
  119. AVInputFormat *ifmt=av_find_input_format("avfoundation");
  120. //Avfoundation
  121. //[video]:[audio]
  122. if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0)
  123. {
  124. fprintf(stderr, "Couldn't open input stream.\n");
  125. return VideoOpenFailed;
  126. }
  127. #endif
  128. videoindex=-1;
  129. pCodecCtx = NULL;
  130. for(i=0; i<pFormatCtx->nb_streams; i++)
  131. if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
  132. {
  133. videoindex=i;
  134. break;
  135. }
  136. if(videoindex==-1)
  137. {
  138. printf("Didn't find a video stream.(没有找到视频流)\n");
  139. return VideoOpenFailed;
  140. }
  141. pCodecCtx = pFormatCtx->streams[videoindex]->codec;
  142. pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
  143. if(pCodec == NULL)
  144. {
  145. printf("video Codec not found.\n");
  146. return VideoDecoderOpenFailed;
  147. }
  148. if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
  149. {
  150. printf("Could not open video codec.\n");
  151. return VideoDecoderOpenFailed;
  152. }
  153. pFrame = av_frame_alloc();
  154. pFrameYUV = av_frame_alloc();
  155. out_buffer=(uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
  156. avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
  157. //***************
  158. // int Screen_W = GetSystemMetrics(SM_CXSCREEN); //获取屏幕宽高
  159. // int Screen_H = GetSystemMetrics(SM_CYSCREEN);
  160. mVideoEncoder->setWidth(pCodecCtx->width, pCodecCtx->height); //设置编码器的宽高
  161. return SUCCEED;
  162. }
  163. void GetVideoThread::deInit()
  164. {
  165. if (out_buffer)
  166. {
  167. av_free(out_buffer);
  168. out_buffer = NULL;
  169. }
  170. if (pFrame)
  171. {
  172. av_free(pFrame);
  173. pFrame = NULL;
  174. }
  175. if (pFrameYUV)
  176. {
  177. av_free(pFrameYUV);
  178. pFrameYUV = NULL;
  179. }
  180. if (pCodecCtx)
  181. avcodec_close(pCodecCtx);
  182. avformat_close_input(&pFormatCtx);
  183. avformat_free_context(pFormatCtx);
  184. }
  185. void GetVideoThread::startRecord()
  186. {
  187. m_isRun = true;
  188. //启动新的线程
  189. std::thread([&](GetVideoThread *pointer)
  190. {
  191. pointer->run();
  192. }, this).detach();
  193. mVideoEncoder->startEncode();
  194. }
  195. void GetVideoThread::pauseRecord()
  196. {
  197. m_pause = true;
  198. }
  199. void GetVideoThread::restoreRecord()
  200. {
  201. m_getFirst = false;
  202. m_pause = false;
  203. }
  204. void GetVideoThread::stopRecord()
  205. {
  206. m_isRun = false;
  207. }
  208. //FILE *fp = fopen("out.yuv","wb");
  209. void GetVideoThread::run()
  210. {
  211. struct SwsContext *img_convert_ctx = NULL;
  212. int y_size = 0;
  213. int yuvSize = 0;
  214. if (pCodecCtx)
  215. {
  216. y_size = pCodecCtx->width * pCodecCtx->height;
  217. yuvSize = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
  218. ///理论上 这里的 size = y_size * 3 / 2
  219. int numBytes = yuvSize;
  220. out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
  221. avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,pCodecCtx->width, pCodecCtx->height);
  222. img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
  223. pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P,
  224. SWS_BICUBIC, NULL, NULL, NULL);
  225. }
  226. AVPacket *packet=(AVPacket *)av_malloc(sizeof(AVPacket));
  227. int64_t firstTime = AppConfig::getTimeStamp_MilliSecond();
  228. m_getFirst = false;
  229. int64_t timeIndex = 0;
  230. bool m_saveVideoFileThread = true;
  231. while(m_isRun )
  232. {
  233. if (av_read_frame(pFormatCtx, packet)<0)
  234. {
  235. fprintf(stderr, "read failed! \n");
  236. AppConfig::mSleep(10);
  237. continue;
  238. }
  239. if (m_pause)
  240. {
  241. av_packet_unref(packet);
  242. AppConfig::mSleep(10);
  243. continue;
  244. }
  245. if(packet->stream_index==videoindex)
  246. {
  247. int64_t time = 0;
  248. if (m_saveVideoFileThread)
  249. {
  250. if (m_getFirst)
  251. {
  252. int64_t secondTime = AppConfig::getTimeStamp_MilliSecond();
  253. time = secondTime - firstTime + timeIndex;
  254. }
  255. else
  256. {
  257. firstTime = AppConfig::getTimeStamp_MilliSecond();
  258. timeIndex = 0;
  259. m_getFirst = true;
  260. }
  261. }
  262. if (avcodec_send_packet(pCodecCtx, packet) != 0)
  263. {
  264. fprintf(stderr, "input AVPacket to decoder failed!\n");
  265. av_packet_unref(packet);
  266. continue;
  267. }
  268. while (0 == avcodec_receive_frame(pCodecCtx, pFrame))
  269. {
  270. /// 转换成YUV420
  271. /// 由于解码后的数据不一定是yuv420p,比如硬件解码后会是yuv420sp,因此这里统一转成yuv420p
  272. sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
  273. if (m_saveVideoFileThread)
  274. {
  275. uint8_t * picture_buf = (uint8_t *)av_malloc(yuvSize);
  276. memcpy(picture_buf, out_buffer, yuvSize);
  277. // fwrite(picture_buf,1,y_size*3/2,fp);
  278. // av_free(picture_buf);
  279. mVideoEncoder->inputYuvBuffer(picture_buf, yuvSize, time); //将yuv数据添加到h.264编码的线程
  280. }
  281. }
  282. }
  283. else
  284. {
  285. fprintf(stderr, "other %d \n", packet->stream_index);
  286. }
  287. av_packet_unref(packet);
  288. }
  289. sws_freeContext(img_convert_ctx);
  290. fprintf(stderr, "record stopping... \n");
  291. m_pause = false;
  292. deInit();
  293. mVideoEncoder->stopEncode();
  294. fprintf(stderr, "record finished! \n");
  295. }