| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223 | /** * 叶海辉 * QQ群121376426 * http://blog.yundiantech.com/ */#include <stdio.h>extern "C"{#include "libavcodec/avcodec.h"#include "libavformat/avformat.h"#include "libswscale/swscale.h"#include "libavdevice/avdevice.h"}//'1' Use Dshow//'0' Use VFW#define USE_DSHOW 0//Show Dshow Devicevoid show_dshow_device(){    AVFormatContext *pFormatCtx = avformat_alloc_context();    AVDictionary* options = NULL;    av_dict_set(&options,"list_devices","true",0);    AVInputFormat *iformat = av_find_input_format("dshow");    printf("========Device Info=============\n");    avformat_open_input(&pFormatCtx,"video=dummy",iformat,&options);    printf("================================\n");}//Show Dshow Device Optionvoid show_dshow_device_option(){    AVFormatContext *pFormatCtx = avformat_alloc_context();    AVDictionary* options = NULL;    av_dict_set(&options,"list_options","true",0);    AVInputFormat *iformat = av_find_input_format("dshow");    printf("========Device Option Info======\n");    avformat_open_input(&pFormatCtx,"video=Integrated Camera",iformat,&options);    printf("================================\n");}//Show VFW Devicevoid show_vfw_device(){    AVFormatContext *pFormatCtx = avformat_alloc_context();    AVInputFormat *iformat = av_find_input_format("vfwcap");    printf("========VFW Device Info======\n");    avformat_open_input(&pFormatCtx,"list",iformat,NULL);    printf("=============================\n");}//Show AVFoundation Devicevoid show_avfoundation_device(){    AVFormatContext *pFormatCtx = avformat_alloc_context();    AVDictionary* options = NULL;    av_dict_set(&options,"list_devices","true",0);    AVInputFormat *iformat = av_find_input_format("avfoundation");    printf("==AVFoundation Device Info===\n");    avformat_open_input(&pFormatCtx,"",iformat,&options);    printf("=============================\n");}#define USE_DSHOW 1int main(int argc, char* argv[]){    AVFormatContext	*pFormatCtx;    int				i, videoindex;    AVCodecContext	*pCodecCtx;    AVCodec			*pCodec;    av_register_all();    avformat_network_init();    avdevice_register_all();//Register Device    //Show Dshow Device    show_dshow_device();    //Show Device Options//    show_dshow_device_option();    //Show VFW Options//    show_vfw_device();    pFormatCtx = avformat_alloc_context();#if USE_DSHOW    //Use dshow    //    //这里需要先安装 screen-capture-recorder 才能使用dshow采集屏幕    //screen-capture-recorder    //Website: http://sourceforge.net/projects/screencapturer/    //    AVInputFormat *ifmt=av_find_input_format("dshow");    if(avformat_open_input(&pFormatCtx,"video=screen-capture-recorder",ifmt,NULL)!=0){        printf("Couldn't open input stream.\n");        return -1;    }#else    //Use gdigrab    AVDictionary* options = NULL;    //Set some options    //grabbing frame rate    //av_dict_set(&options,"framerate","5",0);    //The distance from the left edge of the screen or desktop    //av_dict_set(&options,"offset_x","20",0);    //The distance from the top edge of the screen or desktop    //av_dict_set(&options,"offset_y","40",0);    //Video frame size. The default is to capture the full screen    //av_dict_set(&options,"video_size","640x480",0);    AVInputFormat *ifmt=av_find_input_format("gdigrab");    if(avformat_open_input(&pFormatCtx,"desktop",ifmt,&options)!=0){        printf("Couldn't open input stream.\n");        return -1;    }#endif    if(avformat_find_stream_info(pFormatCtx,NULL)<0)    {        printf("Couldn't find stream information.\n");        return -1;    }    videoindex=-1;    for(i=0; i<pFormatCtx->nb_streams; i++)    {        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)        {            videoindex=i;        }    }    if(videoindex==-1)    {        printf("Couldn't find a video stream.\n");        return -1;    }    pCodecCtx=pFormatCtx->streams[videoindex]->codec;    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);        if(pCodec==NULL)    {        printf("Codec not found.\n");        return -1;    }    if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)    {        printf("Could not open codec.\n");        return -1;    }    AVFrame	*pFrame,*pFrameYUV;    pFrame=av_frame_alloc();    pFrameYUV=av_frame_alloc();    uint8_t *out_buffer=(uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));    avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);    int ret, got_picture;    AVPacket *packet=(AVPacket *)av_malloc(sizeof(AVPacket));    FILE *fp_yuv=fopen("output.yuv","wb");    struct SwsContext *img_convert_ctx;    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);    ///这里打印出视频的宽高    fprintf(stderr,"w= %d h= %d\n",pCodecCtx->width, pCodecCtx->height);    ///我们就读取100张图像    for(int i=0;i<100;i++)    {        if(av_read_frame(pFormatCtx, packet) < 0)        {            break;        }        if(packet->stream_index==videoindex)        {            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);            if(ret < 0){                printf("Decode Error.\n");                return -1;            }            if(got_picture)            {                sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);                int y_size=pCodecCtx->width*pCodecCtx->height;                fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y                fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U                fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V            }        }        av_free_packet(packet);    }    sws_freeContext(img_convert_ctx);    fclose(fp_yuv);    av_free(out_buffer);    av_free(pFrameYUV);    avcodec_close(pCodecCtx);    avformat_close_input(&pFormatCtx);    return 0;}
 |