• FFmpeg 摄像头采集


    FFmpeg 摄像头采集

    extern "C"
    {
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libswscale/swscale.h"
    #include "libavdevice/avdevice.h"
    #include "SDL/SDL.h"
    };
    
    
    int main(int argc, char *argv[])
    {
        AVFormatContext    *pFormatCtx;
        int                i, videoindex;
        AVCodecContext    *pCodecCtx;
        AVCodec            *pCodec;
        
        
        av_register_all();
        avformat_network_init();
        pFormatCtx = avformat_alloc_context();
        
        
        //Register Device
        avdevice_register_all();
        
    // Win32
        //Show Dshow Device
        show_dshow_device();
        {
            AVFormatContext *pFormatCtx = avformat_alloc_context();
            AVDictionary* options = NULL;
            av_dict_set(&options,"list_devices","true",0);
            AVInputFormat *iformat = av_find_input_format("dshow");
            printf("========Device Info=============
    ");
            avformat_open_input(&pFormatCtx,"video=dummy",iformat,&options);
            printf("================================
    ");
        }
        //Show Device Options
        show_dshow_device_option();
        {
            AVFormatContext *pFormatCtx = avformat_alloc_context();
            AVDictionary* options = NULL;
            av_dict_set(&options,"list_options","true",0);
            AVInputFormat *iformat = av_find_input_format("dshow");
            printf("========Device Option Info======
    ");
            avformat_open_input(&pFormatCtx,"video=Integrated Camera",iformat,&options);
            printf("================================
    ");
        }
        //Show VFW Options
        show_vfw_device();
        {
            AVFormatContext *pFormatCtx = avformat_alloc_context();
            AVInputFormat *iformat = av_find_input_format("vfwcap");
            printf("========VFW Device Info======
    ");
            avformat_open_input(&pFormatCtx,"list",iformat,NULL);
            printf("=============================
    ");
        }
        
        //Show AVFoundation Device
        void show_avfoundation_device()
        {
            AVFormatContext *pFormatCtx = avformat_alloc_context();
            AVDictionary* options = NULL;
            av_dict_set(&options,"list_devices","true",0);
            AVInputFormat *iformat = av_find_input_format("avfoundation");
            printf("==AVFoundation Device Info===
    ");
            avformat_open_input(&pFormatCtx,"",iformat,&options);
            printf("=============================
    ");
        }
        
        
        // DSHOW
        AVInputFormat *ifmt=av_find_input_format("dshow");
        //Set own video device's name
        if(avformat_open_input(&pFormatCtx,"video=Integrated Camera",ifmt,NULL)!=0){
            printf("Couldn't open input stream.
    ");
            return -1;
        }
        // VFW
        AVInputFormat *ifmt=av_find_input_format("vfwcap");
        if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0){
            printf("Couldn't open input stream.
    ");
            return -1;
        }
    // LINUX
        AVInputFormat *ifmt=av_find_input_format("video4linux2");
        if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
            printf("Couldn't open input stream.
    ");
            return -1;
        }
    // MAC
        show_avfoundation_device();
        //Mac
        AVInputFormat *ifmt=av_find_input_format("avfoundation");
        //Avfoundation
        //[video]:[audio]
        if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0){
            printf("Couldn't open input stream.
    ");
            return -1;
        }
        
        
        if(avformat_find_stream_info(pFormatCtx,NULL)<0)
        {
            printf("Couldn't find stream information.
    ");
            return -1;
        }
        
        
        videoindex=-1;
        for(i=0; i<pFormatCtx->nb_streams; i++) 
            if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
            {
                videoindex=i;
                break;
            }
        if(videoindex==-1)
        {
            printf("Couldn't find a video stream.
    ");
            return -1;
        }
    
        pCodecCtx=pFormatCtx->streams[videoindex]->codec;
        pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
        if(pCodec==NULL)
        {
            printf("Codec not found.
    ");
            return -1;
        }
        if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
        {
            printf("Could not open codec.
    ");
            return -1;
        }
        
        
        AVFrame    *pFrame,*pFrameYUV;
        pFrame=av_frame_alloc();
        pFrameYUV=av_frame_alloc();
        
        
        struct SwsContext *img_convert_ctx;
        img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 
    
        
        for (;;) {
            //Wait
            SDL_WaitEvent(&event);
            if(event.type==SFM_REFRESH_EVENT){
                //------------------------------
                if(av_read_frame(pFormatCtx, packet)>=0){
                    if(packet->stream_index==videoindex){
                        ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
                        if(ret < 0){
                            printf("Decode Error.
    ");
                            return -1;
                        }
                        if(got_picture){
                            SDL_LockYUVOverlay(bmp);
                            pFrameYUV->data[0]=bmp->pixels[0];
                            pFrameYUV->data[1]=bmp->pixels[2];
                            pFrameYUV->data[2]=bmp->pixels[1];     
                            pFrameYUV->linesize[0]=bmp->pitches[0];
                            pFrameYUV->linesize[1]=bmp->pitches[2];   
                            pFrameYUV->linesize[2]=bmp->pitches[1];
                            sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
    
                            
                            SDL_UnlockYUVOverlay(bmp); 
                            
                            SDL_DisplayYUVOverlay(bmp, &rect); 
    
                        }
                    }
                    av_free_packet(packet);
                }else{
                    //Exit Thread
                    thread_exit=1;
                    break;
                }
            }else if(event.type==SDL_QUIT){
                thread_exit=1;
                break;
            }
    
        }
        
        
        sws_freeContext(img_convert_ctx);
        
        
        //av_free(out_buffer);
        av_free(pFrameYUV);
        avcodec_close(pCodecCtx);
        avformat_close_input(&pFormatCtx);
    
        
        return 0;
    }
  • 相关阅读:
    nioSocket
    Socket
    常见协议和标准
    Object类clone方法
    java中的运算符
    java中方法的定义
    Spring中实现定时调度
    Spring中对资源的读取支持
    HashMap的实现原理
    固定Realm 与配置数据库连接实现登录验证
  • 原文地址:https://www.cnblogs.com/diaoss/p/11582947.html
Copyright © 2020-2023  润新知