• ffmpeg结合SDL编写播放器


    创建播放窗口

    SDL_Surface    *screen = NULL;
    screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
    if (!screen)
    {
        fprintf(stderr,"SDL: could not set video node - exiting
    ");
        exit(1);
    }

    转化yuv

    SDL_ SetVideoMode设置具有指定width,height和bitsperpixel的视频模式。从SDL1.2.10开始。如果width和height都为0,他将使用当前视频模式(或桌面模式,如果没有设置模式)的宽度和高度。

    现在我们在屏幕上创建一个 YUV 的播放平面,以便我们可以输入视频,并设置我们的 SWSContext 将图像数据转换为YUV420 ,YUV 是包含原图像最完整的格式,没有做压缩,减损。

    bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
                        pCodecCtx->height,
                        SDL_YV12_OVERLAY,
                        screen);
    
    //Initialize SWS context for software scaling
    sws_ctx = sws_getContext
        (
         pCodecCtx->width,
         pCodecCtx->height,
         pCodecCtx->pix_fmt,
         pCodecCtx->width,
         pCodecCtx->height,
         AV_PIX_FMT_YUV420P,
         SWS_BILINEAR,
         NULL, NULL, NULL
        );

    显示图片

    在上一节中我们使用函数将每一帧图像都保存下来了,这一节将替换那个函数,改为将每一帧图像都转码送入 sdl 播放。

    if (frameFinished)
    {
        SDL_LockYUVOverlay(bmp);
        
        AVPicture pict;
        pict.data[0] = bmp->pixels[0];
        pict.data[1] = bmp->pixels[2];
        pict.data[2] = bmp->pixels[1];
        
        pict.linesize[0] = bmp->pitches[0];
        pict.linesize[1] = bmp->pitches[2];
        pict.linesize[2] = bmp->pitches[1];
        
        //Convert the image from its native format to RGB
        sws_scale(sws_ctx,
                (uint8_t const * const *)pFrame->data,
                pFrame->linesize,
                0,
                pCodecCtx->height,
                //pFrameRGB->data,
                //pFrameRGB->linesize
                pict.data,
                pict.linesize
                );
        SDL_UnlockYUVOverlay(bmp);
        
        rect.x = 0;
        rect.y = 0;
        rect.w = pCodecCtx->width;
        rect.h = pCodecCtx->height;
        SDL_DisplayYUVOverlay(bmp,&rect);
        SDL_Delay(40);
    }

    本节代码在开始的时候使用了 SDL_LockYUVOverlay ,将 bmp 給锁定,因为此时要播放它,如果一个线程在播放它,另一个线程在处理它,修改它,就会出错,在完成播放后,将锁定取消。

    这里的视频我按照25帧每秒,所以每隔40ms就暂停一下,如果将时间修改得太短,视频就在飞速前进,体验就不好了。

    清理内存

    SDL_Event    event;
    av_free_packet(&packet);
    SDL_PollEvent(&event);
    switch(event.type)
    {
        case SDL_QUIT:
        {
            SDL_Quit();
            exit(0);
            break;
        }
        
        default:
        {
            break;
        }                
    }

    源码:

    #include<stdio.h>
    #include<libavcodec/avcodec.h>
    #include<libavformat/avformat.h>
    #include<libswscale/swscale.h>
    
    #include<SDL.h>
    #include<SDL_thread.h>
    
    #ifdef __MINGW32__
    #undef main  /*Prevents SDL from overriding main()*/
    #endif
    
    int main(int argc, char *argv[])
    {
        AVFormatContext *pFormatCtx = NULL;
        AVCodecContext     *pCodecCtx = NULL;
        AVCodec            *pCodec = NULL;
        AVFrame            *pFrame = NULL;
        AVPacket         packet;
        int                i,videoStream;
        int             frameFinished;
        
        AVDictionary    *optionsDict = NULL;
        struct SwsContext    *sws_ctx = NULL;
        
        SDL_Overlay    *bmp = NULL;
        SDL_Surface    *screen = NULL;
        SDL_Rect    rect;
        SDL_Event    event;
        
        if (argc < 2)
        {
            printf("Please provide a movie file
    ");
            return -1;
        }
        
        //register all formats and codecs
        av_register_all();
        
        if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
        {
            fprintf(stderr,"Could not initialize SDL - %s
    ",SDL_GetError());
            exit(1);
        }
        
        //open video file
        if (avformat_open_input(&pFormatCtx,argv[1], NULL, NULL) != 0)
        {
            return -1;        //couldn't open file
        }
        
        //retrieve stream information
        if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
        {
            return -1;        //couldn't find stream information
        }
        
        //dump information about file onto standard error
        av_dump_format(pFormatCtx, 0, argv[1], 0);
        
        //find the first video stream
        videoStream = -1;
        for (i = 0; i < pFormatCtx->nb_streams; i++)
        {
            if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
            {
                videoStream = i;
                break;
            }
        }
    
        if (videoStream == -1)
        {
            return -1;        //Don't find a video stream
        }
        
        //Get a pointer to the codec context for the video stream
        pCodecCtx = pFormatCtx->streams[videoStream]->codec;
        
        //Find the decoder for the video stream
        pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
        if (pCodec == NULL)
        {
            fprintf(stderr,"Unsupported codec!
    ");
            return -1;    //Codec not found
        }
    
        //open codec
        if (avcodec_open2(pCodecCtx, pCodec, &optionsDict) < 0)
        {
            return -1;    //Could not open codec
        }
        
        //Allcocate an AVFrame structure
        pFrame = av_frame_alloc();
        //pFrameRGB = av_frame_alloc();
        
    #ifdef __DARWIN__
        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
    #else
        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); 
    #endif
        if (!screen)
        {
            fprintf(stderr,"SDL: could not set video node - exiting
    ");
            exit(1);
        }
        
        //Allocate a place to put our YUV image on that screen
        bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
                            pCodecCtx->height,
                            SDL_YV12_OVERLAY,
                            screen);
        
        //Initialize SWS context for software scaling
        sws_ctx = sws_getContext
            (
             pCodecCtx->width,
             pCodecCtx->height,
             pCodecCtx->pix_fmt,
             pCodecCtx->width,
             pCodecCtx->height,
             AV_PIX_FMT_YUV420P,
             SWS_BILINEAR,
             NULL, NULL, NULL
            );
        
        //Assign appropriate parts of buffer to image planes in pFrameRGB
        //Note that pFrameRGB is an AVFrame, but AVFrame is a superset
        // of AVFPicture
        //Read frames and save first five frames to disk
        i = 0;
        while (av_read_frame(pFormatCtx, &packet) >= 0)
        {
            //Is this a packet from video stream
            if (packet.stream_index == videoStream)
            {
                //decode video frame
                avcodec_decode_video2(pCodecCtx, pFrame,
                        &frameFinished, &packet);
                
                //Did wo get a video frame
                if (frameFinished)
                {
                    SDL_LockYUVOverlay(bmp);
                    
                    AVPicture pict;
                    pict.data[0] = bmp->pixels[0];
                    pict.data[1] = bmp->pixels[2];
                    pict.data[2] = bmp->pixels[1];
                    
                    pict.linesize[0] = bmp->pitches[0];
                    pict.linesize[1] = bmp->pitches[2];
                    pict.linesize[2] = bmp->pitches[1];
                    
                    //Convert the image from its native format to RGB
                    sws_scale(sws_ctx,
                            (uint8_t const * const *)pFrame->data,
                            pFrame->linesize,
                            0,
                            pCodecCtx->height,
                            //pFrameRGB->data,
                            //pFrameRGB->linesize
                            pict.data,
                            pict.linesize
                            );
                    SDL_UnlockYUVOverlay(bmp);
                    
                    rect.x = 0;
                    rect.y = 0;
                    rect.w = pCodecCtx->width;
                    rect.h = pCodecCtx->height;
                    SDL_DisplayYUVOverlay(bmp,&rect);
                    SDL_Delay(40);
                }
            }
            
            //Free the packet that was allocated by av_read_frame
            av_free_packet(&packet);
            SDL_PollEvent(&event);
            switch(event.type)
            {
                case SDL_QUIT:
                {
                    SDL_Quit();
                    exit(0);
                    break;
                }
                
                default:
                {
                    break;
                }                
            }
        }
        
        //Free the YUV frame
        av_free(pFrame);
        
        //Close the codec
        avcodec_close(pCodecCtx);
        
        //Close the video file
        avformat_close_input(&pFormatCtx);
        return 0;
    }

    Makefile

    DIR_INC = -I/usr/local/include
    DIR_LIB = -L/usr/local/lib
    
    LIBS = -lavformat
            -lavcodec
            -lva-x11 
            -lva 
            -lxcb-shm 
            -lxcb-xfixes 
            -lxcb-render 
            -lxcb-shape 
            -lxcb -lX11 
            -lasound 
            -lz 
            -lswresample 
            -lswscale 
            -lavutil 
            -lm 
            -pthread 
            `sdl-config --cflags --libs`
    
    FLAGS = -Wall -ggdb
    
    project : project.c
        gcc project.c ${FLAGS} ${DIR_INC} ${DIR_LIB} ${LIBS} -o project
        
    .PHONY:clean
    clean:
        rm project
  • 相关阅读:
    Day5:面向对象的定义(下)
    SQL 查询中not in 与 not exists 的区别
    SQL 语句的执行顺序
    SQL server 连接 查询
    SQL server 约束
    静态类与非静态类,静态成员及使用方法
    HR面试总结
    值类型与引用类型精解
    面试技巧
    MVC与设计模式的关系及MVC的实现原理和设计原理
  • 原文地址:https://www.cnblogs.com/wanghao-boke/p/11733315.html
Copyright © 2020-2023  润新知