• 读取视屏文件,保存帧图片为ppm文件


    ffmpeg跟sdl的学习过程:
    一、版本信息:
    ffmpeg-3.0.2.tar.bz2
    SDL2-2.0.4.tar.gz
    二、编译过程:
    1、ffmgeg的编译:
    ./configure --enable-shared --disable-yasm --prefix=/usr/local/ffmpeg
    make
    make install

    2、sdl的编译:
    ./configure --prefix=/usr/local/sdl
    make
    make install

    3、系统环境配置:
    查看/etc/ld.so.conf,知道 ”系统共享库路径”


    方法一:把编译好的sdl的动态库,ffmpeg的动态库,复制到 “系统共享库路径“ 中,复制完成后,创建好软连接。
    (完成之后调用指令ldconfig ???)


    方法二:修改/etc/ld.so.conf
    把下面的两行追加到ld.so.conf文件中
    /usr/local/ffmpeg/lib
    /usr/local/sdl/lib
    调用ldconfig指令。
    ldconfig做的这些东西都与运行程序时有关,跟编译连接sdl库,编译连接ffmpeg库一点关系都没有。编译的时候还是该加-L就得加,不要混淆了

    方法三:临时环境变量
    export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/ffmpeg/lib:/usr/local/sdl/lib


    方法四:修改shell文件:
    $ vi ~/.bash_profile
    没有LD_LIBRARY_PATH的话,添加:
    LD_LIBRARY_PATH=/usr/local/ffmpeg/lib:/usr/local/sdl/lib
    export LD_LIBRARY_PATH

    有LD_LIBRARY_PATH的话,
    在LD_LIBRARY_PATH之后进行路径添加即可。
    LD_LIBRARY_PATH=.......:/usr/local/ffmpeg/lib:/usr/local/sdl/lib

    修改完shell文件之后,然后运行
    $ source ~/.bash_profile 就行了。
    ------------------------------------------------------------------
    在shell下尝试设置LD_LIBRARY_PATH,以下面这种形式设置,老是报错bash: LD_LIBRARY_PATH: command not found,
    LD_LIBRARY_PATH=/usr/local/lib
    LD_LIBRARY_PATH = $ LD_LIBRARY_PATH:/usr/local/lib
    可能是因为系统之前没有设置过LD_LIBRARY_PATH,于是改成这样:
    export LD_LIBRARY_PATH=/usr/local/lib
    然后用 echo $LD_LIBRARY_PATH检查一下是否真的设置成功
    ------------------------------------------------------------
    三、编写代码,调用ffmpeg动态库。
    代码,读取视屏文件,保存帧图片为ppm文件

    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libswscale/swscale.h>
    #include <stdio.h>

    void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame)
    {
      FILE *pFile;
      char szFilename[32];
      int y;

      // Open file
      sprintf(szFilename, "frame%d.ppm", iFrame);
      pFile=fopen(szFilename, "wb");
      if(pFile==NULL)
      return;

      // Write header
      fprintf(pFile, "P6 %d %d 255 ", width, height);

      // Write pixel data
      for(y=0; y<height; y++)
      fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1, width*3, pFile);

      // Close file
      fclose(pFile);
    }


    int main(int argc, char* argv[])
    {
      AVFormatContext *pFormatCtx;
      int i, videoindex;
      AVCodecContext *pCodecCtx;
      AVCodec *pCodec;
      av_register_all();
      avformat_network_init();
      pFormatCtx = avformat_alloc_context();
      if(avformat_open_input(&pFormatCtx,argv[1],NULL,NULL)!=0)
      {
        printf("open file error ");
        return -1;
      }

      if ( avformat_find_stream_info(pFormatCtx,NULL) < 0 )
      {
        return -1;
      }

      i = 0;
      int videostream = -1;
      printf("pFormatCtx->nb_streams=%d ", pFormatCtx->nb_streams);
      for(i=0;i<pFormatCtx->nb_streams;i++)
      {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
          videostream = i;
          break;
        }
      }
      printf("videostream=%d ", videostream);

      if (-1 == videostream)
      {
        printf("error no video stream ");
        return;
      }

      pCodecCtx = pFormatCtx->streams[videostream]->codec;


      pCodec = avcodec_find_decoder( pCodecCtx->codec_id );

      if(NULL == pCodec)
      {
        printf("couldn't find the decode ");
        return -1;
      }

      if( avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
      {
        printf("open decode error ");
        return -1;
      }

      AVFrame *pFrame,*pFrameRGB;
      pFrame = av_frame_alloc();
      pFrameRGB = av_frame_alloc();
      uint8_t *out_buffer;

      int num = avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
      printf("num=%d ", num);


      out_buffer = (uint8_t *)av_malloc(num*sizeof(uint8_t));
      avpicture_fill((AVPicture *)pFrameRGB, out_buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);


      AVPacket packet;
      int ret = -1;
      i = 0;
      struct SwsContext *img_convert_ctx = NULL;
      img_convert_ctx = sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt , pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
      while(av_read_frame(pFormatCtx, &packet)>=0)
      {
        //printf("i=%d, videoindex=%d, packet.stream_index=%d ", i++, videoindex, packet.stream_index);

        if(packet.stream_index == videostream)
        {
          //printf("111111 ");
          int got_picture = -1;
          ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);

          if(ret < 0)
          {
            printf("decode error ");
            return -1;
          }

          //printf("got_picture:%d ",got_picture);
          if(got_picture)
          {
            sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
            if(++i<=20)
            {
              SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i);
            }

          }
        }

      av_free_packet(&packet);
      }

      free(out_buffer);
      av_free(pFrameRGB);

      // Free the YUV frame
      av_free(pFrame);

      // Close the codec
      avcodec_close(pCodecCtx);

      // Close the video file
      avformat_close_input(&pFormatCtx);


      return 0;
    }

    编译指令:
    gcc -o test main.c -I/usr/local/ffmpeg/include -L/usr/local/ffmpeg/lib -lavutil -lavformat -lavcodec -lswscale -lz

    sdl,ffmpeg都调用的编译指令:
    gcc -o test main.c -I/usr/local/ffmpeg/include -I/usr/local/sdl/include -L/usr/local/ffmpeg/lib -L/usr/local/sdl/lib -lavutil -lavformat -lavcodec -lswscale -lz -lSDL2

  • 相关阅读:
    XCTF EasyHook
    [GXYCTF2019]simple CPP
    [BJDCTF2020]ZJCTF,不过如此
    Open_basedir绕过
    P2240 【深基12.例1】部分背包问题
    PHP深浅拷贝
    关于_tostring[php]的另类利用
    通用Mapper常用方法
    @GetMapping、@PostMapping和@RequestMapping的区别
    IDEA 下载依赖包的问题
  • 原文地址:https://www.cnblogs.com/zhangxuan/p/5564017.html
Copyright © 2020-2023  润新知