#include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #include <stdio.h> #include <SDL2/SDL.h> #include <time.h> #define SFM_REFRESH_EVENT (SDL_USEREVENT + 1) int thread_exit=0; //Thread int sfp_refresh_thread(void *opaque) { SDL_Event event; while (thread_exit==0) { event.type = SFM_REFRESH_EVENT; SDL_PushEvent(&event); //Wait 40 ms SDL_Delay(15); } return 0; } int main(int argc, char* argv[]) { AVFormatContext *pFormatCtx;//格式上下文结构体 int i, videoindex; AVCodecContext *pCodecCtx;//codec上下文 AVCodec *pCodec;//codec int screen_w=0,screen_h=0; SDL_Window *screen; SDL_Renderer* sdlRenderer; SDL_Texture* sdlTexture; SDL_Rect sdlRect; SDL_Thread *video_tid; SDL_Event event; av_register_all();//ffmpeg flow 0,注册codec avformat_network_init();//如要打开网络流,必须运行此函数 pFormatCtx = avformat_alloc_context();//格式上下文结构体指针开空间 if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0)//打开多媒体文件 { printf("open file error "); return -1; } AVDictionary* pOptions = NULL; if ( avformat_find_stream_info(pFormatCtx, &pOptions) < 0 )//读取音视频数据相关信息,参数0:上下文结构体指针,参数1:option { return -1; } av_dump_format(pFormatCtx, 0, argv[1], 0);//调试函数,输出文件的音、视频流的基本信息 //获取视频的时长 if(pFormatCtx->duration != AV_NOPTS_VALUE) { int hours, mins, secs, us; int64_t duration = pFormatCtx->duration + 5000; secs = duration / AV_TIME_BASE; us = duration % AV_TIME_BASE; mins = secs / 60; secs %= 60; hours = mins/ 60; mins %= 60; printf("%02d:%02d:%02d.%02d ", hours, mins, secs, (100 * us) / AV_TIME_BASE); } i = 0; int videostream = -1; printf("pFormatCtx->nb_streams=%d ", pFormatCtx->nb_streams); for(i=0;i<pFormatCtx->nb_streams;i++)//遍历多媒体文件中的每一个流,判断是否为视频。 { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videostream = i; break; } } printf("videostream=%d ", videostream); if (-1 == videostream) { printf("error no video stream "); return; } pCodecCtx = pFormatCtx->streams[videostream]->codec;//codec上下文指定到格式上下文中的codec pCodec = avcodec_find_decoder( pCodecCtx->codec_id );//找到一个codec,必须先调用av_register_all() if(NULL == pCodec) { printf("couldn't find the decode "); return -1; } if( avcodec_open2(pCodecCtx, pCodec, NULL) < 0)//初始化一个视音频编解码器的AVCodecContext { printf("open decode error "); return -1; } AVFrame *pFrame,*pFrameYUV;//Frame结构体 pFrame = av_frame_alloc();//原始帧 pFrameYUV = av_frame_alloc();//YUV帧 uint8_t *out_buffer; int num = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height); printf("num=%d ", num); out_buffer = (uint8_t *)av_malloc(num*sizeof(uint8_t)); avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);//将pFrameYUV和out_buffer联系起来(pFrame指向一段内存) AVPacket packet;//packet结构体 int ret = -1; i = 0; struct SwsContext *img_convert_ctx = NULL;//图像格式转化上下文 img_convert_ctx = sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt , pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);//初始化SWS,图片格式转化上下文 if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { printf( "Could not initialize SDL - %s ", SDL_GetError()); return -1; } screen_w = pCodecCtx->width; screen_h = pCodecCtx->height; //SDL 2.0 Support for multiple windows screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, screen_w, screen_h, SDL_WINDOW_OPENGL); if(!screen) { printf("SDL: could not create window - exiting:%s ",SDL_GetError()); return -1; } sdlRenderer = SDL_CreateRenderer(screen, -1, 0); sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height); sdlRect.x=0; sdlRect.y=0; sdlRect.w=screen_w; sdlRect.h=screen_h; int f1 = 0; int f2 = 0; int got_picture = -1; video_tid = SDL_CreateThread(sfp_refresh_thread,NULL,NULL); time_t t; time(&t); printf("begin :%s ", ctime(&t)); while (1) { SDL_WaitEvent(&event); if(event.type==SFM_REFRESH_EVENT) { if(av_read_frame(pFormatCtx, &packet)>=0)//读取码流中的音频若干帧或者视频一帧,作为packet { f1++; if(packet.stream_index == videostream)//如果是视频 { ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);//解码一帧视频数据。输入一个压缩编码的结构体AVPacket,输出一个解码后的结构体AVFrame if(ret < 0) { printf("decode error "); return -1; } if(got_picture) { //转换 sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);//将输出结果转化成YUV SDL_UpdateYUVTexture(sdlTexture, &sdlRect, pFrameYUV->data[0], pFrameYUV->linesize[0], pFrameYUV->data[1], pFrameYUV->linesize[1], pFrameYUV->data[2], pFrameYUV->linesize[2]); SDL_RenderClear( sdlRenderer ); SDL_RenderCopy( sdlRenderer, sdlTexture, NULL, &sdlRect); SDL_RenderPresent( sdlRenderer ); //SDL_Delay(40); f2++; } } av_free_packet(&packet); } else { thread_exit=1; break; } } } time(&t); printf("begin :%s ", ctime(&t)); SDL_Quit(); sws_freeContext(img_convert_ctx); free(out_buffer); av_free(pFrameYUV); // Free the YUV frame av_free(pFrame); // Close the codec avcodec_close(pCodecCtx); // Close the video file avformat_close_input(&pFormatCtx); printf("f1=%d ", f1); printf("f2=%d ", f2); return 0; }