• FFMPEG学习----使用SDL构建视频播放器


    #include <stdio.h>
    #include <string.h>
    
    extern "C"
    {
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libswscale/swscale.h"
    #include "libavutil/imgutils.h"
    #include "SDL.h"
    };
    //依赖库
    #pragma comment(lib, "avcodec.lib")
    #pragma comment(lib, "avformat.lib")
    #pragma comment(lib, "swscale.lib")
    #pragma comment(lib, "avutil.lib")
    #pragma comment(lib, "SDL2.lib")
    #pragma comment(lib, "SDL2main.lib")
    
    //Refresh Event
    #define REFRESH_EVENT	(SDL_USEREVENT + 1)
    //Break Event
    #define BREAK_EVENT		(SDL_USEREVENT + 2)
    
    bool thread_exit = false;
    bool thread_pause = false;
    
    int RefreshVideo(void *opaque)
    {
    	thread_exit = false;
    	thread_pause = false;
    	while (!thread_exit)
    	{
    		//没按 space
    		if (!thread_pause)
    		{
    			SDL_Event event;
    			event.type = REFRESH_EVENT;
    			SDL_PushEvent(&event);
    		}
    		SDL_Delay(40);
    	}
    	thread_exit = false;
    	thread_pause = false;
    	//Break 退出main函数循环
    	SDL_Event event;
    	event.type = BREAK_EVENT;
    	SDL_PushEvent(&event);
    	return 0;
    }
    
    int main(int argc, char* argv[])
    {
    	//------------FFmpeg---------------- 
    	AVFormatContext		*pFormatCtx = NULL;
    	AVCodecContext		*pCodecCtx = NULL;
    	AVCodec				*pCodec = NULL;
    	AVFrame				*pFrame = NULL, *pFrameYUV = NULL;
    	unsigned char		*out_buffer = NULL;
    	AVPacket			packet;
    	struct SwsContext	*img_convert_ctx = NULL;
    	int					y_size;
    	int					got_picture;
    	int					i, videoIndex;
    	int					frame_cnt = 1;
    
    	//------------SDL---------------- 
    	SDL_Window			*sdlScreen = NULL;
    	SDL_Renderer		*sdlRenderer = NULL;
    	SDL_Texture			*sdlTexture = NULL;
    	SDL_Rect			sdlRect;
    	SDL_Thread			*sdlThread = NULL;
    	SDL_Event			event;
    
    
    	char filepath[1024] = "";
    	printf("Usage: program.exe Titanic.ts
    ");
    	if (argc == 2)
    	{
    		strcpy(filepath, argv[1]);
    	}
    	else
    	{
    		printf("Could not find a file
    ");
    		return -1;
    	}
    
    
    	av_register_all();
    
    	if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0)
    	{
    		printf("Couldn't open an input stream.
    ");
    		return -1;
    	}
    	if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
    	{
    		printf("Couldn't find stream information.
    ");
    		return -1;
    	}
    	videoIndex = -1;
    	for (i = 0; i < pFormatCtx->nb_streams; i++)
    		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
    		{
    			videoIndex = i;
    			break;
    		}
    
    	if (videoIndex == -1)
    	{
    		printf("Couldn't find a video stream.
    ");
    		return -1;
    	}
    
    	pCodecCtx = pFormatCtx->streams[videoIndex]->codec;
    	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    	if (pCodec == NULL)
    	{
    		printf("Codec not found.
    ");
    		return -1;
    	}
    	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
    	{
    		printf("Could not open codec.
    ");
    		return -1;
    	}
    
    	//Output Info-----------------------------
    	printf("--------------- File Information ----------------
    ");
    	av_dump_format(pFormatCtx, 0, filepath, 0);
    	printf("-------------------------------------------------
    ");
    
    	pFrame = av_frame_alloc();
    	pFrameYUV = av_frame_alloc();
    	if (pFrame == NULL || pFrameYUV == NULL)
    	{
    		printf("memory allocation error
    ");
    		return -1;
    	}
    	out_buffer = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1));
    	av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer,
    		AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
    	img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
    		pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
    
    
    	if (SDL_Init(SDL_INIT_VIDEO))
    	{
    		printf("Could not initialize SDL - %s
    ", SDL_GetError());
    		return -1;
    	}
    
    
    	sdlScreen = SDL_CreateWindow("FFmpeg Player", 
    		SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
    		pCodecCtx->width, pCodecCtx->height, SDL_WINDOW_OPENGL);
    	if (sdlScreen == 0)
    	{
    		printf("SDL: could not create SDL_Window - exiting:%s
    ", SDL_GetError());
    		return -1;
    	}
    
    	sdlRenderer = SDL_CreateRenderer(sdlScreen, -1, SDL_RENDERER_ACCELERATED);
    	if (sdlRenderer == NULL)
    	{
    		printf("SDL: could not create SDL_Renderer - exiting:%s
    ", SDL_GetError());
    		return -1;
    	}
    
    	sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
    	if (sdlTexture == NULL)
    	{
    		printf("SDL: could not create SDL_Texture - exiting:%s
    ", SDL_GetError());
    		return -1;
    	}
    
    	//设置图像显示位置
    	sdlRect.x = 10;
    	sdlRect.y = 10;
    	sdlRect.w = pCodecCtx->width - 20;
    	sdlRect.h = pCodecCtx->height - 20;
    
    	sdlThread = SDL_CreateThread(RefreshVideo, NULL, NULL);
    
    	while (true)
    	{
    		SDL_WaitEvent(&event);
    		if (event.type == REFRESH_EVENT)
    		{
    			while (true)
    			{
    				if (av_read_frame(pFormatCtx, &packet) < 0)
    				{
    					thread_exit = true;
    				}
    				if (packet.stream_index == videoIndex)
    				{
    					break;
    				}
    			}
    			if (avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet) < 0)
    			{
    				printf("Decode Error.
    ");
    				return -1;
    			}
    			if (got_picture)
    			{
    				sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
    					pFrameYUV->data, pFrameYUV->linesize);
    
    				SDL_UpdateYUVTexture(sdlTexture, NULL,
    					pFrameYUV->data[0], pFrameYUV->linesize[0],
    					pFrameYUV->data[1], pFrameYUV->linesize[1],
    					pFrameYUV->data[2], pFrameYUV->linesize[2]
    					);
    				//SDL_UpdateTexture(sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0]);
    				SDL_RenderClear(sdlRenderer); 
    				SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, &sdlRect);
    				SDL_RenderPresent(sdlRenderer);
    
    				printf("Succeed to decode %d frame!
    ", frame_cnt);
    				frame_cnt++;
    			}
    			av_free_packet(&packet);
    		}
    		else if (event.type == SDL_KEYDOWN)
    		{
    			//pause
    			if (event.key.keysym.sym == SDLK_SPACE)
    			{
    				thread_pause = !thread_pause;
    			}
    		}
    		else if (event.type == SDL_QUIT)
    		{
    			thread_exit = true;
    		}
    		else if (event.type == BREAK_EVENT)
    		{
    			break;
    		}
    	}//while
    
    	SDL_Quit();
    
    	sws_freeContext(img_convert_ctx);
    	av_frame_free(&pFrameYUV);
    	av_frame_free(&pFrame);
    	avcodec_close(pCodecCtx);
    	avformat_close_input(&pFormatCtx);
    
    	return 0;
    }
    
    


    有人会疑惑,为什么解码后的pFrame不直接用于显示,而是调用swscale()转换之后进行显示?

    如果不进行转换,而是直接调用SDL进行显示的话,会发现显示出来的图像是混乱的。关键问题在于解码后的pFrame的linesize里存储的不是图像的宽度,而是比宽度大一些的一个值。其原因目前还没有仔细调查(大概是出于性能的考虑)。例如分辨率为480x272的图像,解码后的视频的linesize[0]为512,而不是480。以第1行亮度像素(pFrame->data[0])为例,从0-480存储的是亮度数据,而从480-512则存储的是无效的数据。因此需要使用swscale()进行转换。转换后去除了无效数据,linesize[0]变为480。就可以正常显示了。


    Keep it simple!
    作者:N3verL4nd
    知识共享,欢迎转载。
  • 相关阅读:
    SQL 语句优化中间表的使用优化
    SQL 语句优化OR 语句优化案例
    浅谈系统优化设计复杂运算放在逻辑层还是在数据库层?
    linux命令综合
    MySQL常用命令
    PHP知识点积累
    [Git] 生成token解决github remote: Support for password authentication was removed on August 13, 2021.
    [uniapp] GOFLY在线客服系统 uniapp增加播放背景音效或者按钮音效
    [Golang]gorm更新数据update 解决值为0时被忽略
    光阴真的是贱(似箭),一不小心就又过一年了
  • 原文地址:https://www.cnblogs.com/lgh1992314/p/5834644.html
Copyright © 2020-2023  润新知