#include <stdio.h> #include <stdlib.h> extern "C" { #include <SDL.h> #include "libavutil/opt.h" #include "libavutil/channel_layout.h" #include "libavutil/common.h" #include "libavutil/imgutils.h" #include "libavutil/mathematics.h" #include "libavutil/samplefmt.h" #include "libavutil/time.h" #include "libavutil/fifo.h" #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libavformat/avio.h" #include "libavfilter/avfiltergraph.h" #include "libavfilter/avfilter.h" #include "libavfilter/buffersink.h" #include "libavfilter/buffersrc.h" #include "libswscale/swscale.h" #include "libswresample/swresample.h" } #include <memory> #include <windows.h> #include "sdlplayer.h" int mymain(); int _tmain(int argc, _TCHAR* argv[]){ mymain(); return 0; } AVInputFormat mFormat; AVDictionary* iformat_opts; using namespace std; #define INBUF_SIZE 4096 void Init() { av_register_all(); avfilter_register_all(); avformat_network_init(); av_log_set_level(AV_LOG_ERROR); } AVFormatContext *ic = NULL; int64_t lastReadPacktTime ; std::shared_ptr <AVPacket> readPacketFromSource() { std::shared_ptr<AVPacket> packet(static_cast<AVPacket*>(av_malloc(sizeof(AVPacket))), [&](AVPacket *p) { av_packet_free(&p); av_freep(&p);}); av_init_packet(packet.get()); lastReadPacktTime = av_gettime(); int ret = av_read_frame(ic, packet.get()); if(ret >= 0) { return packet; } else { return nullptr; } } bool videoDecode(AVPacket* packet, AVFrame *frame) { int gotFrame = 0; //videoIndex auto hr = avcodec_decode_video2(ic->streams[0]->codec, frame, &gotFrame, packet); if (hr >= 0 && gotFrame != 0) { return true; } return false; } int initVideoDecodeContext() { auto codecId = ic->streams[0]->codec->codec_id; auto codec = avcodec_find_decoder(codecId); if (!codec) { return -1; } int ret = avcodec_open2(ic->streams[0]->codec, codec, NULL); return ret; } static void pgm_save(unsigned char *buf, int wrap, int xsize, int ysize, char *filename) { FILE *f; int i; f = fopen(filename,"w"); fprintf(f, "P5 %d %d %d ", xsize, ysize, 255); for (i = 0; i < ysize; i++) fwrite(buf + i * wrap, 1, xsize, f); fclose(f); } int mymain() { int scan_all_pmts_set = 0; /* register all codecs, demux and protocols */ Init(); ic = avformat_alloc_context(); int ret; if (!ic) { av_log(NULL, AV_LOG_FATAL, "Could not allocate context. "); ret = AVERROR(ENOMEM); printf("alloc err %d ",ret); } /* if (!av_dict_get(iformat_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) { av_dict_set(&iformat_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE); scan_all_pmts_set = 1; } */ int err = avformat_open_input(&ic, "F://3s.mp4", nullptr, nullptr); if (err < 0) { printf("open err err=%d ",err); } printf("come 2 "); /* err = avformat_find_stream_info(ic, nullptr); printf("ic->nb_streams %d ",ic->nb_streams); if(err<0){ }else{ for(int i=0;i<ic->nb_streams;i++){ int type = ic->streams[i]->codec->codec_type; printf("type = %d ",type); } }*/ int ret1 =initVideoDecodeContext(); printf("ret1 = %d ",ret1); std::shared_ptr<CGSDLRender> sdlRender =std::make_shared<CGSDLRender>();//??????? ret = initVideoDecodeContext(); if(ret < 0) return ret; sdlRender->InitVideo(0); sdlRender->CreateVideoSurface(ic->streams[0]->codec->width, ic->streams[0]->codec->height); // AVFrame * videoFrame = av_frame_alloc(); for(int i=0;i<10000;i++){ auto packet = readPacketFromSource(); if(packet){ if(packet->stream_index==0) if(videoDecode(packet.get(),videoFrame)) { //playVideo(videoFrame); int j=0; j++; printf("%d--- ",i); char buf[1024]; sprintf(buf, "%s-%d", "3smp4", i); AVFrame * frame = videoFrame; sdlRender->Display((char**)frame->data,frame->linesize); Sleep(30); //pgm_save(frame->data[0], frame->linesize[0], // frame->width, frame->height, buf); } } else{ break; } } av_frame_free(&videoFrame); return 0; }
sdl 部分.h
class CGSDLRender { public: CGSDLRender(); virtual ~CGSDLRender(void); int InitVideo( int adapterId = 0); int CreateVideoSurface(int width, int height); int Display(char** data, int* linesize); private: SDL_Window *sdlWindow; SDL_Renderer *sdlRender; SDL_Texture *sdlTexture; int width; int height; private: };
实现
#include <SDL.h> #include <windows.h> #include "sdlplayer.h" CGSDLRender::~CGSDLRender() { } CGSDLRender::CGSDLRender() { } int CGSDLRender::InitVideo( int adapterId) { SDL_Init(SDL_INIT_VIDEO); sdlWindow = SDL_CreateWindow(("窗口标题"),300,300,300,300,SDL_WINDOW_SHOWN); sdlRender = SDL_CreateRenderer(sdlWindow, -1, SDL_RendererFlags::SDL_RENDERER_ACCELERATED); return S_OK; } int CGSDLRender::CreateVideoSurface(int width, int height) { this->width = width; this->height = height; sdlTexture = SDL_CreateTexture(sdlRender, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, width, height); return S_OK; } int CGSDLRender::Display(char** data, int* linesize) { void* piexels = nullptr; int pitch; int ret = SDL_LockTexture(sdlTexture, NULL, &piexels, &pitch); if(ret < 0) return ret; uint8_t* yuv[3] = { (uint8_t*)piexels,(uint8_t*)piexels + pitch * height, (uint8_t*)piexels + pitch * height + ((pitch >> 1) * (height >> 1)) }; for (int i = 0; i < height; i++) { memcpy(yuv[0] + i * pitch, data[0] + i * linesize[0], linesize[0]); if (i % 2 == 0) { memcpy(yuv[1] + (i >> 1) * (pitch >> 1), data[2] + (i >> 1) * linesize[2], linesize[2]); memcpy(yuv[2] + (i >> 1) * (pitch >> 1), data[1] + (i >> 1) * linesize[1], linesize[1]); } } SDL_UnlockTexture(sdlTexture); SDL_RenderClear(sdlRender); SDL_RenderCopy(sdlRender, sdlTexture, NULL, NULL); SDL_RenderPresent(sdlRender); return S_OK; }
附加依赖项:avcodec.lib
avformat.lib
avfilter.lib
avutil.lib
swresample.lib
swscale.lib
winmm.lib
Gdi32.lib
dsound.lib
SDL2.lib
SDL2main.lib
dxguid.lib