• 怎样在Android本地视频播放器开发


    在上一章Android本地视频播放器开发--SDL编译编译中编译出sdl的支持库,当时我们使用的2.0,但是有些api被更改了,所以在以下的使用者中我们使用SDL1.3的库,这个库我会传上源码以及编译出的库,接下来这张我们使用ffmpeg解码视频文件中的视频帧同时使用SDL去显示。

    1、Decodec_Video.c 这是我视频解码的文件,其中内容如下:


    [cpp]
    #include <stdio.h>  
    #include <android/log.h>  
     
    #ifdef __MINGW32__  
    #undef main /* Prevents SDL from overriding main() */  
    #endif  
     
    #include "../SDL/include/SDL.h"  
    #include "../SDL/include/SDL_thread.h"  
     
    #include "VideoPlayerDecode.h"  
    #include "../ffmpeg/libavutil/avutil.h"  
    #include "../ffmpeg/libavcodec/avcodec.h"  
    #include "../ffmpeg/libavformat/avformat.h"  
    #include "../ffmpeg/libswscale/swscale.h"  
     
    AVFormatContext *pFormatCtx; 
    int             i, videoStream; 
    AVCodecContext  *pCodecCtx; 
    AVCodec         *pCodec; 
    AVFrame         *pFrame; 
    AVPacket        packet; 
    int             frameFinished; 
    float           aspect_ratio; 
     
    static struct SwsContext *img_convert_ctx; 
    SDL_Surface     *screen; 
    SDL_Overlay *bmp; 
    SDL_Rect        rect; 
    SDL_Event       event; 
     
     
    JNIEXPORT jint JNICALL Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer 
    (JNIEnv *env, jclass clz, jstring fileName) 

        const char* local_title = (*env)->GetStringUTFChars(env, fileName, NULL); 
        av_register_all();//注册所有支持的文件格式以及编解码器  
        if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) { 
            fprintf(stderr, "Could not initialize SDL - %s ", SDL_GetError()); 
            exit(1); 
        } 
        if(avformat_open_input(&pFormatCtx, local_title, NULL, NULL) != 0) 
                    return -1; 
        if(avformat_find_stream_info(pFormatCtx, NULL) < 0) 
                    return -1; 
        av_dump_format(pFormatCtx, -1, local_title, 0); 
        videoStream=-1; 
        for(i=0; i<pFormatCtx->nb_streams; i++) 
            if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { 
                videoStream=i; 
                break; 
            } 
        if(videoStream==-1) 
            return -1; // Didn't find a video stream  
        // Get a pointer to the codec context for the video stream  
        pCodecCtx=pFormatCtx->streams[videoStream]->codec; 
     
        // Find the decoder for the video stream  
        pCodec=avcodec_find_decoder(pCodecCtx->codec_id); 
        if(pCodec==NULL) { 
            fprintf(stderr, "Unsupported codec! "); 
            return -1; // Codec not found  
        } 
        if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)return -1; 
        pFrame = avcodec_alloc_frame(); 
        if(pFrame == NULL)return -1; 
        // Make a screen to put our video  
    #ifndef __DARWIN__  
        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); 
    #else  
        screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); 
    #endif  
        if(!screen) { 
            fprintf(stderr, "SDL: could not set video mode - exiting "); 
            exit(1); 
        } 
        // Allocate a place to put our YUV image on that screen  
        bmp = SDL_CreateYUVOverlay(pCodecCtx->width, 
                pCodecCtx->height, 
                SDL_YV12_OVERLAY, 
                screen); 
        img_convert_ctx = sws_getContext(pCodecCtx->width,   
                              pCodecCtx->height, pCodecCtx->pix_fmt,   
                              pCodecCtx->width, pCodecCtx->height,   
                              PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);   
     
        // Read frames and save first five frames to disk  
        i=0; 
        while(av_read_frame(pFormatCtx, &packet)>=0) { 
            // Is this a packet from the video stream?  
            if(packet.stream_index==videoStream) { 
                avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); 
                // Did we get a video frame?  
                if(frameFinished) { 
                    SDL_LockYUVOverlay(bmp); 
                     
                    AVPicture *pict; 
                    pict->data[0] = bmp->pixels[0]; 
                    pict->data[1] = bmp->pixels[2]; 
                    pict->data[2] = bmp->pixels[1]; 
     
                    pict->linesize[0] = bmp->pitches[0]; 
                    pict->linesize[1] = bmp->pitches[2]; 
                    pict->linesize[2] = bmp->pitches[1]; 
                     
    sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict->data, pict->linesize); 
                    SDL_UnlockYUVOverlay(bmp); 
     
                    rect.x = 0; 
                    rect.y = 0; 
                    rect.w = pCodecCtx->width; 
                    rect.h = pCodecCtx->height; 
                    SDL_DisplayYUVOverlay(bmp, &rect); 
     
                } 
            } 
            // Free the packet that was allocated by av_read_frame  
            av_free_packet(&packet); 
            SDL_PollEvent(&event); 
            switch(event.type) { 
                case SDL_QUIT: 
                    SDL_Quit(); 
                    exit(0); 
                    break; 
                default: 
                    break; 
            } 
     
        } 
        // Free the YUV frame  
        av_free(pFrame); 
     
        // Close the codec  
        avcodec_close(pCodecCtx); 
     
        // Close the video file  
        av_close_input_file(pFormatCtx); 

    #include <stdio.h>
    #include <android/log.h>

    #ifdef __MINGW32__
    #undef main /* Prevents SDL from overriding main() */
    #endif

    #include "../SDL/include/SDL.h"
    #include "../SDL/include/SDL_thread.h"

    #include "VideoPlayerDecode.h"
    #include "../ffmpeg/libavutil/avutil.h"
    #include "../ffmpeg/libavcodec/avcodec.h"
    #include "../ffmpeg/libavformat/avformat.h"
    #include "../ffmpeg/libswscale/swscale.h"

    AVFormatContext *pFormatCtx;
    int             i, videoStream;
    AVCodecContext  *pCodecCtx;
    AVCodec         *pCodec;
    AVFrame         *pFrame;
    AVPacket        packet;
    int             frameFinished;
    float           aspect_ratio;

    static struct SwsContext *img_convert_ctx;
    SDL_Surface     *screen;
    SDL_Overlay *bmp;
    SDL_Rect        rect;
    SDL_Event       event;


    JNIEXPORT jint JNICALL Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer
    (JNIEnv *env, jclass clz, jstring fileName)
    {
     const char* local_title = (*env)->GetStringUTFChars(env, fileName, NULL);
     av_register_all();//注册所有支持的文件格式以及编解码器
     if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
      fprintf(stderr, "Could not initialize SDL - %s ", SDL_GetError());
      exit(1);
     }
     if(avformat_open_input(&pFormatCtx, local_title, NULL, NULL) != 0)
                    return -1;
     if(avformat_find_stream_info(pFormatCtx, NULL) < 0)
                    return -1;
     av_dump_format(pFormatCtx, -1, local_title, 0);
     videoStream=-1;
     for(i=0; i<pFormatCtx->nb_streams; i++)
      if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
       videoStream=i;
       break;
      }
     if(videoStream==-1)
      return -1; // Didn't find a video stream
     // Get a pointer to the codec context for the video stream
     pCodecCtx=pFormatCtx->streams[videoStream]->codec;

     // Find the decoder for the video stream
     pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
     if(pCodec==NULL) {
      fprintf(stderr, "Unsupported codec! ");
      return -1; // Codec not found
     }
     if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)return -1;
     pFrame = avcodec_alloc_frame();
     if(pFrame == NULL)return -1;
     // Make a screen to put our video
    #ifndef __DARWIN__
     screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0);
    #else
     screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0);
    #endif
     if(!screen) {
      fprintf(stderr, "SDL: could not set video mode - exiting ");
      exit(1);
     }
     // Allocate a place to put our YUV image on that screen
     bmp = SDL_CreateYUVOverlay(pCodecCtx->width,
       pCodecCtx->height,
       SDL_YV12_OVERLAY,
       screen);
      img_convert_ctx = sws_getContext(pCodecCtx->width, 
                              pCodecCtx->height, pCodecCtx->pix_fmt, 
                              pCodecCtx->width, pCodecCtx->height, 
                              PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL); 

     // Read frames and save first five frames to disk
     i=0;
     while(av_read_frame(pFormatCtx, &packet)>=0) {
      // Is this a packet from the video stream?
      if(packet.stream_index==videoStream) {
       avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
       // Did we get a video frame?
       if(frameFinished) {
        SDL_LockYUVOverlay(bmp);
        
        AVPicture *pict;
        pict->data[0] = bmp->pixels[0];
        pict->data[1] = bmp->pixels[2];
        pict->data[2] = bmp->pixels[1];

        pict->linesize[0] = bmp->pitches[0];
        pict->linesize[1] = bmp->pitches[2];
        pict->linesize[2] = bmp->pitches[1];
        
    sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict->data, pict->linesize);
        SDL_UnlockYUVOverlay(bmp);

        rect.x = 0;
        rect.y = 0;
        rect.w = pCodecCtx->width;
        rect.h = pCodecCtx->height;
        SDL_DisplayYUVOverlay(bmp, &rect);

       }
      }
      // Free the packet that was allocated by av_read_frame
      av_free_packet(&packet);
      SDL_PollEvent(&event);
      switch(event.type) {
       case SDL_QUIT:
        SDL_Quit();
        exit(0);
        break;
       default:
        break;
      }

     }
     // Free the YUV frame
     av_free(pFrame);

     // Close the codec
     avcodec_close(pCodecCtx);

     // Close the video file
     av_close_input_file(pFormatCtx);
    }
    2、编译结果如下:


    [cpp]
    root@zhangjie:/Graduation/jni# ndk-build 
    Install        : libSDL.so => libs/armeabi/libSDL.so 
    Install        : libffmpeg-neon.so => libs/armeabi/libffmpeg-neon.so 
    Compile arm    : ffmpeg-test-neon <= Decodec_Video.c 
    /Graduation/jni/jniffmpeg/Decodec_Video.c: In function 'Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer': 
    /Graduation/jni/jniffmpeg/Decodec_Video.c:106:1: warning: passing argument 2 of 'sws_scale' from incompatible pointer type [enabled by default] 
    /Graduation/jni/jniffmpeg/../ffmpeg/libswscale/swscale.h:237:5: note: expected 'uint8_t const * const*' but argument is of type 'uint8_t **' 
    /Graduation/jni/jniffmpeg/Decodec_Video.c:137:2: warning: 'av_close_input_file' is deprecated (declared at /Graduation/jni/jniffmpeg/../ffmpeg/libavformat/avformat.h:1533) [-Wdeprecated-declarations] 
    SharedLibrary  : libffmpeg-test-neon.so 
    Install        : libffmpeg-test-neon.so => libs/armeabi/libffmpeg-test-neon.so 

    root@zhangjie:/Graduation/jni# ndk-build
    Install        : libSDL.so => libs/armeabi/libSDL.so
    Install        : libffmpeg-neon.so => libs/armeabi/libffmpeg-neon.so
    Compile arm    : ffmpeg-test-neon <= Decodec_Video.c
    /Graduation/jni/jniffmpeg/Decodec_Video.c: In function 'Java_com_zhangjie_graduation_videopalyer_jni_VideoPlayerDecode_VideoPlayer':
    /Graduation/jni/jniffmpeg/Decodec_Video.c:106:1: warning: passing argument 2 of 'sws_scale' from incompatible pointer type [enabled by default]
    /Graduation/jni/jniffmpeg/../ffmpeg/libswscale/swscale.h:237:5: note: expected 'uint8_t const * const*' but argument is of type 'uint8_t **'
    /Graduation/jni/jniffmpeg/Decodec_Video.c:137:2: warning: 'av_close_input_file' is deprecated (declared at /Graduation/jni/jniffmpeg/../ffmpeg/libavformat/avformat.h:1533) [-Wdeprecated-declarations]
    SharedLibrary  : libffmpeg-test-neon.so
    Install        : libffmpeg-test-neon.so => libs/armeabi/libffmpeg-test-neon.so3、SDL1.3源码

     


    4、之前在Android本地视频播放器开发--NDK编译FFmpeg中没有添加swscale功能,所以需要重新编译ffmpeg,其脚本如下:


    [plain]
    NDK=/opt/android-ndk-r8d 
    PLATFORM=$NDK/platforms/android-8/arch-arm/ 
    PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.4.3/prebuilt/linux-x86 
    LOCAL_ARM_NEON=true 
    CPU=armv7-a 
    OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -mcpu=cortex-a8" 
    PREFIX=./android/$CPU 
    ./configure --target-os=linux  
        --prefix=$PREFIX  
        --enable-cross-compile  
        --arch=arm  
        --enable-nonfree  
        --enable-asm  
        --cpu=cortex-a8  
        --enable-neon  
        --cc=$PREBUILT/bin/arm-linux-androideabi-gcc  
        --cross-prefix=$PREBUILT/bin/arm-linux-androideabi-  
        --nm=$PREBUILT/bin/arm-linux-androideabi-nm  
        --sysroot=$PLATFORM  
        --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 $OPTIMIZE_CFLAGS "  
        --disable-shared  
        --enable-static  
        --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -nostdlib -lc -lm -ldl -llog"  
        --disable-ffmpeg  
        --disable-ffplay  
        --disable-ffprobe  
        --disable-ffserver  
        --disable-encoders  
        --enable-avformat  
        --disable-optimizations  
        --disable-doc  
        --enable-pthreads  
        --disable-yasm  
        --enable-zlib  
        --enable-pic  
        --enable-small 
     
    #make clean 
    make  -j4 install 
     
    $PREBUILT/bin/arm-linux-androideabi-ar d libavcodec/libavcodec.a inverse.o 
     
    $PREBUILT/bin/arm-linux-androideabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -soname libffmpeg-neon.so -shared -nostdlib  -z noexecstack -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg-neon.so libavcodec/libavcodec.a libavformat/libavformat.a libavutil/libavutil.a  libavfilter/libavfilter.a libswresample/libswresample.a libswscale/libswscale.a libavdevice/libavdevice.a -lc -lm -lz -ldl -llog  --warn-once  --dynamic-linker=/system/bin/linker $PREBUILT/lib/gcc/arm-linux-androideabi/4.4.3/libgcc.a 

    NDK=/opt/android-ndk-r8d
    PLATFORM=$NDK/platforms/android-8/arch-arm/
    PREBUILT=$NDK/toolchains/arm-linux-androideabi-4.4.3/prebuilt/linux-x86
    LOCAL_ARM_NEON=true
    CPU=armv7-a
    OPTIMIZE_CFLAGS="-mfloat-abi=softfp -mfpu=neon -marm -mcpu=cortex-a8"
    PREFIX=./android/$CPU
    ./configure --target-os=linux
        --prefix=$PREFIX
        --enable-cross-compile
        --arch=arm
        --enable-nonfree
        --enable-asm
        --cpu=cortex-a8
        --enable-neon
        --cc=$PREBUILT/bin/arm-linux-androideabi-gcc
        --cross-prefix=$PREBUILT/bin/arm-linux-androideabi-
        --nm=$PREBUILT/bin/arm-linux-androideabi-nm
        --sysroot=$PLATFORM
        --extra-cflags=" -O3 -fpic -DANDROID -DHAVE_SYS_UIO_H=1 $OPTIMIZE_CFLAGS "
        --disable-shared
        --enable-static
        --extra-ldflags="-Wl,-rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -nostdlib -lc -lm -ldl -llog"
        --disable-ffmpeg
        --disable-ffplay
        --disable-ffprobe
        --disable-ffserver
        --disable-encoders
        --enable-avformat
        --disable-optimizations
        --disable-doc
        --enable-pthreads
        --disable-yasm
        --enable-zlib
        --enable-pic
        --enable-small

    #make clean
    make  -j4 install

    $PREBUILT/bin/arm-linux-androideabi-ar d libavcodec/libavcodec.a inverse.o

    $PREBUILT/bin/arm-linux-androideabi-ld -rpath-link=$PLATFORM/usr/lib -L$PLATFORM/usr/lib  -soname libffmpeg-neon.so -shared -nostdlib  -z noexecstack -Bsymbolic --whole-archive --no-undefined -o $PREFIX/libffmpeg-neon.so libavcodec/libavcodec.a libavformat/libavformat.a libavutil/libavutil.a  libavfilter/libavfilter.a libswresample/libswresample.a libswscale/libswscale.a libavdevice/libavdevice.a -lc -lm -lz -ldl -llog  --warn-once  --dynamic-linker=/system/bin/linker $PREBUILT/lib/gcc/arm-linux-androideabi/4.4.3/libgcc.a

  • 相关阅读:
    直播流RTMP 知识
    XSSearch 说明文档保存
    网海茫茫,有你最暖
    实践中 XunSearch(讯搜)更新索引方案对比
    实践中 XunSearch(讯搜)的使用教程步骤
    留的住的叫幸福,流逝的叫遗憾
    百度API ; 很多有用的接口及公用 数据
    ecshop 模板开发总结
    jquery库和cityselect插 件的省市 级联
    PHP Excel 下载数据,并分页下载
  • 原文地址:https://www.cnblogs.com/snake-hand/p/3144876.html
Copyright © 2020-2023  润新知