• 实现简易的android 直播技术


    Android 的直播,主要使用的是camera采集的数据推流到服务器上,在客户端播放camera采集的数据。采用SurfaceView+ SurfaceTexture来显示camera数据, SurfaceTexture在接收图像流之后,可以进行二次处理,比如(加字体水印), 处理完毕后再送给另一个SurfaceView用于显示. 
    先说说,android端采集摄像头数据,这个网上有很多的资料,可以百度下。这里可以在回调函数OnPreviewFrame(byte[] data,Camera cma)中获取摄像头数据data. 
    关键是如何把data传输到服务器,要实现这个目标,我使用ffmpeg 来解决这个为题。在android 工程创建jni文件夹,使用jni调用。native函数如下: 

    // 在OnPreviewFrame中调用 
    public native void push(byte[] data); 
    // 可以在OnSurfcaeChanged()中调用. 在jni中初始化输出路径. 
    public native int initPush(String outputurl,int width,int height); 
    在jni 层的代码如下:

    static void ffmpeg_init() {

    avcodec_register_all();

    #if CONFIG_AVDEVICE 
    avdevice_register_all();

    #endif 
    avfilter_register_all();

    av_register_all();
    
    avformat_network_init();

    }

    static int avinit(int width, int height){
    /**编码开始*/
    int ret;
    if(codec==NULL){
    
    
         yuv_width = width;
         yuv_height = height;
         y_length = width * height;
         uv_length = width * height / 4;
         LOGI("init start
    ");
    
        codec = avcodec_find_encoder(AV_CODEC_ID_H264);
        if (!codec) {
            LOGI("Codec not found
    ");
            return -1;
        }
    
        c = avcodec_alloc_context3(codec);
        if (!c) {
            LOGI("Could not allocate video codec context
    ");
            return -1;
        }
    
    
        /* put sample parameters */
        c->bit_rate = 400000;
        /* resolution must be a multiple of two */
        c->width = width;
        c->height = height;
        /* frames per second */
        //c->time_base = (AVRational){1,5};
        c->time_base.den = 25;
        c->time_base.num = 1;
        /* emit one intra frame every ten frames
                  * check frame pict_type before passing frame
             * to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
         * then gop_size is ignored and the output of encoder
         * will always be I frame irrespective to gop_size
         */
         if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
                c->flags |= CODEC_FLAG_GLOBAL_HEADER;
        c->gop_size = 10;
        c->max_b_frames = 10;
        c->pix_fmt = AV_PIX_FMT_YUV420P;
        //av_opt_set(c->priv_data, "preset", "slow", 0);
        av_opt_set(c->priv_data, "preset", "superfast", 0);
        av_opt_set(c->priv_data, "tune", "zerolatency", 0);
        /* open it */
        if (avcodec_open2(c, codec, NULL) < 0) {
            LOGI("Could not open codec
    ");
            return -1;
        }
        LOGI("init end 1
    ");
    
         video_st = avformat_new_stream(ofmt_ctx, codec);
            if (video_st == NULL) {
                LOGI("video_st == NULL
    ");
                return -1;
            }
            video_st->time_base.num = 1;
            video_st->time_base.den = 25;
            video_st->codec = c;
    
        return 0;
    }
    /**编码结束*/

    }

     JNIEXPORT jint JNICALL Java_com_example_directvideo_MainActivity_initPush
     (JNIEnv *env, jobject obj,jstring out ,jint width, jint height){
    
    
     int ret ;
     ffmpeg_init();
    const char* str = //"rtmp://192.168.1.102:1935/myapp/test1";
            //"/storage/emulated/0/zsy.mp4";
            (*env)->GetStringUTFChars(env, out, 0);
        LOGI("%s %d %d ", str,width,height);
        //AVOutputFormat *ofmt = NULL;
                //AVFormatContext *ofmt_ctx = NULL;
                //Output
                    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", str); //RTMP
                    //avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", output_str);//UDP
                    if (!ofmt_ctx) {
                        LOGI( "Could not create output context
    ");
                        ret = AVERROR_UNKNOWN;
                        goto end;
                    }
                     ret = avinit(width, height);
    
                    ofmt = ofmt_ctx->oformat;
                    //Open output URL
                    if (!(ofmt->flags & AVFMT_NOFILE)) {
                        ret = avio_open(&ofmt_ctx->pb, str, AVIO_FLAG_WRITE);
                        if (ret < 0) {
                            LOGI( "Could not open output URL '%s'", str);
                            goto end;
                        }
                    }
                    ret = avformat_write_header(ofmt_ctx, NULL);
                            if (ret < 0) {
                                LOGE( "Error occurred when opening output URL
    ");
                                goto end;
                    }
        (*env)->ReleaseStringUTFChars(env, out, str);
        if (ret < 0 && ret != AVERROR_EOF) {
            LOGI( "Error occurred.
    ");
            return ret;
        }
    
            intited = 1;
    return 0;
    }
    注意传输视频流时,必须使用h264编码器进行编码。初始化成功后,便可使用push()来推送每一帧的camera数据到服务器上。
    本人在ubuntu系统中,利用ngnix + rtmp搭建了简易的流媒体服务器。
    在pc机上,使用ffplay rtmp://192.168.1.102:1935/myapp/test1的命令来播放视屏.
  • 相关阅读:
    eclipse如何设置高亮代码的背景色,比如选中某个单词,高亮所有的
    javascript弹层
    click只能点击一次
    eclipse创建文件夹河包
    maven工程如何引用css和js文件
    maven-parent的pom.xml配置
    pom.xml设置maven的编码方式
    springmvc搭建环境时报No mapping found for HTTP request with URI [/exam3/welcome] in DispatcherServlet with name 'spring2'
    sso的实现
    C#中,重新排列panel中的按钮
  • 原文地址:https://www.cnblogs.com/zhujiabin/p/6179129.html
Copyright © 2020-2023  润新知