• 【Android】Android Camera实时数据采集及通过MediaCodec硬编码编码数据的流程


    吐槽:

      其实常用流程都差不多,但是有时候还是会忘记某一步的详细用法,但是各位朋友请注意,官方已经不推荐Camera类的使用(现在是android.hardware.camera2),但无奈公司项目之前是使用Camera类实现的,并且Camera2貌似是基于API 21以上的,这Android 7的风声都放出来了,可是6.0现在出了3个多月了市场占有率也才貌似3%不到,什么时候才能有个标准化和统一规范,作为一名Android开发者实属不易啊,叹气~
    Android实现摄像头实时数据采集及通过硬编码编码数据的流程
     
    /* 
     * 编码器获取数据,编码,编码后的数据的处理等大致流程如下:
     */
    /* 1.获取原始帧 */ 
    @Override
    onPreviewFrame( byte[] onPreviewData, Camera camera) { 
        /* 在此可以对onPreviewData进行Rotate或者Scale
         * 也可以转换yuv的格式,例如yuv420P(YV12)或者yuv420SP(NV21/NV12)
         * 相关开源类库可以使用libyuv/ffmpeg等
         */
        getRawFrame(onPreviewData)
        /* 然后将onPreviewData加入Camera回调*/
        addCallbackBuffer(onPreviewData);
    }
    private void getRawFrame( byte[] rawFrame ) { encodFrame(rawFrame); }
    /* 2.进行编码 */
    private byte[] encodFrame(byte[] inputData) { return encodedData; } 
    /* 3.取得编码后的数据便可进行相应的操作,可以保存为本地文件,也可进行推流 */ 
    Operation ? Send(byte[] sendData) : Save(byte[] saveData) 
     

    上述代码onPreviewFrame为Camera类的接口,使用Camera前需要进行SurfaceView及SurfaceHolder的初始化及相应interface的实现:

     
    // init the preview surface
    private void initview() {
        SurfaceView surfaceView = (SurfaceView) findViewById(R.id.record_surface);
        SurfaceHolder surfaceHolder = surfaceView.getHolder();
        surfaceHolder.addCallback(this);
        surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);    
    }
    
    
    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        openCamera(holder); // 开启相机
    }
    
    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    }
    
    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        releaseCamera(); // 在surfaceDestroyed的时候记得releaseCamera
    }
    
    private void openCamera(SurfaceHolder holder) {
        releaseCamera();
        try {
                camera = getCamera(Camera.CameraInfo.CAMERA_FACING_BACK); // 根据需求选择前/后置摄像头
            } catch (Exception e) {
                camera = null;
                if (AppContext.isDebugMode) {
                    e.printStackTrace();
                }
            }
        if(mCamera != null){
        try {
                mCamera.setPreviewCallback(this);
                mCamera.setDisplayOrientation(90); // 此方法为官方提供的旋转显示部分的方法,并不会影响onPreviewFrame方法中的原始数据;
                if(parameters == null){
                    parameters = mCamera.getParameters();
                }
                parameters.setPreviewFormat(ImageFormat.NV21); // 常用格式:NV21 / YV12
                parameters.setPreviewSize(width, height); // 还可以设置很多相机的参数,但是建议先遍历当前相机是否支持该配置,不然可能会导致出错;
                mCamera.setParameters(parameters);
                mCamera.setPreviewDisplay(holder);
                mCamera.startPreview();
        } catch (IOException e) {
            e.printStackTrace();
        }
        }
    }
    
    @TargetApi(9)
    private Camera getCamera(int cameraType) {
        Camera camera = null;
        try {
            camera = Camera.open(cameraType); 
        } catch (Exception e) {
            e.printStackTrace();
        }
        return camera; // returns null if camera is unavailable
    }
    
    private synchronized void releaseCamera() {
        if (camera != null) {
            try {
                camera.setPreviewCallback(null);
            } catch (Exception e) {
                e.printStackTrace();
            }
            try {
                camera.stopPreview();
            } catch (Exception e) {
                e.printStackTrace();
            }
            try {
                camera.release();
            } catch (Exception e) {
                e.printStackTrace();
            }
            camera = null;
        }
    }
     

    MediaCodec硬编码实现部分:

     此处推荐参考SRS开源项目中的实现方法:https://github.com/ossrs/srs-sea.git
     
    // video device.
    private Camera camera;
    private MediaCodec vencoder;
    private MediaCodecInfo vmci;
    private MediaCodec.BufferInfo vebi;
    private byte[] vbuffer;
    // video camera settings.
    private Camera.Size vsize;
    private int vcolor;
    private int vbitrate_kbps = 300;
    private final static int VFPS = 20;
    private final static int VGOP = 5;
    private final static int VWIDTH = 640;
    private final static int VHEIGHT = 480;
    
    /* 首先需要初始化MediaCodec的配置 */
    private void initMediaCodec() {
         // choose the right vencoder, perfer qcom then google.
        vcolor = chooseVideoEncoder();
        // vencoder yuv to 264 es stream.
        // requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN
        try {
            vencoder = MediaCodec.createByCodecName(vmci.getName());
        } catch (IOException e) {
            Log.e(TAG, "create vencoder failed.");
            e.printStackTrace();
            return;
        }
        vebi = new MediaCodec.BufferInfo();
        // setup the vencoder.
        // @see https://developer.android.com/reference/android/media/MediaCodec.html
        MediaFormat vformat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, vsize.width, vsize.height);
        vformat.setInteger(MediaFormat.KEY_COLOR_FORMAT, vcolor);
        vformat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
        vformat.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * vbitrate_kbps);
        vformat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS);
        vformat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VGOP);
        Log.i(TAG, String.format("vencoder %s, color=%d, bitrate=%d, fps=%d, gop=%d, size=%dx%d",
                vmci.getName(), vcolor, vbitrate_kbps, VFPS, VGOP, vsize.width, vsize.height));
        // the following error can be ignored:
        // 1. the storeMetaDataInBuffers error:
        //      [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -2147483648
        //      @see http://bigflake.com/mediacodec/#q12
        vencoder.configure(vformat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        vencoder.start();
    }
    
    // for the vbuffer for YV12(android YUV), @see below:
    // https://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat(int)
    // https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12
    private int getYuvBuffer(int width, int height) {
        // stride = ALIGN(width, 16)
        int stride = (int) Math.ceil(width / 16.0) * 16;
        // y_size = stride * height
        int y_size = stride * height;
        // c_stride = ALIGN(stride/2, 16)
        int c_stride = (int) Math.ceil(width / 32.0) * 16;
        // c_size = c_stride * height/2
        int c_size = c_stride * height / 2;
        // size = y_size + c_size * 2
        return y_size + c_size * 2;
    }
    
    // choose the video encoder by name.
    private MediaCodecInfo chooseVideoEncoder(String name, MediaCodecInfo def) {
        int nbCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < nbCodecs; i++) {
            MediaCodecInfo mci = MediaCodecList.getCodecInfoAt(i);
            if (!mci.isEncoder()) {
                continue;
            }
            String[] types = mci.getSupportedTypes();
            for (int j = 0; j < types.length; j++) {
                if (types[j].equalsIgnoreCase(VCODEC)) {
                    //Log.i(TAG, String.format("vencoder %s types: %s", mci.getName(), types[j]));
                    if (name == null) {
                        return mci;
                    }
    
                    if (mci.getName().contains(name)) {
                        return mci;
                    }
                }
            }
        }
        return def;
    }
    
    // choose the right supported color format. @see below:
    // https://developer.android.com/reference/android/media/MediaCodecInfo.html
    // https://developer.android.com/reference/android/media/MediaCodecInfo.CodecCapabilities.html
    private int chooseVideoEncoder() {
        // choose the encoder "video/avc":
        //      1. select one when type matched.
        //      2. perfer google avc.
        //      3. perfer qcom avc.
        vmci = chooseVideoEncoder(null, null);
        //vmci = chooseVideoEncoder("google", vmci);
        //vmci = chooseVideoEncoder("qcom", vmci);
    
        int matchedColorFormat = 0;
        MediaCodecInfo.CodecCapabilities cc = vmci.getCapabilitiesForType(VCODEC);
        for (int i = 0; i < cc.colorFormats.length; i++) {
            int cf = cc.colorFormats[i];
            Log.i(TAG, String.format("vencoder %s supports color fomart 0x%x(%d)", vmci.getName(), cf, cf));
    
            // choose YUV for h.264, prefer the bigger one.
            // corresponding to the color space transform in onPreviewFrame
            if ((cf >= cc.COLOR_FormatYUV420Planar && cf <= cc.COLOR_FormatYUV420SemiPlanar)) {
                if (cf > matchedColorFormat) {
                    matchedColorFormat = cf;
                }
            }
        }
        for (int i = 0; i < cc.profileLevels.length; i++) {
            MediaCodecInfo.CodecProfileLevel pl = cc.profileLevels[i];
            Log.i(TAG, String.format("vencoder %s support profile %d, level %d", vmci.getName(), pl.profile, pl.level));
        }
        Log.i(TAG, String.format("vencoder %s choose color format 0x%x(%d)", vmci.getName(), matchedColorFormat, matchedColorFormat));
        return matchedColorFormat;
    }
     

      上述代码为SRS的部分实现,仅作参考。

      还推荐一个项目,该项目实现了编码后的数据存为本地.h264文件,方便分析,本人Fork的git地址:https://github.com/eterrao/MediaCodecEncodeH264.git 

      原作者git地址:https://github.com/sszhangpengfei/MediaCodecEncodeH264.git 

      (在此感谢拥有开源共享精神的各位朋友,因为你们我才能在学习和成长的路上少了很多坑!)

      实际上MediaCodec的实现步骤基本都大同小异,但是请注意在API20以后编码器数据处理的机制有所改变,官方给出的建议如下:

    链接:developer.android.com/reference/android/media/MediaCodec.html

    以下摘抄官方API:

    Depending on the API version, you can process data in three ways:
    Processing ModeAPI version <= 20
    Jelly Bean/KitKat
    API version >= 21
    Lollipop and later
    Synchronous API using buffer arrays Supported Deprecated
    Synchronous API using buffers Not Available Supported
    Asynchronous API using buffers Not Available Supported
    Asynchronous Processing using Buffers

    Since LOLLIPOP, the preferred method is to process data asynchronously by setting a callback before calling configure. Asynchronous mode changes the state transitions slightly, because you must call start() after flush() to transition the codec to the Running sub-state and start receiving input buffers. Similarly, upon an initial call to start the codec will move directly to the Running sub-state and start passing available input buffers via the callback.

    MediaCodec is typically used like this in asynchronous mode:

     
    MediaCodec codec = MediaCodec.createByCodecName(name);
     MediaFormat mOutputFormat; // member variable
     codec.setCallback(new MediaCodec.Callback() {
       @Override
       void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
         ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
         // fill inputBuffer with valid data
         …
         codec.queueInputBuffer(inputBufferId, …);
       }
    
       @Override
       void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
         ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
         MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
         // bufferFormat is equivalent to mOutputFormat
         // outputBuffer is ready to be processed or rendered.
         …
         codec.releaseOutputBuffer(outputBufferId, …);
       }
    
       @Override
       void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
         // Subsequent data will conform to new format.
         // Can ignore if using getOutputFormat(outputBufferId)
         mOutputFormat = format; // option B
       }
    
       @Override
       void onError(…) {
         …
       }
     });
     codec.configure(format, …);
     mOutputFormat = codec.getOutputFormat(); // option B
     codec.start();
     // wait for processing to complete
     codec.stop();
     codec.release();
     

    Synchronous Processing using Buffers

    Since LOLLIPOP, you should retrieve input and output buffers using getInput/OutputBuffer(int) and/or getInput/OutputImage(int) even when using the codec in synchronous mode. This allows certain optimizations by the framework, e.g. when processing dynamic content. This optimization is disabled if you call getInput/OutputBuffers().

    Note: do not mix the methods of using buffers and buffer arrays at the same time. Specifically, only call getInput/OutputBuffers directly after start() or after having dequeued an output buffer ID with the value ofINFO_OUTPUT_FORMAT_CHANGED.

    MediaCodec is typically used like this in synchronous mode:

     
     MediaCodec codec = MediaCodec.createByCodecName(name);
     codec.configure(format, …);
     MediaFormat outputFormat = codec.getOutputFormat(); // option B
     codec.start();
     for (;;) {
       int inputBufferId = codec.dequeueInputBuffer(timeoutUs);
       if (inputBufferId >= 0) {
         ByteBuffer inputBuffer = codec.getInputBuffer(…);
         // fill inputBuffer with valid data
         …
         codec.queueInputBuffer(inputBufferId, …);
       }
       int outputBufferId = codec.dequeueOutputBuffer(…);
       if (outputBufferId >= 0) {
         ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
         MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
         // bufferFormat is identical to outputFormat
         // outputBuffer is ready to be processed or rendered.
         …
         codec.releaseOutputBuffer(outputBufferId, …);
       } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
         // Subsequent data will conform to new format.
         // Can ignore if using getOutputFormat(outputBufferId)
         outputFormat = codec.getOutputFormat(); // option B
       }
     }
     codec.stop();
     codec.release();
     

    Synchronous Processing using Buffer Arrays (deprecated)

    In versions KITKAT_WATCH and before, the set of input and output buffers are represented by the ByteBuffer[] arrays. After a successful call to start(), retrieve the buffer arrays using getInput/OutputBuffers(). Use the buffer ID-s as indices into these arrays (when non-negative), as demonstrated in the sample below. Note that there is no inherent correlation between the size of the arrays and the number of input and output buffers used by the system, although the array size provides an upper bound.

     
     MediaCodec codec = MediaCodec.createByCodecName(name);
     codec.configure(format, …);
     codec.start();
     ByteBuffer[] inputBuffers = codec.getInputBuffers();
     ByteBuffer[] outputBuffers = codec.getOutputBuffers();
     for (;;) {
       int inputBufferId = codec.dequeueInputBuffer(…);
       if (inputBufferId >= 0) {
         // fill inputBuffers[inputBufferId] with valid data
         …
         codec.queueInputBuffer(inputBufferId, …);
       }
       int outputBufferId = codec.dequeueOutputBuffer(…);
       if (outputBufferId >= 0) {
         // outputBuffers[outputBufferId] is ready to be processed or rendered.
         …
         codec.releaseOutputBuffer(outputBufferId, …);
       } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
         outputBuffers = codec.getOutputBuffers();
       } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
         // Subsequent data will conform to new format.
         MediaFormat format = codec.getOutputFormat();
       }
     }
     codec.stop();
     codec.release();
     
     
    相关参考资料:
     
  • 相关阅读:
    思考题回答
    Winform中DevExpress的TreeList的入门使用教程(附源码下载)
    感兴趣的开源项目列表Java 标签: jfreechart报表引擎图表制作eclipsejavabeans 20080325 11:20 466人阅读
    试用GNU Gettext 开源多语组件包 标签: delphi语言file文本编辑domainstring 20080428 17:54 1330人阅读
    DDK Source Files Allot 标签: ddk测试 20080917 16:03 312人阅读 评论(0)
    Philip 190SW8 LCD 设置 标签: 游戏 20080713 14:45 515人阅读 评论(0)
    爆笑:两分钟让你明白什么是ERP! 标签: 电话工作平台产品 20080326 13:45 311人阅读 评论(0)
    通过控件移动窗体 标签: integer 20080520 16:13 342人阅读 评论(0) 收藏
    Google Web Toolkit 真的至关重要? 标签: googlewebjavascriptgwtajax平台 20080325 09:47 273人阅
    我的Eclipse插件列表 标签: eclipse插件eclipselog4jpropertiestomcattools 20080324 15:11 477人
  • 原文地址:https://www.cnblogs.com/android-blogs/p/5643124.html
Copyright © 2020-2023  润新知