• licode学习之erizo篇--MediaStream


    MediaStream是erizo进行流数据处理的核心模块。当网络数据,经过DtlsTransport进行srtp解密后,得到的rtp裸数据与rtcp裸数据,都要进入MediaStream进行处理;需要发送给对方的rtp数据与rtcp裸数据也要经过MediaStream处理后,才会给DtlsTransport进行加密并发送。

    MediaStream也是个人认为erizo的最为复杂的一个部分。先看一看其集成体系:

    class MediaStream: public MediaSink, public MediaSource, public FeedbackSink,
                            public FeedbackSource, public LogContext, public HandlerManagerListener,
                            public std::enable_shared_from_this<MediaStream>, public Service 

    整个继承体系里面,涉及处理的基类有:MediaSink,MediaSource,FeedbackSink,FeedbackSource, HandlerManagerListener,Service

    MediaStream同时承载着收数据处理,和发送数据处理两部分内容。其中和丢包重传等结合起来,就变为:接收rtp数据,发送rtcp重传信息;发送rtp数据,接收rtcp重传信息。

    在接口上面,需要对外提供发送和接收到的裸数据回调。

    这里面erizo的实现,是将MediaSink与MediaSource纠缠到一起的。但是从宏观上,我的理解是:

    MediaSink:负责发送数据(write to client)

    FeedbackSink:负责发送数据(write to client)

    MediaSource:负责read出来rtp数据 (read from client)

    FeedbackSource:负责read出来数据(read from client)

    MediaStream继承MediaSink和FeedbackSink,所以直接调用MediaStream对象的deliverVideoData,deliverAudioData,deliverFeedback即可直接向对端发送数据。

    要想接收对方的数据,需要MediaSource,FeedbackSource进行数据回调,怎么回调,需要进一步看一下MediaSource的定义:

    **
     * A MediaSource is any class that produces audio or video data.
     */
    class MediaSource: public virtual Monitor {
     protected:
        // SSRCs coming from the source
        uint32_t audio_source_ssrc_;
        std::vector<uint32_t> video_source_ssrc_list_;
        MediaSink* video_sink_;
        MediaSink* audio_sink_;
        MediaSink* event_sink_;
        // can it accept feedback
        FeedbackSink* source_fb_sink_;
    
     public:
        void setAudioSink(MediaSink* audio_sink) {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            this->audio_sink_ = audio_sink;
        }
        void setVideoSink(MediaSink* video_sink) {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            this->video_sink_ = video_sink;
        }
        void setEventSink(MediaSink* event_sink) {
          boost::mutex::scoped_lock lock(monitor_mutex_);
          this->event_sink_ = event_sink;
        }
    
        FeedbackSink* getFeedbackSink() {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            return source_fb_sink_;
        }
        virtual int sendPLI() = 0;
        uint32_t getVideoSourceSSRC() {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            if (video_source_ssrc_list_.empty()) {
              return 0;
            }
            return video_source_ssrc_list_[0];
        }
        void setVideoSourceSSRC(uint32_t ssrc) {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            if (video_source_ssrc_list_.empty()) {
              video_source_ssrc_list_.push_back(ssrc);
              return;
            }
            video_source_ssrc_list_[0] = ssrc;
        }
        std::vector<uint32_t> getVideoSourceSSRCList() {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            return video_source_ssrc_list_;  //  return by copy to avoid concurrent access
        }
        void setVideoSourceSSRCList(const std::vector<uint32_t>& new_ssrc_list) {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            video_source_ssrc_list_ = new_ssrc_list;
        }
        uint32_t getAudioSourceSSRC() {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            return audio_source_ssrc_;
        }
        void setAudioSourceSSRC(uint32_t ssrc) {
            boost::mutex::scoped_lock lock(monitor_mutex_);
            audio_source_ssrc_ = ssrc;
        }
    
        bool isVideoSourceSSRC(uint32_t ssrc) {
          auto found_ssrc = std::find_if(video_source_ssrc_list_.begin(), video_source_ssrc_list_.end(),
              [ssrc](uint32_t known_ssrc) {
              return known_ssrc == ssrc;
              });
          return (found_ssrc != video_source_ssrc_list_.end());
        }
    
        bool isAudioSourceSSRC(uint32_t ssrc) {
          return audio_source_ssrc_ == ssrc;
        }
    
        MediaSource() : audio_source_ssrc_{0}, video_source_ssrc_list_{std::vector<uint32_t>(1, 0)},
          video_sink_{nullptr}, audio_sink_{nullptr}, event_sink_{nullptr}, source_fb_sink_{nullptr} {}
        virtual ~MediaSource() {}
    
        virtual void close() = 0;
    };

    MediaSource定义了4个MediaSink对象,分别对应video,audio,event,feedback。

    MediaStream继承了MediaSource同样的4个set接口,让调用者可以设置MediaSource的这4个对象。当发生读取事件时,MediaStream会调用这4个设置的MediaSink对象的相应方法,来向外传递数据。

    FeedbackSource也是同样的道理。

    void MediaStream::read(std::shared_ptr<DataPacket> packet) {
      char* buf = packet->data;
      int len = packet->length;
      // PROCESS RTCP
      RtpHeader *head = reinterpret_cast<RtpHeader*> (buf);
      RtcpHeader *chead = reinterpret_cast<RtcpHeader*> (buf);
      uint32_t recvSSRC = 0;
      if (!chead->isRtcp()) {
        recvSSRC = head->getSSRC();
      } else if (chead->packettype == RTCP_Sender_PT || chead->packettype == RTCP_SDES_PT) {  // Sender Report
        recvSSRC = chead->getSSRC();
      }
      // DELIVER FEEDBACK (RR, FEEDBACK PACKETS)
      if (chead->isFeedback()) {
        if (fb_sink_ != nullptr && should_send_feedback_) {
          fb_sink_->deliverFeedback(std::move(packet));
        }
      } else {
        // RTP or RTCP Sender Report
        if (bundle_) {
          // Check incoming SSRC
          // Deliver data
          if (isVideoSourceSSRC(recvSSRC) && video_sink_) {
            parseIncomingPayloadType(buf, len, VIDEO_PACKET);
            video_sink_->deliverVideoData(std::move(packet));
          } else if (isAudioSourceSSRC(recvSSRC) && audio_sink_) {
            parseIncomingPayloadType(buf, len, AUDIO_PACKET);
            audio_sink_->deliverAudioData(std::move(packet));
          } else {
            ELOG_DEBUG("%s read video unknownSSRC: %u, localVideoSSRC: %u, localAudioSSRC: %u",
                        toLog(), recvSSRC, this->getVideoSourceSSRC(), this->getAudioSourceSSRC());
          }
        } else {
          if (packet->type == AUDIO_PACKET && audio_sink_) {
            parseIncomingPayloadType(buf, len, AUDIO_PACKET);
            // Firefox does not send SSRC in SDP
            if (getAudioSourceSSRC() == 0) {
              ELOG_DEBUG("%s discoveredAudioSourceSSRC:%u", toLog(), recvSSRC);
              this->setAudioSourceSSRC(recvSSRC);
            }
            audio_sink_->deliverAudioData(std::move(packet));
          } else if (packet->type == VIDEO_PACKET && video_sink_) {
            parseIncomingPayloadType(buf, len, VIDEO_PACKET);
            // Firefox does not send SSRC in SDP
            if (getVideoSourceSSRC() == 0) {
              ELOG_DEBUG("%s discoveredVideoSourceSSRC:%u", toLog(), recvSSRC);
              this->setVideoSourceSSRC(recvSSRC);
            }
            // change ssrc for RTP packets, don't touch here if RTCP
            video_sink_->deliverVideoData(std::move(packet));
          }
        }  // if not bundle
      }  // if not Feedback
    }

    这里,MediaStream的读写就清楚了,如果我们需要使用MediaStream,则需要做:

    1、定义一个MediaSink的子类,将之设置给MediaStream,用于接收MediaStream的数据

    2、直接调用MediaStream的deliver方法,让其向外发送数据。

    class Receiver : public MediaSink
    {
    public:
        virtual void close() {};
    
    private:
        virtual int deliverAudioData_(std::shared_ptr<DataPacket> data_packet) {
            printf("now receiver audio packet
    ");
        };
        virtual int deliverVideoData_(std::shared_ptr<DataPacket> data_packet) {
            printf("now receive video packet
    ");
        };
        virtual int deliverEvent_(MediaEventPtr event) {
            printf("now receive event packet 
    ");
        };
    };
    class StreamControl
    {
    public:
        void init() {
            m_workerPool = std::make_shared<ThreadPool>(2);
            m_ioWorkerPool = std::make_shared<IOThreadPool>(2);
    
            std::string connid = "1";
            IceConfig cfg;//you may need init the cfg value
            std::vector<RtpMap> rtp_mappings;//you may need to init the mappings
            std::vector<erizo::ExtMap> ext_mappings; //you may need to init the ext mappings
            WebRtcConnectionEventListener* listener = new SendOfferEvtListener;
            m_conn = std::make_shared<WebRtcConnection>(workerPool->getLessUsedWorker(),
                m_ioWorkerPool->getLessUsedIOWorker(),
                connid,
                cfg,
                rtp_mappings,
                ext_mappings,
                listener);
            std::string stream_id = "1";
            std::string stream_label = "1";
            m_pStream = new MediaStream(m_workerPool->getLessUsedWorker(),
                m_conn,
                stream_id,
                stream_label,
                true);
            m_conn->addMediaStream(m_pStream);
            m_pStream->setVideoSink(&m_receiver);
            m_pStream->setAudioSink(&m_receiver);
            m_pStream->setEventSink(&m_receiver);
        }
    
        void sendRtpData(const char* buf, int len, bool isVideo) {
            std::shared_ptr<DataPacket> dp = std::make_shared<DataPacket>();
            dp->length = len;
            memcpy(dp->data, buf);
            dp->comp = 1;
            dp->type = isVideo ? VIDEO_PACKET : AUDIO_PACKET;
            if (isVideo)
            {
                m_pStream->deliverVideoData(dp);
            }
            else
            {
                m_pStream->deliverAudioData(dp);
            }
        }
    private:
        Receiver m_receiver;
        MediaStream* m_pStream;
        std::shared_ptr<ThreadPool> m_workerPool;
        std::shared_ptr<IOThreadPool> m_ioWorkerPool;
        WebRtcConnection* m_conn;
    };

    MediaStream还继承了HandlerManagerListener,Service这两部分是媒体处理的核心模块,pipeline使用的内容,之后学习时再写吧。pipeline更是复杂,代码好难读。

  • 相关阅读:
    python安装
    道德准则
    工作职场
    30岁之前
    互联网产品
    项目经理催活指南
    项目中如何找到优先级,以及工作时间安排
    语音行业相关概念
    cookie sessionStorage localStorage 的异同
    数组中map,forEach和for循环的区别
  • 原文地址:https://www.cnblogs.com/limedia/p/licode_erizo_mediastream.html
Copyright © 2020-2023  润新知