• webrtc自带client的音频引擎创建代码走读


    srcwebrtcexamplespeerconnectionclientconductor.cc
    1、bool Conductor::InitializePeerConnection()
    1.1 webrtc::CreatePeerConnectionFactory();

    src alkappwebrtcpeerconnectionfactory.cc
    2、 bool PeerConnectionFactory::Initialize()
    2.1.1 cricket::MediaEngineInterface* PeerConnectionFactory::CreateMediaEngine_w() {

    return cricket::WebRtcMediaEngineFactory::Create(default_adm_.get(), video_encoder_factory_.get(),video_decoder_factory_.get());
    }

    src alkmediawebrtcwebrtcmediaengine.cc
    2.1.2
    MediaEngineInterface* WebRtcMediaEngineFactory::Create(
    webrtc::AudioDeviceModule* adm,WebRtcVideoEncoderFactory* encoder_factory,WebRtcVideoDecoderFactory* decoder_factory)
    {
    return CreateWebRtcMediaEngine(adm, encoder_factory, decoder_factory);
    }
    2.1.3
    cricket::MediaEngineInterface* WebRtcMediaEngineFactory::CreateWebRtcMediaEngine(
    webrtc::AudioDeviceModule* adm,WebRtcVideoEncoderFactory* encoder_factory,WebRtcVideoDecoderFactory* decoder_factory)
    {
    return new cricket::WebRtcMediaEngine2(adm, encoder_factory,
    decoder_factory);
    }

    2.1.4
    class WebRtcMediaEngine2
    : public CompositeMediaEngine<WebRtcVoiceEngine, WebRtcVideoEngine2>
    {

    public:

    WebRtcMediaEngine2(webrtc::AudioDeviceModule* adm,WebRtcVideoEncoderFactory* encoder_factory,WebRtcVideoDecoderFactory* decoder_factory)
    };

    2.1.5
    src alkmediawebrtcwebrtcvoiceengine.cc
    WebRtcVoiceEngine::WebRtcVoiceEngine()

    : voe_wrapper_(new VoEWrapper())
    {
    Construct();
    }

    2.1.6

    src alkmediawebrtcwebrtcvoe.h
    class VoEWrapper {

    public:

    VoEWrapper()
    : engine_(webrtc::VoiceEngine::Create())
    , processing_(engine_),
    base_(engine_), codec_(engine_)
    , dtmf_(engine_),
    hw_(engine_), network_(engine_)
    , rtp_(engine_), volume_(engine_){}
    };

    2.1.7
    srcwebrtcvoice_enginevoice_engine_impl.cc
    VoiceEngine* VoiceEngine::Create()
    {
    return GetVoiceEngine(config, true);

    }

    VoiceEngine* GetVoiceEngine(const Config* config, bool owns_config)
    {
    VoiceEngineImpl* self = new VoiceEngineImpl(config, owns_config);
    }

    2.1.9
    srcwebrtcvoice_enginevoice_engine_impl.h
    class VoiceEngineImpl : public voe::SharedData, public VoiceEngine, public VoEBaseImpl ,public VoEHardwareImpl
    {public:

    VoiceEngineImpl(const Config* config, bool owns_config)
    :SharedData(*config),

    VoEBaseImpl(this),

    VoEHardwareImpl(this),
    {}

    };

    2.1.10
    void WebRtcVoiceEngine::Construct() 
    {
    // Load our audio codec list.
    内部调用voe_wrapper_->codec()->NumOfCodecs() 
    ConstructCodecs();

    //获取是否需要回音消除,降噪,自动调节音量等
    options_ = GetDefaultEngineOptions(); 
    }

    src alksessionmediachannelmanager.cc
    2.2
    bool ChannelManager::Init()
    {
    initialized_ = worker_thread_->Invoke<bool>(Bind(
    &ChannelManager::InitMediaEngine_w, this));
    }
    2.2.1
    bool ChannelManager::InitMediaEngine_w()
    {

    return (media_engine_->Init(worker_thread_));

    }

    2.2.2
    template<class VOICE, class VIDEO>

    class CompositeMediaEngine : public MediaEngineInterface
    {
    public:
    virtual bool Init(rtc::Thread* worker_thread)
    {

    if (!voice_.Init(worker_thread))

    return false;

    video_.Init();

    return true;

    }
    protected:

    VOICE voice_; //默认的WebRtcVoiceEngine或自定义的音频引擎
    }

    2.2.3
    bool WebRtcVoiceEngine::Init(rtc::Thread* worker_thread)
    {
    bool res = InitInternal();
    }

    2.2.4
    bool WebRtcVoiceEngine::InitInternal()
    {
    if (voe_wrapper_->base()->Init(adm_) == -1) // VoiceEngineImpl 的init
    }

    2.2.5

    VoiceEngineImpl(const Config* config, bool owns_config)
    :SharedData(*config)
    , VoEHardwareImpl(this)

    ,VoEBaseImpl(this)

    srcwebrtcvoice_enginevoe_base_impl.cc

    VoEBaseImpl::VoEBaseImpl(voe::SharedData* shared)

    :shared_(shared)


    int VoEBaseImpl::Init(AudioDeviceModule* external_adm,AudioProcessing* audioproc)
    {
    if (external_adm == nullptr)
    {
    // Create the internal ADM implementation. //shared_指向VoiceEngineImpl
    shared_->set_audio_device(AudioDeviceModuleImpl::Create(
    VoEId(shared_->instance_id(), -1), shared_->audio_device_layer())); // create函数中调用CreatePlatformSpecificObjects
    }
    else
    {
    // Use the already existing external ADM implementation.
    shared_->set_audio_device(external_adm);  
    }

    SharedData::SharedData(const Config& config)

    : _channelManager(_gInstanceCounter, config)
    ,_audioDevicePtr(NULL)
    {
    _audioDeviceLayer = AudioDeviceModule::kPlatformDefaultAudio;
    }

    SharedData::set_audio_device(AudioDeviceModule* audio_device){ _audioDevicePtr = audio_device;}

    AudioDeviceModule* SharedData::audio_device() { return _audioDevicePtr; }

    // Register the AudioObserver implementation

    if (shared_->audio_device()->RegisterEventObserver(this) != 0)

    // Register the AudioTransport implementation

    if (shared_->audio_device()->RegisterAudioCallback(this) != 0)

    // ADM initialization

    if (shared_->audio_device()->Init() != 0)  // AudioDeviceModuleImpl::Init(),调用_ptrAudioDevice->Init()开启了一些音频处理线程,这些线程负责来采集音频,

    // 如AudioDeviceWindowsWave::Init()中调用 _ptrThread = ThreadWrapper::CreateThread(ThreadFunc, this, threadName);

    //->AudioDeviceWindowsWave::ThreadFunc

    //->AudioDeviceWindowsWave::ThreadProcess()

    //->AudioDeviceWindowsWave::RecProc(LONGLONG& consumedTime)

    //->_ptrAudioBuffer->DeliverRecordedData();

    int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects()

    {
    AudioDeviceGeneric* ptrAudioDevice(NULL);
    AudioLayer audioLayer(PlatformAudioLayer()); // AudioDeviceModule::kPlatformDefaultAudio;

     ptrAudioDevice = new AudioDeviceWindowsWave(Id());


    ptrAudioDevice = new AudioDeviceWindowsCore(Id());
    _ptrAudioDevice = ptrAudioDevice;
    }

    // Initialize the default speaker
    if (shared_->audio_device()->SetPlayoutDevice(WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE) != 0)
    if (shared_->audio_device()->InitSpeaker() != 0)

    // Initialize the default microphone
    if (shared_->audio_device()->SetRecordingDevice(WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE) != 0) 

    int32_t AudioDeviceModuleImpl::SetRecordingDevice(WindowsDeviceType device)

    {

    return (_ptrAudioDevice->SetRecordingDevice(device));

    }

    if (shared_->audio_device()->InitMicrophone() != 0)

    // Set number of channels
    if (shared_->audio_device()->StereoPlayoutIsAvailable(&available) != 0)
    if (shared_->audio_device()->SetStereoPlayout(available) != 0)

    if (!audioproc) {
    audioproc = AudioProcessing::Create();
    }
    shared_->set_audio_processing(audioproc);
    }

    VoEBaseImpl::StartSend()

    {

     if (shared_->audio_device()->StartRecording() != 0)

    }

    int32_t AudioDeviceModuleImpl::StartRecording()

    {
    return (_ptrAudioDevice->StartRecording()); // AudioDeviceWindowsWave或AudioDeviceWindowsCore启动录音

    }

  • 相关阅读:
    iOS iOS与html进行交互
    2. SwiftUI学习之_padding1()
    基础知识 1. 设计模式是什么?你知道哪些设计模式,请简要叙述?
    swift 5.0富文本
    ios 本地化
    ios报错:nw_protocol_get_quic_image_block_invoke dlopen libquic failed
    IOS开发没有开发者账号也可以进行测试
    swift项目中新的字体如何加入
    TestFlight用法 包教包会(iOS APP官方测试工具)
    IOS FMDB的使用
  • 原文地址:https://www.cnblogs.com/zxpo/p/7667270.html
Copyright © 2020-2023  润新知