最近在学习libjingle_peeconnection的代码
不知道应该如何写起,就先从类和各种数据结构列起吧
PeerConnectionFactory:
在创建PeerConnectionFactory的实例的时候会创建两个thread ( signaling_thread_ 和 worker_thread_ 类型为rtc::Thread),目前还不知道有和作用,两者的关系
class PeerConnectionFactory : public PeerConnectionFactoryInterface,
public rtc::MessageHandler {
public:
virtual void SetOptions(const Options& options) {
options_ = options;
}
virtual rtc::scoped_refptr<PeerConnectionInterface>
CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
PortAllocatorFactoryInterface* allocator_factory,
DTLSIdentityServiceInterface* dtls_identity_service,
PeerConnectionObserver* observer);
bool Initialize();
virtual rtc::scoped_refptr<MediaStreamInterface>
CreateLocalMediaStream(const std::string& label);
virtual rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
const MediaConstraintsInterface* constraints);
virtual rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource(
cricket::VideoCapturer* capturer,
const MediaConstraintsInterface* constraints);
virtual rtc::scoped_refptr<VideoTrackInterface>
CreateVideoTrack(const std::string& id,
VideoSourceInterface* video_source);
virtual rtc::scoped_refptr<AudioTrackInterface>
CreateAudioTrack(const std::string& id,
AudioSourceInterface* audio_source);
virtual bool StartAecDump(rtc::PlatformFile file);
virtual cricket::ChannelManager* channel_manager();
virtual rtc::Thread* signaling_thread();
virtual rtc::Thread* worker_thread();
const Options& options() const { return options_; }
protected:
PeerConnectionFactory();
PeerConnectionFactory(
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
AudioDeviceModule* default_adm,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory);
virtual ~PeerConnectionFactory();
private:
bool Initialize_s();
void Terminate_s();
rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource_s(
const MediaConstraintsInterface* constraints);
rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource_s(
cricket::VideoCapturer* capturer,
const MediaConstraintsInterface* constraints);
rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection_s(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
PortAllocatorFactoryInterface* allocator_factory,
DTLSIdentityServiceInterface* dtls_identity_service,
PeerConnectionObserver* observer);
bool StartAecDump_s(rtc::PlatformFile file);
// Implements rtc::MessageHandler.
void OnMessage(rtc::Message* msg);
bool owns_ptrs_;
rtc::Thread* signaling_thread_;
rtc::Thread* worker_thread_;
Options options_;
rtc::scoped_refptr<PortAllocatorFactoryInterface> allocator_factory_;
// External Audio device used for audio playback.
rtc::scoped_refptr<AudioDeviceModule> default_adm_;
rtc::scoped_ptr<cricket::ChannelManager> channel_manager_;
// External Video encoder factory. This can be NULL if the client has not
// injected any. In that case, video engine will use the internal SW encoder.
rtc::scoped_ptr<cricket::WebRtcVideoEncoderFactory>
video_encoder_factory_;
// External Video decoder factory. This can be NULL if the client has not
// injected any. In that case, video engine will use the internal SW decoder.
rtc::scoped_ptr<cricket::WebRtcVideoDecoderFactory>
video_decoder_factory_;
};