最近在學習libjingle_peeconnection的代碼數據結構
不知道應該如何寫起,就先從類和各類數據結構列起吧ide
PeerConnectionFactory:學習
在建立PeerConnectionFactory的實例的時候會建立兩個thread ( signaling_thread_ 和 worker_thread_ 類型爲rtc::Thread),目前還不知道有和做用,二者的關係code
class PeerConnectionFactory : public PeerConnectionFactoryInterface,
public rtc::MessageHandler {
public:
virtual void SetOptions(const Options& options) {
options_ = options;
}orm
virtual rtc::scoped_refptr<PeerConnectionInterface>
CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
PortAllocatorFactoryInterface* allocator_factory,
DTLSIdentityServiceInterface* dtls_identity_service,
PeerConnectionObserver* observer);server
bool Initialize();string
virtual rtc::scoped_refptr<MediaStreamInterface>
CreateLocalMediaStream(const std::string& label);it
virtual rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
const MediaConstraintsInterface* constraints);io
virtual rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource(
cricket::VideoCapturer* capturer,
const MediaConstraintsInterface* constraints);form
virtual rtc::scoped_refptr<VideoTrackInterface>
CreateVideoTrack(const std::string& id,
VideoSourceInterface* video_source);
virtual rtc::scoped_refptr<AudioTrackInterface>
CreateAudioTrack(const std::string& id,
AudioSourceInterface* audio_source);
virtual bool StartAecDump(rtc::PlatformFile file);
virtual cricket::ChannelManager* channel_manager();
virtual rtc::Thread* signaling_thread();
virtual rtc::Thread* worker_thread();
const Options& options() const { return options_; }
protected:
PeerConnectionFactory();
PeerConnectionFactory(
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread,
AudioDeviceModule* default_adm,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory);
virtual ~PeerConnectionFactory();
private:
bool Initialize_s();
void Terminate_s();
rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource_s(
const MediaConstraintsInterface* constraints);
rtc::scoped_refptr<VideoSourceInterface> CreateVideoSource_s(
cricket::VideoCapturer* capturer,
const MediaConstraintsInterface* constraints);
rtc::scoped_refptr<PeerConnectionInterface> CreatePeerConnection_s(
const PeerConnectionInterface::RTCConfiguration& configuration,
const MediaConstraintsInterface* constraints,
PortAllocatorFactoryInterface* allocator_factory,
DTLSIdentityServiceInterface* dtls_identity_service,
PeerConnectionObserver* observer);
bool StartAecDump_s(rtc::PlatformFile file);
// Implements rtc::MessageHandler.
void OnMessage(rtc::Message* msg);
bool owns_ptrs_; rtc::Thread* signaling_thread_; rtc::Thread* worker_thread_; Options options_; rtc::scoped_refptr<PortAllocatorFactoryInterface> allocator_factory_; // External Audio device used for audio playback. rtc::scoped_refptr<AudioDeviceModule> default_adm_; rtc::scoped_ptr<cricket::ChannelManager> channel_manager_; // External Video encoder factory. This can be NULL if the client has not // injected any. In that case, video engine will use the internal SW encoder. rtc::scoped_ptr<cricket::WebRtcVideoEncoderFactory> video_encoder_factory_; // External Video decoder factory. This can be NULL if the client has not // injected any. In that case, video engine will use the internal SW decoder. rtc::scoped_ptr<cricket::WebRtcVideoDecoderFactory> video_decoder_factory_;};