Reland of Adding the ability to create an RtpSender without a track. (patchset #1 id:1 of https://codereview.webrtc.org/1426443007/ )

Reason for revert:
Relanding, after changing the expectations of WebRtcBrowserTest.CallAndModifyStream.

Original issue's description:
> Revert of Adding the ability to create an RtpSender without a track. (patchset #8 id:140001 of https://codereview.webrtc.org/1413713003/ )
>
> Reason for revert:
> Causing a compiler warning, and causing WebRtcBrowserTest.CallAndModifyStream to fail.
>
> Original issue's description:
> > Adding the ability to create an RtpSender without a track.
> >
> > This CL also changes AddStream to immediately create a sender, rather
> > than waiting until the track is seen in SDP. And the PeerConnection now
> > builds the list of "send streams" from the list of senders, rather than
> > the collection of local media streams.
> >
> > Committed: https://crrev.com/ac9d92ccbe2b29590c53f702e11dc625820480d5
> > Cr-Commit-Position: refs/heads/master@{#10414}
>
> TBR=pthatcher@webrtc.org,pthatcher@chromium.org
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
>
> Committed: https://crrev.com/8f46c63f6f764254892f4111b54aa1cc8f32eeeb
> Cr-Commit-Position: refs/heads/master@{#10417}

TBR=pthatcher@webrtc.org,pthatcher@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true

Review URL: https://codereview.webrtc.org/1413983004

Cr-Commit-Position: refs/heads/master@{#10730}
This commit is contained in:
deadbeef 2015-11-20 09:49:59 -08:00 committed by Commit bot
parent 0a43fef6dc
commit 6834fa10f1
18 changed files with 703 additions and 249 deletions

View File

@ -31,7 +31,7 @@
namespace webrtc {
static const char kAudioTrackKind[] = "audio";
const char MediaStreamTrackInterface::kAudioKind[] = "audio";
AudioTrack::AudioTrack(const std::string& label,
AudioSourceInterface* audio_source)
@ -40,7 +40,7 @@ AudioTrack::AudioTrack(const std::string& label,
}
std::string AudioTrack::kind() const {
return kAudioTrackKind;
return kAudioKind;
}
rtc::scoped_refptr<AudioTrack> AudioTrack::Create(

View File

@ -100,6 +100,9 @@ class MediaStreamTrackInterface : public rtc::RefCountInterface,
kFailed = 3, // Track negotiation failed.
};
static const char kAudioKind[];
static const char kVideoKind[];
virtual std::string kind() const = 0;
virtual std::string id() const = 0;
virtual bool enabled() const = 0;

View File

@ -50,8 +50,8 @@ namespace webrtc {
// RtpSenders/Receivers to get to the BaseChannels. These interfaces should be
// refactored away eventually, as the classes converge.
// This interface is called by AudioTrackHandler classes in mediastreamhandler.h
// to change the settings of an audio track connected to certain PeerConnection.
// This interface is called by AudioRtpSender/Receivers to change the settings
// of an audio track connected to certain PeerConnection.
class AudioProviderInterface {
public:
// Enable/disable the audio playout of a remote audio track with |ssrc|.
@ -71,9 +71,8 @@ class AudioProviderInterface {
virtual ~AudioProviderInterface() {}
};
// This interface is called by VideoTrackHandler classes in mediastreamhandler.h
// to change the settings of a video track connected to a certain
// PeerConnection.
// This interface is called by VideoRtpSender/Receivers to change the settings
// of a video track connected to a certain PeerConnection.
class VideoProviderInterface {
public:
virtual bool SetCaptureDevice(uint32_t ssrc,

View File

@ -60,6 +60,7 @@ using webrtc::DataChannel;
using webrtc::MediaConstraintsInterface;
using webrtc::MediaStreamInterface;
using webrtc::PeerConnectionInterface;
using webrtc::RtpSenderInterface;
using webrtc::StreamCollection;
using webrtc::StunConfigurations;
using webrtc::TurnConfigurations;
@ -400,25 +401,15 @@ bool IsValidOfferToReceiveMedia(int value) {
}
// Add the stream and RTP data channel info to |session_options|.
void SetStreams(cricket::MediaSessionOptions* session_options,
rtc::scoped_refptr<StreamCollection> streams,
const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
rtp_data_channels) {
void AddSendStreams(
cricket::MediaSessionOptions* session_options,
const std::vector<rtc::scoped_refptr<RtpSenderInterface>>& senders,
const std::map<std::string, rtc::scoped_refptr<DataChannel>>&
rtp_data_channels) {
session_options->streams.clear();
if (streams != nullptr) {
for (size_t i = 0; i < streams->count(); ++i) {
MediaStreamInterface* stream = streams->at(i);
// For each audio track in the stream, add it to the MediaSessionOptions.
for (const auto& track : stream->GetAudioTracks()) {
session_options->AddSendStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
stream->label());
}
// For each video track in the stream, add it to the MediaSessionOptions.
for (const auto& track : stream->GetVideoTracks()) {
session_options->AddSendStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
stream->label());
}
}
for (const auto& sender : senders) {
session_options->AddSendStream(sender->media_type(), sender->id(),
sender->stream_id());
}
// Check for data channels.
@ -733,8 +724,6 @@ PeerConnection::remote_streams() {
return remote_streams_;
}
// TODO(deadbeef): Create RtpSenders immediately here, even if local
// description hasn't yet been set.
bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
if (IsClosed()) {
return false;
@ -745,23 +734,46 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
local_streams_->AddStream(local_stream);
// Find tracks that have already been configured in SDP. This can occur if a
// local session description that contains the MSID of these tracks is set
// before AddLocalStream is called. It can also occur if the local session
// description is not changed and RemoveLocalStream is called and later
// AddLocalStream is called again with the same stream.
for (const auto& track : local_stream->GetAudioTracks()) {
const TrackInfo* track_info =
FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
if (track_info) {
CreateAudioSender(local_stream, track.get(), track_info->ssrc);
auto sender = FindSenderForTrack(track.get());
if (sender == senders_.end()) {
// Normal case; we've never seen this track before.
AudioRtpSender* new_sender = new AudioRtpSender(
track.get(), local_stream->label(), session_.get(), stats_.get());
senders_.push_back(new_sender);
// If the sender has already been configured in SDP, we call SetSsrc,
// which will connect the sender to the underlying transport. This can
// occur if a local session description that contains the ID of the sender
// is set before AddStream is called. It can also occur if the local
// session description is not changed and RemoveStream is called, and
// later AddStream is called again with the same stream.
const TrackInfo* track_info = FindTrackInfo(
local_audio_tracks_, local_stream->label(), track->id());
if (track_info) {
new_sender->SetSsrc(track_info->ssrc);
}
} else {
// We already have a sender for this track, so just change the stream_id
// so that it's correct in the next call to CreateOffer.
(*sender)->set_stream_id(local_stream->label());
}
}
for (const auto& track : local_stream->GetVideoTracks()) {
const TrackInfo* track_info =
FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
if (track_info) {
CreateVideoSender(local_stream, track.get(), track_info->ssrc);
auto sender = FindSenderForTrack(track.get());
if (sender == senders_.end()) {
// Normal case; we've never seen this track before.
VideoRtpSender* new_sender = new VideoRtpSender(
track.get(), local_stream->label(), session_.get());
senders_.push_back(new_sender);
const TrackInfo* track_info = FindTrackInfo(
local_video_tracks_, local_stream->label(), track->id());
if (track_info) {
new_sender->SetSsrc(track_info->ssrc);
}
} else {
// We already have a sender for this track, so just change the stream_id
// so that it's correct in the next call to CreateOffer.
(*sender)->set_stream_id(local_stream->label());
}
}
@ -771,21 +783,27 @@ bool PeerConnection::AddStream(MediaStreamInterface* local_stream) {
}
// TODO(deadbeef): Don't destroy RtpSenders here; they should be kept around
// indefinitely.
// indefinitely, when we have unified plan SDP.
void PeerConnection::RemoveStream(MediaStreamInterface* local_stream) {
for (const auto& track : local_stream->GetAudioTracks()) {
const TrackInfo* track_info =
FindTrackInfo(local_audio_tracks_, local_stream->label(), track->id());
if (track_info) {
DestroyAudioSender(local_stream, track.get(), track_info->ssrc);
auto sender = FindSenderForTrack(track.get());
if (sender == senders_.end()) {
LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
<< " doesn't exist.";
continue;
}
(*sender)->Stop();
senders_.erase(sender);
}
for (const auto& track : local_stream->GetVideoTracks()) {
const TrackInfo* track_info =
FindTrackInfo(local_video_tracks_, local_stream->label(), track->id());
if (track_info) {
DestroyVideoSender(local_stream, track.get());
auto sender = FindSenderForTrack(track.get());
if (sender == senders_.end()) {
LOG(LS_WARNING) << "RtpSender for track with id " << track->id()
<< " doesn't exist.";
continue;
}
(*sender)->Stop();
senders_.erase(sender);
}
local_streams_->RemoveStream(local_stream);
@ -816,6 +834,21 @@ rtc::scoped_refptr<DtmfSenderInterface> PeerConnection::CreateDtmfSender(
return DtmfSenderProxy::Create(signaling_thread(), sender.get());
}
rtc::scoped_refptr<RtpSenderInterface> PeerConnection::CreateSender(
const std::string& kind) {
RtpSenderInterface* new_sender;
if (kind == MediaStreamTrackInterface::kAudioKind) {
new_sender = new AudioRtpSender(session_.get(), stats_.get());
} else if (kind == MediaStreamTrackInterface::kVideoKind) {
new_sender = new VideoRtpSender(session_.get());
} else {
LOG(LS_ERROR) << "CreateSender called with invalid kind: " << kind;
return rtc::scoped_refptr<RtpSenderInterface>();
}
senders_.push_back(new_sender);
return RtpSenderProxy::Create(signaling_thread(), new_sender);
}
std::vector<rtc::scoped_refptr<RtpSenderInterface>> PeerConnection::GetSenders()
const {
std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders;
@ -1324,49 +1357,6 @@ void PeerConnection::DestroyVideoReceiver(MediaStreamInterface* stream,
}
}
void PeerConnection::CreateAudioSender(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32_t ssrc) {
senders_.push_back(new AudioRtpSender(audio_track, ssrc, session_.get()));
stats_->AddLocalAudioTrack(audio_track, ssrc);
}
void PeerConnection::CreateVideoSender(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32_t ssrc) {
senders_.push_back(new VideoRtpSender(video_track, ssrc, session_.get()));
}
// TODO(deadbeef): Keep RtpSenders around even if track goes away in local
// description.
void PeerConnection::DestroyAudioSender(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32_t ssrc) {
auto it = FindSenderForTrack(audio_track);
if (it == senders_.end()) {
LOG(LS_WARNING) << "RtpSender for track with id " << audio_track->id()
<< " doesn't exist.";
return;
} else {
(*it)->Stop();
senders_.erase(it);
}
stats_->RemoveLocalAudioTrack(audio_track, ssrc);
}
void PeerConnection::DestroyVideoSender(MediaStreamInterface* stream,
VideoTrackInterface* video_track) {
auto it = FindSenderForTrack(video_track);
if (it == senders_.end()) {
LOG(LS_WARNING) << "RtpSender for track with id " << video_track->id()
<< " doesn't exist.";
return;
} else {
(*it)->Stop();
senders_.erase(it);
}
}
void PeerConnection::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
RTC_DCHECK(signaling_thread()->IsCurrent());
@ -1442,7 +1432,7 @@ bool PeerConnection::GetOptionsForOffer(
return false;
}
SetStreams(session_options, local_streams_, rtp_data_channels_);
AddSendStreams(session_options, senders_, rtp_data_channels_);
// Offer to receive audio/video if the constraint is not set and there are
// send streams, or we're currently receiving.
if (rtc_options.offer_to_receive_audio == RTCOfferAnswerOptions::kUndefined) {
@ -1475,7 +1465,7 @@ bool PeerConnection::GetOptionsForAnswer(
return false;
}
SetStreams(session_options, local_streams_, rtp_data_channels_);
AddSendStreams(session_options, senders_, rtp_data_channels_);
session_options->bundle_enabled =
session_options->bundle_enabled &&
(session_options->has_audio() || session_options->has_video() ||
@ -1503,8 +1493,7 @@ void PeerConnection::UpdateRemoteStreamsList(
TrackInfos* current_tracks = GetRemoteTracks(media_type);
// Find removed tracks. I.e., tracks where the track id or ssrc don't match
// the
// new StreamParam.
// the new StreamParam.
auto track_it = current_tracks->begin();
while (track_it != current_tracks->end()) {
const TrackInfo& info = *track_it;
@ -1706,62 +1695,44 @@ void PeerConnection::OnLocalTrackSeen(const std::string& stream_label,
const std::string& track_id,
uint32_t ssrc,
cricket::MediaType media_type) {
MediaStreamInterface* stream = local_streams_->find(stream_label);
if (!stream) {
LOG(LS_WARNING) << "An unknown local MediaStream with label "
<< stream_label << " has been configured.";
RtpSenderInterface* sender = FindSenderById(track_id);
if (!sender) {
LOG(LS_WARNING) << "An unknown RtpSender with id " << track_id
<< " has been configured in the local description.";
return;
}
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
if (!audio_track) {
LOG(LS_WARNING) << "An unknown local AudioTrack with id , " << track_id
<< " has been configured.";
return;
}
CreateAudioSender(stream, audio_track, ssrc);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
if (!video_track) {
LOG(LS_WARNING) << "An unknown local VideoTrack with id , " << track_id
<< " has been configured.";
return;
}
CreateVideoSender(stream, video_track, ssrc);
} else {
RTC_DCHECK(false && "Invalid media type");
if (sender->media_type() != media_type) {
LOG(LS_WARNING) << "An RtpSender has been configured in the local"
<< " description with an unexpected media type.";
return;
}
sender->set_stream_id(stream_label);
sender->SetSsrc(ssrc);
}
void PeerConnection::OnLocalTrackRemoved(const std::string& stream_label,
const std::string& track_id,
uint32_t ssrc,
cricket::MediaType media_type) {
MediaStreamInterface* stream = local_streams_->find(stream_label);
if (!stream) {
// This is the normal case. I.e., RemoveLocalStream has been called and the
RtpSenderInterface* sender = FindSenderById(track_id);
if (!sender) {
// This is the normal case. I.e., RemoveStream has been called and the
// SessionDescriptions has been renegotiated.
return;
}
// A track has been removed from the SessionDescription but the MediaStream
// is still associated with PeerConnection. This only occurs if the SDP
// doesn't match with the calls to AddLocalStream and RemoveLocalStream.
if (media_type == cricket::MEDIA_TYPE_AUDIO) {
AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
if (!audio_track) {
return;
}
DestroyAudioSender(stream, audio_track, ssrc);
} else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
if (!video_track) {
return;
}
DestroyVideoSender(stream, video_track);
} else {
RTC_DCHECK(false && "Invalid media type.");
// A sender has been removed from the SessionDescription but it's still
// associated with the PeerConnection. This only occurs if the SDP doesn't
// match with the calls to CreateSender, AddStream and RemoveStream.
if (sender->media_type() != media_type) {
LOG(LS_WARNING) << "An RtpSender has been configured in the local"
<< " description with an unexpected media type.";
return;
}
sender->SetSsrc(0);
}
void PeerConnection::UpdateLocalRtpDataChannels(
@ -1979,6 +1950,15 @@ void PeerConnection::OnDataChannelOpenMessage(
DataChannelProxy::Create(signaling_thread(), channel));
}
RtpSenderInterface* PeerConnection::FindSenderById(const std::string& id) {
auto it =
std::find_if(senders_.begin(), senders_.end(),
[id](const rtc::scoped_refptr<RtpSenderInterface>& sender) {
return sender->id() == id;
});
return it != senders_.end() ? it->get() : nullptr;
}
std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
PeerConnection::FindSenderForTrack(MediaStreamTrackInterface* track) {
return std::find_if(

View File

@ -101,6 +101,9 @@ class PeerConnection : public PeerConnectionInterface,
rtc::scoped_refptr<DtmfSenderInterface> CreateDtmfSender(
AudioTrackInterface* track) override;
rtc::scoped_refptr<RtpSenderInterface> CreateSender(
const std::string& kind) override;
std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
const override;
std::vector<rtc::scoped_refptr<RtpReceiverInterface>> GetReceivers()
@ -197,12 +200,6 @@ class PeerConnection : public PeerConnectionInterface,
AudioTrackInterface* audio_track);
void DestroyVideoReceiver(MediaStreamInterface* stream,
VideoTrackInterface* video_track);
void CreateAudioSender(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32_t ssrc);
void CreateVideoSender(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
uint32_t ssrc);
void DestroyAudioSender(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
uint32_t ssrc);
@ -342,6 +339,8 @@ class PeerConnection : public PeerConnectionInterface,
void OnDataChannelOpenMessage(const std::string& label,
const InternalDataChannelInit& config);
RtpSenderInterface* FindSenderById(const std::string& id);
std::vector<rtc::scoped_refptr<RtpSenderInterface>>::iterator
FindSenderForTrack(MediaStreamTrackInterface* track);
std::vector<rtc::scoped_refptr<RtpReceiverInterface>>::iterator

View File

@ -98,6 +98,7 @@ static const int kMaxWaitMs = 10000;
#if !defined(THREAD_SANITIZER)
static const int kMaxWaitForStatsMs = 3000;
#endif
static const int kMaxWaitForActivationMs = 5000;
static const int kMaxWaitForFramesMs = 10000;
static const int kEndAudioFrameCount = 3;
static const int kEndVideoFrameCount = 3;
@ -288,18 +289,7 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
peer_connection_factory_->CreateLocalMediaStream(stream_label);
if (audio && can_receive_audio()) {
FakeConstraints constraints;
// Disable highpass filter so that we can get all the test audio frames.
constraints.AddMandatory(
MediaConstraintsInterface::kHighpassFilter, false);
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
peer_connection_factory_->CreateAudioSource(&constraints);
// TODO(perkj): Test audio source when it is implemented. Currently audio
// always use the default input.
std::string label = stream_label + kAudioTrackLabelBase;
rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
peer_connection_factory_->CreateAudioTrack(label, source));
stream->AddTrack(audio_track);
stream->AddTrack(CreateLocalAudioTrack(stream_label));
}
if (video && can_receive_video()) {
stream->AddTrack(CreateLocalVideoTrack(stream_label));
@ -401,6 +391,35 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
data_observer_.reset(new MockDataChannelObserver(data_channel_));
}
rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateLocalAudioTrack(
const std::string& stream_label) {
FakeConstraints constraints;
// Disable highpass filter so that we can get all the test audio frames.
constraints.AddMandatory(MediaConstraintsInterface::kHighpassFilter, false);
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
peer_connection_factory_->CreateAudioSource(&constraints);
// TODO(perkj): Test audio source when it is implemented. Currently audio
// always use the default input.
std::string label = stream_label + kAudioTrackLabelBase;
return peer_connection_factory_->CreateAudioTrack(label, source);
}
rtc::scoped_refptr<webrtc::VideoTrackInterface> CreateLocalVideoTrack(
const std::string& stream_label) {
// Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
FakeConstraints source_constraints = video_constraints_;
source_constraints.SetMandatoryMaxFrameRate(10);
cricket::FakeVideoCapturer* fake_capturer =
new webrtc::FakePeriodicVideoCapturer();
video_capturers_.push_back(fake_capturer);
rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
peer_connection_factory_->CreateVideoSource(fake_capturer,
&source_constraints);
std::string label = stream_label + kVideoTrackLabelBase;
return peer_connection_factory_->CreateVideoTrack(label, source);
}
DataChannelInterface* data_channel() { return data_channel_; }
const MockDataChannelObserver* data_observer() const {
return data_observer_.get();
@ -715,22 +734,6 @@ class PeerConnectionTestClient : public webrtc::PeerConnectionObserver,
return peer_connection_.get() != nullptr;
}
rtc::scoped_refptr<webrtc::VideoTrackInterface>
CreateLocalVideoTrack(const std::string stream_label) {
// Set max frame rate to 10fps to reduce the risk of the tests to be flaky.
FakeConstraints source_constraints = video_constraints_;
source_constraints.SetMandatoryMaxFrameRate(10);
cricket::FakeVideoCapturer* fake_capturer =
new webrtc::FakePeriodicVideoCapturer();
video_capturers_.push_back(fake_capturer);
rtc::scoped_refptr<webrtc::VideoSourceInterface> source =
peer_connection_factory_->CreateVideoSource(
fake_capturer, &source_constraints);
std::string label = stream_label + kVideoTrackLabelBase;
return peer_connection_factory_->CreateVideoTrack(label, source);
}
rtc::scoped_refptr<webrtc::PeerConnectionInterface> CreatePeerConnection(
webrtc::PortAllocatorFactoryInterface* factory,
const MediaConstraintsInterface* constraints) {
@ -1011,7 +1014,6 @@ class MAYBE_JsepPeerConnectionP2PTestClient : public testing::Test {
initiating_client_->AddMediaStream(true, true);
}
initiating_client_->Negotiate();
const int kMaxWaitForActivationMs = 5000;
// Assert true is used here since next tests are guaranteed to fail and
// would eat up 5 seconds.
ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
@ -1653,6 +1655,34 @@ TEST_F(MAYBE_JsepPeerConnectionP2PTestClient,
LocalP2PTest();
}
// This tests that if we negotiate after calling CreateSender but before we
// have a track, then set a track later, frames from the newly-set track are
// received end-to-end.
TEST_F(MAYBE_JsepPeerConnectionP2PTestClient, EarlyWarmupTest) {
ASSERT_TRUE(CreateTestClients());
auto audio_sender = initializing_client()->pc()->CreateSender("audio");
auto video_sender = initializing_client()->pc()->CreateSender("video");
initializing_client()->Negotiate();
// Wait for ICE connection to complete, without any tracks.
// Note that the receiving client WILL (in HandleIncomingOffer) create
// tracks, so it's only the initiator here that's doing early warmup.
ASSERT_TRUE_WAIT(SessionActive(), kMaxWaitForActivationMs);
VerifySessionDescriptions();
EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionCompleted,
initializing_client()->ice_connection_state(),
kMaxWaitForFramesMs);
EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected,
receiving_client()->ice_connection_state(),
kMaxWaitForFramesMs);
// Now set the tracks, and expect frames to immediately start flowing.
EXPECT_TRUE(
audio_sender->SetTrack(initializing_client()->CreateLocalAudioTrack("")));
EXPECT_TRUE(
video_sender->SetTrack(initializing_client()->CreateLocalVideoTrack("")));
EXPECT_TRUE_WAIT(FramesNotPending(kEndAudioFrameCount, kEndVideoFrameCount),
kMaxWaitForFramesMs);
}
class IceServerParsingTest : public testing::Test {
public:
// Convenience for parsing a single URL.

View File

@ -333,6 +333,12 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
AudioTrackInterface* track) = 0;
// TODO(deadbeef): Make these pure virtual once all subclasses implement them.
// |kind| must be "audio" or "video".
virtual rtc::scoped_refptr<RtpSenderInterface> CreateSender(
const std::string& kind) {
return rtc::scoped_refptr<RtpSenderInterface>();
}
virtual std::vector<rtc::scoped_refptr<RtpSenderInterface>> GetSenders()
const {
return std::vector<rtc::scoped_refptr<RtpSenderInterface>>();

View File

@ -2037,8 +2037,10 @@ TEST_F(PeerConnectionInterfaceTest, LocalDescriptionChanged) {
EXPECT_TRUE(ContainsSender(senders, kVideoTracks[1]));
// Remove an audio and video track.
pc_->RemoveStream(reference_collection_->at(0));
rtc::scoped_ptr<SessionDescriptionInterface> desc_2;
CreateSessionDescriptionAndReference(1, 1, desc_2.accept());
pc_->AddStream(reference_collection_->at(0));
EXPECT_TRUE(DoSetLocalDescription(desc_2.release()));
senders = pc_->GetSenders();
EXPECT_EQ(2u, senders.size());

View File

@ -43,6 +43,9 @@ BEGIN_PROXY_MAP(PeerConnection)
PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
PROXY_METHOD1(rtc::scoped_refptr<DtmfSenderInterface>,
CreateDtmfSender, AudioTrackInterface*)
PROXY_METHOD1(rtc::scoped_refptr<RtpSenderInterface>,
CreateSender,
const std::string&)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
GetSenders)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,

View File

@ -29,6 +29,7 @@
#include "talk/app/webrtc/localaudiosource.h"
#include "talk/app/webrtc/videosourceinterface.h"
#include "webrtc/base/helpers.h"
namespace webrtc {
@ -59,34 +60,49 @@ void LocalAudioSinkAdapter::SetSink(cricket::AudioRenderer::Sink* sink) {
}
AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
uint32_t ssrc,
AudioProviderInterface* provider)
const std::string& stream_id,
AudioProviderInterface* provider,
StatsCollector* stats)
: id_(track->id()),
track_(track),
ssrc_(ssrc),
stream_id_(stream_id),
provider_(provider),
stats_(stats),
track_(track),
cached_track_enabled_(track->enabled()),
sink_adapter_(new LocalAudioSinkAdapter()) {
RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
track_->AddSink(sink_adapter_.get());
Reconfigure();
}
AudioRtpSender::AudioRtpSender(AudioProviderInterface* provider,
StatsCollector* stats)
: id_(rtc::CreateRandomUuid()),
stream_id_(rtc::CreateRandomUuid()),
provider_(provider),
stats_(stats),
sink_adapter_(new LocalAudioSinkAdapter()) {}
AudioRtpSender::~AudioRtpSender() {
track_->RemoveSink(sink_adapter_.get());
track_->UnregisterObserver(this);
Stop();
}
void AudioRtpSender::OnChanged() {
RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
Reconfigure();
if (can_send_track()) {
SetAudioSend();
}
}
}
bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
if (track->kind() != "audio") {
if (stopped_) {
LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
return false;
}
if (track && track->kind() != MediaStreamTrackInterface::kAudioKind) {
LOG(LS_ERROR) << "SetTrack called on audio RtpSender with " << track->kind()
<< " track.";
return false;
@ -94,36 +110,83 @@ bool AudioRtpSender::SetTrack(MediaStreamTrackInterface* track) {
AudioTrackInterface* audio_track = static_cast<AudioTrackInterface*>(track);
// Detach from old track.
track_->RemoveSink(sink_adapter_.get());
track_->UnregisterObserver(this);
if (track_) {
track_->RemoveSink(sink_adapter_.get());
track_->UnregisterObserver(this);
}
if (can_send_track() && stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
// Attach to new track.
bool prev_can_send_track = can_send_track();
track_ = audio_track;
cached_track_enabled_ = track_->enabled();
track_->RegisterObserver(this);
track_->AddSink(sink_adapter_.get());
Reconfigure();
if (track_) {
cached_track_enabled_ = track_->enabled();
track_->RegisterObserver(this);
track_->AddSink(sink_adapter_.get());
}
// Update audio provider.
if (can_send_track()) {
SetAudioSend();
if (stats_) {
stats_->AddLocalAudioTrack(track_.get(), ssrc_);
}
} else if (prev_can_send_track) {
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc_, false, options, nullptr);
}
return true;
}
void AudioRtpSender::SetSsrc(uint32_t ssrc) {
if (stopped_ || ssrc == ssrc_) {
return;
}
// If we are already sending with a particular SSRC, stop sending.
if (can_send_track()) {
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc_, false, options, nullptr);
if (stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
}
ssrc_ = ssrc;
if (can_send_track()) {
SetAudioSend();
if (stats_) {
stats_->AddLocalAudioTrack(track_.get(), ssrc_);
}
}
}
void AudioRtpSender::Stop() {
// TODO(deadbeef): Need to do more here to fully stop sending packets.
if (!provider_) {
if (stopped_) {
return;
}
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc_, false, options, nullptr);
provider_ = nullptr;
if (track_) {
track_->RemoveSink(sink_adapter_.get());
track_->UnregisterObserver(this);
}
if (can_send_track()) {
cricket::AudioOptions options;
provider_->SetAudioSend(ssrc_, false, options, nullptr);
if (stats_) {
stats_->RemoveLocalAudioTrack(track_.get(), ssrc_);
}
}
stopped_ = true;
}
void AudioRtpSender::Reconfigure() {
if (!provider_) {
return;
}
void AudioRtpSender::SetAudioSend() {
RTC_DCHECK(!stopped_ && can_send_track());
cricket::AudioOptions options;
if (track_->enabled() && track_->GetSource()) {
// TODO(xians): Remove this static_cast since we should be able to connect
// a remote audio track to peer connection.
// a remote audio track to a peer connection.
options = static_cast<LocalAudioSource*>(track_->GetSource())->options();
}
@ -136,35 +199,42 @@ void AudioRtpSender::Reconfigure() {
}
VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
uint32_t ssrc,
const std::string& stream_id,
VideoProviderInterface* provider)
: id_(track->id()),
track_(track),
ssrc_(ssrc),
stream_id_(stream_id),
provider_(provider),
track_(track),
cached_track_enabled_(track->enabled()) {
RTC_DCHECK(provider != nullptr);
track_->RegisterObserver(this);
VideoSourceInterface* source = track_->GetSource();
if (source) {
provider_->SetCaptureDevice(ssrc_, source->GetVideoCapturer());
}
Reconfigure();
}
VideoRtpSender::VideoRtpSender(VideoProviderInterface* provider)
: id_(rtc::CreateRandomUuid()),
stream_id_(rtc::CreateRandomUuid()),
provider_(provider) {}
VideoRtpSender::~VideoRtpSender() {
track_->UnregisterObserver(this);
Stop();
}
void VideoRtpSender::OnChanged() {
RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
Reconfigure();
if (can_send_track()) {
SetVideoSend();
}
}
}
bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
if (track->kind() != "video") {
if (stopped_) {
LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender.";
return false;
}
if (track && track->kind() != MediaStreamTrackInterface::kVideoKind) {
LOG(LS_ERROR) << "SetTrack called on video RtpSender with " << track->kind()
<< " track.";
return false;
@ -172,30 +242,72 @@ bool VideoRtpSender::SetTrack(MediaStreamTrackInterface* track) {
VideoTrackInterface* video_track = static_cast<VideoTrackInterface*>(track);
// Detach from old track.
track_->UnregisterObserver(this);
if (track_) {
track_->UnregisterObserver(this);
}
// Attach to new track.
bool prev_can_send_track = can_send_track();
track_ = video_track;
cached_track_enabled_ = track_->enabled();
track_->RegisterObserver(this);
Reconfigure();
if (track_) {
cached_track_enabled_ = track_->enabled();
track_->RegisterObserver(this);
}
// Update video provider.
if (can_send_track()) {
VideoSourceInterface* source = track_->GetSource();
// TODO(deadbeef): If SetTrack is called with a disabled track, and the
// previous track was enabled, this could cause a frame from the new track
// to slip out. Really, what we need is for SetCaptureDevice and
// SetVideoSend
// to be combined into one atomic operation, all the way down to
// WebRtcVideoSendStream.
provider_->SetCaptureDevice(ssrc_,
source ? source->GetVideoCapturer() : nullptr);
SetVideoSend();
} else if (prev_can_send_track) {
provider_->SetCaptureDevice(ssrc_, nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr);
}
return true;
}
void VideoRtpSender::SetSsrc(uint32_t ssrc) {
if (stopped_ || ssrc == ssrc_) {
return;
}
// If we are already sending with a particular SSRC, stop sending.
if (can_send_track()) {
provider_->SetCaptureDevice(ssrc_, nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr);
}
ssrc_ = ssrc;
if (can_send_track()) {
VideoSourceInterface* source = track_->GetSource();
provider_->SetCaptureDevice(ssrc_,
source ? source->GetVideoCapturer() : nullptr);
SetVideoSend();
}
}
void VideoRtpSender::Stop() {
// TODO(deadbeef): Need to do more here to fully stop sending packets.
if (!provider_) {
if (stopped_) {
return;
}
provider_->SetCaptureDevice(ssrc_, nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr);
provider_ = nullptr;
if (track_) {
track_->UnregisterObserver(this);
}
if (can_send_track()) {
provider_->SetCaptureDevice(ssrc_, nullptr);
provider_->SetVideoSend(ssrc_, false, nullptr);
}
stopped_ = true;
}
void VideoRtpSender::Reconfigure() {
if (!provider_) {
return;
}
void VideoRtpSender::SetVideoSend() {
RTC_DCHECK(!stopped_ && can_send_track());
const cricket::VideoOptions* options = nullptr;
VideoSourceInterface* source = track_->GetSource();
if (track_->enabled() && source) {

View File

@ -36,6 +36,7 @@
#include "talk/app/webrtc/mediastreamprovider.h"
#include "talk/app/webrtc/rtpsenderinterface.h"
#include "talk/app/webrtc/statscollector.h"
#include "talk/media/base/audiorenderer.h"
#include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
@ -70,9 +71,15 @@ class LocalAudioSinkAdapter : public AudioTrackSinkInterface,
class AudioRtpSender : public ObserverInterface,
public rtc::RefCountedObject<RtpSenderInterface> {
public:
// StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
// at the appropriate times.
AudioRtpSender(AudioTrackInterface* track,
uint32_t ssrc,
AudioProviderInterface* provider);
const std::string& stream_id,
AudioProviderInterface* provider,
StatsCollector* stats);
// Randomly generates id and stream_id.
AudioRtpSender(AudioProviderInterface* provider, StatsCollector* stats);
virtual ~AudioRtpSender();
@ -85,18 +92,37 @@ class AudioRtpSender : public ObserverInterface,
return track_.get();
}
void SetSsrc(uint32_t ssrc) override;
uint32_t ssrc() const override { return ssrc_; }
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_AUDIO;
}
std::string id() const override { return id_; }
void set_stream_id(const std::string& stream_id) override {
stream_id_ = stream_id;
}
std::string stream_id() const override { return stream_id_; }
void Stop() override;
private:
void Reconfigure();
bool can_send_track() const { return track_ && ssrc_; }
// Helper function to construct options for
// AudioProviderInterface::SetAudioSend.
void SetAudioSend();
std::string id_;
rtc::scoped_refptr<AudioTrackInterface> track_;
uint32_t ssrc_;
std::string stream_id_;
AudioProviderInterface* provider_;
bool cached_track_enabled_;
StatsCollector* stats_;
rtc::scoped_refptr<AudioTrackInterface> track_;
uint32_t ssrc_ = 0;
bool cached_track_enabled_ = false;
bool stopped_ = false;
// Used to pass the data callback from the |track_| to the other end of
// cricket::AudioRenderer.
@ -107,9 +133,12 @@ class VideoRtpSender : public ObserverInterface,
public rtc::RefCountedObject<RtpSenderInterface> {
public:
VideoRtpSender(VideoTrackInterface* track,
uint32_t ssrc,
const std::string& stream_id,
VideoProviderInterface* provider);
// Randomly generates id and stream_id.
explicit VideoRtpSender(VideoProviderInterface* provider);
virtual ~VideoRtpSender();
// ObserverInterface implementation
@ -121,18 +150,36 @@ class VideoRtpSender : public ObserverInterface,
return track_.get();
}
void SetSsrc(uint32_t ssrc) override;
uint32_t ssrc() const override { return ssrc_; }
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_VIDEO;
}
std::string id() const override { return id_; }
void set_stream_id(const std::string& stream_id) override {
stream_id_ = stream_id;
}
std::string stream_id() const override { return stream_id_; }
void Stop() override;
private:
void Reconfigure();
bool can_send_track() const { return track_ && ssrc_; }
// Helper function to construct options for
// VideoProviderInterface::SetVideoSend.
void SetVideoSend();
std::string id_;
rtc::scoped_refptr<VideoTrackInterface> track_;
uint32_t ssrc_;
std::string stream_id_;
VideoProviderInterface* provider_;
bool cached_track_enabled_;
rtc::scoped_refptr<VideoTrackInterface> track_;
uint32_t ssrc_ = 0;
bool cached_track_enabled_ = false;
bool stopped_ = false;
};
} // namespace webrtc

View File

@ -35,6 +35,7 @@
#include "talk/app/webrtc/proxy.h"
#include "talk/app/webrtc/mediastreaminterface.h"
#include "talk/session/media/mediasession.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
@ -47,10 +48,24 @@ class RtpSenderInterface : public rtc::RefCountInterface {
virtual bool SetTrack(MediaStreamTrackInterface* track) = 0;
virtual rtc::scoped_refptr<MediaStreamTrackInterface> track() const = 0;
// Used to set the SSRC of the sender, once a local description has been set.
// If |ssrc| is 0, this indiates that the sender should disconnect from the
// underlying transport (this occurs if the sender isn't seen in a local
// description).
virtual void SetSsrc(uint32_t ssrc) = 0;
virtual uint32_t ssrc() const = 0;
// Audio or video sender?
virtual cricket::MediaType media_type() const = 0;
// Not to be confused with "mid", this is a field we can temporarily use
// to uniquely identify a receiver until we implement Unified Plan SDP.
virtual std::string id() const = 0;
// TODO(deadbeef): Support one sender having multiple stream ids.
virtual void set_stream_id(const std::string& stream_id) = 0;
virtual std::string stream_id() const = 0;
virtual void Stop() = 0;
protected:
@ -61,7 +76,12 @@ class RtpSenderInterface : public rtc::RefCountInterface {
BEGIN_PROXY_MAP(RtpSender)
PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, track)
PROXY_METHOD1(void, SetSsrc, uint32_t)
PROXY_CONSTMETHOD0(uint32_t, ssrc)
PROXY_CONSTMETHOD0(cricket::MediaType, media_type)
PROXY_CONSTMETHOD0(std::string, id)
PROXY_METHOD1(void, set_stream_id, const std::string&)
PROXY_CONSTMETHOD0(std::string, stream_id)
PROXY_METHOD0(void, Stop)
END_PROXY()

View File

@ -48,7 +48,9 @@ static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoTrackId[] = "video_1";
static const char kAudioTrackId[] = "audio_1";
static const uint32_t kVideoSsrc = 98;
static const uint32_t kVideoSsrc2 = 100;
static const uint32_t kAudioSsrc = 99;
static const uint32_t kAudioSsrc2 = 101;
namespace webrtc {
@ -120,8 +122,10 @@ class RtpSenderReceiverTest : public testing::Test {
audio_track_ = AudioTrack::Create(kAudioTrackId, NULL);
EXPECT_TRUE(stream_->AddTrack(audio_track_));
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
audio_rtp_sender_ = new AudioRtpSender(stream_->GetAudioTracks()[0],
kAudioSsrc, &audio_provider_);
audio_rtp_sender_ =
new AudioRtpSender(stream_->GetAudioTracks()[0], stream_->label(),
&audio_provider_, nullptr);
audio_rtp_sender_->SetSsrc(kAudioSsrc);
}
void CreateVideoRtpSender() {
@ -130,7 +134,8 @@ class RtpSenderReceiverTest : public testing::Test {
kVideoSsrc, video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
video_rtp_sender_ = new VideoRtpSender(stream_->GetVideoTracks()[0],
kVideoSsrc, &video_provider_);
stream_->label(), &video_provider_);
video_rtp_sender_->SetSsrc(kVideoSsrc);
}
void DestroyAudioRtpSender() {
@ -280,4 +285,206 @@ TEST_F(RtpSenderReceiverTest, RemoteAudioTrackSetVolume) {
DestroyAudioRtpReceiver();
}
// Test that provider methods aren't called without both a track and an SSRC.
TEST_F(RtpSenderReceiverTest, AudioSenderWithoutTrackAndSsrc) {
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(&audio_provider_, nullptr);
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
EXPECT_TRUE(sender->SetTrack(track));
EXPECT_TRUE(sender->SetTrack(nullptr));
sender->SetSsrc(kAudioSsrc);
sender->SetSsrc(0);
// Just let it get destroyed and make sure it doesn't call any methods on the
// provider interface.
}
// Test that provider methods aren't called without both a track and an SSRC.
TEST_F(RtpSenderReceiverTest, VideoSenderWithoutTrackAndSsrc) {
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
EXPECT_TRUE(sender->SetTrack(video_track_));
EXPECT_TRUE(sender->SetTrack(nullptr));
sender->SetSsrc(kVideoSsrc);
sender->SetSsrc(0);
// Just let it get destroyed and make sure it doesn't call any methods on the
// provider interface.
}
// Test that an audio sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set first.
TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupSsrcThenTrack) {
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(&audio_provider_, nullptr);
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
sender->SetSsrc(kAudioSsrc);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
sender->SetTrack(track);
// Calls expected from destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
}
// Test that an audio sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set last.
TEST_F(RtpSenderReceiverTest, AudioSenderEarlyWarmupTrackThenSsrc) {
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(&audio_provider_, nullptr);
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
sender->SetTrack(track);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
sender->SetSsrc(kAudioSsrc);
// Calls expected from destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
}
// Test that a video sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set first.
TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupSsrcThenTrack) {
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
sender->SetSsrc(kVideoSsrc);
EXPECT_CALL(video_provider_,
SetCaptureDevice(kVideoSsrc,
video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
sender->SetTrack(video_track_);
// Calls expected from destructor.
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
}
// Test that a video sender calls the expected methods on the provider once
// it has a track and SSRC, when the SSRC is set last.
TEST_F(RtpSenderReceiverTest, VideoSenderEarlyWarmupTrackThenSsrc) {
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(&video_provider_);
sender->SetTrack(video_track_);
EXPECT_CALL(video_provider_,
SetCaptureDevice(kVideoSsrc,
video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
sender->SetSsrc(kVideoSsrc);
// Calls expected from destructor.
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
}
// Test that the sender is disconnected from the provider when its SSRC is
// set to 0.
TEST_F(RtpSenderReceiverTest, AudioSenderSsrcSetToZero) {
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
sender->SetSsrc(kAudioSsrc);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
sender->SetSsrc(0);
// Make sure it's SetSsrc that called methods on the provider, and not the
// destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
}
// Test that the sender is disconnected from the provider when its SSRC is
// set to 0.
TEST_F(RtpSenderReceiverTest, VideoSenderSsrcSetToZero) {
EXPECT_CALL(video_provider_,
SetCaptureDevice(kVideoSsrc,
video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
sender->SetSsrc(0);
// Make sure it's SetSsrc that called methods on the provider, and not the
// destructor.
EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
}
TEST_F(RtpSenderReceiverTest, AudioSenderTrackSetToNull) {
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
sender->SetSsrc(kAudioSsrc);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
EXPECT_TRUE(sender->SetTrack(nullptr));
// Make sure it's SetTrack that called methods on the provider, and not the
// destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(_, _, _, _)).Times(0);
}
TEST_F(RtpSenderReceiverTest, VideoSenderTrackSetToNull) {
EXPECT_CALL(video_provider_,
SetCaptureDevice(kVideoSsrc,
video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
EXPECT_TRUE(sender->SetTrack(nullptr));
// Make sure it's SetTrack that called methods on the provider, and not the
// destructor.
EXPECT_CALL(video_provider_, SetCaptureDevice(_, _)).Times(0);
EXPECT_CALL(video_provider_, SetVideoSend(_, _, _)).Times(0);
}
TEST_F(RtpSenderReceiverTest, AudioSenderSsrcChanged) {
rtc::scoped_refptr<AudioTrackInterface> track =
AudioTrack::Create(kAudioTrackId, nullptr);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, true, _, _));
rtc::scoped_refptr<AudioRtpSender> sender =
new AudioRtpSender(track, kStreamLabel1, &audio_provider_, nullptr);
sender->SetSsrc(kAudioSsrc);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc, false, _, _)).Times(1);
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, true, _, _)).Times(1);
sender->SetSsrc(kAudioSsrc2);
// Calls expected from destructor.
EXPECT_CALL(audio_provider_, SetAudioSend(kAudioSsrc2, false, _, _)).Times(1);
}
TEST_F(RtpSenderReceiverTest, VideoSenderSsrcChanged) {
EXPECT_CALL(video_provider_,
SetCaptureDevice(kVideoSsrc,
video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, true, _));
rtc::scoped_refptr<VideoRtpSender> sender =
new VideoRtpSender(video_track_, kStreamLabel1, &video_provider_);
sender->SetSsrc(kVideoSsrc);
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc, false, _)).Times(1);
EXPECT_CALL(video_provider_,
SetCaptureDevice(kVideoSsrc2,
video_track_->GetSource()->GetVideoCapturer()));
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, true, _));
sender->SetSsrc(kVideoSsrc2);
// Calls expected from destructor.
EXPECT_CALL(video_provider_, SetCaptureDevice(kVideoSsrc2, nullptr)).Times(1);
EXPECT_CALL(video_provider_, SetVideoSend(kVideoSsrc2, false, _)).Times(1);
}
} // namespace webrtc

View File

@ -31,7 +31,7 @@
namespace webrtc {
static const char kVideoTrackKind[] = "video";
const char MediaStreamTrackInterface::kVideoKind[] = "video";
VideoTrack::VideoTrack(const std::string& label,
VideoSourceInterface* video_source)
@ -47,7 +47,7 @@ VideoTrack::~VideoTrack() {
}
std::string VideoTrack::kind() const {
return kVideoTrackKind;
return kVideoKind;
}
void VideoTrack::AddRenderer(VideoRendererInterface* renderer) {

View File

@ -1739,7 +1739,6 @@ void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
cricket::GetFirstVideoContent(desc);
if ((!video_info || video_info->rejected) && video_channel_) {
SignalVideoChannelDestroyed();
const std::string content_name = video_channel_->content_name();
channel_manager_->DestroyVideoChannel(video_channel_.release());
}
@ -1747,7 +1746,6 @@ void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
cricket::GetFirstAudioContent(desc);
if ((!voice_info || voice_info->rejected) && voice_channel_) {
SignalVoiceChannelDestroyed();
const std::string content_name = voice_channel_->content_name();
channel_manager_->DestroyVoiceChannel(voice_channel_.release());
}
@ -1755,7 +1753,6 @@ void WebRtcSession::RemoveUnusedChannels(const SessionDescription* desc) {
cricket::GetFirstDataContent(desc);
if ((!data_info || data_info->rejected) && data_channel_) {
SignalDataChannelDestroyed();
const std::string content_name = data_channel_->content_name();
channel_manager_->DestroyDataChannel(data_channel_.release());
}
}

View File

@ -164,17 +164,21 @@ class TestRandomGenerator : public RandomGenerator {
int seed_;
};
// TODO: Use Base64::Base64Table instead.
static const char BASE64[64] = {
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'
};
namespace {
// TODO: Use Base64::Base64Table instead.
static const char kBase64[64] = {
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'};
static const char kHex[16] = {'0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
static const char kUuidDigit17[4] = {'8', '9', 'a', 'b'};
// This round about way of creating a global RNG is to safe-guard against
// indeterminant static initialization order.
scoped_ptr<RandomGenerator>& GetGlobalRng() {
@ -232,7 +236,7 @@ bool CreateRandomString(size_t len,
}
bool CreateRandomString(size_t len, std::string* str) {
return CreateRandomString(len, BASE64, 64, str);
return CreateRandomString(len, kBase64, 64, str);
}
bool CreateRandomString(size_t len, const std::string& table,
@ -241,6 +245,41 @@ bool CreateRandomString(size_t len, const std::string& table,
static_cast<int>(table.size()), str);
}
// Version 4 UUID is of the form:
// xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx
// Where 'x' is a hex digit, and 'y' is 8, 9, a or b.
std::string CreateRandomUuid() {
std::string str;
scoped_ptr<uint8_t[]> bytes(new uint8_t[31]);
if (!Rng().Generate(bytes.get(), 31)) {
LOG(LS_ERROR) << "Failed to generate random string!";
return str;
}
str.reserve(36);
for (size_t i = 0; i < 8; ++i) {
str.push_back(kHex[bytes[i] % 16]);
}
str.push_back('-');
for (size_t i = 8; i < 12; ++i) {
str.push_back(kHex[bytes[i] % 16]);
}
str.push_back('-');
str.push_back('4');
for (size_t i = 12; i < 15; ++i) {
str.push_back(kHex[bytes[i] % 16]);
}
str.push_back('-');
str.push_back(kUuidDigit17[bytes[15] % 4]);
for (size_t i = 16; i < 19; ++i) {
str.push_back(kHex[bytes[i] % 16]);
}
str.push_back('-');
for (size_t i = 19; i < 31; ++i) {
str.push_back(kHex[bytes[i] % 16]);
}
return str;
}
uint32_t CreateRandomId() {
uint32_t id;
if (!Rng().Generate(&id, sizeof(id))) {

View File

@ -39,6 +39,9 @@ bool CreateRandomString(size_t length, std::string* str);
bool CreateRandomString(size_t length, const std::string& table,
std::string* str);
// Generates a (cryptographically) random UUID version 4 string.
std::string CreateRandomUuid();
// Generates a random id.
uint32_t CreateRandomId();

View File

@ -43,16 +43,23 @@ TEST_F(RandomTest, TestCreateRandomString) {
EXPECT_EQ(256U, random2.size());
}
TEST_F(RandomTest, TestCreateRandomUuid) {
std::string random = CreateRandomUuid();
EXPECT_EQ(36U, random.size());
}
TEST_F(RandomTest, TestCreateRandomForTest) {
// Make sure we get the output we expect.
SetRandomTestMode(true);
EXPECT_EQ(2154761789U, CreateRandomId());
EXPECT_EQ("h0ISP4S5SJKH/9EY", CreateRandomString(16));
EXPECT_EQ("41706e92-cdd3-46d9-a22d-8ff1737ffb11", CreateRandomUuid());
// Reset and make sure we get the same output.
SetRandomTestMode(true);
EXPECT_EQ(2154761789U, CreateRandomId());
EXPECT_EQ("h0ISP4S5SJKH/9EY", CreateRandomString(16));
EXPECT_EQ("41706e92-cdd3-46d9-a22d-8ff1737ffb11", CreateRandomUuid());
// Test different character sets.
SetRandomTestMode(true);