Swap use of CriticalSectionWrapper with rtc::CriticalSection in voice_engine/

Also remove mischievous tab character!
This is a part of getting rid of CriticalSectionWrapper and makes the code slightly simpler.

BUG=

Review URL: https://codereview.webrtc.org/1607353002

Cr-Commit-Position: refs/heads/master@{#11346}
This commit is contained in:
tommi 2016-01-21 10:37:37 -08:00 committed by Commit bot
parent 8947a01e05
commit 31fc21f454
39 changed files with 232 additions and 293 deletions

View File

@ -14,6 +14,7 @@
#include <utility> #include <utility>
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/format_macros.h" #include "webrtc/base/format_macros.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/base/thread_checker.h" #include "webrtc/base/thread_checker.h"
@ -30,7 +31,6 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h" #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
#include "webrtc/modules/utility/include/audio_frame_operations.h" #include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/modules/utility/include/process_thread.h" #include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/include/voe_base.h" #include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_external_media.h" #include "webrtc/voice_engine/include/voe_external_media.h"
@ -157,9 +157,7 @@ struct ChannelStatistics : public RtcpStatistics {
// Statistics callback, called at each generation of a new RTCP report block. // Statistics callback, called at each generation of a new RTCP report block.
class StatisticsProxy : public RtcpStatisticsCallback { class StatisticsProxy : public RtcpStatisticsCallback {
public: public:
StatisticsProxy(uint32_t ssrc) StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
: stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
ssrc_(ssrc) {}
virtual ~StatisticsProxy() {} virtual ~StatisticsProxy() {}
void StatisticsUpdated(const RtcpStatistics& statistics, void StatisticsUpdated(const RtcpStatistics& statistics,
@ -167,7 +165,7 @@ class StatisticsProxy : public RtcpStatisticsCallback {
if (ssrc != ssrc_) if (ssrc != ssrc_)
return; return;
CriticalSectionScoped cs(stats_lock_.get()); rtc::CritScope cs(&stats_lock_);
stats_.rtcp = statistics; stats_.rtcp = statistics;
if (statistics.jitter > stats_.max_jitter) { if (statistics.jitter > stats_.max_jitter) {
stats_.max_jitter = statistics.jitter; stats_.max_jitter = statistics.jitter;
@ -177,7 +175,7 @@ class StatisticsProxy : public RtcpStatisticsCallback {
void CNameChanged(const char* cname, uint32_t ssrc) override {} void CNameChanged(const char* cname, uint32_t ssrc) override {}
ChannelStatistics GetStats() { ChannelStatistics GetStats() {
CriticalSectionScoped cs(stats_lock_.get()); rtc::CritScope cs(&stats_lock_);
return stats_; return stats_;
} }
@ -185,7 +183,7 @@ class StatisticsProxy : public RtcpStatisticsCallback {
// StatisticsUpdated calls are triggered from threads in the RTP module, // StatisticsUpdated calls are triggered from threads in the RTP module,
// while GetStats calls can be triggered from the public voice engine API, // while GetStats calls can be triggered from the public voice engine API,
// hence synchronization is needed. // hence synchronization is needed.
rtc::scoped_ptr<CriticalSectionWrapper> stats_lock_; rtc::CriticalSection stats_lock_;
const uint32_t ssrc_; const uint32_t ssrc_;
ChannelStatistics stats_; ChannelStatistics stats_;
}; };
@ -298,7 +296,7 @@ Channel::InFrameType(FrameType frame_type)
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::InFrameType(frame_type=%d)", frame_type); "Channel::InFrameType(frame_type=%d)", frame_type);
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
_sendFrameType = (frame_type == kAudioFrameSpeech); _sendFrameType = (frame_type == kAudioFrameSpeech);
return 0; return 0;
} }
@ -306,7 +304,7 @@ Channel::InFrameType(FrameType frame_type)
int32_t int32_t
Channel::OnRxVadDetected(int vadDecision) Channel::OnRxVadDetected(int vadDecision)
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_rxVadObserverPtr) if (_rxVadObserverPtr)
{ {
_rxVadObserverPtr->OnRxVad(_channelId, vadDecision); _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
@ -321,7 +319,7 @@ bool Channel::SendRtp(const uint8_t* data,
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SendPacket(channel=%d, len=%" PRIuS ")", len); "Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_transportPtr == NULL) if (_transportPtr == NULL)
{ {
@ -352,7 +350,7 @@ Channel::SendRtcp(const uint8_t *data, size_t len)
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SendRtcp(len=%" PRIuS ")", len); "Channel::SendRtcp(len=%" PRIuS ")", len);
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_transportPtr == NULL) if (_transportPtr == NULL)
{ {
WEBRTC_TRACE(kTraceError, kTraceVoice, WEBRTC_TRACE(kTraceError, kTraceVoice,
@ -566,7 +564,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame)
// scaling/panning, as that applies to the mix operation. // scaling/panning, as that applies to the mix operation.
// External recipients of the audio (e.g. via AudioTrack), will do their // External recipients of the audio (e.g. via AudioTrack), will do their
// own mixing/dynamic processing. // own mixing/dynamic processing.
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (audio_sink_) { if (audio_sink_) {
AudioSinkInterface::Data data( AudioSinkInterface::Data data(
&audioFrame->data_[0], &audioFrame->data_[0],
@ -580,7 +578,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame)
float left_pan = 1.0f; float left_pan = 1.0f;
float right_pan = 1.0f; float right_pan = 1.0f;
{ {
CriticalSectionScoped cs(&volume_settings_critsect_); rtc::CritScope cs(&volume_settings_critsect_);
output_gain = _outputGain; output_gain = _outputGain;
left_pan = _panLeft; left_pan = _panLeft;
right_pan= _panRight; right_pan= _panRight;
@ -620,7 +618,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame)
// External media // External media
if (_outputExternalMedia) if (_outputExternalMedia)
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
const bool isStereo = (audioFrame->num_channels_ == 2); const bool isStereo = (audioFrame->num_channels_ == 2);
if (_outputExternalMediaCallbackPtr) if (_outputExternalMediaCallbackPtr)
{ {
@ -633,7 +631,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame)
// Record playout if enabled // Record playout if enabled
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFileRecording && _outputFileRecorderPtr) if (_outputFileRecording && _outputFileRecorderPtr)
{ {
@ -660,7 +658,7 @@ int32_t Channel::GetAudioFrame(int32_t id, AudioFrame* audioFrame)
(GetPlayoutFrequency() / 1000); (GetPlayoutFrequency() / 1000);
{ {
CriticalSectionScoped lock(ts_stats_lock_.get()); rtc::CritScope lock(&ts_stats_lock_);
// Compute ntp time. // Compute ntp time.
audioFrame->ntp_time_ms_ = ntp_estimator_.Estimate( audioFrame->ntp_time_ms_ = ntp_estimator_.Estimate(
audioFrame->timestamp_); audioFrame->timestamp_);
@ -704,7 +702,7 @@ Channel::NeededFrequency(int32_t id) const
// limit the spectrum anyway. // limit the spectrum anyway.
if (channel_state_.Get().output_file_playing) if (channel_state_.Get().output_file_playing)
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFilePlayerPtr) if (_outputFilePlayerPtr)
{ {
if(_outputFilePlayerPtr->Frequency()>highestNeeded) if(_outputFilePlayerPtr->Frequency()>highestNeeded)
@ -790,7 +788,7 @@ Channel::RecordFileEnded(int32_t id)
assert(id == _outputFileRecorderId); assert(id == _outputFileRecorderId);
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
_outputFileRecording = false; _outputFileRecording = false;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
@ -803,11 +801,7 @@ Channel::Channel(int32_t channelId,
uint32_t instanceId, uint32_t instanceId,
RtcEventLog* const event_log, RtcEventLog* const event_log,
const Config& config) const Config& config)
: _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()), : _instanceId(instanceId),
_callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
volume_settings_critsect_(
*CriticalSectionWrapper::CreateCriticalSection()),
_instanceId(instanceId),
_channelId(channelId), _channelId(channelId),
event_log_(event_log), event_log_(event_log),
rtp_header_parser_(RtpHeaderParser::Create()), rtp_header_parser_(RtpHeaderParser::Create()),
@ -848,7 +842,6 @@ Channel::Channel(int32_t channelId,
playout_delay_ms_(0), playout_delay_ms_(0),
_numberOfDiscardedPackets(0), _numberOfDiscardedPackets(0),
send_sequence_number_(0), send_sequence_number_(0),
ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()), rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
capture_start_rtp_time_stamp_(-1), capture_start_rtp_time_stamp_(-1),
capture_start_ntp_time_ms_(-1), capture_start_ntp_time_ms_(-1),
@ -875,7 +868,6 @@ Channel::Channel(int32_t channelId,
_lastPayloadType(0), _lastPayloadType(0),
_includeAudioLevelIndication(false), _includeAudioLevelIndication(false),
_outputSpeechType(AudioFrame::kNormalSpeech), _outputSpeechType(AudioFrame::kNormalSpeech),
video_sync_lock_(CriticalSectionWrapper::CreateCriticalSection()),
_average_jitter_buffer_delay_us(0), _average_jitter_buffer_delay_us(0),
_previousTimestamp(0), _previousTimestamp(0),
_recPacketDelayMs(20), _recPacketDelayMs(20),
@ -885,7 +877,6 @@ Channel::Channel(int32_t channelId,
restored_packet_in_use_(false), restored_packet_in_use_(false),
rtcp_observer_(new VoERtcpObserver(this)), rtcp_observer_(new VoERtcpObserver(this)),
network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())), network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock())),
assoc_send_channel_lock_(CriticalSectionWrapper::CreateCriticalSection()),
associate_send_channel_(ChannelOwner(nullptr)), associate_send_channel_(ChannelOwner(nullptr)),
pacing_enabled_(config.Get<VoicePacing>().enabled), pacing_enabled_(config.Get<VoicePacing>().enabled),
feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy() feedback_observer_proxy_(pacing_enabled_ ? new TransportFeedbackProxy()
@ -953,7 +944,7 @@ Channel::~Channel()
StopPlayout(); StopPlayout();
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_inputFilePlayerPtr) if (_inputFilePlayerPtr)
{ {
_inputFilePlayerPtr->RegisterModuleFileCallback(NULL); _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
@ -999,11 +990,6 @@ Channel::~Channel()
_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
// End of modules shutdown // End of modules shutdown
// Delete other objects
delete &_callbackCritSect;
delete &_fileCritSect;
delete &volume_settings_critsect_;
} }
int32_t int32_t
@ -1164,7 +1150,7 @@ Channel::SetEngineInformation(Statistics& engineStatistics,
ProcessThread& moduleProcessThread, ProcessThread& moduleProcessThread,
AudioDeviceModule& audioDeviceModule, AudioDeviceModule& audioDeviceModule,
VoiceEngineObserver* voiceEngineObserver, VoiceEngineObserver* voiceEngineObserver,
CriticalSectionWrapper* callbackCritSect) rtc::CriticalSection* callbackCritSect)
{ {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetEngineInformation()"); "Channel::SetEngineInformation()");
@ -1187,7 +1173,7 @@ Channel::UpdateLocalTimeStamp()
} }
void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) { void Channel::SetSink(rtc::scoped_ptr<AudioSinkInterface> sink) {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
audio_sink_ = std::move(sink); audio_sink_ = std::move(sink);
} }
@ -1267,7 +1253,7 @@ Channel::StartSend()
_engineStatisticsPtr->SetLastError( _engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError, VE_RTP_RTCP_MODULE_ERROR, kTraceError,
"StartSend() RTP/RTCP failed to start sending"); "StartSend() RTP/RTCP failed to start sending");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
channel_state_.SetSending(false); channel_state_.SetSending(false);
return -1; return -1;
} }
@ -1339,7 +1325,7 @@ Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
{ {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::RegisterVoiceEngineObserver()"); "Channel::RegisterVoiceEngineObserver()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_voiceEngineObserverPtr) if (_voiceEngineObserverPtr)
{ {
@ -1357,7 +1343,7 @@ Channel::DeRegisterVoiceEngineObserver()
{ {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::DeRegisterVoiceEngineObserver()"); "Channel::DeRegisterVoiceEngineObserver()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (!_voiceEngineObserverPtr) if (!_voiceEngineObserverPtr)
{ {
@ -1664,7 +1650,7 @@ int32_t Channel::RegisterExternalTransport(Transport& transport)
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
"Channel::RegisterExternalTransport()"); "Channel::RegisterExternalTransport()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_externalTransport) if (_externalTransport)
{ {
@ -1684,7 +1670,7 @@ Channel::DeRegisterExternalTransport()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::DeRegisterExternalTransport()"); "Channel::DeRegisterExternalTransport()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (!_transportPtr) if (!_transportPtr)
{ {
@ -1828,7 +1814,7 @@ int32_t Channel::ReceivedRTCPPacket(const int8_t* data, size_t length) {
} }
{ {
CriticalSectionScoped lock(ts_stats_lock_.get()); rtc::CritScope lock(&ts_stats_lock_);
ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
} }
return 0; return 0;
@ -1857,7 +1843,7 @@ int Channel::StartPlayingFileLocally(const char* fileName,
} }
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFilePlayerPtr) if (_outputFilePlayerPtr)
{ {
@ -1936,7 +1922,7 @@ int Channel::StartPlayingFileLocally(InStream* stream,
} }
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
// Destroy the old instance // Destroy the old instance
if (_outputFilePlayerPtr) if (_outputFilePlayerPtr)
@ -1995,7 +1981,7 @@ int Channel::StopPlayingFileLocally()
} }
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFilePlayerPtr->StopPlayingFile() != 0) if (_outputFilePlayerPtr->StopPlayingFile() != 0)
{ {
@ -2047,7 +2033,7 @@ int Channel::RegisterFilePlayingToMixer()
if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
{ {
channel_state_.SetOutputFilePlaying(false); channel_state_.SetOutputFilePlaying(false);
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
_engineStatisticsPtr->SetLastError( _engineStatisticsPtr->SetLastError(
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError, VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
"StartPlayingFile() failed to add participant as file to mixer"); "StartPlayingFile() failed to add participant as file to mixer");
@ -2074,7 +2060,7 @@ int Channel::StartPlayingFileAsMicrophone(const char* fileName,
"stopPosition=%d)", fileName, loop, format, volumeScaling, "stopPosition=%d)", fileName, loop, format, volumeScaling,
startPosition, stopPosition); startPosition, stopPosition);
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (channel_state_.Get().input_file_playing) if (channel_state_.Get().input_file_playing)
{ {
@ -2149,7 +2135,7 @@ int Channel::StartPlayingFileAsMicrophone(InStream* stream,
return -1; return -1;
} }
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (channel_state_.Get().input_file_playing) if (channel_state_.Get().input_file_playing)
{ {
@ -2205,7 +2191,7 @@ int Channel::StopPlayingFileAsMicrophone()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::StopPlayingFileAsMicrophone()"); "Channel::StopPlayingFileAsMicrophone()");
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (!channel_state_.Get().input_file_playing) if (!channel_state_.Get().input_file_playing)
{ {
@ -2273,7 +2259,7 @@ int Channel::StartRecordingPlayout(const char* fileName,
format = kFileFormatCompressedFile; format = kFileFormatCompressedFile;
} }
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
// Destroy the old instance // Destroy the old instance
if (_outputFileRecorderPtr) if (_outputFileRecorderPtr)
@ -2350,7 +2336,7 @@ int Channel::StartRecordingPlayout(OutStream* stream,
format = kFileFormatCompressedFile; format = kFileFormatCompressedFile;
} }
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
// Destroy the old instance // Destroy the old instance
if (_outputFileRecorderPtr) if (_outputFileRecorderPtr)
@ -2401,7 +2387,7 @@ int Channel::StopRecordingPlayout()
} }
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFileRecorderPtr->StopRecording() != 0) if (_outputFileRecorderPtr->StopRecording() != 0)
{ {
@ -2421,7 +2407,7 @@ int Channel::StopRecordingPlayout()
void void
Channel::SetMixWithMicStatus(bool mix) Channel::SetMixWithMicStatus(bool mix)
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
_mixFileWithMicrophone=mix; _mixFileWithMicrophone=mix;
} }
@ -2444,7 +2430,7 @@ Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
int int
Channel::SetMute(bool enable) Channel::SetMute(bool enable)
{ {
CriticalSectionScoped cs(&volume_settings_critsect_); rtc::CritScope cs(&volume_settings_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetMute(enable=%d)", enable); "Channel::SetMute(enable=%d)", enable);
_mute = enable; _mute = enable;
@ -2454,14 +2440,14 @@ Channel::SetMute(bool enable)
bool bool
Channel::Mute() const Channel::Mute() const
{ {
CriticalSectionScoped cs(&volume_settings_critsect_); rtc::CritScope cs(&volume_settings_critsect_);
return _mute; return _mute;
} }
int int
Channel::SetOutputVolumePan(float left, float right) Channel::SetOutputVolumePan(float left, float right)
{ {
CriticalSectionScoped cs(&volume_settings_critsect_); rtc::CritScope cs(&volume_settings_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetOutputVolumePan()"); "Channel::SetOutputVolumePan()");
_panLeft = left; _panLeft = left;
@ -2472,7 +2458,7 @@ Channel::SetOutputVolumePan(float left, float right)
int int
Channel::GetOutputVolumePan(float& left, float& right) const Channel::GetOutputVolumePan(float& left, float& right) const
{ {
CriticalSectionScoped cs(&volume_settings_critsect_); rtc::CritScope cs(&volume_settings_critsect_);
left = _panLeft; left = _panLeft;
right = _panRight; right = _panRight;
return 0; return 0;
@ -2481,7 +2467,7 @@ Channel::GetOutputVolumePan(float& left, float& right) const
int int
Channel::SetChannelOutputVolumeScaling(float scaling) Channel::SetChannelOutputVolumeScaling(float scaling)
{ {
CriticalSectionScoped cs(&volume_settings_critsect_); rtc::CritScope cs(&volume_settings_critsect_);
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::SetChannelOutputVolumeScaling()"); "Channel::SetChannelOutputVolumeScaling()");
_outputGain = scaling; _outputGain = scaling;
@ -2491,7 +2477,7 @@ Channel::SetChannelOutputVolumeScaling(float scaling)
int int
Channel::GetChannelOutputVolumeScaling(float& scaling) const Channel::GetChannelOutputVolumeScaling(float& scaling) const
{ {
CriticalSectionScoped cs(&volume_settings_critsect_); rtc::CritScope cs(&volume_settings_critsect_);
scaling = _outputGain; scaling = _outputGain;
return 0; return 0;
} }
@ -2601,7 +2587,7 @@ Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
{ {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::RegisterRxVadObserver()"); "Channel::RegisterRxVadObserver()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_rxVadObserverPtr) if (_rxVadObserverPtr)
{ {
@ -2620,7 +2606,7 @@ Channel::DeRegisterRxVadObserver()
{ {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::DeRegisterRxVadObserver()"); "Channel::DeRegisterRxVadObserver()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (!_rxVadObserverPtr) if (!_rxVadObserverPtr)
{ {
@ -3260,7 +3246,7 @@ Channel::GetRTPStatistics(CallStatistics& stats)
// --- Timestamps // --- Timestamps
{ {
CriticalSectionScoped lock(ts_stats_lock_.get()); rtc::CritScope lock(&ts_stats_lock_);
stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
} }
return 0; return 0;
@ -3401,7 +3387,7 @@ Channel::PrepareEncodeAndSend(int mixingFrequency)
if (channel_state_.Get().input_external_media) if (channel_state_.Get().input_external_media)
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
const bool isStereo = (_audioFrame.num_channels_ == 2); const bool isStereo = (_audioFrame.num_channels_ == 2);
if (_inputExternalMediaCallbackPtr) if (_inputExternalMediaCallbackPtr)
{ {
@ -3465,7 +3451,7 @@ Channel::EncodeAndSend()
} }
void Channel::DisassociateSendChannel(int channel_id) { void Channel::DisassociateSendChannel(int channel_id) {
CriticalSectionScoped lock(assoc_send_channel_lock_.get()); rtc::CritScope lock(&assoc_send_channel_lock_);
Channel* channel = associate_send_channel_.channel(); Channel* channel = associate_send_channel_.channel();
if (channel && channel->ChannelId() == channel_id) { if (channel && channel->ChannelId() == channel_id) {
// If this channel is associated with a send channel of the specified // If this channel is associated with a send channel of the specified
@ -3482,7 +3468,7 @@ int Channel::RegisterExternalMediaProcessing(
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::RegisterExternalMediaProcessing()"); "Channel::RegisterExternalMediaProcessing()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (kPlaybackPerChannel == type) if (kPlaybackPerChannel == type)
{ {
@ -3518,7 +3504,7 @@ int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::DeRegisterExternalMediaProcessing()"); "Channel::DeRegisterExternalMediaProcessing()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (kPlaybackPerChannel == type) if (kPlaybackPerChannel == type)
{ {
@ -3580,7 +3566,7 @@ void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms, bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) const { int* playout_buffer_delay_ms) const {
CriticalSectionScoped cs(video_sync_lock_.get()); rtc::CritScope lock(&video_sync_lock_);
if (_average_jitter_buffer_delay_us == 0) { if (_average_jitter_buffer_delay_us == 0) {
return false; return false;
} }
@ -3627,7 +3613,7 @@ Channel::SetMinimumPlayoutDelay(int delayMs)
int Channel::GetPlayoutTimestamp(unsigned int& timestamp) { int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
uint32_t playout_timestamp_rtp = 0; uint32_t playout_timestamp_rtp = 0;
{ {
CriticalSectionScoped cs(video_sync_lock_.get()); rtc::CritScope lock(&video_sync_lock_);
playout_timestamp_rtp = playout_timestamp_rtp_; playout_timestamp_rtp = playout_timestamp_rtp_;
} }
if (playout_timestamp_rtp == 0) { if (playout_timestamp_rtp == 0) {
@ -3681,7 +3667,7 @@ Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
size_t fileSamples(0); size_t fileSamples(0);
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_inputFilePlayerPtr == NULL) if (_inputFilePlayerPtr == NULL)
{ {
@ -3751,7 +3737,7 @@ Channel::MixAudioWithFile(AudioFrame& audioFrame,
size_t fileSamples(0); size_t fileSamples(0);
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFilePlayerPtr == NULL) if (_outputFilePlayerPtr == NULL)
{ {
@ -3900,7 +3886,7 @@ void Channel::UpdatePlayoutTimestamp(bool rtcp) {
playout_timestamp); playout_timestamp);
{ {
CriticalSectionScoped cs(video_sync_lock_.get()); rtc::CritScope lock(&video_sync_lock_);
if (rtcp) { if (rtcp) {
playout_timestamp_rtcp_ = playout_timestamp; playout_timestamp_rtcp_ = playout_timestamp;
} else { } else {
@ -3941,7 +3927,7 @@ void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
if (timestamp_diff_ms == 0) return; if (timestamp_diff_ms == 0) return;
{ {
CriticalSectionScoped cs(video_sync_lock_.get()); rtc::CritScope lock(&video_sync_lock_);
if (packet_delay_ms >= 10 && packet_delay_ms <= 60) { if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
_recPacketDelayMs = packet_delay_ms; _recPacketDelayMs = packet_delay_ms;
@ -4085,7 +4071,7 @@ int64_t Channel::GetRTT(bool allow_associate_channel) const {
int64_t rtt = 0; int64_t rtt = 0;
if (report_blocks.empty()) { if (report_blocks.empty()) {
if (allow_associate_channel) { if (allow_associate_channel) {
CriticalSectionScoped lock(assoc_send_channel_lock_.get()); rtc::CritScope lock(&assoc_send_channel_lock_);
Channel* channel = associate_send_channel_.channel(); Channel* channel = associate_send_channel_.channel();
// Tries to get RTT from an associated channel. This is important for // Tries to get RTT from an associated channel. This is important for
// receive-only channels. // receive-only channels.

View File

@ -47,7 +47,6 @@ namespace webrtc {
class AudioDeviceModule; class AudioDeviceModule;
class Config; class Config;
class CriticalSectionWrapper;
class FileWrapper; class FileWrapper;
class PacketRouter; class PacketRouter;
class ProcessThread; class ProcessThread;
@ -103,57 +102,56 @@ class ChannelState {
bool receiving; bool receiving;
}; };
ChannelState() : lock_(CriticalSectionWrapper::CreateCriticalSection()) { ChannelState() {}
}
virtual ~ChannelState() {} virtual ~ChannelState() {}
void Reset() { void Reset() {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_ = State(); state_ = State();
} }
State Get() const { State Get() const {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
return state_; return state_;
} }
void SetRxApmIsEnabled(bool enable) { void SetRxApmIsEnabled(bool enable) {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_.rx_apm_is_enabled = enable; state_.rx_apm_is_enabled = enable;
} }
void SetInputExternalMedia(bool enable) { void SetInputExternalMedia(bool enable) {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_.input_external_media = enable; state_.input_external_media = enable;
} }
void SetOutputFilePlaying(bool enable) { void SetOutputFilePlaying(bool enable) {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_.output_file_playing = enable; state_.output_file_playing = enable;
} }
void SetInputFilePlaying(bool enable) { void SetInputFilePlaying(bool enable) {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_.input_file_playing = enable; state_.input_file_playing = enable;
} }
void SetPlaying(bool enable) { void SetPlaying(bool enable) {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_.playing = enable; state_.playing = enable;
} }
void SetSending(bool enable) { void SetSending(bool enable) {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_.sending = enable; state_.sending = enable;
} }
void SetReceiving(bool enable) { void SetReceiving(bool enable) {
CriticalSectionScoped lock(lock_.get()); rtc::CritScope lock(&lock_);
state_.receiving = enable; state_.receiving = enable;
} }
private: private:
rtc::scoped_ptr<CriticalSectionWrapper> lock_; mutable rtc::CriticalSection lock_;
State state_; State state_;
}; };
@ -190,7 +188,7 @@ public:
ProcessThread& moduleProcessThread, ProcessThread& moduleProcessThread,
AudioDeviceModule& audioDeviceModule, AudioDeviceModule& audioDeviceModule,
VoiceEngineObserver* voiceEngineObserver, VoiceEngineObserver* voiceEngineObserver,
CriticalSectionWrapper* callbackCritSect); rtc::CriticalSection* callbackCritSect);
int32_t UpdateLocalTimeStamp(); int32_t UpdateLocalTimeStamp();
void SetSink(rtc::scoped_ptr<AudioSinkInterface> sink); void SetSink(rtc::scoped_ptr<AudioSinkInterface> sink);
@ -430,7 +428,7 @@ public:
} }
bool ExternalTransport() const bool ExternalTransport() const
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
return _externalTransport; return _externalTransport;
} }
bool ExternalMixing() const bool ExternalMixing() const
@ -460,7 +458,7 @@ public:
// Used for obtaining RTT for a receive-only channel. // Used for obtaining RTT for a receive-only channel.
void set_associate_send_channel(const ChannelOwner& channel) { void set_associate_send_channel(const ChannelOwner& channel) {
assert(_channelId != channel.channel()->ChannelId()); assert(_channelId != channel.channel()->ChannelId());
CriticalSectionScoped lock(assoc_send_channel_lock_.get()); rtc::CritScope lock(&assoc_send_channel_lock_);
associate_send_channel_ = channel; associate_send_channel_ = channel;
} }
@ -494,9 +492,9 @@ private:
int32_t GetPlayoutFrequency(); int32_t GetPlayoutFrequency();
int64_t GetRTT(bool allow_associate_channel) const; int64_t GetRTT(bool allow_associate_channel) const;
CriticalSectionWrapper& _fileCritSect; mutable rtc::CriticalSection _fileCritSect;
CriticalSectionWrapper& _callbackCritSect; mutable rtc::CriticalSection _callbackCritSect;
CriticalSectionWrapper& volume_settings_critsect_; mutable rtc::CriticalSection volume_settings_critsect_;
uint32_t _instanceId; uint32_t _instanceId;
int32_t _channelId; int32_t _channelId;
@ -544,7 +542,7 @@ private:
uint16_t send_sequence_number_; uint16_t send_sequence_number_;
uint8_t restored_packet_[kVoiceEngineMaxIpPacketSizeBytes]; uint8_t restored_packet_[kVoiceEngineMaxIpPacketSizeBytes];
rtc::scoped_ptr<CriticalSectionWrapper> ts_stats_lock_; mutable rtc::CriticalSection ts_stats_lock_;
rtc::scoped_ptr<rtc::TimestampWrapAroundHandler> rtp_ts_wraparound_handler_; rtc::scoped_ptr<rtc::TimestampWrapAroundHandler> rtp_ts_wraparound_handler_;
// The rtp timestamp of the first played out audio frame. // The rtp timestamp of the first played out audio frame.
@ -560,7 +558,7 @@ private:
ProcessThread* _moduleProcessThreadPtr; ProcessThread* _moduleProcessThreadPtr;
AudioDeviceModule* _audioDeviceModulePtr; AudioDeviceModule* _audioDeviceModulePtr;
VoiceEngineObserver* _voiceEngineObserverPtr; // owned by base VoiceEngineObserver* _voiceEngineObserverPtr; // owned by base
CriticalSectionWrapper* _callbackCritSectPtr; // owned by base rtc::CriticalSection* _callbackCritSectPtr; // owned by base
Transport* _transportPtr; // WebRtc socket or external transport Transport* _transportPtr; // WebRtc socket or external transport
RMSLevel rms_level_; RMSLevel rms_level_;
rtc::scoped_ptr<AudioProcessing> rx_audioproc_; // far end AudioProcessing rtc::scoped_ptr<AudioProcessing> rx_audioproc_; // far end AudioProcessing
@ -585,7 +583,7 @@ private:
// VoENetwork // VoENetwork
AudioFrame::SpeechType _outputSpeechType; AudioFrame::SpeechType _outputSpeechType;
// VoEVideoSync // VoEVideoSync
rtc::scoped_ptr<CriticalSectionWrapper> video_sync_lock_; mutable rtc::CriticalSection video_sync_lock_;
uint32_t _average_jitter_buffer_delay_us GUARDED_BY(video_sync_lock_); uint32_t _average_jitter_buffer_delay_us GUARDED_BY(video_sync_lock_);
uint32_t _previousTimestamp; uint32_t _previousTimestamp;
uint16_t _recPacketDelayMs GUARDED_BY(video_sync_lock_); uint16_t _recPacketDelayMs GUARDED_BY(video_sync_lock_);
@ -598,7 +596,7 @@ private:
rtc::scoped_ptr<VoERtcpObserver> rtcp_observer_; rtc::scoped_ptr<VoERtcpObserver> rtcp_observer_;
rtc::scoped_ptr<NetworkPredictor> network_predictor_; rtc::scoped_ptr<NetworkPredictor> network_predictor_;
// An associated send channel. // An associated send channel.
rtc::scoped_ptr<CriticalSectionWrapper> assoc_send_channel_lock_; mutable rtc::CriticalSection assoc_send_channel_lock_;
ChannelOwner associate_send_channel_ GUARDED_BY(assoc_send_channel_lock_); ChannelOwner associate_send_channel_ GUARDED_BY(assoc_send_channel_lock_);
bool pacing_enabled_; bool pacing_enabled_;

View File

@ -48,7 +48,6 @@ ChannelOwner::ChannelRef::ChannelRef(class Channel* channel)
ChannelManager::ChannelManager(uint32_t instance_id, const Config& config) ChannelManager::ChannelManager(uint32_t instance_id, const Config& config)
: instance_id_(instance_id), : instance_id_(instance_id),
last_channel_id_(-1), last_channel_id_(-1),
lock_(CriticalSectionWrapper::CreateCriticalSection()),
config_(config), config_(config),
event_log_(RtcEventLog::Create()) {} event_log_(RtcEventLog::Create()) {}
@ -66,7 +65,7 @@ ChannelOwner ChannelManager::CreateChannelInternal(const Config& config) {
event_log_.get(), config); event_log_.get(), config);
ChannelOwner channel_owner(channel); ChannelOwner channel_owner(channel);
CriticalSectionScoped crit(lock_.get()); rtc::CritScope crit(&lock_);
channels_.push_back(channel_owner); channels_.push_back(channel_owner);
@ -74,7 +73,7 @@ ChannelOwner ChannelManager::CreateChannelInternal(const Config& config) {
} }
ChannelOwner ChannelManager::GetChannel(int32_t channel_id) { ChannelOwner ChannelManager::GetChannel(int32_t channel_id) {
CriticalSectionScoped crit(lock_.get()); rtc::CritScope crit(&lock_);
for (size_t i = 0; i < channels_.size(); ++i) { for (size_t i = 0; i < channels_.size(); ++i) {
if (channels_[i].channel()->ChannelId() == channel_id) if (channels_[i].channel()->ChannelId() == channel_id)
@ -84,7 +83,7 @@ ChannelOwner ChannelManager::GetChannel(int32_t channel_id) {
} }
void ChannelManager::GetAllChannels(std::vector<ChannelOwner>* channels) { void ChannelManager::GetAllChannels(std::vector<ChannelOwner>* channels) {
CriticalSectionScoped crit(lock_.get()); rtc::CritScope crit(&lock_);
*channels = channels_; *channels = channels_;
} }
@ -95,7 +94,7 @@ void ChannelManager::DestroyChannel(int32_t channel_id) {
// Channels while holding a lock, but rather when the method returns. // Channels while holding a lock, but rather when the method returns.
ChannelOwner reference(NULL); ChannelOwner reference(NULL);
{ {
CriticalSectionScoped crit(lock_.get()); rtc::CritScope crit(&lock_);
std::vector<ChannelOwner>::iterator to_delete = channels_.end(); std::vector<ChannelOwner>::iterator to_delete = channels_.end();
for (auto it = channels_.begin(); it != channels_.end(); ++it) { for (auto it = channels_.begin(); it != channels_.end(); ++it) {
Channel* channel = it->channel(); Channel* channel = it->channel();
@ -119,14 +118,14 @@ void ChannelManager::DestroyAllChannels() {
// lock, but rather when the method returns. // lock, but rather when the method returns.
std::vector<ChannelOwner> references; std::vector<ChannelOwner> references;
{ {
CriticalSectionScoped crit(lock_.get()); rtc::CritScope crit(&lock_);
references = channels_; references = channels_;
channels_.clear(); channels_.clear();
} }
} }
size_t ChannelManager::NumOfChannels() const { size_t ChannelManager::NumOfChannels() const {
CriticalSectionScoped crit(lock_.get()); rtc::CritScope crit(&lock_);
return channels_.size(); return channels_.size();
} }

View File

@ -14,10 +14,10 @@
#include <vector> #include <vector>
#include "webrtc/base/constructormagic.h" #include "webrtc/base/constructormagic.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/call/rtc_event_log.h" #include "webrtc/call/rtc_event_log.h"
#include "webrtc/system_wrappers/include/atomic32.h" #include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
namespace webrtc { namespace webrtc {
@ -123,7 +123,7 @@ class ChannelManager {
Atomic32 last_channel_id_; Atomic32 last_channel_id_;
rtc::scoped_ptr<CriticalSectionWrapper> lock_; mutable rtc::CriticalSection lock_;
std::vector<ChannelOwner> channels_; std::vector<ChannelOwner> channels_;
const Config& config_; const Config& config_;

View File

@ -12,7 +12,6 @@
#include <assert.h> #include <assert.h>
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
namespace webrtc { namespace webrtc {
@ -66,7 +65,6 @@ const int16_t Dtmf_dBm0kHz[37]=
DtmfInband::DtmfInband(int32_t id) : DtmfInband::DtmfInband(int32_t id) :
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_id(id), _id(id),
_outputFrequencyHz(8000), _outputFrequencyHz(8000),
_frameLengthSamples(0), _frameLengthSamples(0),
@ -84,7 +82,6 @@ DtmfInband::DtmfInband(int32_t id) :
DtmfInband::~DtmfInband() DtmfInband::~DtmfInband()
{ {
delete &_critSect;
} }
int int
@ -130,7 +127,7 @@ DtmfInband::AddTone(uint8_t eventCode,
int32_t lengthMs, int32_t lengthMs,
int32_t attenuationDb) int32_t attenuationDb)
{ {
CriticalSectionScoped lock(&_critSect); rtc::CritScope lock(&_critSect);
if (attenuationDb > 36 || eventCode > 15) if (attenuationDb > 36 || eventCode > 15)
{ {
@ -159,7 +156,7 @@ DtmfInband::AddTone(uint8_t eventCode,
int int
DtmfInband::ResetTone() DtmfInband::ResetTone()
{ {
CriticalSectionScoped lock(&_critSect); rtc::CritScope lock(&_critSect);
ReInit(); ReInit();
@ -174,7 +171,7 @@ int
DtmfInband::StartTone(uint8_t eventCode, DtmfInband::StartTone(uint8_t eventCode,
int32_t attenuationDb) int32_t attenuationDb)
{ {
CriticalSectionScoped lock(&_critSect); rtc::CritScope lock(&_critSect);
if (attenuationDb > 36 || eventCode > 15) if (attenuationDb > 36 || eventCode > 15)
{ {
@ -200,7 +197,7 @@ DtmfInband::StartTone(uint8_t eventCode,
int int
DtmfInband::StopTone() DtmfInband::StopTone()
{ {
CriticalSectionScoped lock(&_critSect); rtc::CritScope lock(&_critSect);
if (!_playing) if (!_playing)
{ {
@ -222,7 +219,7 @@ DtmfInband::ReInit()
bool bool
DtmfInband::IsAddingTone() DtmfInband::IsAddingTone()
{ {
CriticalSectionScoped lock(&_critSect); rtc::CritScope lock(&_critSect);
return (_remainingSamples > 0 || _playing); return (_remainingSamples > 0 || _playing);
} }
@ -230,7 +227,7 @@ int
DtmfInband::Get10msTone(int16_t output[320], DtmfInband::Get10msTone(int16_t output[320],
uint16_t& outputSizeInSamples) uint16_t& outputSizeInSamples)
{ {
CriticalSectionScoped lock(&_critSect); rtc::CritScope lock(&_critSect);
if (DtmfFix_generate(output, if (DtmfFix_generate(output,
_eventCode, _eventCode,
_attenuationDb, _attenuationDb,
@ -248,6 +245,7 @@ DtmfInband::Get10msTone(int16_t output[320],
void void
DtmfInband::UpdateDelaySinceLastTone() DtmfInband::UpdateDelaySinceLastTone()
{ {
rtc::CritScope lock(&_critSect);
_delaySinceLastToneMS += kDtmfFrameSizeMs; _delaySinceLastToneMS += kDtmfFrameSizeMs;
// avoid wraparound // avoid wraparound
if (_delaySinceLastToneMS > (1<<30)) if (_delaySinceLastToneMS > (1<<30))
@ -259,6 +257,7 @@ DtmfInband::UpdateDelaySinceLastTone()
uint32_t uint32_t
DtmfInband::DelaySinceLastTone() const DtmfInband::DelaySinceLastTone() const
{ {
rtc::CritScope lock(&_critSect);
return _delaySinceLastToneMS; return _delaySinceLastToneMS;
} }

View File

@ -13,9 +13,9 @@
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
#include "webrtc/voice_engine/voice_engine_defines.h" #include "webrtc/voice_engine/voice_engine_defines.h"
#include "webrtc/base/criticalsection.h"
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper;
class DtmfInband class DtmfInband
{ {
@ -67,7 +67,7 @@ private:
int16_t length); int16_t length);
private: private:
CriticalSectionWrapper& _critSect; mutable rtc::CriticalSection _critSect;
int32_t _id; int32_t _id;
uint16_t _outputFrequencyHz; // {8000, 16000, 32000} uint16_t _outputFrequencyHz; // {8000, 16000, 32000}
int16_t _oldOutputLow[2]; // Data needed for oscillator model int16_t _oldOutputLow[2]; // Data needed for oscillator model

View File

@ -15,7 +15,6 @@ namespace webrtc {
DtmfInbandQueue::DtmfInbandQueue(int32_t id): DtmfInbandQueue::DtmfInbandQueue(int32_t id):
_id(id), _id(id),
_DtmfCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_nextEmptyIndex(0) _nextEmptyIndex(0)
{ {
memset(_DtmfKey,0, sizeof(_DtmfKey)); memset(_DtmfKey,0, sizeof(_DtmfKey));
@ -25,13 +24,12 @@ DtmfInbandQueue::DtmfInbandQueue(int32_t id):
DtmfInbandQueue::~DtmfInbandQueue() DtmfInbandQueue::~DtmfInbandQueue()
{ {
delete &_DtmfCritsect;
} }
int int
DtmfInbandQueue::AddDtmf(uint8_t key, uint16_t len, uint8_t level) DtmfInbandQueue::AddDtmf(uint8_t key, uint16_t len, uint8_t level)
{ {
CriticalSectionScoped lock(&_DtmfCritsect); rtc::CritScope lock(&_DtmfCritsect);
if (_nextEmptyIndex >= kDtmfInbandMax) if (_nextEmptyIndex >= kDtmfInbandMax)
{ {
@ -50,7 +48,7 @@ DtmfInbandQueue::AddDtmf(uint8_t key, uint16_t len, uint8_t level)
int8_t int8_t
DtmfInbandQueue::NextDtmf(uint16_t* len, uint8_t* level) DtmfInbandQueue::NextDtmf(uint16_t* len, uint8_t* level)
{ {
CriticalSectionScoped lock(&_DtmfCritsect); rtc::CritScope lock(&_DtmfCritsect);
if(!PendingDtmf()) if(!PendingDtmf())
{ {
@ -74,14 +72,14 @@ DtmfInbandQueue::NextDtmf(uint16_t* len, uint8_t* level)
bool bool
DtmfInbandQueue::PendingDtmf() DtmfInbandQueue::PendingDtmf()
{ {
CriticalSectionScoped lock(&_DtmfCritsect); rtc::CritScope lock(&_DtmfCritsect);
return _nextEmptyIndex > 0; return _nextEmptyIndex > 0;
} }
void void
DtmfInbandQueue::ResetDtmf() DtmfInbandQueue::ResetDtmf()
{ {
CriticalSectionScoped lock(&_DtmfCritsect); rtc::CritScope lock(&_DtmfCritsect);
_nextEmptyIndex = 0; _nextEmptyIndex = 0;
} }

View File

@ -11,7 +11,7 @@
#ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H #ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
#define WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H #define WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
#include "webrtc/system_wrappers/include/critical_section_wrapper.h" #include "webrtc/base/criticalsection.h"
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
#include "webrtc/voice_engine/voice_engine_defines.h" #include "webrtc/voice_engine/voice_engine_defines.h"
@ -38,7 +38,7 @@ private:
enum {kDtmfInbandMax = 20}; enum {kDtmfInbandMax = 20};
int32_t _id; int32_t _id;
CriticalSectionWrapper& _DtmfCritsect; rtc::CriticalSection _DtmfCritsect;
uint8_t _nextEmptyIndex; uint8_t _nextEmptyIndex;
uint8_t _DtmfKey[kDtmfInbandMax]; uint8_t _DtmfKey[kDtmfInbandMax];
uint16_t _DtmfLen[kDtmfInbandMax]; uint16_t _DtmfLen[kDtmfInbandMax];

View File

@ -10,7 +10,6 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h" #include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/include/module_common_types.h" #include "webrtc/modules/include/module_common_types.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/voice_engine/level_indicator.h" #include "webrtc/voice_engine/level_indicator.h"
namespace webrtc { namespace webrtc {
@ -25,7 +24,6 @@ const int8_t permutation[33] =
AudioLevel::AudioLevel() : AudioLevel::AudioLevel() :
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_absMax(0), _absMax(0),
_count(0), _count(0),
_currentLevel(0), _currentLevel(0),
@ -33,12 +31,11 @@ AudioLevel::AudioLevel() :
} }
AudioLevel::~AudioLevel() { AudioLevel::~AudioLevel() {
delete &_critSect;
} }
void AudioLevel::Clear() void AudioLevel::Clear()
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
_absMax = 0; _absMax = 0;
_count = 0; _count = 0;
_currentLevel = 0; _currentLevel = 0;
@ -56,7 +53,7 @@ void AudioLevel::ComputeLevel(const AudioFrame& audioFrame)
// Protect member access using a lock since this method is called on a // Protect member access using a lock since this method is called on a
// dedicated audio thread in the RecordedDataIsAvailable() callback. // dedicated audio thread in the RecordedDataIsAvailable() callback.
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (absValue > _absMax) if (absValue > _absMax)
_absMax = absValue; _absMax = absValue;
@ -88,13 +85,13 @@ void AudioLevel::ComputeLevel(const AudioFrame& audioFrame)
int8_t AudioLevel::Level() const int8_t AudioLevel::Level() const
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
return _currentLevel; return _currentLevel;
} }
int16_t AudioLevel::LevelFullRange() const int16_t AudioLevel::LevelFullRange() const
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
return _currentLevelFullRange; return _currentLevelFullRange;
} }

View File

@ -11,13 +11,13 @@
#ifndef WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H #ifndef WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
#define WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H #define WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
#include "webrtc/base/criticalsection.h"
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
#include "webrtc/voice_engine/voice_engine_defines.h" #include "webrtc/voice_engine/voice_engine_defines.h"
namespace webrtc { namespace webrtc {
class AudioFrame; class AudioFrame;
class CriticalSectionWrapper;
namespace voe { namespace voe {
class AudioLevel class AudioLevel
@ -40,7 +40,7 @@ public:
private: private:
enum { kUpdateFrequency = 10}; enum { kUpdateFrequency = 10};
CriticalSectionWrapper& _critSect; mutable rtc::CriticalSection _critSect;
int16_t _absMax; int16_t _absMax;
int16_t _count; int16_t _count;

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h" #include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/voice_engine/monitor_module.h" #include "webrtc/voice_engine/monitor_module.h"
@ -18,20 +17,18 @@ namespace voe {
MonitorModule::MonitorModule() : MonitorModule::MonitorModule() :
_observerPtr(NULL), _observerPtr(NULL),
_callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_lastProcessTime(TickTime::MillisecondTimestamp()) _lastProcessTime(TickTime::MillisecondTimestamp())
{ {
} }
MonitorModule::~MonitorModule() MonitorModule::~MonitorModule()
{ {
delete &_callbackCritSect;
} }
int32_t int32_t
MonitorModule::RegisterObserver(MonitorObserver& observer) MonitorModule::RegisterObserver(MonitorObserver& observer)
{ {
CriticalSectionScoped lock(&_callbackCritSect); rtc::CritScope lock(&_callbackCritSect);
if (_observerPtr) if (_observerPtr)
{ {
return -1; return -1;
@ -43,7 +40,7 @@ MonitorModule::RegisterObserver(MonitorObserver& observer)
int32_t int32_t
MonitorModule::DeRegisterObserver() MonitorModule::DeRegisterObserver()
{ {
CriticalSectionScoped lock(&_callbackCritSect); rtc::CritScope lock(&_callbackCritSect);
if (!_observerPtr) if (!_observerPtr)
{ {
return 0; return 0;
@ -64,9 +61,9 @@ int32_t
MonitorModule::Process() MonitorModule::Process()
{ {
_lastProcessTime = TickTime::MillisecondTimestamp(); _lastProcessTime = TickTime::MillisecondTimestamp();
rtc::CritScope lock(&_callbackCritSect);
if (_observerPtr) if (_observerPtr)
{ {
CriticalSectionScoped lock(&_callbackCritSect);
_observerPtr->OnPeriodicProcess(); _observerPtr->OnPeriodicProcess();
} }
return 0; return 0;

View File

@ -11,6 +11,8 @@
#ifndef WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H #ifndef WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
#define WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H #define WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/include/module.h" #include "webrtc/modules/include/module.h"
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
#include "webrtc/voice_engine/voice_engine_defines.h" #include "webrtc/voice_engine/voice_engine_defines.h"
@ -25,8 +27,6 @@ protected:
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper;
namespace voe { namespace voe {
class MonitorModule : public Module class MonitorModule : public Module
@ -45,8 +45,8 @@ public: // module
int32_t Process() override; int32_t Process() override;
private: private:
MonitorObserver* _observerPtr; rtc::CriticalSection _callbackCritSect;
CriticalSectionWrapper& _callbackCritSect; MonitorObserver* _observerPtr GUARDED_BY(_callbackCritSect);
int64_t _lastProcessTime; int64_t _lastProcessTime;
}; };

View File

@ -13,7 +13,6 @@
#include "webrtc/base/format_macros.h" #include "webrtc/base/format_macros.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h" #include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/utility/include/audio_frame_operations.h" #include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h" #include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/include/voe_external_media.h" #include "webrtc/voice_engine/include/voe_external_media.h"
@ -68,7 +67,7 @@ void OutputMixer::RecordFileEnded(int32_t id)
"OutputMixer::RecordFileEnded(id=%d)", id); "OutputMixer::RecordFileEnded(id=%d)", id);
assert(id == _instanceId); assert(id == _instanceId);
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
_outputFileRecording = false; _outputFileRecording = false;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1), WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::RecordFileEnded() =>" "OutputMixer::RecordFileEnded() =>"
@ -92,8 +91,6 @@ OutputMixer::Create(OutputMixer*& mixer, uint32_t instanceId)
} }
OutputMixer::OutputMixer(uint32_t instanceId) : OutputMixer::OutputMixer(uint32_t instanceId) :
_callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_mixerModule(*AudioConferenceMixer::Create(instanceId)), _mixerModule(*AudioConferenceMixer::Create(instanceId)),
_audioLevel(), _audioLevel(),
_dtmfGenerator(instanceId), _dtmfGenerator(instanceId),
@ -138,7 +135,7 @@ OutputMixer::~OutputMixer()
DeRegisterExternalMediaProcessing(); DeRegisterExternalMediaProcessing();
} }
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFileRecorderPtr) if (_outputFileRecorderPtr)
{ {
_outputFileRecorderPtr->RegisterModuleFileCallback(NULL); _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
@ -149,8 +146,6 @@ OutputMixer::~OutputMixer()
} }
_mixerModule.UnRegisterMixedStreamCallback(); _mixerModule.UnRegisterMixedStreamCallback();
delete &_mixerModule; delete &_mixerModule;
delete &_callbackCritSect;
delete &_fileCritSect;
} }
int32_t int32_t
@ -178,7 +173,7 @@ int OutputMixer::RegisterExternalMediaProcessing(
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::RegisterExternalMediaProcessing()"); "OutputMixer::RegisterExternalMediaProcessing()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
_externalMediaCallbackPtr = &proccess_object; _externalMediaCallbackPtr = &proccess_object;
_externalMedia = true; _externalMedia = true;
@ -190,7 +185,7 @@ int OutputMixer::DeRegisterExternalMediaProcessing()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
"OutputMixer::DeRegisterExternalMediaProcessing()"); "OutputMixer::DeRegisterExternalMediaProcessing()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
_externalMedia = false; _externalMedia = false;
_externalMediaCallbackPtr = NULL; _externalMediaCallbackPtr = NULL;
@ -314,7 +309,7 @@ int OutputMixer::StartRecordingPlayout(const char* fileName,
format = kFileFormatCompressedFile; format = kFileFormatCompressedFile;
} }
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
// Destroy the old instance // Destroy the old instance
if (_outputFileRecorderPtr) if (_outputFileRecorderPtr)
@ -394,7 +389,7 @@ int OutputMixer::StartRecordingPlayout(OutStream* stream,
format = kFileFormatCompressedFile; format = kFileFormatCompressedFile;
} }
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
// Destroy the old instance // Destroy the old instance
if (_outputFileRecorderPtr) if (_outputFileRecorderPtr)
@ -445,7 +440,7 @@ int OutputMixer::StopRecordingPlayout()
return -1; return -1;
} }
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFileRecorderPtr->StopRecording() != 0) if (_outputFileRecorderPtr->StopRecording() != 0)
{ {
@ -472,7 +467,7 @@ int OutputMixer::GetMixedAudio(int sample_rate_hz,
// --- Record playout if enabled // --- Record playout if enabled
{ {
CriticalSectionScoped cs(&_fileCritSect); rtc::CritScope cs(&_fileCritSect);
if (_outputFileRecording && _outputFileRecorderPtr) if (_outputFileRecording && _outputFileRecorderPtr)
_outputFileRecorderPtr->RecordAudioToFile(_audioFrame); _outputFileRecorderPtr->RecordAudioToFile(_audioFrame);
} }
@ -536,7 +531,7 @@ OutputMixer::DoOperationsOnCombinedSignal(bool feed_data_to_apm)
// --- External media processing // --- External media processing
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_externalMedia) if (_externalMedia)
{ {
const bool is_stereo = (_audioFrame.num_channels_ == 2); const bool is_stereo = (_audioFrame.num_channels_ == 2);

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H_ #ifndef WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H_
#define WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H_ #define WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H_
#include "webrtc/base/criticalsection.h"
#include "webrtc/common_audio/resampler/include/push_resampler.h" #include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
#include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h" #include "webrtc/modules/audio_conference_mixer/include/audio_conference_mixer.h"
@ -23,7 +24,6 @@
namespace webrtc { namespace webrtc {
class AudioProcessing; class AudioProcessing;
class CriticalSectionWrapper;
class FileWrapper; class FileWrapper;
class VoEMediaProcess; class VoEMediaProcess;
@ -108,10 +108,9 @@ private:
Statistics* _engineStatisticsPtr; Statistics* _engineStatisticsPtr;
AudioProcessing* _audioProcessingModulePtr; AudioProcessing* _audioProcessingModulePtr;
// owns rtc::CriticalSection _callbackCritSect;
CriticalSectionWrapper& _callbackCritSect;
// protect the _outputFileRecorderPtr and _outputFileRecording // protect the _outputFileRecorderPtr and _outputFileRecording
CriticalSectionWrapper& _fileCritSect; rtc::CriticalSection _fileCritSect;
AudioConferenceMixer& _mixerModule; AudioConferenceMixer& _mixerModule;
AudioFrame _audioFrame; AudioFrame _audioFrame;
// Converts mixed audio to the audio device output rate. // Converts mixed audio to the audio device output rate.

View File

@ -11,7 +11,6 @@
#include "webrtc/voice_engine/shared_data.h" #include "webrtc/voice_engine/shared_data.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h" #include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/output_mixer.h" #include "webrtc/voice_engine/output_mixer.h"
@ -25,7 +24,6 @@ static int32_t _gInstanceCounter = 0;
SharedData::SharedData(const Config& config) SharedData::SharedData(const Config& config)
: _instanceId(++_gInstanceCounter), : _instanceId(++_gInstanceCounter),
_apiCritPtr(CriticalSectionWrapper::CreateCriticalSection()),
_channelManager(_gInstanceCounter, config), _channelManager(_gInstanceCounter, config),
_engineStatistics(_gInstanceCounter), _engineStatistics(_gInstanceCounter),
_audioDevicePtr(NULL), _audioDevicePtr(NULL),
@ -51,7 +49,6 @@ SharedData::~SharedData()
if (_audioDevicePtr) { if (_audioDevicePtr) {
_audioDevicePtr->Release(); _audioDevicePtr->Release();
} }
delete _apiCritPtr;
_moduleProcessThreadPtr->Stop(); _moduleProcessThreadPtr->Stop();
Trace::ReturnTrace(); Trace::ReturnTrace();
} }

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_VOICE_ENGINE_SHARED_DATA_H #ifndef WEBRTC_VOICE_ENGINE_SHARED_DATA_H
#define WEBRTC_VOICE_ENGINE_SHARED_DATA_H #define WEBRTC_VOICE_ENGINE_SHARED_DATA_H
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/modules/audio_device/include/audio_device.h" #include "webrtc/modules/audio_device/include/audio_device.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h" #include "webrtc/modules/audio_processing/include/audio_processing.h"
@ -23,7 +24,6 @@ class ProcessThread;
namespace webrtc { namespace webrtc {
class Config; class Config;
class CriticalSectionWrapper;
namespace voe { namespace voe {
@ -43,7 +43,7 @@ public:
void set_audio_processing(AudioProcessing* audio_processing); void set_audio_processing(AudioProcessing* audio_processing);
TransmitMixer* transmit_mixer() { return _transmitMixerPtr; } TransmitMixer* transmit_mixer() { return _transmitMixerPtr; }
OutputMixer* output_mixer() { return _outputMixerPtr; } OutputMixer* output_mixer() { return _outputMixerPtr; }
CriticalSectionWrapper* crit_sec() { return _apiCritPtr; } rtc::CriticalSection* crit_sec() { return &_apiCritPtr; }
ProcessThread* process_thread() { return _moduleProcessThreadPtr.get(); } ProcessThread* process_thread() { return _moduleProcessThreadPtr.get(); }
AudioDeviceModule::AudioLayer audio_device_layer() const { AudioDeviceModule::AudioLayer audio_device_layer() const {
return _audioDeviceLayer; return _audioDeviceLayer;
@ -63,7 +63,7 @@ public:
protected: protected:
const uint32_t _instanceId; const uint32_t _instanceId;
CriticalSectionWrapper* _apiCritPtr; mutable rtc::CriticalSection _apiCritPtr;
ChannelManager _channelManager; ChannelManager _channelManager;
Statistics _engineStatistics; Statistics _engineStatistics;
AudioDeviceModule* _audioDevicePtr; AudioDeviceModule* _audioDevicePtr;

View File

@ -13,7 +13,6 @@
#include "webrtc/voice_engine/statistics.h" #include "webrtc/voice_engine/statistics.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
namespace webrtc { namespace webrtc {
@ -21,7 +20,6 @@ namespace webrtc {
namespace voe { namespace voe {
Statistics::Statistics(uint32_t instanceId) : Statistics::Statistics(uint32_t instanceId) :
_critPtr(CriticalSectionWrapper::CreateCriticalSection()),
_instanceId(instanceId), _instanceId(instanceId),
_lastError(0), _lastError(0),
_isInitialized(false) _isInitialized(false)
@ -30,11 +28,6 @@ Statistics::Statistics(uint32_t instanceId) :
Statistics::~Statistics() Statistics::~Statistics()
{ {
if (_critPtr)
{
delete _critPtr;
_critPtr = NULL;
}
} }
int32_t Statistics::SetInitialized() int32_t Statistics::SetInitialized()
@ -56,7 +49,7 @@ bool Statistics::Initialized() const
int32_t Statistics::SetLastError(int32_t error) const int32_t Statistics::SetLastError(int32_t error) const
{ {
CriticalSectionScoped cs(_critPtr); rtc::CritScope cs(&lock_);
_lastError = error; _lastError = error;
return 0; return 0;
} }
@ -64,11 +57,11 @@ int32_t Statistics::SetLastError(int32_t error) const
int32_t Statistics::SetLastError(int32_t error, int32_t Statistics::SetLastError(int32_t error,
TraceLevel level) const TraceLevel level) const
{ {
CriticalSectionScoped cs(_critPtr);
_lastError = error;
WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1), WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1),
"error code is set to %d", "error code is set to %d",
_lastError); error);
rtc::CritScope cs(&lock_);
_lastError = error;
return 0; return 0;
} }
@ -76,22 +69,28 @@ int32_t Statistics::SetLastError(
int32_t error, int32_t error,
TraceLevel level, const char* msg) const TraceLevel level, const char* msg) const
{ {
CriticalSectionScoped cs(_critPtr);
char traceMessage[KTraceMaxMessageSize]; char traceMessage[KTraceMaxMessageSize];
assert(strlen(msg) < KTraceMaxMessageSize); assert(strlen(msg) < KTraceMaxMessageSize);
_lastError = error;
sprintf(traceMessage, "%s (error=%d)", msg, error); sprintf(traceMessage, "%s (error=%d)", msg, error);
WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1), "%s", WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1), "%s",
traceMessage); traceMessage);
rtc::CritScope cs(&lock_);
_lastError = error;
return 0; return 0;
} }
int32_t Statistics::LastError() const int32_t Statistics::LastError() const
{ {
CriticalSectionScoped cs(_critPtr); int32_t ret;
{
rtc::CritScope cs(&lock_);
ret = _lastError;
}
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
"LastError() => %d", _lastError); "LastError() => %d", ret);
return _lastError; return ret;
} }
} // namespace voe } // namespace voe

View File

@ -11,14 +11,13 @@
#ifndef WEBRTC_VOICE_ENGINE_STATISTICS_H #ifndef WEBRTC_VOICE_ENGINE_STATISTICS_H
#define WEBRTC_VOICE_ENGINE_STATISTICS_H #define WEBRTC_VOICE_ENGINE_STATISTICS_H
#include "webrtc/base/criticalsection.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"
#include "webrtc/voice_engine/voice_engine_defines.h" #include "webrtc/voice_engine/voice_engine_defines.h"
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper;
namespace voe { namespace voe {
class Statistics class Statistics
@ -40,7 +39,7 @@ class Statistics
int32_t LastError() const; int32_t LastError() const;
private: private:
CriticalSectionWrapper* _critPtr; mutable rtc::CriticalSection lock_;
const uint32_t _instanceId; const uint32_t _instanceId;
mutable int32_t _lastError; mutable int32_t _lastError;
bool _isInitialized; bool _isInitialized;

View File

@ -37,9 +37,7 @@ namespace {
namespace voetest { namespace voetest {
ConferenceTransport::ConferenceTransport() ConferenceTransport::ConferenceTransport()
: pq_crit_(webrtc::CriticalSectionWrapper::CreateCriticalSection()), : packet_event_(webrtc::EventWrapper::Create()),
stream_crit_(webrtc::CriticalSectionWrapper::CreateCriticalSection()),
packet_event_(webrtc::EventWrapper::Create()),
thread_(Run, this, "ConferenceTransport"), thread_(Run, this, "ConferenceTransport"),
rtt_ms_(0), rtt_ms_(0),
stream_count_(0), stream_count_(0),
@ -120,7 +118,7 @@ bool ConferenceTransport::SendRtcp(const uint8_t* data, size_t len) {
int ConferenceTransport::GetReceiverChannelForSsrc(unsigned int sender_ssrc) int ConferenceTransport::GetReceiverChannelForSsrc(unsigned int sender_ssrc)
const { const {
webrtc::CriticalSectionScoped lock(stream_crit_.get()); rtc::CritScope lock(&stream_crit_);
auto it = streams_.find(sender_ssrc); auto it = streams_.find(sender_ssrc);
if (it != streams_.end()) { if (it != streams_.end()) {
return it->second.second; return it->second.second;
@ -132,7 +130,7 @@ void ConferenceTransport::StorePacket(Packet::Type type,
const void* data, const void* data,
size_t len) { size_t len) {
{ {
webrtc::CriticalSectionScoped lock(pq_crit_.get()); rtc::CritScope lock(&pq_crit_);
packet_queue_.push_back(Packet(type, data, len, rtc::Time())); packet_queue_.push_back(Packet(type, data, len, rtc::Time()));
} }
packet_event_->Set(); packet_event_->Set();
@ -198,7 +196,7 @@ bool ConferenceTransport::DispatchPackets() {
while (true) { while (true) {
Packet packet; Packet packet;
{ {
webrtc::CriticalSectionScoped lock(pq_crit_.get()); rtc::CritScope lock(&pq_crit_);
if (packet_queue_.empty()) if (packet_queue_.empty())
break; break;
packet = packet_queue_.front(); packet = packet_queue_.front();
@ -245,14 +243,14 @@ unsigned int ConferenceTransport::AddStream(std::string file_name,
EXPECT_EQ(0, local_rtp_rtcp_->SetLocalSSRC(new_receiver, kLocalSsrc)); EXPECT_EQ(0, local_rtp_rtcp_->SetLocalSSRC(new_receiver, kLocalSsrc));
{ {
webrtc::CriticalSectionScoped lock(stream_crit_.get()); rtc::CritScope lock(&stream_crit_);
streams_[remote_ssrc] = std::make_pair(new_sender, new_receiver); streams_[remote_ssrc] = std::make_pair(new_sender, new_receiver);
} }
return remote_ssrc; // remote ssrc used as stream id. return remote_ssrc; // remote ssrc used as stream id.
} }
bool ConferenceTransport::RemoveStream(unsigned int id) { bool ConferenceTransport::RemoveStream(unsigned int id) {
webrtc::CriticalSectionScoped lock(stream_crit_.get()); rtc::CritScope lock(&stream_crit_);
auto it = streams_.find(id); auto it = streams_.find(id);
if (it == streams_.end()) { if (it == streams_.end()) {
return false; return false;

View File

@ -17,11 +17,11 @@
#include "testing/gtest/include/gtest/gtest.h" #include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/basictypes.h" #include "webrtc/base/basictypes.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/platform_thread.h" #include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h" #include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h" #include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/voice_engine/include/voe_base.h" #include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h" #include "webrtc/voice_engine/include/voe_codec.h"
@ -128,17 +128,16 @@ class ConferenceTransport: public webrtc::Transport {
void SendPacket(const Packet& packet); void SendPacket(const Packet& packet);
bool DispatchPackets(); bool DispatchPackets();
const rtc::scoped_ptr<webrtc::CriticalSectionWrapper> pq_crit_; mutable rtc::CriticalSection pq_crit_;
const rtc::scoped_ptr<webrtc::CriticalSectionWrapper> stream_crit_; mutable rtc::CriticalSection stream_crit_;
const rtc::scoped_ptr<webrtc::EventWrapper> packet_event_; const rtc::scoped_ptr<webrtc::EventWrapper> packet_event_;
rtc::PlatformThread thread_; rtc::PlatformThread thread_;
unsigned int rtt_ms_; unsigned int rtt_ms_;
unsigned int stream_count_; unsigned int stream_count_;
std::map<unsigned int, std::pair<int, int>> streams_ std::map<unsigned int, std::pair<int, int>> streams_ GUARDED_BY(stream_crit_);
GUARDED_BY(stream_crit_.get()); std::deque<Packet> packet_queue_ GUARDED_BY(pq_crit_);
std::deque<Packet> packet_queue_ GUARDED_BY(pq_crit_.get());
int local_sender_; // Channel Id of local sender int local_sender_; // Channel Id of local sender
int reflector_; int reflector_;

View File

@ -13,12 +13,12 @@
#include <deque> #include <deque>
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/platform_thread.h" #include "webrtc/base/platform_thread.h"
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h" #include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/system_wrappers/include/atomic32.h" #include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h" #include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/sleep.h" #include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h" #include "webrtc/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h"
@ -28,8 +28,7 @@ class TestErrorObserver;
class LoopBackTransport : public webrtc::Transport { class LoopBackTransport : public webrtc::Transport {
public: public:
LoopBackTransport(webrtc::VoENetwork* voe_network, int channel) LoopBackTransport(webrtc::VoENetwork* voe_network, int channel)
: crit_(webrtc::CriticalSectionWrapper::CreateCriticalSection()), : packet_event_(webrtc::EventWrapper::Create()),
packet_event_(webrtc::EventWrapper::Create()),
thread_(NetworkProcess, this, "LoopBackTransport"), thread_(NetworkProcess, this, "LoopBackTransport"),
channel_(channel), channel_(channel),
voe_network_(voe_network), voe_network_(voe_network),
@ -62,7 +61,7 @@ class LoopBackTransport : public webrtc::Transport {
} }
void AddChannel(uint32_t ssrc, int channel) { void AddChannel(uint32_t ssrc, int channel) {
webrtc::CriticalSectionScoped lock(crit_.get()); rtc::CritScope lock(&crit_);
channels_[ssrc] = channel; channels_[ssrc] = channel;
} }
@ -85,7 +84,7 @@ class LoopBackTransport : public webrtc::Transport {
const void* data, const void* data,
size_t len) { size_t len) {
{ {
webrtc::CriticalSectionScoped lock(crit_.get()); rtc::CritScope lock(&crit_);
packet_queue_.push_back(Packet(type, data, len)); packet_queue_.push_back(Packet(type, data, len));
} }
packet_event_->Set(); packet_event_->Set();
@ -110,7 +109,7 @@ class LoopBackTransport : public webrtc::Transport {
Packet p; Packet p;
int channel = channel_; int channel = channel_;
{ {
webrtc::CriticalSectionScoped lock(crit_.get()); rtc::CritScope lock(&crit_);
if (packet_queue_.empty()) if (packet_queue_.empty())
break; break;
p = packet_queue_.front(); p = packet_queue_.front();
@ -143,12 +142,12 @@ class LoopBackTransport : public webrtc::Transport {
return true; return true;
} }
const rtc::scoped_ptr<webrtc::CriticalSectionWrapper> crit_; mutable rtc::CriticalSection crit_;
const rtc::scoped_ptr<webrtc::EventWrapper> packet_event_; const rtc::scoped_ptr<webrtc::EventWrapper> packet_event_;
rtc::PlatformThread thread_; rtc::PlatformThread thread_;
std::deque<Packet> packet_queue_ GUARDED_BY(crit_.get()); std::deque<Packet> packet_queue_ GUARDED_BY(crit_);
const int channel_; const int channel_;
std::map<uint32_t, int> channels_ GUARDED_BY(crit_.get()); std::map<uint32_t, int> channels_ GUARDED_BY(crit_);
webrtc::VoENetwork* const voe_network_; webrtc::VoENetwork* const voe_network_;
webrtc::Atomic32 transmitted_packets_; webrtc::Atomic32 transmitted_packets_;
}; };

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/base/criticalsection.h"
#include "webrtc/system_wrappers/include/atomic32.h" #include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h" #include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/test/testsupport/fileutils.h" #include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h" #include "webrtc/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
@ -17,9 +17,7 @@
class TestRtpObserver : public webrtc::VoERTPObserver { class TestRtpObserver : public webrtc::VoERTPObserver {
public: public:
TestRtpObserver() TestRtpObserver() : changed_ssrc_event_(voetest::EventWrapper::Create()) {}
: crit_(voetest::CriticalSectionWrapper::CreateCriticalSection()),
changed_ssrc_event_(voetest::EventWrapper::Create()) {}
virtual ~TestRtpObserver() {} virtual ~TestRtpObserver() {}
virtual void OnIncomingCSRCChanged(int channel, virtual void OnIncomingCSRCChanged(int channel,
unsigned int CSRC, unsigned int CSRC,
@ -31,11 +29,11 @@ class TestRtpObserver : public webrtc::VoERTPObserver {
EXPECT_EQ(voetest::kEventSignaled, changed_ssrc_event_->Wait(10*1000)); EXPECT_EQ(voetest::kEventSignaled, changed_ssrc_event_->Wait(10*1000));
} }
void SetIncomingSsrc(unsigned int ssrc) { void SetIncomingSsrc(unsigned int ssrc) {
voetest::CriticalSectionScoped lock(crit_.get()); rtc::CritScope lock(&crit_);
incoming_ssrc_ = ssrc; incoming_ssrc_ = ssrc;
} }
public: public:
rtc::scoped_ptr<voetest::CriticalSectionWrapper> crit_; rtc::CriticalSection crit_;
unsigned int incoming_ssrc_; unsigned int incoming_ssrc_;
rtc::scoped_ptr<voetest::EventWrapper> changed_ssrc_event_; rtc::scoped_ptr<voetest::EventWrapper> changed_ssrc_event_;
}; };
@ -48,7 +46,7 @@ void TestRtpObserver::OnIncomingSSRCChanged(int channel,
TEST_LOG("%s", msg); TEST_LOG("%s", msg);
{ {
voetest::CriticalSectionScoped lock(crit_.get()); rtc::CritScope lock(&crit_);
if (incoming_ssrc_ == SSRC) if (incoming_ssrc_ == SSRC)
changed_ssrc_event_->Set(); changed_ssrc_event_->Set();
} }

View File

@ -43,7 +43,6 @@
#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API #ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper;
class VoENetEqStats; class VoENetEqStats;
} }
#endif #endif

View File

@ -13,7 +13,6 @@
#include "webrtc/base/format_macros.h" #include "webrtc/base/format_macros.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/modules/utility/include/audio_frame_operations.h" #include "webrtc/modules/utility/include/audio_frame_operations.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h" #include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
@ -37,7 +36,7 @@ TransmitMixer::OnPeriodicProcess()
bool send_typing_noise_warning = false; bool send_typing_noise_warning = false;
bool typing_noise_detected = false; bool typing_noise_detected = false;
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_typingNoiseWarningPending) { if (_typingNoiseWarningPending) {
send_typing_noise_warning = true; send_typing_noise_warning = true;
typing_noise_detected = _typingNoiseDetected; typing_noise_detected = _typingNoiseDetected;
@ -45,7 +44,7 @@ TransmitMixer::OnPeriodicProcess()
} }
} }
if (send_typing_noise_warning) { if (send_typing_noise_warning) {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_voiceEngineObserverPtr) { if (_voiceEngineObserverPtr) {
if (typing_noise_detected) { if (typing_noise_detected) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
@ -71,7 +70,7 @@ TransmitMixer::OnPeriodicProcess()
// Modify |_saturationWarning| under lock to avoid conflict with write op // Modify |_saturationWarning| under lock to avoid conflict with write op
// in ProcessAudio and also ensure that we don't hold the lock during the // in ProcessAudio and also ensure that we don't hold the lock during the
// callback. // callback.
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
saturationWarning = _saturationWarning; saturationWarning = _saturationWarning;
if (_saturationWarning) if (_saturationWarning)
_saturationWarning = false; _saturationWarning = false;
@ -79,7 +78,7 @@ TransmitMixer::OnPeriodicProcess()
if (saturationWarning) if (saturationWarning)
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_voiceEngineObserverPtr) if (_voiceEngineObserverPtr)
{ {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
@ -118,7 +117,7 @@ void TransmitMixer::PlayFileEnded(int32_t id)
assert(id == _filePlayerId); assert(id == _filePlayerId);
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
_filePlaying = false; _filePlaying = false;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
@ -134,14 +133,14 @@ TransmitMixer::RecordFileEnded(int32_t id)
if (id == _fileRecorderId) if (id == _fileRecorderId)
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
_fileRecording = false; _fileRecording = false;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::RecordFileEnded() => fileRecorder module" "TransmitMixer::RecordFileEnded() => fileRecorder module"
"is shutdown"); "is shutdown");
} else if (id == _fileCallRecorderId) } else if (id == _fileCallRecorderId)
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
_fileCallRecording = false; _fileCallRecording = false;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::RecordFileEnded() => fileCallRecorder" "TransmitMixer::RecordFileEnded() => fileCallRecorder"
@ -193,8 +192,6 @@ TransmitMixer::TransmitMixer(uint32_t instanceId) :
_fileRecording(false), _fileRecording(false),
_fileCallRecording(false), _fileCallRecording(false),
_audioLevel(), _audioLevel(),
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION #ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
_typingNoiseWarningPending(false), _typingNoiseWarningPending(false),
_typingNoiseDetected(false), _typingNoiseDetected(false),
@ -226,7 +223,7 @@ TransmitMixer::~TransmitMixer()
DeRegisterExternalMediaProcessing(kRecordingAllChannelsMixed); DeRegisterExternalMediaProcessing(kRecordingAllChannelsMixed);
DeRegisterExternalMediaProcessing(kRecordingPreprocessing); DeRegisterExternalMediaProcessing(kRecordingPreprocessing);
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_fileRecorderPtr) if (_fileRecorderPtr)
{ {
_fileRecorderPtr->RegisterModuleFileCallback(NULL); _fileRecorderPtr->RegisterModuleFileCallback(NULL);
@ -249,8 +246,6 @@ TransmitMixer::~TransmitMixer()
_filePlayerPtr = NULL; _filePlayerPtr = NULL;
} }
} }
delete &_critSect;
delete &_callbackCritSect;
} }
int32_t int32_t
@ -276,7 +271,7 @@ TransmitMixer::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
{ {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::RegisterVoiceEngineObserver()"); "TransmitMixer::RegisterVoiceEngineObserver()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (_voiceEngineObserverPtr) if (_voiceEngineObserverPtr)
{ {
@ -340,7 +335,7 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
samplesPerSec); samplesPerSec);
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (external_preproc_ptr_) { if (external_preproc_ptr_) {
external_preproc_ptr_->Process(-1, kRecordingPreprocessing, external_preproc_ptr_->Process(-1, kRecordingPreprocessing,
_audioFrame.data_, _audioFrame.data_,
@ -388,7 +383,7 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
// --- Record to file // --- Record to file
bool file_recording = false; bool file_recording = false;
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
file_recording = _fileRecording; file_recording = _fileRecording;
} }
if (file_recording) if (file_recording)
@ -397,7 +392,7 @@ TransmitMixer::PrepareDemux(const void* audioSamples,
} }
{ {
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (external_postproc_ptr_) { if (external_postproc_ptr_) {
external_postproc_ptr_->Process(-1, kRecordingAllChannelsMixed, external_postproc_ptr_->Process(-1, kRecordingAllChannelsMixed,
_audioFrame.data_, _audioFrame.data_,
@ -520,7 +515,7 @@ int TransmitMixer::StartPlayingFileAsMicrophone(const char* fileName,
return 0; return 0;
} }
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
// Destroy the old instance // Destroy the old instance
if (_filePlayerPtr) if (_filePlayerPtr)
@ -597,7 +592,7 @@ int TransmitMixer::StartPlayingFileAsMicrophone(InStream* stream,
return 0; return 0;
} }
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
// Destroy the old instance // Destroy the old instance
if (_filePlayerPtr) if (_filePlayerPtr)
@ -654,7 +649,7 @@ int TransmitMixer::StopPlayingFileAsMicrophone()
return 0; return 0;
} }
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_filePlayerPtr->StopPlayingFile() != 0) if (_filePlayerPtr->StopPlayingFile() != 0)
{ {
@ -686,7 +681,7 @@ int TransmitMixer::StartRecordingMicrophone(const char* fileName,
"TransmitMixer::StartRecordingMicrophone(fileName=%s)", "TransmitMixer::StartRecordingMicrophone(fileName=%s)",
fileName); fileName);
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_fileRecording) if (_fileRecording)
{ {
@ -764,7 +759,7 @@ int TransmitMixer::StartRecordingMicrophone(OutStream* stream,
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::StartRecordingMicrophone()"); "TransmitMixer::StartRecordingMicrophone()");
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_fileRecording) if (_fileRecording)
{ {
@ -841,7 +836,7 @@ int TransmitMixer::StopRecordingMicrophone()
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::StopRecordingMicrophone()"); "TransmitMixer::StopRecordingMicrophone()");
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (!_fileRecording) if (!_fileRecording)
{ {
@ -903,7 +898,7 @@ int TransmitMixer::StartRecordingCall(const char* fileName,
format = kFileFormatCompressedFile; format = kFileFormatCompressedFile;
} }
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
// Destroy the old instance // Destroy the old instance
if (_fileCallRecorderPtr) if (_fileCallRecorderPtr)
@ -981,7 +976,7 @@ int TransmitMixer::StartRecordingCall(OutStream* stream,
format = kFileFormatCompressedFile; format = kFileFormatCompressedFile;
} }
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
// Destroy the old instance // Destroy the old instance
if (_fileCallRecorderPtr) if (_fileCallRecorderPtr)
@ -1032,7 +1027,7 @@ int TransmitMixer::StopRecordingCall()
return -1; return -1;
} }
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_fileCallRecorderPtr->StopRecording() != 0) if (_fileCallRecorderPtr->StopRecording() != 0)
{ {
@ -1062,7 +1057,7 @@ int TransmitMixer::RegisterExternalMediaProcessing(
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::RegisterExternalMediaProcessing()"); "TransmitMixer::RegisterExternalMediaProcessing()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (!object) { if (!object) {
return -1; return -1;
} }
@ -1082,7 +1077,7 @@ int TransmitMixer::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
"TransmitMixer::DeRegisterExternalMediaProcessing()"); "TransmitMixer::DeRegisterExternalMediaProcessing()");
CriticalSectionScoped cs(&_callbackCritSect); rtc::CritScope cs(&_callbackCritSect);
if (type == kRecordingAllChannelsMixed) { if (type == kRecordingAllChannelsMixed) {
external_postproc_ptr_ = NULL; external_postproc_ptr_ = NULL;
} else if (type == kRecordingPreprocessing) { } else if (type == kRecordingPreprocessing) {
@ -1127,7 +1122,7 @@ bool TransmitMixer::IsRecordingCall()
bool TransmitMixer::IsRecordingMic() bool TransmitMixer::IsRecordingMic()
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
return _fileRecording; return _fileRecording;
} }
@ -1162,7 +1157,7 @@ void TransmitMixer::GenerateAudioFrame(const int16_t* audio,
int32_t TransmitMixer::RecordAudioToFile( int32_t TransmitMixer::RecordAudioToFile(
uint32_t mixingFrequency) uint32_t mixingFrequency)
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_fileRecorderPtr == NULL) if (_fileRecorderPtr == NULL)
{ {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1), WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
@ -1189,7 +1184,7 @@ int32_t TransmitMixer::MixOrReplaceAudioWithFile(
size_t fileSamples(0); size_t fileSamples(0);
{ {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
if (_filePlayerPtr == NULL) if (_filePlayerPtr == NULL)
{ {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, WEBRTC_TRACE(kTraceWarning, kTraceVoice,
@ -1267,7 +1262,7 @@ void TransmitMixer::ProcessAudio(int delay_ms, int clock_drift,
// Store new capture level. Only updated when analog AGC is enabled. // Store new capture level. Only updated when analog AGC is enabled.
_captureLevel = agc->stream_analog_level(); _captureLevel = agc->stream_analog_level();
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
// Triggers a callback in OnPeriodicProcess(). // Triggers a callback in OnPeriodicProcess().
_saturationWarning |= agc->stream_is_saturated(); _saturationWarning |= agc->stream_is_saturated();
} }
@ -1282,11 +1277,11 @@ void TransmitMixer::TypingDetection(bool keyPressed)
bool vadActive = _audioFrame.vad_activity_ == AudioFrame::kVadActive; bool vadActive = _audioFrame.vad_activity_ == AudioFrame::kVadActive;
if (_typingDetection.Process(keyPressed, vadActive)) { if (_typingDetection.Process(keyPressed, vadActive)) {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
_typingNoiseWarningPending = true; _typingNoiseWarningPending = true;
_typingNoiseDetected = true; _typingNoiseDetected = true;
} else { } else {
CriticalSectionScoped cs(&_critSect); rtc::CritScope cs(&_critSect);
// If there is already a warning pending, do not change the state. // If there is already a warning pending, do not change the state.
// Otherwise set a warning pending if last callback was for noise detected. // Otherwise set a warning pending if last callback was for noise detected.
if (!_typingNoiseWarningPending && _typingNoiseDetected) { if (!_typingNoiseWarningPending && _typingNoiseDetected) {

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H #ifndef WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
#define WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H #define WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/scoped_ptr.h" #include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/resampler/include/push_resampler.h" #include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
@ -210,8 +211,8 @@ private:
bool _fileCallRecording; bool _fileCallRecording;
voe::AudioLevel _audioLevel; voe::AudioLevel _audioLevel;
// protect file instances and their variables in MixedParticipants() // protect file instances and their variables in MixedParticipants()
CriticalSectionWrapper& _critSect; rtc::CriticalSection _critSect;
CriticalSectionWrapper& _callbackCritSect; rtc::CriticalSection _callbackCritSect;
#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION #ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
webrtc::TypingDetection _typingDetection; webrtc::TypingDetection _typingDetection;

View File

@ -12,7 +12,6 @@
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h" #include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -17,7 +17,6 @@
#include "webrtc/modules/audio_coding/include/audio_coding_module.h" #include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/modules/audio_device/audio_device_impl.h" #include "webrtc/modules/audio_device/audio_device_impl.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h" #include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h" #include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"
@ -39,16 +38,14 @@ VoEBase* VoEBase::GetInterface(VoiceEngine* voiceEngine) {
VoEBaseImpl::VoEBaseImpl(voe::SharedData* shared) VoEBaseImpl::VoEBaseImpl(voe::SharedData* shared)
: voiceEngineObserverPtr_(nullptr), : voiceEngineObserverPtr_(nullptr),
callbackCritSect_(*CriticalSectionWrapper::CreateCriticalSection()),
shared_(shared) {} shared_(shared) {}
VoEBaseImpl::~VoEBaseImpl() { VoEBaseImpl::~VoEBaseImpl() {
TerminateInternal(); TerminateInternal();
delete &callbackCritSect_;
} }
void VoEBaseImpl::OnErrorIsReported(const ErrorCode error) { void VoEBaseImpl::OnErrorIsReported(const ErrorCode error) {
CriticalSectionScoped cs(&callbackCritSect_); rtc::CritScope cs(&callbackCritSect_);
int errCode = 0; int errCode = 0;
if (error == AudioDeviceObserver::kRecordingError) { if (error == AudioDeviceObserver::kRecordingError) {
errCode = VE_RUNTIME_REC_ERROR; errCode = VE_RUNTIME_REC_ERROR;
@ -64,7 +61,7 @@ void VoEBaseImpl::OnErrorIsReported(const ErrorCode error) {
} }
void VoEBaseImpl::OnWarningIsReported(const WarningCode warning) { void VoEBaseImpl::OnWarningIsReported(const WarningCode warning) {
CriticalSectionScoped cs(&callbackCritSect_); rtc::CritScope cs(&callbackCritSect_);
int warningCode = 0; int warningCode = 0;
if (warning == AudioDeviceObserver::kRecordingWarning) { if (warning == AudioDeviceObserver::kRecordingWarning) {
warningCode = VE_RUNTIME_REC_WARNING; warningCode = VE_RUNTIME_REC_WARNING;
@ -176,7 +173,7 @@ void VoEBaseImpl::PullRenderData(int bits_per_sample,
} }
int VoEBaseImpl::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) { int VoEBaseImpl::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
CriticalSectionScoped cs(&callbackCritSect_); rtc::CritScope cs(&callbackCritSect_);
if (voiceEngineObserverPtr_) { if (voiceEngineObserverPtr_) {
shared_->SetLastError( shared_->SetLastError(
VE_INVALID_OPERATION, kTraceError, VE_INVALID_OPERATION, kTraceError,
@ -196,7 +193,7 @@ int VoEBaseImpl::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
} }
int VoEBaseImpl::DeRegisterVoiceEngineObserver() { int VoEBaseImpl::DeRegisterVoiceEngineObserver() {
CriticalSectionScoped cs(&callbackCritSect_); rtc::CritScope cs(&callbackCritSect_);
if (!voiceEngineObserverPtr_) { if (!voiceEngineObserverPtr_) {
shared_->SetLastError( shared_->SetLastError(
VE_INVALID_OPERATION, kTraceError, VE_INVALID_OPERATION, kTraceError,
@ -216,7 +213,7 @@ int VoEBaseImpl::DeRegisterVoiceEngineObserver() {
int VoEBaseImpl::Init(AudioDeviceModule* external_adm, int VoEBaseImpl::Init(AudioDeviceModule* external_adm,
AudioProcessing* audioproc) { AudioProcessing* audioproc) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
WebRtcSpl_Init(); WebRtcSpl_Init();
if (shared_->statistics().Initialized()) { if (shared_->statistics().Initialized()) {
return 0; return 0;
@ -382,12 +379,12 @@ int VoEBaseImpl::Init(AudioDeviceModule* external_adm,
} }
int VoEBaseImpl::Terminate() { int VoEBaseImpl::Terminate() {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
return TerminateInternal(); return TerminateInternal();
} }
int VoEBaseImpl::CreateChannel() { int VoEBaseImpl::CreateChannel() {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -398,7 +395,7 @@ int VoEBaseImpl::CreateChannel() {
} }
int VoEBaseImpl::CreateChannel(const Config& config) { int VoEBaseImpl::CreateChannel(const Config& config) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -434,7 +431,7 @@ int VoEBaseImpl::InitializeChannel(voe::ChannelOwner* channel_owner) {
} }
int VoEBaseImpl::DeleteChannel(int channel) { int VoEBaseImpl::DeleteChannel(int channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -461,7 +458,7 @@ int VoEBaseImpl::DeleteChannel(int channel) {
} }
int VoEBaseImpl::StartReceive(int channel) { int VoEBaseImpl::StartReceive(int channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -477,7 +474,7 @@ int VoEBaseImpl::StartReceive(int channel) {
} }
int VoEBaseImpl::StopReceive(int channel) { int VoEBaseImpl::StopReceive(int channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -493,7 +490,7 @@ int VoEBaseImpl::StopReceive(int channel) {
} }
int VoEBaseImpl::StartPlayout(int channel) { int VoEBaseImpl::StartPlayout(int channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -517,7 +514,7 @@ int VoEBaseImpl::StartPlayout(int channel) {
} }
int VoEBaseImpl::StopPlayout(int channel) { int VoEBaseImpl::StopPlayout(int channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -537,7 +534,7 @@ int VoEBaseImpl::StopPlayout(int channel) {
} }
int VoEBaseImpl::StartSend(int channel) { int VoEBaseImpl::StartSend(int channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -561,7 +558,7 @@ int VoEBaseImpl::StartSend(int channel) {
} }
int VoEBaseImpl::StopSend(int channel) { int VoEBaseImpl::StopSend(int channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);
return -1; return -1;
@ -795,7 +792,7 @@ void VoEBaseImpl::GetPlayoutData(int sample_rate, size_t number_of_channels,
int VoEBaseImpl::AssociateSendChannel(int channel, int VoEBaseImpl::AssociateSendChannel(int channel,
int accociate_send_channel) { int accociate_send_channel) {
CriticalSectionScoped cs(shared_->crit_sec()); rtc::CritScope cs(shared_->crit_sec());
if (!shared_->statistics().Initialized()) { if (!shared_->statistics().Initialized()) {
shared_->SetLastError(VE_NOT_INITED, kTraceError); shared_->SetLastError(VE_NOT_INITED, kTraceError);

View File

@ -13,6 +13,7 @@
#include "webrtc/voice_engine/include/voe_base.h" #include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/base/criticalsection.h"
#include "webrtc/modules/include/module_common_types.h" #include "webrtc/modules/include/module_common_types.h"
#include "webrtc/voice_engine/shared_data.h" #include "webrtc/voice_engine/shared_data.h"
@ -138,7 +139,7 @@ class VoEBaseImpl : public VoEBase,
// channel. // channel.
int InitializeChannel(voe::ChannelOwner* channel_owner); int InitializeChannel(voe::ChannelOwner* channel_owner);
VoiceEngineObserver* voiceEngineObserverPtr_; VoiceEngineObserver* voiceEngineObserverPtr_;
CriticalSectionWrapper& callbackCritSect_; rtc::CriticalSection callbackCritSect_;
AudioFrame audioFrame_; AudioFrame audioFrame_;
voe::SharedData* shared_; voe::SharedData* shared_;

View File

@ -12,7 +12,6 @@
#include "webrtc/base/format_macros.h" #include "webrtc/base/format_macros.h"
#include "webrtc/modules/audio_coding/include/audio_coding_module.h" #include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -10,7 +10,7 @@
#include "webrtc/voice_engine/voe_dtmf_impl.h" #include "webrtc/voice_engine/voe_dtmf_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h" #include "webrtc/base/criticalsection.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"
@ -197,7 +197,7 @@ int VoEDtmfImpl::SetDtmfFeedbackStatus(bool enable, bool directFeedback) {
"SetDtmfFeedbackStatus(enable=%d, directFeeback=%d)", "SetDtmfFeedbackStatus(enable=%d, directFeeback=%d)",
(int)enable, (int)directFeedback); (int)enable, (int)directFeedback);
CriticalSectionScoped sc(_shared->crit_sec()); rtc::CritScope cs(_shared->crit_sec());
_dtmfFeedback = enable; _dtmfFeedback = enable;
_dtmfDirectFeedback = directFeedback; _dtmfDirectFeedback = directFeedback;
@ -206,7 +206,7 @@ int VoEDtmfImpl::SetDtmfFeedbackStatus(bool enable, bool directFeedback) {
} }
int VoEDtmfImpl::GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback) { int VoEDtmfImpl::GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback) {
CriticalSectionScoped sc(_shared->crit_sec()); rtc::CritScope cs(_shared->crit_sec());
enabled = _dtmfFeedback; enabled = _dtmfFeedback;
directFeedback = _dtmfDirectFeedback; directFeedback = _dtmfDirectFeedback;

View File

@ -10,7 +10,6 @@
#include "webrtc/voice_engine/voe_external_media_impl.h" #include "webrtc/voice_engine/voe_external_media_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -11,7 +11,6 @@
#include "webrtc/voice_engine/voe_file_impl.h" #include "webrtc/voice_engine/voe_file_impl.h"
#include "webrtc/modules/media_file/media_file.h" #include "webrtc/modules/media_file/media_file.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h" #include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"

View File

@ -12,7 +12,6 @@
#include <assert.h> #include <assert.h>
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"
#include "webrtc/voice_engine/voice_engine_impl.h" #include "webrtc/voice_engine/voice_engine_impl.h"
@ -234,7 +233,7 @@ int VoEHardwareImpl::SetRecordingDevice(int index,
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetRecordingDevice(index=%d, recordingChannel=%d)", index, "SetRecordingDevice(index=%d, recordingChannel=%d)", index,
(int)recordingChannel); (int)recordingChannel);
CriticalSectionScoped cs(_shared->crit_sec()); rtc::CritScope cs(_shared->crit_sec());
if (!_shared->statistics().Initialized()) { if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError); _shared->SetLastError(VE_NOT_INITED, kTraceError);
@ -345,7 +344,7 @@ int VoEHardwareImpl::SetRecordingDevice(int index,
int VoEHardwareImpl::SetPlayoutDevice(int index) { int VoEHardwareImpl::SetPlayoutDevice(int index) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"SetPlayoutDevice(index=%d)", index); "SetPlayoutDevice(index=%d)", index);
CriticalSectionScoped cs(_shared->crit_sec()); rtc::CritScope cs(_shared->crit_sec());
if (!_shared->statistics().Initialized()) { if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError); _shared->SetLastError(VE_NOT_INITED, kTraceError);

View File

@ -11,7 +11,6 @@
#include "webrtc/voice_engine/voe_neteq_stats_impl.h" #include "webrtc/voice_engine/voe_neteq_stats_impl.h"
#include "webrtc/modules/audio_coding/include/audio_coding_module.h" #include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -13,7 +13,6 @@
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/format_macros.h" #include "webrtc/base/format_macros.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/file_wrapper.h" #include "webrtc/system_wrappers/include/file_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -10,7 +10,6 @@
#include "webrtc/voice_engine/voe_video_sync_impl.h" #include "webrtc/voice_engine/voe_video_sync_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -10,7 +10,6 @@
#include "webrtc/voice_engine/voe_volume_control_impl.h" #include "webrtc/voice_engine/voe_volume_control_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel.h" #include "webrtc/voice_engine/channel.h"
#include "webrtc/voice_engine/include/voe_errors.h" #include "webrtc/voice_engine/include/voe_errors.h"

View File

@ -17,7 +17,6 @@
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/modules/audio_coding/include/audio_coding_module.h" #include "webrtc/modules/audio_coding/include/audio_coding_module.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/voice_engine/channel_proxy.h" #include "webrtc/voice_engine/channel_proxy.h"
#include "webrtc/voice_engine/voice_engine_impl.h" #include "webrtc/voice_engine/voice_engine_impl.h"
@ -66,7 +65,7 @@ int VoiceEngineImpl::Release() {
rtc::scoped_ptr<voe::ChannelProxy> VoiceEngineImpl::GetChannelProxy( rtc::scoped_ptr<voe::ChannelProxy> VoiceEngineImpl::GetChannelProxy(
int channel_id) { int channel_id) {
RTC_DCHECK(channel_id >= 0); RTC_DCHECK(channel_id >= 0);
CriticalSectionScoped cs(crit_sec()); rtc::CritScope cs(crit_sec());
RTC_DCHECK(statistics().Initialized()); RTC_DCHECK(statistics().Initialized());
return rtc::scoped_ptr<voe::ChannelProxy>( return rtc::scoped_ptr<voe::ChannelProxy>(
new voe::ChannelProxy(channel_manager().GetChannel(channel_id))); new voe::ChannelProxy(channel_manager().GetChannel(channel_id)));