Remove calls to ScopedToUnique and UniqueToScoped

They're just no-ops now, and will soon go away.

BUG=webrtc:5520

Review URL: https://codereview.webrtc.org/1914153002

Cr-Commit-Position: refs/heads/master@{#12510}
This commit is contained in:
kwiberg 2016-04-26 08:18:04 -07:00 committed by Commit bot
parent 4485ffb58d
commit 1c7fdd86eb
17 changed files with 45 additions and 55 deletions

View File

@ -1241,7 +1241,7 @@ void WebRtcSession::SetRawAudioSink(uint32_t ssrc,
if (!voice_channel_)
return;
voice_channel_->SetRawAudioSink(ssrc, rtc::ScopedToUnique(std::move(sink)));
voice_channel_->SetRawAudioSink(ssrc, std::move(sink));
}
RtpParameters WebRtcSession::GetAudioRtpParameters(uint32_t ssrc) const {

View File

@ -191,9 +191,8 @@ namespace internal {
Call::Call(const Call::Config& config)
: clock_(Clock::GetRealTimeClock()),
num_cpu_cores_(CpuInfo::DetectNumberOfCores()),
module_process_thread_(
rtc::ScopedToUnique(ProcessThread::Create("ModuleProcessThread"))),
pacer_thread_(rtc::ScopedToUnique(ProcessThread::Create("PacerThread"))),
module_process_thread_(ProcessThread::Create("ModuleProcessThread")),
pacer_thread_(ProcessThread::Create("PacerThread")),
call_stats_(new CallStats(clock_)),
bitrate_allocator_(new BitrateAllocator()),
config_(config),

View File

@ -72,8 +72,7 @@ LappedTransform::LappedTransform(size_t num_in_channels,
window,
shift_amount,
&blocker_callback_),
fft_(rtc::ScopedToUnique(
RealFourier::Create(RealFourier::FftOrder(block_length_)))),
fft_(RealFourier::Create(RealFourier::FftOrder(block_length_))),
cplx_length_(RealFourier::ComplexLength(fft_->order())),
real_buf_(num_in_channels,
block_length_,

View File

@ -66,7 +66,7 @@ bool AudioManager::JavaAudioManager::IsDeviceBlacklistedForOpenSLESUsage() {
// AudioManager implementation
AudioManager::AudioManager()
: j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
: j_environment_(JVM::GetInstance()->environment()),
audio_layer_(AudioDeviceModule::kPlatformDefaultAudio),
initialized_(false),
hardware_aec_(false),
@ -80,14 +80,14 @@ AudioManager::AudioManager()
{"nativeCacheAudioParameters",
"(IIZZZZIIJ)V",
reinterpret_cast<void*>(&webrtc::AudioManager::CacheAudioParameters)}};
j_native_registration_ = rtc::ScopedToUnique(j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioManager",
native_methods, arraysize(native_methods)));
j_native_registration_ = j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioManager", native_methods,
arraysize(native_methods));
j_audio_manager_.reset(new JavaAudioManager(
j_native_registration_.get(),
rtc::ScopedToUnique(j_native_registration_->NewObject(
j_native_registration_->NewObject(
"<init>", "(Landroid/content/Context;J)V",
JVM::GetInstance()->context(), PointerTojlong(this)))));
JVM::GetInstance()->context(), PointerTojlong(this))));
}
AudioManager::~AudioManager() {

View File

@ -74,7 +74,7 @@ bool AudioRecordJni::JavaAudioRecord::EnableBuiltInNS(bool enable) {
// AudioRecordJni implementation.
AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
: j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
: j_environment_(JVM::GetInstance()->environment()),
audio_manager_(audio_manager),
audio_parameters_(audio_manager->GetRecordAudioParameters()),
total_delay_in_milliseconds_(0),
@ -93,14 +93,14 @@ AudioRecordJni::AudioRecordJni(AudioManager* audio_manager)
&webrtc::AudioRecordJni::CacheDirectBufferAddress)},
{"nativeDataIsRecorded", "(IJ)V",
reinterpret_cast<void*>(&webrtc::AudioRecordJni::DataIsRecorded)}};
j_native_registration_ = rtc::ScopedToUnique(j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioRecord",
native_methods, arraysize(native_methods)));
j_native_registration_ = j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioRecord", native_methods,
arraysize(native_methods));
j_audio_record_.reset(new JavaAudioRecord(
j_native_registration_.get(),
rtc::ScopedToUnique(j_native_registration_->NewObject(
j_native_registration_->NewObject(
"<init>", "(Landroid/content/Context;J)V",
JVM::GetInstance()->context(), PointerTojlong(this)))));
JVM::GetInstance()->context(), PointerTojlong(this))));
// Detach from this thread since we want to use the checker to verify calls
// from the Java based audio thread.
thread_checker_java_.DetachFromThread();

View File

@ -69,7 +69,7 @@ int AudioTrackJni::JavaAudioTrack::GetStreamVolume() {
// TODO(henrika): possible extend usage of AudioManager and add it as member.
AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
: j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
: j_environment_(JVM::GetInstance()->environment()),
audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
direct_buffer_address_(nullptr),
direct_buffer_capacity_in_bytes_(0),
@ -86,14 +86,14 @@ AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
&webrtc::AudioTrackJni::CacheDirectBufferAddress)},
{"nativeGetPlayoutData", "(IJ)V",
reinterpret_cast<void*>(&webrtc::AudioTrackJni::GetPlayoutData)}};
j_native_registration_ = rtc::ScopedToUnique(j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioTrack",
native_methods, arraysize(native_methods)));
j_native_registration_ = j_environment_->RegisterNatives(
"org/webrtc/voiceengine/WebRtcAudioTrack", native_methods,
arraysize(native_methods));
j_audio_track_.reset(new JavaAudioTrack(
j_native_registration_.get(),
rtc::ScopedToUnique(j_native_registration_->NewObject(
j_native_registration_->NewObject(
"<init>", "(Landroid/content/Context;J)V",
JVM::GetInstance()->context(), PointerTojlong(this)))));
JVM::GetInstance()->context(), PointerTojlong(this))));
// Detach from this thread since we want to use the checker to verify calls
// from the Java based audio thread.
thread_checker_java_.DetachFromThread();

View File

@ -15,10 +15,9 @@
namespace webrtc {
BuildInfo::BuildInfo()
: j_environment_(rtc::ScopedToUnique(JVM::GetInstance()->environment())),
j_build_info_(JVM::GetInstance()->GetClass(
"org/webrtc/voiceengine/BuildInfo")) {
}
: j_environment_(JVM::GetInstance()->environment()),
j_build_info_(
JVM::GetInstance()->GetClass("org/webrtc/voiceengine/BuildInfo")) {}
std::string BuildInfo::GetStringFromJava(const char* name) {
jmethodID id = j_build_info_.GetStaticMethodId(name, "()Ljava/lang/String;");

View File

@ -142,8 +142,7 @@ class AudioDeviceAPITest: public testing::Test {
virtual ~AudioDeviceAPITest() {}
static void SetUpTestCase() {
process_thread_ =
rtc::ScopedToUnique(ProcessThread::Create("ProcessThread"));
process_thread_ = ProcessThread::Create("ProcessThread");
process_thread_->Start();
// Windows:

View File

@ -594,11 +594,10 @@ FuncTestManager::~FuncTestManager()
int32_t FuncTestManager::Init()
{
EXPECT_TRUE((_processThread = rtc::ScopedToUnique(
ProcessThread::Create("ProcessThread"))) != NULL);
if (_processThread == NULL)
{
return -1;
EXPECT_TRUE((_processThread = ProcessThread::Create("ProcessThread")) !=
NULL);
if (_processThread == NULL) {
return -1;
}
_processThread->Start();
@ -832,8 +831,8 @@ int32_t FuncTestManager::TestAudioLayerSelection()
// ==================================================
// Next, try to make fresh start with new audio layer
EXPECT_TRUE((_processThread = rtc::ScopedToUnique(
ProcessThread::Create("ProcessThread"))) != NULL);
EXPECT_TRUE((_processThread = ProcessThread::Create("ProcessThread")) !=
NULL);
if (_processThread == NULL)
{
return -1;

View File

@ -84,8 +84,7 @@ std::unique_ptr<DesktopFrame> SharedMemoryDesktopFrame::Create(
size_t buffer_size =
size.width() * size.height() * DesktopFrame::kBytesPerPixel;
std::unique_ptr<SharedMemory> shared_memory;
shared_memory = rtc::ScopedToUnique(
shared_memory_factory->CreateSharedMemory(buffer_size));
shared_memory = shared_memory_factory->CreateSharedMemory(buffer_size);
if (!shared_memory)
return nullptr;

View File

@ -49,8 +49,7 @@ DesktopFrameWin* DesktopFrameWin::Create(
std::unique_ptr<SharedMemory> shared_memory;
HANDLE section_handle = nullptr;
if (shared_memory_factory) {
shared_memory = rtc::ScopedToUnique(
shared_memory_factory->CreateSharedMemory(buffer_size));
shared_memory = shared_memory_factory->CreateSharedMemory(buffer_size);
section_handle = shared_memory->handle();
}
void* data = nullptr;

View File

@ -75,8 +75,7 @@ ScreenCapturerWinGdi::~ScreenCapturerWinGdi() {
void ScreenCapturerWinGdi::SetSharedMemoryFactory(
rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
shared_memory_factory_ =
rtc::ScopedToUnique(std::move(shared_memory_factory));
shared_memory_factory_ = std::move(shared_memory_factory);
}
void ScreenCapturerWinGdi::Capture(const DesktopRegion& region) {

View File

@ -83,8 +83,7 @@ void ScreenCapturerWinMagnifier::Start(Callback* callback) {
void ScreenCapturerWinMagnifier::SetSharedMemoryFactory(
rtc::scoped_ptr<SharedMemoryFactory> shared_memory_factory) {
shared_memory_factory_ =
rtc::ScopedToUnique(std::move(shared_memory_factory));
shared_memory_factory_ = std::move(shared_memory_factory);
}
void ScreenCapturerWinMagnifier::Capture(const DesktopRegion& region) {

View File

@ -207,8 +207,8 @@ TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) {
packets[i].sequence_number, packets[i].arrival_time_ms * 1000));
rtc::Buffer raw_packet = feedback->Build();
feedback = rtc::ScopedToUnique(rtcp::TransportFeedback::ParseFrom(
raw_packet.data(), raw_packet.size()));
feedback = rtcp::TransportFeedback::ParseFrom(raw_packet.data(),
raw_packet.size());
std::vector<PacketInfo> expected_packets;
expected_packets.push_back(packets[i]);
@ -276,8 +276,8 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
info.arrival_time_ms * 1000));
rtc::Buffer raw_packet = feedback->Build();
feedback = rtc::ScopedToUnique(
rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()));
feedback =
rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size());
std::vector<PacketInfo> received_feedback;
@ -297,8 +297,8 @@ TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) {
EXPECT_TRUE(feedback->WithReceivedPacket(info.sequence_number,
info.arrival_time_ms * 1000));
raw_packet = feedback->Build();
feedback = rtc::ScopedToUnique(
rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()));
feedback =
rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size());
EXPECT_TRUE(feedback.get() != nullptr);
EXPECT_CALL(*bitrate_estimator_, IncomingPacketFeedbackVector(_))

View File

@ -297,7 +297,7 @@ TEST(ProcessThreadImpl, PostTask) {
std::unique_ptr<EventWrapper> task_ran(EventWrapper::Create());
std::unique_ptr<RaiseEventTask> task(new RaiseEventTask(task_ran.get()));
thread.Start();
thread.PostTask(rtc::UniqueToScoped(std::move(task)));
thread.PostTask(std::move(task));
EXPECT_EQ(kEventSignaled, task_ran->Wait(100));
thread.Stop();
}

View File

@ -434,8 +434,7 @@ class VideoCaptureExternalTest : public testing::Test {
public:
void SetUp() {
capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
process_module_ =
rtc::ScopedToUnique(webrtc::ProcessThread::Create("ProcessThread"));
process_module_ = webrtc::ProcessThread::Create("ProcessThread");
process_module_->Start();
process_module_->RegisterModule(capture_module_);

View File

@ -28,7 +28,7 @@ SharedData::SharedData(const Config& config)
_engineStatistics(_gInstanceCounter),
_audioDevicePtr(NULL),
_moduleProcessThreadPtr(
rtc::ScopedToUnique(ProcessThread::Create("VoiceProcessThread"))) {
ProcessThread::Create("VoiceProcessThread")) {
Trace::CreateTrace();
if (OutputMixer::Create(_outputMixerPtr, _gInstanceCounter) == 0)
{