2011-07-07 08:21:25 +00:00
|
|
|
/*
|
2012-01-31 08:45:03 +00:00
|
|
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
2011-07-07 08:21:25 +00:00
|
|
|
*
|
|
|
|
|
* Use of this source code is governed by a BSD-style license
|
|
|
|
|
* that can be found in the LICENSE file in the root of the source
|
|
|
|
|
* tree. An additional intellectual property rights grant can be found
|
|
|
|
|
* in the file PATENTS. All contributing project authors may
|
|
|
|
|
* be found in the AUTHORS file in the root of the source tree.
|
|
|
|
|
*/
|
|
|
|
|
|
2013-02-12 21:42:18 +00:00
|
|
|
#include "webrtc/voice_engine/channel.h"
|
|
|
|
|
|
2015-05-11 12:44:23 +02:00
|
|
|
#include <algorithm>
|
2015-12-12 01:37:01 +01:00
|
|
|
#include <utility>
|
2015-05-11 12:44:23 +02:00
|
|
|
|
2016-12-05 01:46:09 -08:00
|
|
|
#include "webrtc/audio/utility/audio_frame_operations.h"
|
2016-11-29 04:26:24 -08:00
|
|
|
#include "webrtc/base/array_view.h"
|
2015-09-17 16:30:16 +02:00
|
|
|
#include "webrtc/base/checks.h"
|
2016-01-21 10:37:37 -08:00
|
|
|
#include "webrtc/base/criticalsection.h"
|
Use size_t more consistently for packet/payload lengths.
See design doc at https://docs.google.com/a/chromium.org/document/d/1I6nmE9D_BmCY-IoV6MDPY2V6WYpEI-dg2apWXTfZyUI/edit?usp=sharing for more information.
This CL was reviewed and approved in pieces in the following CLs:
https://webrtc-codereview.appspot.com/24209004/
https://webrtc-codereview.appspot.com/24229004/
https://webrtc-codereview.appspot.com/24259004/
https://webrtc-codereview.appspot.com/25109004/
https://webrtc-codereview.appspot.com/26099004/
https://webrtc-codereview.appspot.com/27069004/
https://webrtc-codereview.appspot.com/27969004/
https://webrtc-codereview.appspot.com/27989004/
https://webrtc-codereview.appspot.com/29009004/
https://webrtc-codereview.appspot.com/30929004/
https://webrtc-codereview.appspot.com/30939004/
https://webrtc-codereview.appspot.com/31999004/
Committing as TBR to the original reviewers.
BUG=chromium:81439
TEST=none
TBR=pthatcher,henrik.lundin,tina.legrand,stefan,tkchin,glaznev,kjellander,perkj,mflodman,henrika,asapersson,niklas.enbom
Review URL: https://webrtc-codereview.appspot.com/23129004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@7726 4adac7df-926f-26a2-2b94-8c16560cd09d
2014-11-20 22:28:14 +00:00
|
|
|
#include "webrtc/base/format_macros.h"
|
2015-11-27 09:48:36 -08:00
|
|
|
#include "webrtc/base/logging.h"
|
2016-07-29 12:59:36 +02:00
|
|
|
#include "webrtc/base/rate_limiter.h"
|
2015-12-07 10:26:18 +01:00
|
|
|
#include "webrtc/base/thread_checker.h"
|
2014-06-05 20:34:08 +00:00
|
|
|
#include "webrtc/base/timeutils.h"
|
2015-05-11 12:44:23 +02:00
|
|
|
#include "webrtc/config.h"
|
Moved RtcEventLog files from call/ to logging/
The RtcEventLog headers need to be accessible from any place which needs
logging, and the implementation needs access to data structures that are
logged.
After a discussion in the code review, we all agreed to move the RtcEventLog implementation into its own top level directory - which I called "logging/" in expectation that other types of logging may have similar requirements. The directory contains two main build targets - "rtc_event_log_api", which is just rtc_event_log.h, that has no external dependencies and can be used from anywhere, and "rtc_event_log_impl" which contains the rest of the implementation and has many dependencies (more in the future).
The "api" target can be referenced from anywhere, while the "impl" target is only needed at the place of instantiation (currently Call, soon to be moved to PeerConnection by https://codereview.webrtc.org/2353033005/).
This change allows using RtcEventLog in the p2p/ directory, so that we
can log STUN pings and ICE state transitions.
BUG=webrtc:6393
R=kjellander@webrtc.org, kwiberg@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, terelius@webrtc.org
Review URL: https://codereview.webrtc.org/2380683005 .
Cr-Commit-Position: refs/heads/master@{#14485}
2016-10-03 18:31:22 -07:00
|
|
|
#include "webrtc/logging/rtc_event_log/rtc_event_log.h"
|
2016-10-24 13:47:09 -07:00
|
|
|
#include "webrtc/modules/audio_coding/codecs/audio_format_conversion.h"
|
2013-02-12 21:42:18 +00:00
|
|
|
#include "webrtc/modules/audio_device/include/audio_device.h"
|
|
|
|
|
#include "webrtc/modules/audio_processing/include/audio_processing.h"
|
2015-11-04 08:31:52 +01:00
|
|
|
#include "webrtc/modules/include/module_common_types.h"
|
2015-12-07 10:26:18 +01:00
|
|
|
#include "webrtc/modules/pacing/packet_router.h"
|
2015-11-04 08:31:52 +01:00
|
|
|
#include "webrtc/modules/rtp_rtcp/include/receive_statistics.h"
|
|
|
|
|
#include "webrtc/modules/rtp_rtcp/include/rtp_payload_registry.h"
|
|
|
|
|
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
|
2013-08-15 23:38:54 +00:00
|
|
|
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
|
2015-11-04 08:31:52 +01:00
|
|
|
#include "webrtc/modules/utility/include/process_thread.h"
|
2015-10-28 18:17:40 +01:00
|
|
|
#include "webrtc/system_wrappers/include/trace.h"
|
2013-02-12 21:42:18 +00:00
|
|
|
#include "webrtc/voice_engine/include/voe_external_media.h"
|
|
|
|
|
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
|
|
|
|
|
#include "webrtc/voice_engine/output_mixer.h"
|
|
|
|
|
#include "webrtc/voice_engine/statistics.h"
|
|
|
|
|
#include "webrtc/voice_engine/transmit_mixer.h"
|
|
|
|
|
#include "webrtc/voice_engine/utility.h"
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2012-11-14 19:07:54 +00:00
|
|
|
namespace webrtc {
|
|
|
|
|
namespace voe {
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-04-06 12:22:38 -07:00
|
|
|
namespace {
|
|
|
|
|
|
2016-07-29 12:59:36 +02:00
|
|
|
constexpr int64_t kMaxRetransmissionWindowMs = 1000;
|
|
|
|
|
constexpr int64_t kMinRetransmissionWindowMs = 30;
|
|
|
|
|
|
2016-04-06 12:22:38 -07:00
|
|
|
} // namespace
|
|
|
|
|
|
2016-03-11 03:06:41 -08:00
|
|
|
const int kTelephoneEventAttenuationdB = 10;
|
|
|
|
|
|
2016-07-04 07:06:55 -07:00
|
|
|
class RtcEventLogProxy final : public webrtc::RtcEventLog {
|
|
|
|
|
public:
|
|
|
|
|
RtcEventLogProxy() : event_log_(nullptr) {}
|
|
|
|
|
|
|
|
|
|
bool StartLogging(const std::string& file_name,
|
|
|
|
|
int64_t max_size_bytes) override {
|
|
|
|
|
RTC_NOTREACHED();
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool StartLogging(rtc::PlatformFile log_file,
|
|
|
|
|
int64_t max_size_bytes) override {
|
|
|
|
|
RTC_NOTREACHED();
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void StopLogging() override { RTC_NOTREACHED(); }
|
|
|
|
|
|
|
|
|
|
void LogVideoReceiveStreamConfig(
|
|
|
|
|
const webrtc::VideoReceiveStream::Config& config) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogVideoReceiveStreamConfig(config);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void LogVideoSendStreamConfig(
|
|
|
|
|
const webrtc::VideoSendStream::Config& config) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogVideoSendStreamConfig(config);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-10 05:12:51 -07:00
|
|
|
void LogAudioReceiveStreamConfig(
|
|
|
|
|
const webrtc::AudioReceiveStream::Config& config) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogAudioReceiveStreamConfig(config);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void LogAudioSendStreamConfig(
|
|
|
|
|
const webrtc::AudioSendStream::Config& config) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogAudioSendStreamConfig(config);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-04 07:06:55 -07:00
|
|
|
void LogRtpHeader(webrtc::PacketDirection direction,
|
|
|
|
|
webrtc::MediaType media_type,
|
|
|
|
|
const uint8_t* header,
|
|
|
|
|
size_t packet_length) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogRtpHeader(direction, media_type, header, packet_length);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void LogRtcpPacket(webrtc::PacketDirection direction,
|
|
|
|
|
webrtc::MediaType media_type,
|
|
|
|
|
const uint8_t* packet,
|
|
|
|
|
size_t length) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogRtcpPacket(direction, media_type, packet, length);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void LogAudioPlayout(uint32_t ssrc) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogAudioPlayout(ssrc);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void LogBwePacketLossEvent(int32_t bitrate,
|
|
|
|
|
uint8_t fraction_loss,
|
|
|
|
|
int32_t total_packets) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (event_log_) {
|
|
|
|
|
event_log_->LogBwePacketLossEvent(bitrate, fraction_loss, total_packets);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void SetEventLog(RtcEventLog* event_log) {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
event_log_ = event_log;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
rtc::CriticalSection crit_;
|
|
|
|
|
RtcEventLog* event_log_ GUARDED_BY(crit_);
|
|
|
|
|
RTC_DISALLOW_COPY_AND_ASSIGN(RtcEventLogProxy);
|
|
|
|
|
};
|
|
|
|
|
|
2016-11-30 07:51:13 -08:00
|
|
|
class RtcpRttStatsProxy final : public RtcpRttStats {
|
|
|
|
|
public:
|
|
|
|
|
RtcpRttStatsProxy() : rtcp_rtt_stats_(nullptr) {}
|
|
|
|
|
|
|
|
|
|
void OnRttUpdate(int64_t rtt) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (rtcp_rtt_stats_)
|
|
|
|
|
rtcp_rtt_stats_->OnRttUpdate(rtt);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int64_t LastProcessedRtt() const override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (!rtcp_rtt_stats_)
|
|
|
|
|
return 0;
|
|
|
|
|
return rtcp_rtt_stats_->LastProcessedRtt();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void SetRtcpRttStats(RtcpRttStats* rtcp_rtt_stats) {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
rtcp_rtt_stats_ = rtcp_rtt_stats;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
rtc::CriticalSection crit_;
|
|
|
|
|
RtcpRttStats* rtcp_rtt_stats_ GUARDED_BY(crit_);
|
|
|
|
|
RTC_DISALLOW_COPY_AND_ASSIGN(RtcpRttStatsProxy);
|
|
|
|
|
};
|
|
|
|
|
|
2015-12-07 10:26:18 +01:00
|
|
|
class TransportFeedbackProxy : public TransportFeedbackObserver {
|
|
|
|
|
public:
|
|
|
|
|
TransportFeedbackProxy() : feedback_observer_(nullptr) {
|
|
|
|
|
pacer_thread_.DetachFromThread();
|
|
|
|
|
network_thread_.DetachFromThread();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void SetTransportFeedbackObserver(
|
|
|
|
|
TransportFeedbackObserver* feedback_observer) {
|
|
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
feedback_observer_ = feedback_observer;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Implements TransportFeedbackObserver.
|
|
|
|
|
void AddPacket(uint16_t sequence_number,
|
|
|
|
|
size_t length,
|
2016-06-01 06:31:17 -07:00
|
|
|
int probe_cluster_id) override {
|
2015-12-07 10:26:18 +01:00
|
|
|
RTC_DCHECK(pacer_thread_.CalledOnValidThread());
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (feedback_observer_)
|
2016-06-20 11:53:02 -07:00
|
|
|
feedback_observer_->AddPacket(sequence_number, length, probe_cluster_id);
|
2015-12-07 10:26:18 +01:00
|
|
|
}
|
|
|
|
|
void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override {
|
|
|
|
|
RTC_DCHECK(network_thread_.CalledOnValidThread());
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
2016-10-18 09:40:34 -07:00
|
|
|
if (feedback_observer_)
|
|
|
|
|
feedback_observer_->OnTransportFeedback(feedback);
|
2016-09-07 09:58:20 +02:00
|
|
|
}
|
|
|
|
|
std::vector<PacketInfo> GetTransportFeedbackVector() const override {
|
|
|
|
|
RTC_NOTREACHED();
|
|
|
|
|
return std::vector<PacketInfo>();
|
2015-12-07 10:26:18 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
rtc::CriticalSection crit_;
|
|
|
|
|
rtc::ThreadChecker thread_checker_;
|
|
|
|
|
rtc::ThreadChecker pacer_thread_;
|
|
|
|
|
rtc::ThreadChecker network_thread_;
|
|
|
|
|
TransportFeedbackObserver* feedback_observer_ GUARDED_BY(&crit_);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
class TransportSequenceNumberProxy : public TransportSequenceNumberAllocator {
|
|
|
|
|
public:
|
|
|
|
|
TransportSequenceNumberProxy() : seq_num_allocator_(nullptr) {
|
|
|
|
|
pacer_thread_.DetachFromThread();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void SetSequenceNumberAllocator(
|
|
|
|
|
TransportSequenceNumberAllocator* seq_num_allocator) {
|
|
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
seq_num_allocator_ = seq_num_allocator;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Implements TransportSequenceNumberAllocator.
|
|
|
|
|
uint16_t AllocateSequenceNumber() override {
|
|
|
|
|
RTC_DCHECK(pacer_thread_.CalledOnValidThread());
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (!seq_num_allocator_)
|
|
|
|
|
return 0;
|
|
|
|
|
return seq_num_allocator_->AllocateSequenceNumber();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
rtc::CriticalSection crit_;
|
|
|
|
|
rtc::ThreadChecker thread_checker_;
|
|
|
|
|
rtc::ThreadChecker pacer_thread_;
|
|
|
|
|
TransportSequenceNumberAllocator* seq_num_allocator_ GUARDED_BY(&crit_);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
class RtpPacketSenderProxy : public RtpPacketSender {
|
|
|
|
|
public:
|
2016-01-28 05:22:45 -08:00
|
|
|
RtpPacketSenderProxy() : rtp_packet_sender_(nullptr) {}
|
2015-12-07 10:26:18 +01:00
|
|
|
|
|
|
|
|
void SetPacketSender(RtpPacketSender* rtp_packet_sender) {
|
|
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
rtp_packet_sender_ = rtp_packet_sender;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Implements RtpPacketSender.
|
|
|
|
|
void InsertPacket(Priority priority,
|
|
|
|
|
uint32_t ssrc,
|
|
|
|
|
uint16_t sequence_number,
|
|
|
|
|
int64_t capture_time_ms,
|
|
|
|
|
size_t bytes,
|
|
|
|
|
bool retransmission) override {
|
|
|
|
|
rtc::CritScope lock(&crit_);
|
|
|
|
|
if (rtp_packet_sender_) {
|
|
|
|
|
rtp_packet_sender_->InsertPacket(priority, ssrc, sequence_number,
|
|
|
|
|
capture_time_ms, bytes, retransmission);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
rtc::ThreadChecker thread_checker_;
|
|
|
|
|
rtc::CriticalSection crit_;
|
|
|
|
|
RtpPacketSender* rtp_packet_sender_ GUARDED_BY(&crit_);
|
|
|
|
|
};
|
|
|
|
|
|
2013-12-19 13:26:02 +00:00
|
|
|
// Extend the default RTCP statistics struct with max_jitter, defined as the
|
|
|
|
|
// maximum jitter value seen in an RTCP report block.
|
|
|
|
|
struct ChannelStatistics : public RtcpStatistics {
|
|
|
|
|
ChannelStatistics() : rtcp(), max_jitter(0) {}
|
|
|
|
|
|
|
|
|
|
RtcpStatistics rtcp;
|
|
|
|
|
uint32_t max_jitter;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Statistics callback, called at each generation of a new RTCP report block.
|
|
|
|
|
class StatisticsProxy : public RtcpStatisticsCallback {
|
|
|
|
|
public:
|
2016-01-21 10:37:37 -08:00
|
|
|
StatisticsProxy(uint32_t ssrc) : ssrc_(ssrc) {}
|
2013-12-19 13:26:02 +00:00
|
|
|
virtual ~StatisticsProxy() {}
|
|
|
|
|
|
2015-03-04 12:58:35 +00:00
|
|
|
void StatisticsUpdated(const RtcpStatistics& statistics,
|
|
|
|
|
uint32_t ssrc) override {
|
2013-12-19 13:26:02 +00:00
|
|
|
if (ssrc != ssrc_)
|
|
|
|
|
return;
|
|
|
|
|
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope cs(&stats_lock_);
|
2013-12-19 13:26:02 +00:00
|
|
|
stats_.rtcp = statistics;
|
|
|
|
|
if (statistics.jitter > stats_.max_jitter) {
|
|
|
|
|
stats_.max_jitter = statistics.jitter;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-04 12:58:35 +00:00
|
|
|
void CNameChanged(const char* cname, uint32_t ssrc) override {}
|
2014-12-18 13:50:16 +00:00
|
|
|
|
2013-12-19 13:26:02 +00:00
|
|
|
ChannelStatistics GetStats() {
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope cs(&stats_lock_);
|
2013-12-19 13:26:02 +00:00
|
|
|
return stats_;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
// StatisticsUpdated calls are triggered from threads in the RTP module,
|
|
|
|
|
// while GetStats calls can be triggered from the public voice engine API,
|
|
|
|
|
// hence synchronization is needed.
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CriticalSection stats_lock_;
|
2013-12-19 13:26:02 +00:00
|
|
|
const uint32_t ssrc_;
|
|
|
|
|
ChannelStatistics stats_;
|
|
|
|
|
};
|
|
|
|
|
|
2015-02-17 12:57:14 +00:00
|
|
|
class VoERtcpObserver : public RtcpBandwidthObserver {
|
2014-05-28 09:52:06 +00:00
|
|
|
public:
|
2015-02-17 12:57:14 +00:00
|
|
|
explicit VoERtcpObserver(Channel* owner) : owner_(owner) {}
|
|
|
|
|
virtual ~VoERtcpObserver() {}
|
|
|
|
|
|
|
|
|
|
void OnReceivedEstimatedBitrate(uint32_t bitrate) override {
|
|
|
|
|
// Not used for Voice Engine.
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-04 12:58:35 +00:00
|
|
|
void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks,
|
|
|
|
|
int64_t rtt,
|
|
|
|
|
int64_t now_ms) override {
|
2015-02-17 12:57:14 +00:00
|
|
|
// TODO(mflodman): Do we need to aggregate reports here or can we jut send
|
|
|
|
|
// what we get? I.e. do we ever get multiple reports bundled into one RTCP
|
|
|
|
|
// report for VoiceEngine?
|
|
|
|
|
if (report_blocks.empty())
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
int fraction_lost_aggregate = 0;
|
|
|
|
|
int total_number_of_packets = 0;
|
|
|
|
|
|
|
|
|
|
// If receiving multiple report blocks, calculate the weighted average based
|
|
|
|
|
// on the number of packets a report refers to.
|
|
|
|
|
for (ReportBlockList::const_iterator block_it = report_blocks.begin();
|
|
|
|
|
block_it != report_blocks.end(); ++block_it) {
|
|
|
|
|
// Find the previous extended high sequence number for this remote SSRC,
|
|
|
|
|
// to calculate the number of RTP packets this report refers to. Ignore if
|
|
|
|
|
// we haven't seen this SSRC before.
|
|
|
|
|
std::map<uint32_t, uint32_t>::iterator seq_num_it =
|
|
|
|
|
extended_max_sequence_number_.find(block_it->sourceSSRC);
|
|
|
|
|
int number_of_packets = 0;
|
|
|
|
|
if (seq_num_it != extended_max_sequence_number_.end()) {
|
|
|
|
|
number_of_packets = block_it->extendedHighSeqNum - seq_num_it->second;
|
|
|
|
|
}
|
|
|
|
|
fraction_lost_aggregate += number_of_packets * block_it->fractionLost;
|
|
|
|
|
total_number_of_packets += number_of_packets;
|
|
|
|
|
|
|
|
|
|
extended_max_sequence_number_[block_it->sourceSSRC] =
|
|
|
|
|
block_it->extendedHighSeqNum;
|
|
|
|
|
}
|
|
|
|
|
int weighted_fraction_lost = 0;
|
|
|
|
|
if (total_number_of_packets > 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
weighted_fraction_lost =
|
|
|
|
|
(fraction_lost_aggregate + total_number_of_packets / 2) /
|
|
|
|
|
total_number_of_packets;
|
2015-02-17 12:57:14 +00:00
|
|
|
}
|
|
|
|
|
owner_->OnIncomingFractionLoss(weighted_fraction_lost);
|
2014-05-28 09:52:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
Channel* owner_;
|
2015-02-17 12:57:14 +00:00
|
|
|
// Maps remote side ssrc to extended highest sequence number received.
|
|
|
|
|
std::map<uint32_t, uint32_t> extended_max_sequence_number_;
|
2014-05-28 09:52:06 +00:00
|
|
|
};
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::SendData(FrameType frameType,
|
|
|
|
|
uint8_t payloadType,
|
|
|
|
|
uint32_t timeStamp,
|
|
|
|
|
const uint8_t* payloadData,
|
|
|
|
|
size_t payloadSize,
|
|
|
|
|
const RTPFragmentationHeader* fragmentation) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
|
|
|
|
|
" payloadSize=%" PRIuS ", fragmentation=0x%x)",
|
|
|
|
|
frameType, payloadType, timeStamp, payloadSize, fragmentation);
|
|
|
|
|
|
|
|
|
|
if (_includeAudioLevelIndication) {
|
|
|
|
|
// Store current audio level in the RTP/RTCP module.
|
|
|
|
|
// The level will be used in combination with voice-activity state
|
|
|
|
|
// (frameType) to add an RTP header extension
|
2016-11-29 04:26:24 -08:00
|
|
|
_rtpRtcpModule->SetAudioLevel(rms_level_.Average());
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Push data from ACM to RTP/RTCP-module to deliver audio frame for
|
|
|
|
|
// packetization.
|
|
|
|
|
// This call will trigger Transport::SendPacket() from the RTP/RTCP module.
|
2016-08-02 17:46:41 -07:00
|
|
|
if (!_rtpRtcpModule->SendOutgoingData(
|
2016-01-28 05:22:45 -08:00
|
|
|
(FrameType&)frameType, payloadType, timeStamp,
|
|
|
|
|
// Leaving the time when this frame was
|
|
|
|
|
// received from the capture device as
|
|
|
|
|
// undefined for voice for now.
|
2016-08-02 17:46:41 -07:00
|
|
|
-1, payloadData, payloadSize, fragmentation, nullptr, nullptr)) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
|
|
|
|
|
"Channel::SendData() failed to send data to RTP/RTCP module");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
_lastLocalTimeStamp = timeStamp;
|
|
|
|
|
_lastPayloadType = payloadType;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::InFrameType(FrameType frame_type) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::InFrameType(frame_type=%d)", frame_type);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
|
|
|
|
_sendFrameType = (frame_type == kAudioFrameSpeech);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2015-10-02 03:39:33 -07:00
|
|
|
bool Channel::SendRtp(const uint8_t* data,
|
|
|
|
|
size_t len,
|
|
|
|
|
const PacketOptions& options) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendPacket(channel=%d, len=%" PRIuS ")", len);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2013-10-18 21:10:51 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_transportPtr == NULL) {
|
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendPacket() failed to send RTP packet due to"
|
|
|
|
|
" invalid transport object");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
uint8_t* bufferToSendPtr = (uint8_t*)data;
|
|
|
|
|
size_t bufferLength = len;
|
|
|
|
|
|
|
|
|
|
if (!_transportPtr->SendRtp(bufferToSendPtr, bufferLength, options)) {
|
|
|
|
|
std::string transport_name =
|
|
|
|
|
_externalTransport ? "external transport" : "WebRtc sockets";
|
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendPacket() RTP transmission using %s failed",
|
|
|
|
|
transport_name.c_str());
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
bool Channel::SendRtcp(const uint8_t* data, size_t len) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendRtcp(len=%" PRIuS ")", len);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
|
|
|
|
if (_transportPtr == NULL) {
|
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendRtcp() failed to send RTCP packet"
|
|
|
|
|
" due to invalid transport object");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
uint8_t* bufferToSendPtr = (uint8_t*)data;
|
|
|
|
|
size_t bufferLength = len;
|
|
|
|
|
|
|
|
|
|
int n = _transportPtr->SendRtcp(bufferToSendPtr, bufferLength);
|
|
|
|
|
if (n < 0) {
|
|
|
|
|
std::string transport_name =
|
|
|
|
|
_externalTransport ? "external transport" : "WebRtc sockets";
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendRtcp() transmission using %s failed",
|
|
|
|
|
transport_name.c_str());
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
void Channel::OnIncomingSSRCChanged(uint32_t ssrc) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::OnIncomingSSRCChanged(SSRC=%d)", ssrc);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Update ssrc so that NTP for AV sync can be updated.
|
|
|
|
|
_rtpRtcpModule->SetRemoteSSRC(ssrc);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2015-09-17 23:03:57 +02:00
|
|
|
void Channel::OnIncomingCSRCChanged(uint32_t CSRC, bool added) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::OnIncomingCSRCChanged(CSRC=%d, added=%d)", CSRC,
|
|
|
|
|
added);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2015-09-17 23:03:57 +02:00
|
|
|
int32_t Channel::OnInitializeDecoder(
|
2013-05-14 08:31:39 +00:00
|
|
|
int8_t payloadType,
|
2012-03-01 18:34:25 +00:00
|
|
|
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
|
2013-05-14 08:31:39 +00:00
|
|
|
int frequency,
|
Convert channel counts to size_t.
IIRC, this was originally requested by ajm during review of the other size_t conversions I did over the past year, and I agreed it made sense, but wanted to do it separately since those changes were already gargantuan.
BUG=chromium:81439
TEST=none
R=henrik.lundin@webrtc.org, henrika@webrtc.org, kjellander@webrtc.org, minyue@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1316523002 .
Cr-Commit-Position: refs/heads/master@{#11229}
2016-01-12 16:26:35 -08:00
|
|
|
size_t channels,
|
2015-09-17 23:03:57 +02:00
|
|
|
uint32_t rate) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::OnInitializeDecoder(payloadType=%d, "
|
|
|
|
|
"payloadName=%s, frequency=%u, channels=%" PRIuS ", rate=%u)",
|
|
|
|
|
payloadType, payloadName, frequency, channels, rate);
|
|
|
|
|
|
|
|
|
|
CodecInst receiveCodec = {0};
|
|
|
|
|
CodecInst dummyCodec = {0};
|
|
|
|
|
|
|
|
|
|
receiveCodec.pltype = payloadType;
|
|
|
|
|
receiveCodec.plfreq = frequency;
|
|
|
|
|
receiveCodec.channels = channels;
|
|
|
|
|
receiveCodec.rate = rate;
|
|
|
|
|
strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
|
|
|
|
|
|
|
|
|
|
audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
|
|
|
|
|
receiveCodec.pacsize = dummyCodec.pacsize;
|
|
|
|
|
|
|
|
|
|
// Register the new codec to the ACM
|
2016-10-24 13:47:09 -07:00
|
|
|
if (!audio_coding_->RegisterReceiveCodec(receiveCodec.pltype,
|
|
|
|
|
CodecInstToSdp(receiveCodec))) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::OnInitializeDecoder() invalid codec ("
|
|
|
|
|
"pt=%d, name=%s) received - 1",
|
|
|
|
|
payloadType, payloadName);
|
|
|
|
|
_engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::OnReceivedPayloadData(const uint8_t* payloadData,
|
|
|
|
|
size_t payloadSize,
|
|
|
|
|
const WebRtcRTPHeader* rtpHeader) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::OnReceivedPayloadData(payloadSize=%" PRIuS
|
|
|
|
|
","
|
|
|
|
|
" payloadType=%u, audioChannel=%" PRIuS ")",
|
|
|
|
|
payloadSize, rtpHeader->header.payloadType,
|
|
|
|
|
rtpHeader->type.Audio.channel);
|
|
|
|
|
|
|
|
|
|
if (!channel_state_.Get().playing) {
|
|
|
|
|
// Avoid inserting into NetEQ when we are not playing. Count the
|
|
|
|
|
// packet as discarded.
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"received packet is discarded since playing is not"
|
|
|
|
|
" activated");
|
|
|
|
|
_numberOfDiscardedPackets++;
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Push the incoming payload (parsed and ready for decoding) into the ACM
|
|
|
|
|
if (audio_coding_->IncomingPacket(payloadData, payloadSize, *rtpHeader) !=
|
|
|
|
|
0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
|
|
|
|
|
"Channel::OnReceivedPayloadData() unable to push data to the ACM");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int64_t round_trip_time = 0;
|
|
|
|
|
_rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time, NULL, NULL,
|
|
|
|
|
NULL);
|
2013-08-15 23:38:54 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
std::vector<uint16_t> nack_list = audio_coding_->GetNackList(round_trip_time);
|
|
|
|
|
if (!nack_list.empty()) {
|
|
|
|
|
// Can't use nack_list.data() since it's not supported by all
|
|
|
|
|
// compilers.
|
|
|
|
|
ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2013-09-06 13:40:11 +00:00
|
|
|
bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
|
Use size_t more consistently for packet/payload lengths.
See design doc at https://docs.google.com/a/chromium.org/document/d/1I6nmE9D_BmCY-IoV6MDPY2V6WYpEI-dg2apWXTfZyUI/edit?usp=sharing for more information.
This CL was reviewed and approved in pieces in the following CLs:
https://webrtc-codereview.appspot.com/24209004/
https://webrtc-codereview.appspot.com/24229004/
https://webrtc-codereview.appspot.com/24259004/
https://webrtc-codereview.appspot.com/25109004/
https://webrtc-codereview.appspot.com/26099004/
https://webrtc-codereview.appspot.com/27069004/
https://webrtc-codereview.appspot.com/27969004/
https://webrtc-codereview.appspot.com/27989004/
https://webrtc-codereview.appspot.com/29009004/
https://webrtc-codereview.appspot.com/30929004/
https://webrtc-codereview.appspot.com/30939004/
https://webrtc-codereview.appspot.com/31999004/
Committing as TBR to the original reviewers.
BUG=chromium:81439
TEST=none
TBR=pthatcher,henrik.lundin,tina.legrand,stefan,tkchin,glaznev,kjellander,perkj,mflodman,henrika,asapersson,niklas.enbom
Review URL: https://webrtc-codereview.appspot.com/23129004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@7726 4adac7df-926f-26a2-2b94-8c16560cd09d
2014-11-20 22:28:14 +00:00
|
|
|
size_t rtp_packet_length) {
|
2013-09-06 13:40:11 +00:00
|
|
|
RTPHeader header;
|
|
|
|
|
if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
|
|
|
|
|
WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
|
|
|
|
|
"IncomingPacket invalid RTP header");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
header.payload_type_frequency =
|
|
|
|
|
rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
|
|
|
|
|
if (header.payload_type_frequency < 0)
|
|
|
|
|
return false;
|
|
|
|
|
return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
|
|
|
|
|
}
|
|
|
|
|
|
2016-05-18 05:36:01 -07:00
|
|
|
MixerParticipant::AudioFrameInfo Channel::GetAudioFrameWithMuted(
|
|
|
|
|
int32_t id,
|
|
|
|
|
AudioFrame* audioFrame) {
|
2016-07-04 07:06:55 -07:00
|
|
|
unsigned int ssrc;
|
|
|
|
|
RTC_CHECK_EQ(GetLocalSSRC(ssrc), 0);
|
|
|
|
|
event_log_proxy_->LogAudioPlayout(ssrc);
|
2016-01-28 05:22:45 -08:00
|
|
|
// Get 10ms raw PCM data from the ACM (mixer limits output frequency)
|
2016-05-17 12:21:55 -07:00
|
|
|
bool muted;
|
|
|
|
|
if (audio_coding_->PlayoutData10Ms(audioFrame->sample_rate_hz_, audioFrame,
|
|
|
|
|
&muted) == -1) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::GetAudioFrame() PlayoutData10Ms() failed!");
|
|
|
|
|
// In all likelihood, the audio in this frame is garbage. We return an
|
|
|
|
|
// error so that the audio mixer module doesn't add it to the mix. As
|
|
|
|
|
// a result, it won't be played out and the actions skipped here are
|
|
|
|
|
// irrelevant.
|
2016-05-18 05:36:01 -07:00
|
|
|
return MixerParticipant::AudioFrameInfo::kError;
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2016-05-18 08:52:45 -07:00
|
|
|
|
|
|
|
|
if (muted) {
|
|
|
|
|
// TODO(henrik.lundin): We should be able to do better than this. But we
|
|
|
|
|
// will have to go through all the cases below where the audio samples may
|
|
|
|
|
// be used, and handle the muted case in some way.
|
2016-12-05 01:46:09 -08:00
|
|
|
AudioFrameOperations::Mute(audioFrame);
|
2016-05-18 08:52:45 -07:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Convert module ID to internal VoE channel ID
|
|
|
|
|
audioFrame->id_ = VoEChannelId(audioFrame->id_);
|
|
|
|
|
// Store speech type for dead-or-alive detection
|
|
|
|
|
_outputSpeechType = audioFrame->speech_type_;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
ChannelState::State state = channel_state_.Get();
|
2014-03-18 10:32:33 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
{
|
|
|
|
|
// Pass the audio buffers to an optional sink callback, before applying
|
|
|
|
|
// scaling/panning, as that applies to the mix operation.
|
|
|
|
|
// External recipients of the audio (e.g. via AudioTrack), will do their
|
|
|
|
|
// own mixing/dynamic processing.
|
|
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
|
|
|
|
if (audio_sink_) {
|
|
|
|
|
AudioSinkInterface::Data data(
|
|
|
|
|
&audioFrame->data_[0], audioFrame->samples_per_channel_,
|
|
|
|
|
audioFrame->sample_rate_hz_, audioFrame->num_channels_,
|
|
|
|
|
audioFrame->timestamp_);
|
|
|
|
|
audio_sink_->OnData(data);
|
2015-12-12 01:37:01 +01:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2015-12-12 01:37:01 +01:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
float output_gain = 1.0f;
|
|
|
|
|
float left_pan = 1.0f;
|
|
|
|
|
float right_pan = 1.0f;
|
|
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&volume_settings_critsect_);
|
|
|
|
|
output_gain = _outputGain;
|
|
|
|
|
left_pan = _panLeft;
|
|
|
|
|
right_pan = _panRight;
|
|
|
|
|
}
|
2013-10-17 18:28:55 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Output volume scaling
|
|
|
|
|
if (output_gain < 0.99f || output_gain > 1.01f) {
|
|
|
|
|
AudioFrameOperations::ScaleWithSat(output_gain, *audioFrame);
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Scale left and/or right channel(s) if stereo and master balance is
|
|
|
|
|
// active
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (left_pan != 1.0f || right_pan != 1.0f) {
|
|
|
|
|
if (audioFrame->num_channels_ == 1) {
|
|
|
|
|
// Emulate stereo mode since panning is active.
|
|
|
|
|
// The mono signal is copied to both left and right channels here.
|
|
|
|
|
AudioFrameOperations::MonoToStereo(audioFrame);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
// For true stereo mode (when we are receiving a stereo signal), no
|
|
|
|
|
// action is needed.
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Do the panning operation (the audio frame contains stereo at this
|
|
|
|
|
// stage)
|
|
|
|
|
AudioFrameOperations::Scale(left_pan, right_pan, *audioFrame);
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Mix decoded PCM output with file if file mixing is enabled
|
|
|
|
|
if (state.output_file_playing) {
|
|
|
|
|
MixAudioWithFile(*audioFrame, audioFrame->sample_rate_hz_);
|
2016-05-18 08:52:45 -07:00
|
|
|
muted = false; // We may have added non-zero samples.
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// External media
|
|
|
|
|
if (_outputExternalMedia) {
|
|
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
|
|
|
|
const bool isStereo = (audioFrame->num_channels_ == 2);
|
|
|
|
|
if (_outputExternalMediaCallbackPtr) {
|
|
|
|
|
_outputExternalMediaCallbackPtr->Process(
|
|
|
|
|
_channelId, kPlaybackPerChannel, (int16_t*)audioFrame->data_,
|
|
|
|
|
audioFrame->samples_per_channel_, audioFrame->sample_rate_hz_,
|
|
|
|
|
isStereo);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Record playout if enabled
|
|
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (_outputFileRecording && output_file_recorder_) {
|
|
|
|
|
output_file_recorder_->RecordAudioToFile(*audioFrame);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Measure audio level (0-9)
|
2016-05-18 08:52:45 -07:00
|
|
|
// TODO(henrik.lundin) Use the |muted| information here too.
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputAudioLevel.ComputeLevel(*audioFrame);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (capture_start_rtp_time_stamp_ < 0 && audioFrame->timestamp_ != 0) {
|
|
|
|
|
// The first frame with a valid rtp timestamp.
|
|
|
|
|
capture_start_rtp_time_stamp_ = audioFrame->timestamp_;
|
|
|
|
|
}
|
2014-06-05 20:34:08 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (capture_start_rtp_time_stamp_ >= 0) {
|
|
|
|
|
// audioFrame.timestamp_ should be valid from now on.
|
|
|
|
|
|
|
|
|
|
// Compute elapsed time.
|
|
|
|
|
int64_t unwrap_timestamp =
|
|
|
|
|
rtp_ts_wraparound_handler_->Unwrap(audioFrame->timestamp_);
|
|
|
|
|
audioFrame->elapsed_time_ms_ =
|
|
|
|
|
(unwrap_timestamp - capture_start_rtp_time_stamp_) /
|
2016-10-12 11:04:10 -07:00
|
|
|
(GetRtpTimestampRateHz() / 1000);
|
2016-01-28 05:22:45 -08:00
|
|
|
|
|
|
|
|
{
|
|
|
|
|
rtc::CritScope lock(&ts_stats_lock_);
|
|
|
|
|
// Compute ntp time.
|
|
|
|
|
audioFrame->ntp_time_ms_ =
|
|
|
|
|
ntp_estimator_.Estimate(audioFrame->timestamp_);
|
|
|
|
|
// |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
|
|
|
|
|
if (audioFrame->ntp_time_ms_ > 0) {
|
|
|
|
|
// Compute |capture_start_ntp_time_ms_| so that
|
|
|
|
|
// |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
|
|
|
|
|
capture_start_ntp_time_ms_ =
|
|
|
|
|
audioFrame->ntp_time_ms_ - audioFrame->elapsed_time_ms_;
|
2014-05-19 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2014-05-19 17:39:11 +00:00
|
|
|
|
2016-05-18 05:36:01 -07:00
|
|
|
return muted ? MixerParticipant::AudioFrameInfo::kMuted
|
|
|
|
|
: MixerParticipant::AudioFrameInfo::kNormal;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-10-20 14:24:39 -07:00
|
|
|
AudioMixer::Source::AudioFrameInfo Channel::GetAudioFrameWithInfo(
|
|
|
|
|
int sample_rate_hz,
|
|
|
|
|
AudioFrame* audio_frame) {
|
|
|
|
|
audio_frame->sample_rate_hz_ = sample_rate_hz;
|
2016-10-20 06:32:39 -07:00
|
|
|
|
2016-10-20 14:24:39 -07:00
|
|
|
const auto frame_info = GetAudioFrameWithMuted(-1, audio_frame);
|
2016-10-20 06:32:39 -07:00
|
|
|
|
|
|
|
|
using FrameInfo = AudioMixer::Source::AudioFrameInfo;
|
|
|
|
|
FrameInfo new_audio_frame_info = FrameInfo::kError;
|
|
|
|
|
switch (frame_info) {
|
|
|
|
|
case MixerParticipant::AudioFrameInfo::kNormal:
|
|
|
|
|
new_audio_frame_info = FrameInfo::kNormal;
|
|
|
|
|
break;
|
|
|
|
|
case MixerParticipant::AudioFrameInfo::kMuted:
|
|
|
|
|
new_audio_frame_info = FrameInfo::kMuted;
|
|
|
|
|
break;
|
|
|
|
|
case MixerParticipant::AudioFrameInfo::kError:
|
|
|
|
|
new_audio_frame_info = FrameInfo::kError;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2016-10-20 14:24:39 -07:00
|
|
|
return new_audio_frame_info;
|
2016-10-20 06:32:39 -07:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::NeededFrequency(int32_t id) const {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::NeededFrequency(id=%d)", id);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int highestNeeded = 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Determine highest needed receive frequency
|
|
|
|
|
int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Return the bigger of playout and receive frequency in the ACM.
|
|
|
|
|
if (audio_coding_->PlayoutFrequency() > receiveFrequency) {
|
|
|
|
|
highestNeeded = audio_coding_->PlayoutFrequency();
|
|
|
|
|
} else {
|
|
|
|
|
highestNeeded = receiveFrequency;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Special case, if we're playing a file on the playout side
|
|
|
|
|
// we take that frequency into consideration as well
|
|
|
|
|
// This is not needed on sending side, since the codec will
|
|
|
|
|
// limit the spectrum anyway.
|
|
|
|
|
if (channel_state_.Get().output_file_playing) {
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_player_) {
|
|
|
|
|
if (output_file_player_->Frequency() > highestNeeded) {
|
|
|
|
|
highestNeeded = output_file_player_->Frequency();
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return (highestNeeded);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-05-30 08:11:28 -07:00
|
|
|
int32_t Channel::CreateChannel(
|
|
|
|
|
Channel*& channel,
|
|
|
|
|
int32_t channelId,
|
|
|
|
|
uint32_t instanceId,
|
2016-09-07 07:34:41 -07:00
|
|
|
const VoEBase::ChannelConfig& config) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
|
|
|
|
|
"Channel::CreateChannel(channelId=%d, instanceId=%d)", channelId,
|
|
|
|
|
instanceId);
|
|
|
|
|
|
2016-09-07 07:34:41 -07:00
|
|
|
channel = new Channel(channelId, instanceId, config);
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel == NULL) {
|
|
|
|
|
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, channelId),
|
|
|
|
|
"Channel::CreateChannel() unable to allocate memory for"
|
|
|
|
|
" channel");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
void Channel::PlayNotification(int32_t id, uint32_t durationMs) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::PlayNotification(id=%d, durationMs=%d)", id,
|
|
|
|
|
durationMs);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Not implement yet
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
void Channel::RecordNotification(int32_t id, uint32_t durationMs) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RecordNotification(id=%d, durationMs=%d)", id,
|
|
|
|
|
durationMs);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Not implement yet
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
void Channel::PlayFileEnded(int32_t id) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::PlayFileEnded(id=%d)", id);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (id == _inputFilePlayerId) {
|
|
|
|
|
channel_state_.SetInputFilePlaying(false);
|
|
|
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::PlayFileEnded() => input file player module is"
|
|
|
|
|
" shutdown");
|
|
|
|
|
} else if (id == _outputFilePlayerId) {
|
|
|
|
|
channel_state_.SetOutputFilePlaying(false);
|
|
|
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::PlayFileEnded() => output file player module is"
|
|
|
|
|
" shutdown");
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
void Channel::RecordFileEnded(int32_t id) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RecordFileEnded(id=%d)", id);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
assert(id == _outputFileRecorderId);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFileRecording = false;
|
|
|
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RecordFileEnded() => output file recorder module is"
|
|
|
|
|
" shutdown");
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2013-05-14 08:31:39 +00:00
|
|
|
Channel::Channel(int32_t channelId,
|
2013-09-12 17:03:00 +00:00
|
|
|
uint32_t instanceId,
|
2016-09-07 07:34:41 -07:00
|
|
|
const VoEBase::ChannelConfig& config)
|
2016-01-21 10:37:37 -08:00
|
|
|
: _instanceId(instanceId),
|
2015-12-07 10:26:18 +01:00
|
|
|
_channelId(channelId),
|
2016-07-04 07:06:55 -07:00
|
|
|
event_log_proxy_(new RtcEventLogProxy()),
|
2016-11-30 07:51:13 -08:00
|
|
|
rtcp_rtt_stats_proxy_(new RtcpRttStatsProxy()),
|
2015-12-07 10:26:18 +01:00
|
|
|
rtp_header_parser_(RtpHeaderParser::Create()),
|
2016-11-25 06:40:25 -08:00
|
|
|
rtp_payload_registry_(new RTPPayloadRegistry()),
|
2015-12-07 10:26:18 +01:00
|
|
|
rtp_receive_statistics_(
|
|
|
|
|
ReceiveStatistics::Create(Clock::GetRealTimeClock())),
|
|
|
|
|
rtp_receiver_(
|
|
|
|
|
RtpReceiver::CreateAudioReceiver(Clock::GetRealTimeClock(),
|
|
|
|
|
this,
|
|
|
|
|
this,
|
|
|
|
|
rtp_payload_registry_.get())),
|
2016-09-22 03:36:27 -07:00
|
|
|
telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
|
2015-12-07 10:26:18 +01:00
|
|
|
_outputAudioLevel(),
|
|
|
|
|
_externalTransport(false),
|
|
|
|
|
// Avoid conflict with other channels by adding 1024 - 1026,
|
|
|
|
|
// won't use as much as 1024 channels.
|
|
|
|
|
_inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
|
|
|
|
|
_outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
|
|
|
|
|
_outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
|
|
|
|
|
_outputFileRecording(false),
|
|
|
|
|
_outputExternalMedia(false),
|
|
|
|
|
_inputExternalMediaCallbackPtr(NULL),
|
|
|
|
|
_outputExternalMediaCallbackPtr(NULL),
|
|
|
|
|
_timeStamp(0), // This is just an offset, RTP module will add it's own
|
|
|
|
|
// random offset
|
|
|
|
|
ntp_estimator_(Clock::GetRealTimeClock()),
|
|
|
|
|
playout_timestamp_rtp_(0),
|
|
|
|
|
playout_timestamp_rtcp_(0),
|
|
|
|
|
playout_delay_ms_(0),
|
|
|
|
|
_numberOfDiscardedPackets(0),
|
|
|
|
|
send_sequence_number_(0),
|
|
|
|
|
rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
|
|
|
|
|
capture_start_rtp_time_stamp_(-1),
|
|
|
|
|
capture_start_ntp_time_ms_(-1),
|
|
|
|
|
_engineStatisticsPtr(NULL),
|
|
|
|
|
_outputMixerPtr(NULL),
|
|
|
|
|
_transmitMixerPtr(NULL),
|
|
|
|
|
_moduleProcessThreadPtr(NULL),
|
|
|
|
|
_audioDeviceModulePtr(NULL),
|
|
|
|
|
_voiceEngineObserverPtr(NULL),
|
|
|
|
|
_callbackCritSectPtr(NULL),
|
|
|
|
|
_transportPtr(NULL),
|
|
|
|
|
_sendFrameType(0),
|
|
|
|
|
_externalMixing(false),
|
|
|
|
|
_mixFileWithMicrophone(false),
|
2016-03-24 10:36:00 -07:00
|
|
|
input_mute_(false),
|
|
|
|
|
previous_frame_muted_(false),
|
2015-12-07 10:26:18 +01:00
|
|
|
_panLeft(1.0f),
|
|
|
|
|
_panRight(1.0f),
|
|
|
|
|
_outputGain(1.0f),
|
|
|
|
|
_lastLocalTimeStamp(0),
|
|
|
|
|
_lastPayloadType(0),
|
|
|
|
|
_includeAudioLevelIndication(false),
|
|
|
|
|
_outputSpeechType(AudioFrame::kNormalSpeech),
|
|
|
|
|
restored_packet_in_use_(false),
|
|
|
|
|
rtcp_observer_(new VoERtcpObserver(this)),
|
|
|
|
|
associate_send_channel_(ChannelOwner(nullptr)),
|
2016-09-07 07:34:41 -07:00
|
|
|
pacing_enabled_(config.enable_voice_pacing),
|
2016-02-01 04:39:55 -08:00
|
|
|
feedback_observer_proxy_(new TransportFeedbackProxy()),
|
|
|
|
|
seq_num_allocator_proxy_(new TransportSequenceNumberProxy()),
|
2016-06-13 07:34:51 -07:00
|
|
|
rtp_packet_sender_proxy_(new RtpPacketSenderProxy()),
|
2016-07-29 12:59:36 +02:00
|
|
|
retransmission_rate_limiter_(new RateLimiter(Clock::GetRealTimeClock(),
|
|
|
|
|
kMaxRetransmissionWindowMs)),
|
2016-11-28 02:34:18 -08:00
|
|
|
decoder_factory_(config.acm_config.decoder_factory),
|
|
|
|
|
// Bitrate smoother can be initialized with arbitrary time constant
|
|
|
|
|
// (0 used here). The actual time constant will be set in SetBitRate.
|
|
|
|
|
bitrate_smoother_(0, Clock::GetRealTimeClock()) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Channel() - ctor");
|
2016-09-07 07:34:41 -07:00
|
|
|
AudioCodingModule::Config acm_config(config.acm_config);
|
2016-01-28 05:22:45 -08:00
|
|
|
acm_config.id = VoEModuleId(instanceId, channelId);
|
2016-05-18 08:52:45 -07:00
|
|
|
acm_config.neteq_config.enable_muted_state = true;
|
2016-01-28 05:22:45 -08:00
|
|
|
audio_coding_.reset(AudioCodingModule::Create(acm_config));
|
|
|
|
|
|
|
|
|
|
_outputAudioLevel.Clear();
|
|
|
|
|
|
|
|
|
|
RtpRtcp::Configuration configuration;
|
|
|
|
|
configuration.audio = true;
|
|
|
|
|
configuration.outgoing_transport = this;
|
2016-12-15 06:24:49 -08:00
|
|
|
configuration.overhead_observer = this;
|
2016-01-28 05:22:45 -08:00
|
|
|
configuration.receive_statistics = rtp_receive_statistics_.get();
|
|
|
|
|
configuration.bandwidth_callback = rtcp_observer_.get();
|
2016-02-01 04:39:55 -08:00
|
|
|
if (pacing_enabled_) {
|
|
|
|
|
configuration.paced_sender = rtp_packet_sender_proxy_.get();
|
|
|
|
|
configuration.transport_sequence_number_allocator =
|
|
|
|
|
seq_num_allocator_proxy_.get();
|
|
|
|
|
configuration.transport_feedback_callback = feedback_observer_proxy_.get();
|
|
|
|
|
}
|
2016-07-04 07:06:55 -07:00
|
|
|
configuration.event_log = &(*event_log_proxy_);
|
2016-11-30 07:51:13 -08:00
|
|
|
configuration.rtt_stats = &(*rtcp_rtt_stats_proxy_);
|
2016-07-29 12:59:36 +02:00
|
|
|
configuration.retransmission_rate_limiter =
|
|
|
|
|
retransmission_rate_limiter_.get();
|
2016-01-28 05:22:45 -08:00
|
|
|
|
|
|
|
|
_rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
|
2016-02-25 16:56:48 +01:00
|
|
|
_rtpRtcpModule->SetSendingMediaStatus(false);
|
2016-01-28 05:22:45 -08:00
|
|
|
|
|
|
|
|
statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
|
|
|
|
|
rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
|
|
|
|
|
statistics_proxy_.get());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Channel::~Channel() {
|
|
|
|
|
rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
|
|
|
|
|
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::~Channel() - dtor");
|
|
|
|
|
|
|
|
|
|
if (_outputExternalMedia) {
|
|
|
|
|
DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
|
|
|
|
|
}
|
|
|
|
|
if (channel_state_.Get().input_external_media) {
|
|
|
|
|
DeRegisterExternalMediaProcessing(kRecordingPerChannel);
|
|
|
|
|
}
|
|
|
|
|
StopSend();
|
|
|
|
|
StopPlayout();
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2016-08-17 07:31:12 -07:00
|
|
|
if (input_file_player_) {
|
|
|
|
|
input_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
input_file_player_->StopPlayingFile();
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_player_) {
|
|
|
|
|
output_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_player_->StopPlayingFile();
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_recorder_) {
|
|
|
|
|
output_file_recorder_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_recorder_->StopRecording();
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// The order to safely shutdown modules in a channel is:
|
|
|
|
|
// 1. De-register callbacks in modules
|
|
|
|
|
// 2. De-register modules in process thread
|
|
|
|
|
// 3. Destroy modules
|
|
|
|
|
if (audio_coding_->RegisterTransportCallback(NULL) == -1) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"~Channel() failed to de-register transport callback"
|
|
|
|
|
" (Audio coding module)");
|
|
|
|
|
}
|
|
|
|
|
if (audio_coding_->RegisterVADCallback(NULL) == -1) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"~Channel() failed to de-register VAD callback"
|
|
|
|
|
" (Audio coding module)");
|
|
|
|
|
}
|
|
|
|
|
// De-register modules in process thread
|
|
|
|
|
_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get());
|
2015-02-27 13:36:34 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// End of modules shutdown
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::Init() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Init()");
|
2014-03-18 10:32:33 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.Reset();
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- Initial sanity
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if ((_engineStatisticsPtr == NULL) || (_moduleProcessThreadPtr == NULL)) {
|
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Init() must call SetEngineInformation() first");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- Add modules to process thread (for periodic schedulation)
|
2015-02-27 13:36:34 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get());
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- ACM initialization
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (audio_coding_->InitializeReceiver() == -1) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
|
|
|
|
"Channel::Init() unable to initialize the ACM - 1");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- RTP/RTCP module initialization
|
|
|
|
|
|
|
|
|
|
// Ensure that RTCP is enabled by default for the created channel.
|
|
|
|
|
// Note that, the module will keep generating RTCP until it is explicitly
|
|
|
|
|
// disabled by the user.
|
|
|
|
|
// After StopListen (when no sockets exists), RTCP packets will no longer
|
|
|
|
|
// be transmitted since the Transport object will then be invalid.
|
2016-09-22 03:36:27 -07:00
|
|
|
telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
|
2016-01-28 05:22:45 -08:00
|
|
|
// RTCP is enabled by default.
|
|
|
|
|
_rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound);
|
|
|
|
|
// --- Register all permanent callbacks
|
|
|
|
|
const bool fail = (audio_coding_->RegisterTransportCallback(this) == -1) ||
|
|
|
|
|
(audio_coding_->RegisterVADCallback(this) == -1);
|
|
|
|
|
|
|
|
|
|
if (fail) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_CANNOT_INIT_CHANNEL, kTraceError,
|
|
|
|
|
"Channel::Init() callbacks not registered");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- Register all supported codecs to the receiving side of the
|
|
|
|
|
// RTP/RTCP module
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
CodecInst codec;
|
|
|
|
|
const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
|
|
|
|
|
|
|
|
|
|
for (int idx = 0; idx < nSupportedCodecs; idx++) {
|
|
|
|
|
// Open up the RTP/RTCP receiver for all supported codecs
|
|
|
|
|
if ((audio_coding_->Codec(idx, &codec) == -1) ||
|
2016-11-24 09:34:46 -08:00
|
|
|
(rtp_receiver_->RegisterReceivePayload(codec) == -1)) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Init() unable to register %s "
|
|
|
|
|
"(%d/%d/%" PRIuS "/%d) to RTP/RTCP receiver",
|
|
|
|
|
codec.plname, codec.pltype, codec.plfreq, codec.channels,
|
|
|
|
|
codec.rate);
|
|
|
|
|
} else {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Init() %s (%d/%d/%" PRIuS
|
|
|
|
|
"/%d) has been "
|
|
|
|
|
"added to the RTP/RTCP receiver",
|
|
|
|
|
codec.plname, codec.pltype, codec.plfreq, codec.channels,
|
|
|
|
|
codec.rate);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Ensure that PCMU is used as default codec on the sending side
|
|
|
|
|
if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1)) {
|
|
|
|
|
SetSendCodec(codec);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Register default PT for outband 'telephone-event'
|
|
|
|
|
if (!STR_CASE_CMP(codec.plname, "telephone-event")) {
|
2016-04-06 12:22:38 -07:00
|
|
|
if (_rtpRtcpModule->RegisterSendPayload(codec) == -1 ||
|
2016-10-24 13:47:09 -07:00
|
|
|
!audio_coding_->RegisterReceiveCodec(codec.pltype,
|
|
|
|
|
CodecInstToSdp(codec))) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Init() failed to register outband "
|
|
|
|
|
"'telephone-event' (%d/%d) correctly",
|
|
|
|
|
codec.pltype, codec.plfreq);
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2013-03-13 23:20:57 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!STR_CASE_CMP(codec.plname, "CN")) {
|
2016-04-06 12:22:38 -07:00
|
|
|
if (!codec_manager_.RegisterEncoder(codec) ||
|
|
|
|
|
!codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get()) ||
|
2016-10-24 13:47:09 -07:00
|
|
|
!audio_coding_->RegisterReceiveCodec(codec.pltype,
|
|
|
|
|
CodecInstToSdp(codec)) ||
|
2016-04-06 12:22:38 -07:00
|
|
|
_rtpRtcpModule->RegisterSendPayload(codec) == -1) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Init() failed to register CN (%d/%d) "
|
|
|
|
|
"correctly - 1",
|
|
|
|
|
codec.pltype, codec.plfreq);
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::SetEngineInformation(Statistics& engineStatistics,
|
|
|
|
|
OutputMixer& outputMixer,
|
|
|
|
|
voe::TransmitMixer& transmitMixer,
|
|
|
|
|
ProcessThread& moduleProcessThread,
|
|
|
|
|
AudioDeviceModule& audioDeviceModule,
|
|
|
|
|
VoiceEngineObserver* voiceEngineObserver,
|
|
|
|
|
rtc::CriticalSection* callbackCritSect) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetEngineInformation()");
|
|
|
|
|
_engineStatisticsPtr = &engineStatistics;
|
|
|
|
|
_outputMixerPtr = &outputMixer;
|
|
|
|
|
_transmitMixerPtr = &transmitMixer,
|
|
|
|
|
_moduleProcessThreadPtr = &moduleProcessThread;
|
|
|
|
|
_audioDeviceModulePtr = &audioDeviceModule;
|
|
|
|
|
_voiceEngineObserverPtr = voiceEngineObserver;
|
|
|
|
|
_callbackCritSectPtr = callbackCritSect;
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::UpdateLocalTimeStamp() {
|
|
|
|
|
_timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-02-17 10:04:18 -08:00
|
|
|
void Channel::SetSink(std::unique_ptr<AudioSinkInterface> sink) {
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2016-01-13 12:00:26 -08:00
|
|
|
audio_sink_ = std::move(sink);
|
2015-12-12 01:37:01 +01:00
|
|
|
}
|
|
|
|
|
|
2016-06-13 07:34:51 -07:00
|
|
|
const rtc::scoped_refptr<AudioDecoderFactory>&
|
|
|
|
|
Channel::GetAudioDecoderFactory() const {
|
|
|
|
|
return decoder_factory_;
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::StartPlayout() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartPlayout()");
|
|
|
|
|
if (channel_state_.Get().playing) {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2012-12-12 23:00:29 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!_externalMixing) {
|
|
|
|
|
// Add participant as candidates for mixing.
|
|
|
|
|
if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
|
|
|
|
"StartPlayout() failed to add participant to mixer");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetPlaying(true);
|
|
|
|
|
if (RegisterFilePlayingToMixer() != 0)
|
|
|
|
|
return -1;
|
2012-06-04 03:26:39 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::StopPlayout() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StopPlayout()");
|
|
|
|
|
if (!channel_state_.Get().playing) {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2012-12-12 23:00:29 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!_externalMixing) {
|
|
|
|
|
// Remove participant as candidates for mixing
|
|
|
|
|
if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
|
|
|
|
"StopPlayout() failed to remove participant from mixer");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetPlaying(false);
|
|
|
|
|
_outputAudioLevel.Clear();
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::StartSend() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartSend()");
|
|
|
|
|
// Resume the previous sequence number which was reset by StopSend().
|
|
|
|
|
// This needs to be done before |sending| is set to true.
|
|
|
|
|
if (send_sequence_number_)
|
|
|
|
|
SetInitSequenceNumber(send_sequence_number_);
|
2011-11-28 16:31:28 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().sending) {
|
2011-07-07 08:21:25 +00:00
|
|
|
return 0;
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
|
|
|
|
channel_state_.SetSending(true);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-02-25 16:56:48 +01:00
|
|
|
_rtpRtcpModule->SetSendingMediaStatus(true);
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_rtpRtcpModule->SetSendingStatus(true) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
|
|
|
|
|
"StartSend() RTP/RTCP failed to start sending");
|
2016-02-25 16:56:48 +01:00
|
|
|
_rtpRtcpModule->SetSendingMediaStatus(false);
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2014-03-18 10:32:33 +00:00
|
|
|
channel_state_.SetSending(false);
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
|
|
|
|
}
|
2011-11-28 16:31:28 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::StopSend() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StopSend()");
|
|
|
|
|
if (!channel_state_.Get().sending) {
|
2011-07-07 08:21:25 +00:00
|
|
|
return 0;
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
|
|
|
|
channel_state_.SetSending(false);
|
|
|
|
|
|
|
|
|
|
// Store the sequence number to be able to pick up the same sequence for
|
|
|
|
|
// the next StartSend(). This is needed for restarting device, otherwise
|
|
|
|
|
// it might cause libSRTP to complain about packets being replayed.
|
|
|
|
|
// TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
|
|
|
|
|
// CL is landed. See issue
|
|
|
|
|
// https://code.google.com/p/webrtc/issues/detail?id=2111 .
|
|
|
|
|
send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
|
|
|
|
|
|
|
|
|
|
// Reset sending SSRC and sequence number and triggers direct transmission
|
|
|
|
|
// of RTCP BYE
|
|
|
|
|
if (_rtpRtcpModule->SetSendingStatus(false) == -1) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
|
|
|
|
|
"StartSend() RTP/RTCP failed to stop sending");
|
|
|
|
|
}
|
2016-02-25 16:56:48 +01:00
|
|
|
_rtpRtcpModule->SetSendingMediaStatus(false);
|
2016-01-28 05:22:45 -08:00
|
|
|
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-10-31 12:52:33 -07:00
|
|
|
void Channel::ResetDiscardedPacketCount() {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2016-10-31 12:52:33 -07:00
|
|
|
"Channel::ResetDiscardedPacketCount()");
|
2016-01-28 05:22:45 -08:00
|
|
|
_numberOfDiscardedPackets = 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RegisterVoiceEngineObserver()");
|
|
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_voiceEngineObserverPtr) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceError,
|
|
|
|
|
"RegisterVoiceEngineObserver() observer already enabled");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
_voiceEngineObserverPtr = &observer;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::DeRegisterVoiceEngineObserver() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::DeRegisterVoiceEngineObserver()");
|
|
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!_voiceEngineObserverPtr) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceWarning,
|
|
|
|
|
"DeRegisterVoiceEngineObserver() observer already disabled");
|
2011-07-07 08:21:25 +00:00
|
|
|
return 0;
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
|
|
|
|
_voiceEngineObserverPtr = NULL;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::GetSendCodec(CodecInst& codec) {
|
2016-04-06 12:22:38 -07:00
|
|
|
auto send_codec = codec_manager_.GetCodecInst();
|
2015-11-03 11:20:50 -08:00
|
|
|
if (send_codec) {
|
|
|
|
|
codec = *send_codec;
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::GetRecCodec(CodecInst& codec) {
|
|
|
|
|
return (audio_coding_->ReceiveCodec(&codec));
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::SetSendCodec(const CodecInst& codec) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetSendCodec()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-04-06 12:22:38 -07:00
|
|
|
if (!codec_manager_.RegisterEncoder(codec) ||
|
|
|
|
|
!codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"SetSendCodec() failed to register codec to ACM");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
|
|
|
|
|
_rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
|
|
|
|
|
if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
|
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"SetSendCodec() failed to register codec to"
|
|
|
|
|
" RTP/RTCP module");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-11-30 04:47:39 -08:00
|
|
|
void Channel::SetBitRate(int bitrate_bps, int64_t probing_interval_ms) {
|
2015-04-29 16:03:33 +02:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetBitRate(bitrate_bps=%d)", bitrate_bps);
|
2016-10-12 05:00:55 -07:00
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder)
|
|
|
|
|
(*encoder)->OnReceivedTargetAudioBitrate(bitrate_bps);
|
|
|
|
|
});
|
2016-07-29 12:59:36 +02:00
|
|
|
retransmission_rate_limiter_->SetMaxRate(bitrate_bps);
|
2016-11-28 02:34:18 -08:00
|
|
|
|
|
|
|
|
// We give smoothed bitrate allocation to audio network adaptor as
|
|
|
|
|
// the uplink bandwidth.
|
2016-11-30 04:47:39 -08:00
|
|
|
// The probing spikes should not affect the bitrate smoother more than 25%.
|
|
|
|
|
// To simplify the calculations we use a step response as input signal.
|
|
|
|
|
// The step response of an exponential filter is
|
|
|
|
|
// u(t) = 1 - e^(-t / time_constant).
|
|
|
|
|
// In order to limit the affect of a BWE spike within 25% of its value before
|
|
|
|
|
// the next probing, we would choose a time constant that fulfills
|
|
|
|
|
// 1 - e^(-probing_interval_ms / time_constant) < 0.25
|
|
|
|
|
// Then 4 * probing_interval_ms is a good choice.
|
|
|
|
|
bitrate_smoother_.SetTimeConstantMs(probing_interval_ms * 4);
|
2016-11-28 02:34:18 -08:00
|
|
|
bitrate_smoother_.AddSample(bitrate_bps);
|
|
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder) {
|
|
|
|
|
(*encoder)->OnReceivedUplinkBandwidth(
|
|
|
|
|
static_cast<int>(*bitrate_smoother_.GetAverage()));
|
|
|
|
|
}
|
|
|
|
|
});
|
2015-04-29 16:03:33 +02:00
|
|
|
}
|
|
|
|
|
|
2015-02-17 12:57:14 +00:00
|
|
|
void Channel::OnIncomingFractionLoss(int fraction_lost) {
|
2016-10-12 05:00:55 -07:00
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder)
|
|
|
|
|
(*encoder)->OnReceivedUplinkPacketLossFraction(fraction_lost / 255.0f);
|
|
|
|
|
});
|
2014-05-28 09:52:06 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::SetVADStatus(bool enableVAD,
|
|
|
|
|
ACMVADMode mode,
|
|
|
|
|
bool disableDTX) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetVADStatus(mode=%d)", mode);
|
2016-04-06 12:22:38 -07:00
|
|
|
RTC_DCHECK(!(disableDTX && enableVAD)); // disableDTX mode is deprecated.
|
|
|
|
|
if (!codec_manager_.SetVAD(enableVAD, mode) ||
|
|
|
|
|
!codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
|
|
|
|
|
kTraceError,
|
|
|
|
|
"SetVADStatus() failed to set VAD");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::GetVADStatus(bool& enabledVAD,
|
|
|
|
|
ACMVADMode& mode,
|
|
|
|
|
bool& disabledDTX) {
|
2016-04-06 12:22:38 -07:00
|
|
|
const auto* params = codec_manager_.GetStackParams();
|
|
|
|
|
enabledVAD = params->use_cng;
|
|
|
|
|
mode = params->vad_mode;
|
|
|
|
|
disabledDTX = !params->use_cng;
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::SetRecPayloadType(const CodecInst& codec) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetRecPayloadType()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().playing) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_ALREADY_PLAYING, kTraceError,
|
|
|
|
|
"SetRecPayloadType() unable to set PT while playing");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (codec.pltype == -1) {
|
|
|
|
|
// De-register the selected codec (RTP/RTCP module and ACM)
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int8_t pltype(-1);
|
|
|
|
|
CodecInst rxCodec = codec;
|
|
|
|
|
|
|
|
|
|
// Get payload type for the given codec
|
2016-11-24 09:34:46 -08:00
|
|
|
rtp_payload_registry_->ReceivePayloadType(rxCodec, &pltype);
|
2016-01-28 05:22:45 -08:00
|
|
|
rxCodec.pltype = pltype;
|
|
|
|
|
|
|
|
|
|
if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetRecPayloadType() RTP/RTCP-module deregistration "
|
|
|
|
|
"failed");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetRecPayloadType() ACM deregistration failed - 1");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
return 0;
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-11-24 09:34:46 -08:00
|
|
|
if (rtp_receiver_->RegisterReceivePayload(codec) != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
// First attempt to register failed => de-register and try again
|
2016-04-06 12:22:38 -07:00
|
|
|
// TODO(kwiberg): Retrying is probably not necessary, since
|
|
|
|
|
// AcmReceiver::AddCodec also retries.
|
2016-01-28 05:22:45 -08:00
|
|
|
rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
|
2016-11-24 09:34:46 -08:00
|
|
|
if (rtp_receiver_->RegisterReceivePayload(codec) != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetRecPayloadType() RTP/RTCP-module registration failed");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2016-10-24 13:47:09 -07:00
|
|
|
if (!audio_coding_->RegisterReceiveCodec(codec.pltype,
|
|
|
|
|
CodecInstToSdp(codec))) {
|
2016-01-28 05:22:45 -08:00
|
|
|
audio_coding_->UnregisterReceiveCodec(codec.pltype);
|
2016-10-24 13:47:09 -07:00
|
|
|
if (!audio_coding_->RegisterReceiveCodec(codec.pltype,
|
|
|
|
|
CodecInstToSdp(codec))) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetRecPayloadType() ACM registration failed - 1");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::GetRecPayloadType(CodecInst& codec) {
|
|
|
|
|
int8_t payloadType(-1);
|
2016-11-24 09:34:46 -08:00
|
|
|
if (rtp_payload_registry_->ReceivePayloadType(codec, &payloadType) != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
|
|
|
|
|
"GetRecPayloadType() failed to retrieve RX payload type");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
codec.pltype = payloadType;
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetSendCNPayloadType()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
CodecInst codec;
|
|
|
|
|
int32_t samplingFreqHz(-1);
|
|
|
|
|
const size_t kMono = 1;
|
|
|
|
|
if (frequency == kFreq32000Hz)
|
|
|
|
|
samplingFreqHz = 32000;
|
|
|
|
|
else if (frequency == kFreq16000Hz)
|
|
|
|
|
samplingFreqHz = 16000;
|
|
|
|
|
|
|
|
|
|
if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetSendCNPayloadType() failed to retrieve default CN codec "
|
|
|
|
|
"settings");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Modify the payload type (must be set to dynamic range)
|
|
|
|
|
codec.pltype = type;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-04-06 12:22:38 -07:00
|
|
|
if (!codec_manager_.RegisterEncoder(codec) ||
|
|
|
|
|
!codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetSendCNPayloadType() failed to register CN to ACM");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
|
|
|
|
|
_rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
|
|
|
|
|
if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetSendCNPayloadType() failed to register CN to RTP/RTCP "
|
|
|
|
|
"module");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2014-09-03 12:28:06 +00:00
|
|
|
int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
|
2014-08-12 08:13:33 +00:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2014-09-03 12:28:06 +00:00
|
|
|
"Channel::SetOpusMaxPlaybackRate()");
|
2014-08-12 08:13:33 +00:00
|
|
|
|
2014-09-03 12:28:06 +00:00
|
|
|
if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
|
2014-08-12 08:13:33 +00:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
2014-09-03 12:28:06 +00:00
|
|
|
"SetOpusMaxPlaybackRate() failed to set maximum playback rate");
|
2014-08-12 08:13:33 +00:00
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-13 09:38:07 +00:00
|
|
|
int Channel::SetOpusDtx(bool enable_dtx) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetOpusDtx(%d)", enable_dtx);
|
2015-05-11 12:19:35 +02:00
|
|
|
int ret = enable_dtx ? audio_coding_->EnableOpusDtx()
|
2015-03-13 09:38:07 +00:00
|
|
|
: audio_coding_->DisableOpusDtx();
|
|
|
|
|
if (ret != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
|
|
|
|
|
kTraceError, "SetOpusDtx() failed");
|
2015-03-13 09:38:07 +00:00
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-27 04:53:47 -07:00
|
|
|
int Channel::GetOpusDtx(bool* enabled) {
|
|
|
|
|
int success = -1;
|
|
|
|
|
audio_coding_->QueryEncoder([&](AudioEncoder const* encoder) {
|
|
|
|
|
if (encoder) {
|
|
|
|
|
*enabled = encoder->GetDtx();
|
|
|
|
|
success = 0;
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
return success;
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-12 05:00:55 -07:00
|
|
|
bool Channel::EnableAudioNetworkAdaptor(const std::string& config_string) {
|
|
|
|
|
bool success = false;
|
|
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder) {
|
|
|
|
|
success = (*encoder)->EnableAudioNetworkAdaptor(
|
|
|
|
|
config_string, Clock::GetRealTimeClock());
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
return success;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Channel::DisableAudioNetworkAdaptor() {
|
|
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder)
|
|
|
|
|
(*encoder)->DisableAudioNetworkAdaptor();
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Channel::SetReceiverFrameLengthRange(int min_frame_length_ms,
|
|
|
|
|
int max_frame_length_ms) {
|
|
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder) {
|
|
|
|
|
(*encoder)->SetReceiverFrameLengthRange(min_frame_length_ms,
|
|
|
|
|
max_frame_length_ms);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-29 00:57:13 -07:00
|
|
|
int32_t Channel::RegisterExternalTransport(Transport* transport) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2011-07-07 08:21:25 +00:00
|
|
|
"Channel::RegisterExternalTransport()");
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
|
|
|
|
if (_externalTransport) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceError,
|
|
|
|
|
"RegisterExternalTransport() external transport already enabled");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
_externalTransport = true;
|
2016-04-29 00:57:13 -07:00
|
|
|
_transportPtr = transport;
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::DeRegisterExternalTransport() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::DeRegisterExternalTransport()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2016-04-29 00:57:13 -07:00
|
|
|
if (_transportPtr) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"DeRegisterExternalTransport() all transport is disabled");
|
|
|
|
|
} else {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceWarning,
|
|
|
|
|
"DeRegisterExternalTransport() external transport already "
|
|
|
|
|
"disabled");
|
|
|
|
|
}
|
|
|
|
|
_externalTransport = false;
|
|
|
|
|
_transportPtr = NULL;
|
|
|
|
|
return 0;
|
2013-03-13 23:20:57 +00:00
|
|
|
}
|
|
|
|
|
|
2016-04-29 00:57:13 -07:00
|
|
|
int32_t Channel::ReceivedRTPPacket(const uint8_t* received_packet,
|
2016-01-28 05:22:45 -08:00
|
|
|
size_t length,
|
2014-03-24 10:38:25 +00:00
|
|
|
const PacketTime& packet_time) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
2013-04-03 15:43:57 +00:00
|
|
|
"Channel::ReceivedRTPPacket()");
|
2013-03-13 23:20:57 +00:00
|
|
|
|
2013-04-03 15:43:57 +00:00
|
|
|
// Store playout timestamp for the received RTP packet
|
2013-04-11 20:23:35 +00:00
|
|
|
UpdatePlayoutTimestamp(false);
|
2013-03-13 23:20:57 +00:00
|
|
|
|
2013-05-29 12:12:51 +00:00
|
|
|
RTPHeader header;
|
2013-09-06 13:40:11 +00:00
|
|
|
if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
|
|
|
|
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
|
|
|
|
|
"Incoming packet: invalid RTP header");
|
2013-05-29 12:12:51 +00:00
|
|
|
return -1;
|
|
|
|
|
}
|
2013-08-15 23:38:54 +00:00
|
|
|
header.payload_type_frequency =
|
|
|
|
|
rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
|
2013-09-06 13:40:11 +00:00
|
|
|
if (header.payload_type_frequency < 0)
|
2013-08-15 23:38:54 +00:00
|
|
|
return -1;
|
2013-11-08 15:18:52 +00:00
|
|
|
bool in_order = IsPacketInOrder(header);
|
2016-01-28 05:22:45 -08:00
|
|
|
rtp_receive_statistics_->IncomingPacket(
|
|
|
|
|
header, length, IsPacketRetransmitted(header, in_order));
|
2013-09-06 13:40:11 +00:00
|
|
|
rtp_payload_registry_->SetIncomingPayloadType(header);
|
2014-03-24 10:38:25 +00:00
|
|
|
|
2013-11-08 15:18:52 +00:00
|
|
|
return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
|
2013-09-06 13:40:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool Channel::ReceivePacket(const uint8_t* packet,
|
Use size_t more consistently for packet/payload lengths.
See design doc at https://docs.google.com/a/chromium.org/document/d/1I6nmE9D_BmCY-IoV6MDPY2V6WYpEI-dg2apWXTfZyUI/edit?usp=sharing for more information.
This CL was reviewed and approved in pieces in the following CLs:
https://webrtc-codereview.appspot.com/24209004/
https://webrtc-codereview.appspot.com/24229004/
https://webrtc-codereview.appspot.com/24259004/
https://webrtc-codereview.appspot.com/25109004/
https://webrtc-codereview.appspot.com/26099004/
https://webrtc-codereview.appspot.com/27069004/
https://webrtc-codereview.appspot.com/27969004/
https://webrtc-codereview.appspot.com/27989004/
https://webrtc-codereview.appspot.com/29009004/
https://webrtc-codereview.appspot.com/30929004/
https://webrtc-codereview.appspot.com/30939004/
https://webrtc-codereview.appspot.com/31999004/
Committing as TBR to the original reviewers.
BUG=chromium:81439
TEST=none
TBR=pthatcher,henrik.lundin,tina.legrand,stefan,tkchin,glaznev,kjellander,perkj,mflodman,henrika,asapersson,niklas.enbom
Review URL: https://webrtc-codereview.appspot.com/23129004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@7726 4adac7df-926f-26a2-2b94-8c16560cd09d
2014-11-20 22:28:14 +00:00
|
|
|
size_t packet_length,
|
2013-09-06 13:40:11 +00:00
|
|
|
const RTPHeader& header,
|
|
|
|
|
bool in_order) {
|
2015-01-23 11:58:42 +00:00
|
|
|
if (rtp_payload_registry_->IsRtx(header)) {
|
|
|
|
|
return HandleRtxPacket(packet, packet_length, header);
|
2013-08-15 23:38:54 +00:00
|
|
|
}
|
2013-09-06 13:40:11 +00:00
|
|
|
const uint8_t* payload = packet + header.headerLength;
|
Use size_t more consistently for packet/payload lengths.
See design doc at https://docs.google.com/a/chromium.org/document/d/1I6nmE9D_BmCY-IoV6MDPY2V6WYpEI-dg2apWXTfZyUI/edit?usp=sharing for more information.
This CL was reviewed and approved in pieces in the following CLs:
https://webrtc-codereview.appspot.com/24209004/
https://webrtc-codereview.appspot.com/24229004/
https://webrtc-codereview.appspot.com/24259004/
https://webrtc-codereview.appspot.com/25109004/
https://webrtc-codereview.appspot.com/26099004/
https://webrtc-codereview.appspot.com/27069004/
https://webrtc-codereview.appspot.com/27969004/
https://webrtc-codereview.appspot.com/27989004/
https://webrtc-codereview.appspot.com/29009004/
https://webrtc-codereview.appspot.com/30929004/
https://webrtc-codereview.appspot.com/30939004/
https://webrtc-codereview.appspot.com/31999004/
Committing as TBR to the original reviewers.
BUG=chromium:81439
TEST=none
TBR=pthatcher,henrik.lundin,tina.legrand,stefan,tkchin,glaznev,kjellander,perkj,mflodman,henrika,asapersson,niklas.enbom
Review URL: https://webrtc-codereview.appspot.com/23129004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@7726 4adac7df-926f-26a2-2b94-8c16560cd09d
2014-11-20 22:28:14 +00:00
|
|
|
assert(packet_length >= header.headerLength);
|
|
|
|
|
size_t payload_length = packet_length - header.headerLength;
|
2013-08-15 23:38:54 +00:00
|
|
|
PayloadUnion payload_specific;
|
|
|
|
|
if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
|
2013-09-06 13:40:11 +00:00
|
|
|
&payload_specific)) {
|
|
|
|
|
return false;
|
2013-08-15 23:38:54 +00:00
|
|
|
}
|
2013-09-06 13:40:11 +00:00
|
|
|
return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
|
|
|
|
|
payload_specific, in_order);
|
|
|
|
|
}
|
|
|
|
|
|
2015-01-23 11:58:42 +00:00
|
|
|
bool Channel::HandleRtxPacket(const uint8_t* packet,
|
|
|
|
|
size_t packet_length,
|
|
|
|
|
const RTPHeader& header) {
|
2013-09-06 13:40:11 +00:00
|
|
|
if (!rtp_payload_registry_->IsRtx(header))
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
// Remove the RTX header and parse the original RTP header.
|
|
|
|
|
if (packet_length < header.headerLength)
|
|
|
|
|
return false;
|
|
|
|
|
if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
|
|
|
|
|
return false;
|
|
|
|
|
if (restored_packet_in_use_) {
|
|
|
|
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
|
|
|
|
|
"Multiple RTX headers detected, dropping packet");
|
|
|
|
|
return false;
|
2013-04-03 15:43:57 +00:00
|
|
|
}
|
2013-09-06 13:40:11 +00:00
|
|
|
if (!rtp_payload_registry_->RestoreOriginalPacket(
|
2015-10-14 11:29:49 -07:00
|
|
|
restored_packet_, packet, &packet_length, rtp_receiver_->SSRC(),
|
|
|
|
|
header)) {
|
2013-09-06 13:40:11 +00:00
|
|
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
|
|
|
|
|
"Incoming RTX packet: invalid RTP header");
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
restored_packet_in_use_ = true;
|
2015-10-14 11:29:49 -07:00
|
|
|
bool ret = OnRecoveredPacket(restored_packet_, packet_length);
|
2013-09-06 13:40:11 +00:00
|
|
|
restored_packet_in_use_ = false;
|
|
|
|
|
return ret;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool Channel::IsPacketInOrder(const RTPHeader& header) const {
|
|
|
|
|
StreamStatistician* statistician =
|
|
|
|
|
rtp_receive_statistics_->GetStatistician(header.ssrc);
|
|
|
|
|
if (!statistician)
|
|
|
|
|
return false;
|
|
|
|
|
return statistician->IsPacketInOrder(header.sequenceNumber);
|
2013-04-03 15:43:57 +00:00
|
|
|
}
|
|
|
|
|
|
2013-11-08 15:18:52 +00:00
|
|
|
bool Channel::IsPacketRetransmitted(const RTPHeader& header,
|
|
|
|
|
bool in_order) const {
|
2013-09-06 13:40:11 +00:00
|
|
|
// Retransmissions are handled separately if RTX is enabled.
|
|
|
|
|
if (rtp_payload_registry_->RtxEnabled())
|
|
|
|
|
return false;
|
|
|
|
|
StreamStatistician* statistician =
|
|
|
|
|
rtp_receive_statistics_->GetStatistician(header.ssrc);
|
|
|
|
|
if (!statistician)
|
|
|
|
|
return false;
|
|
|
|
|
// Check if this is a retransmission.
|
2015-01-12 21:51:21 +00:00
|
|
|
int64_t min_rtt = 0;
|
2013-09-06 13:40:11 +00:00
|
|
|
_rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
|
2016-01-28 05:22:45 -08:00
|
|
|
return !in_order && statistician->IsRetransmitOfOldPacket(header, min_rtt);
|
2013-08-15 23:38:54 +00:00
|
|
|
}
|
|
|
|
|
|
2016-04-29 00:57:13 -07:00
|
|
|
int32_t Channel::ReceivedRTCPPacket(const uint8_t* data, size_t length) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
2013-04-03 15:43:57 +00:00
|
|
|
"Channel::ReceivedRTCPPacket()");
|
|
|
|
|
// Store playout timestamp for the received RTCP packet
|
2013-04-11 20:23:35 +00:00
|
|
|
UpdatePlayoutTimestamp(true);
|
2013-04-03 15:43:57 +00:00
|
|
|
|
|
|
|
|
// Deliver RTCP packet to RTP/RTCP module for parsing
|
2016-04-29 00:57:13 -07:00
|
|
|
if (_rtpRtcpModule->IncomingRtcpPacket(data, length) == -1) {
|
2013-04-03 15:43:57 +00:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
|
|
|
|
|
"Channel::IncomingRTPPacket() RTCP packet is invalid");
|
|
|
|
|
}
|
2014-05-20 22:55:01 +00:00
|
|
|
|
2015-05-13 14:14:42 +02:00
|
|
|
int64_t rtt = GetRTT(true);
|
|
|
|
|
if (rtt == 0) {
|
|
|
|
|
// Waiting for valid RTT.
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2016-07-29 12:59:36 +02:00
|
|
|
|
|
|
|
|
int64_t nack_window_ms = rtt;
|
|
|
|
|
if (nack_window_ms < kMinRetransmissionWindowMs) {
|
|
|
|
|
nack_window_ms = kMinRetransmissionWindowMs;
|
|
|
|
|
} else if (nack_window_ms > kMaxRetransmissionWindowMs) {
|
|
|
|
|
nack_window_ms = kMaxRetransmissionWindowMs;
|
|
|
|
|
}
|
|
|
|
|
retransmission_rate_limiter_->SetWindowSize(nack_window_ms);
|
|
|
|
|
|
2016-10-12 05:00:55 -07:00
|
|
|
// Invoke audio encoders OnReceivedRtt().
|
|
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder)
|
|
|
|
|
(*encoder)->OnReceivedRtt(rtt);
|
|
|
|
|
});
|
|
|
|
|
|
2015-05-13 14:14:42 +02:00
|
|
|
uint32_t ntp_secs = 0;
|
|
|
|
|
uint32_t ntp_frac = 0;
|
|
|
|
|
uint32_t rtp_timestamp = 0;
|
2016-01-28 05:22:45 -08:00
|
|
|
if (0 !=
|
|
|
|
|
_rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
|
|
|
|
|
&rtp_timestamp)) {
|
2015-05-13 14:14:42 +02:00
|
|
|
// Waiting for RTCP.
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2014-09-02 18:58:24 +00:00
|
|
|
{
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope lock(&ts_stats_lock_);
|
2014-10-09 10:52:43 +00:00
|
|
|
ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
|
2014-09-02 18:58:24 +00:00
|
|
|
}
|
2013-04-03 15:43:57 +00:00
|
|
|
return 0;
|
2013-03-13 23:20:57 +00:00
|
|
|
}
|
|
|
|
|
|
2011-07-07 08:21:25 +00:00
|
|
|
int Channel::StartPlayingFileLocally(const char* fileName,
|
2013-05-14 08:31:39 +00:00
|
|
|
bool loop,
|
|
|
|
|
FileFormats format,
|
|
|
|
|
int startPosition,
|
|
|
|
|
float volumeScaling,
|
|
|
|
|
int stopPosition,
|
2016-01-28 05:22:45 -08:00
|
|
|
const CodecInst* codecInst) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
|
|
|
|
|
" format=%d, volumeScaling=%5.3f, startPosition=%d, "
|
|
|
|
|
"stopPosition=%d)",
|
|
|
|
|
fileName, loop, format, volumeScaling, startPosition,
|
|
|
|
|
stopPosition);
|
|
|
|
|
|
|
|
|
|
if (channel_state_.Get().output_file_playing) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_ALREADY_PLAYING, kTraceError,
|
|
|
|
|
"StartPlayingFileLocally() is already playing");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
|
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_player_) {
|
|
|
|
|
output_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_player_.reset();
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-09-08 04:32:33 -07:00
|
|
|
output_file_player_ = FilePlayer::CreateFilePlayer(
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFilePlayerId, (const FileFormats)format);
|
|
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!output_file_player_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"StartPlayingFileLocally() filePlayer format is not correct");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2012-06-04 03:26:39 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
const uint32_t notificationTime(0);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_player_->StartPlayingFile(
|
2016-01-28 05:22:45 -08:00
|
|
|
fileName, loop, startPosition, volumeScaling, notificationTime,
|
|
|
|
|
stopPosition, (const CodecInst*)codecInst) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartPlayingFile() failed to start file playout");
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_player_->StopPlayingFile();
|
|
|
|
|
output_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
|
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_player_->RegisterModuleFileCallback(this);
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetOutputFilePlaying(true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (RegisterFilePlayingToMixer() != 0)
|
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int Channel::StartPlayingFileLocally(InStream* stream,
|
2013-05-14 08:31:39 +00:00
|
|
|
FileFormats format,
|
|
|
|
|
int startPosition,
|
|
|
|
|
float volumeScaling,
|
|
|
|
|
int stopPosition,
|
2016-01-28 05:22:45 -08:00
|
|
|
const CodecInst* codecInst) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartPlayingFileLocally(format=%d,"
|
|
|
|
|
" volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
|
|
|
|
|
format, volumeScaling, startPosition, stopPosition);
|
|
|
|
|
|
|
|
|
|
if (stream == NULL) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartPlayingFileLocally() NULL as input stream");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().output_file_playing) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_ALREADY_PLAYING, kTraceError,
|
|
|
|
|
"StartPlayingFileLocally() is already playing");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
|
|
|
|
|
|
|
|
|
// Destroy the old instance
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_player_) {
|
|
|
|
|
output_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_player_.reset();
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Create the instance
|
2016-09-08 04:32:33 -07:00
|
|
|
output_file_player_ = FilePlayer::CreateFilePlayer(
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFilePlayerId, (const FileFormats)format);
|
2011-10-31 23:53:04 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!output_file_player_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"StartPlayingFileLocally() filePlayer format isnot correct");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-10-31 23:53:04 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
const uint32_t notificationTime(0);
|
|
|
|
|
|
2016-08-22 08:43:54 -07:00
|
|
|
if (output_file_player_->StartPlayingFile(stream, startPosition,
|
2016-08-17 07:31:12 -07:00
|
|
|
volumeScaling, notificationTime,
|
|
|
|
|
stopPosition, codecInst) != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartPlayingFile() failed to "
|
|
|
|
|
"start file playout");
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_player_->StopPlayingFile();
|
|
|
|
|
output_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
2011-10-31 23:53:04 +00:00
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_player_->RegisterModuleFileCallback(this);
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetOutputFilePlaying(true);
|
|
|
|
|
}
|
2012-06-04 03:26:39 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (RegisterFilePlayingToMixer() != 0)
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::StopPlayingFileLocally() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StopPlayingFileLocally()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!channel_state_.Get().output_file_playing) {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
|
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_player_->StopPlayingFile() != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_STOP_RECORDING_FAILED, kTraceError,
|
|
|
|
|
"StopPlayingFile() could not stop playing");
|
|
|
|
|
return -1;
|
2011-10-28 23:15:47 +00:00
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetOutputFilePlaying(false);
|
|
|
|
|
}
|
|
|
|
|
// _fileCritSect cannot be taken while calling
|
|
|
|
|
// SetAnonymousMixibilityStatus. Refer to comments in
|
|
|
|
|
// StartPlayingFileLocally(const char* ...) for more details.
|
|
|
|
|
if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
|
|
|
|
"StopPlayingFile() failed to stop participant from playing as"
|
|
|
|
|
"file in the mixer");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::IsPlayingFileLocally() const {
|
|
|
|
|
return channel_state_.Get().output_file_playing;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::RegisterFilePlayingToMixer() {
|
|
|
|
|
// Return success for not registering for file playing to mixer if:
|
|
|
|
|
// 1. playing file before playout is started on that channel.
|
|
|
|
|
// 2. starting playout without file playing on that channel.
|
|
|
|
|
if (!channel_state_.Get().playing ||
|
|
|
|
|
!channel_state_.Get().output_file_playing) {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2012-06-04 03:26:39 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// |_fileCritSect| cannot be taken while calling
|
|
|
|
|
// SetAnonymousMixabilityStatus() since as soon as the participant is added
|
|
|
|
|
// frames can be pulled by the mixer. Since the frames are generated from
|
|
|
|
|
// the file, _fileCritSect will be taken. This would result in a deadlock.
|
|
|
|
|
if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0) {
|
|
|
|
|
channel_state_.SetOutputFilePlaying(false);
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
|
|
|
|
|
"StartPlayingFile() failed to add participant as file to mixer");
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_player_->StopPlayingFile();
|
|
|
|
|
output_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
|
|
|
|
}
|
2012-06-04 03:26:39 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2012-06-04 03:26:39 +00:00
|
|
|
}
|
|
|
|
|
|
2011-07-07 08:21:25 +00:00
|
|
|
int Channel::StartPlayingFileAsMicrophone(const char* fileName,
|
2013-05-14 08:31:39 +00:00
|
|
|
bool loop,
|
|
|
|
|
FileFormats format,
|
|
|
|
|
int startPosition,
|
|
|
|
|
float volumeScaling,
|
|
|
|
|
int stopPosition,
|
2016-01-28 05:22:45 -08:00
|
|
|
const CodecInst* codecInst) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
|
|
|
|
|
"loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
|
|
|
|
|
"stopPosition=%d)",
|
|
|
|
|
fileName, loop, format, volumeScaling, startPosition,
|
|
|
|
|
stopPosition);
|
2014-03-18 10:32:33 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().input_file_playing) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_ALREADY_PLAYING, kTraceWarning,
|
|
|
|
|
"StartPlayingFileAsMicrophone() filePlayer is playing");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Destroy the old instance
|
2016-08-17 07:31:12 -07:00
|
|
|
if (input_file_player_) {
|
|
|
|
|
input_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
input_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Create the instance
|
2016-09-08 04:32:33 -07:00
|
|
|
input_file_player_ = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
|
2016-08-17 07:31:12 -07:00
|
|
|
(const FileFormats)format);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!input_file_player_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"StartPlayingFileAsMicrophone() filePlayer format isnot correct");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
const uint32_t notificationTime(0);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (input_file_player_->StartPlayingFile(
|
2016-01-28 05:22:45 -08:00
|
|
|
fileName, loop, startPosition, volumeScaling, notificationTime,
|
|
|
|
|
stopPosition, (const CodecInst*)codecInst) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartPlayingFile() failed to start file playout");
|
2016-08-17 07:31:12 -07:00
|
|
|
input_file_player_->StopPlayingFile();
|
|
|
|
|
input_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
|
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
input_file_player_->RegisterModuleFileCallback(this);
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetInputFilePlaying(true);
|
|
|
|
|
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int Channel::StartPlayingFileAsMicrophone(InStream* stream,
|
2013-05-14 08:31:39 +00:00
|
|
|
FileFormats format,
|
|
|
|
|
int startPosition,
|
|
|
|
|
float volumeScaling,
|
|
|
|
|
int stopPosition,
|
2016-01-28 05:22:45 -08:00
|
|
|
const CodecInst* codecInst) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartPlayingFileAsMicrophone(format=%d, "
|
|
|
|
|
"volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
|
|
|
|
|
format, volumeScaling, startPosition, stopPosition);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (stream == NULL) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartPlayingFileAsMicrophone NULL as input stream");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2014-03-18 10:32:33 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().input_file_playing) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_ALREADY_PLAYING, kTraceWarning,
|
|
|
|
|
"StartPlayingFileAsMicrophone() is playing");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Destroy the old instance
|
2016-08-17 07:31:12 -07:00
|
|
|
if (input_file_player_) {
|
|
|
|
|
input_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
input_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Create the instance
|
2016-09-08 04:32:33 -07:00
|
|
|
input_file_player_ = FilePlayer::CreateFilePlayer(_inputFilePlayerId,
|
2016-08-17 07:31:12 -07:00
|
|
|
(const FileFormats)format);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!input_file_player_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"StartPlayingInputFile() filePlayer format isnot correct");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
const uint32_t notificationTime(0);
|
2013-01-22 04:44:30 +00:00
|
|
|
|
2016-08-22 08:43:54 -07:00
|
|
|
if (input_file_player_->StartPlayingFile(stream, startPosition, volumeScaling,
|
|
|
|
|
notificationTime, stopPosition,
|
|
|
|
|
codecInst) != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartPlayingFile() failed to start "
|
|
|
|
|
"file playout");
|
2016-08-17 07:31:12 -07:00
|
|
|
input_file_player_->StopPlayingFile();
|
|
|
|
|
input_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
input_file_player_->RegisterModuleFileCallback(this);
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetInputFilePlaying(true);
|
|
|
|
|
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::StopPlayingFileAsMicrophone() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StopPlayingFileAsMicrophone()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2014-03-18 10:32:33 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!channel_state_.Get().input_file_playing) {
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (input_file_player_->StopPlayingFile() != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_STOP_RECORDING_FAILED, kTraceError,
|
|
|
|
|
"StopPlayingFile() could not stop playing");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
input_file_player_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
input_file_player_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetInputFilePlaying(false);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::IsPlayingFileAsMicrophone() const {
|
|
|
|
|
return channel_state_.Get().input_file_playing;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2012-03-01 18:34:25 +00:00
|
|
|
int Channel::StartRecordingPlayout(const char* fileName,
|
2016-01-28 05:22:45 -08:00
|
|
|
const CodecInst* codecInst) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartRecordingPlayout(fileName=%s)", fileName);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_outputFileRecording) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
|
|
|
|
|
"StartRecordingPlayout() is already recording");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
FileFormats format;
|
|
|
|
|
const uint32_t notificationTime(0); // Not supported in VoE
|
|
|
|
|
CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if ((codecInst != NULL) &&
|
|
|
|
|
((codecInst->channels < 1) || (codecInst->channels > 2))) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_BAD_ARGUMENT, kTraceError,
|
|
|
|
|
"StartRecordingPlayout() invalid compression");
|
|
|
|
|
return (-1);
|
|
|
|
|
}
|
|
|
|
|
if (codecInst == NULL) {
|
|
|
|
|
format = kFileFormatPcm16kHzFile;
|
|
|
|
|
codecInst = &dummyCodec;
|
|
|
|
|
} else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
|
|
|
|
|
(STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
|
|
|
|
|
(STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
|
|
|
|
|
format = kFileFormatWavFile;
|
|
|
|
|
} else {
|
|
|
|
|
format = kFileFormatCompressedFile;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Destroy the old instance
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_recorder_) {
|
|
|
|
|
output_file_recorder_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_recorder_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_recorder_ = FileRecorder::CreateFileRecorder(
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFileRecorderId, (const FileFormats)format);
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!output_file_recorder_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"StartRecordingPlayout() fileRecorder format isnot correct");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_recorder_->StartRecordingAudioFile(
|
2016-01-28 05:22:45 -08:00
|
|
|
fileName, (const CodecInst&)*codecInst, notificationTime) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartRecordingAudioFile() failed to start file recording");
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_recorder_->StopRecording();
|
|
|
|
|
output_file_recorder_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
|
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_recorder_->RegisterModuleFileCallback(this);
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFileRecording = true;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int Channel::StartRecordingPlayout(OutStream* stream,
|
2016-01-28 05:22:45 -08:00
|
|
|
const CodecInst* codecInst) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::StartRecordingPlayout()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_outputFileRecording) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
|
|
|
|
|
"StartRecordingPlayout() is already recording");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
FileFormats format;
|
|
|
|
|
const uint32_t notificationTime(0); // Not supported in VoE
|
|
|
|
|
CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (codecInst != NULL && codecInst->channels != 1) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_BAD_ARGUMENT, kTraceError,
|
|
|
|
|
"StartRecordingPlayout() invalid compression");
|
|
|
|
|
return (-1);
|
|
|
|
|
}
|
|
|
|
|
if (codecInst == NULL) {
|
|
|
|
|
format = kFileFormatPcm16kHzFile;
|
|
|
|
|
codecInst = &dummyCodec;
|
|
|
|
|
} else if ((STR_CASE_CMP(codecInst->plname, "L16") == 0) ||
|
|
|
|
|
(STR_CASE_CMP(codecInst->plname, "PCMU") == 0) ||
|
|
|
|
|
(STR_CASE_CMP(codecInst->plname, "PCMA") == 0)) {
|
|
|
|
|
format = kFileFormatWavFile;
|
|
|
|
|
} else {
|
|
|
|
|
format = kFileFormatCompressedFile;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Destroy the old instance
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_recorder_) {
|
|
|
|
|
output_file_recorder_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_recorder_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_recorder_ = FileRecorder::CreateFileRecorder(
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFileRecorderId, (const FileFormats)format);
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!output_file_recorder_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"StartRecordingPlayout() fileRecorder format isnot correct");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-22 08:43:54 -07:00
|
|
|
if (output_file_recorder_->StartRecordingAudioFile(stream, *codecInst,
|
2016-08-17 07:31:12 -07:00
|
|
|
notificationTime) != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
|
|
|
|
|
"StartRecordingPlayout() failed to "
|
|
|
|
|
"start file recording");
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_recorder_->StopRecording();
|
|
|
|
|
output_file_recorder_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
return -1;
|
|
|
|
|
}
|
2013-01-22 04:44:30 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_recorder_->RegisterModuleFileCallback(this);
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFileRecording = true;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::StopRecordingPlayout() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
|
|
|
|
|
"Channel::StopRecordingPlayout()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!_outputFileRecording) {
|
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
|
|
|
|
|
"StopRecordingPlayout() isnot recording");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (output_file_recorder_->StopRecording() != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_STOP_RECORDING_FAILED, kTraceError,
|
|
|
|
|
"StopRecording() could not stop recording");
|
|
|
|
|
return (-1);
|
|
|
|
|
}
|
2016-08-17 07:31:12 -07:00
|
|
|
output_file_recorder_->RegisterModuleFileCallback(NULL);
|
|
|
|
|
output_file_recorder_.reset();
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputFileRecording = false;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
void Channel::SetMixWithMicStatus(bool mix) {
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
|
|
|
|
_mixFileWithMicrophone = mix;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetSpeechOutputLevel(uint32_t& level) const {
|
|
|
|
|
int8_t currentLevel = _outputAudioLevel.Level();
|
|
|
|
|
level = static_cast<int32_t>(currentLevel);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const {
|
|
|
|
|
int16_t currentLevel = _outputAudioLevel.LevelFullRange();
|
|
|
|
|
level = static_cast<int32_t>(currentLevel);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-03-24 10:36:00 -07:00
|
|
|
int Channel::SetInputMute(bool enable) {
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&volume_settings_critsect_);
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2011-07-07 08:21:25 +00:00
|
|
|
"Channel::SetMute(enable=%d)", enable);
|
2016-03-24 10:36:00 -07:00
|
|
|
input_mute_ = enable;
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-03-24 10:36:00 -07:00
|
|
|
bool Channel::InputMute() const {
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&volume_settings_critsect_);
|
2016-03-24 10:36:00 -07:00
|
|
|
return input_mute_;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::SetOutputVolumePan(float left, float right) {
|
|
|
|
|
rtc::CritScope cs(&volume_settings_critsect_);
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2011-07-07 08:21:25 +00:00
|
|
|
"Channel::SetOutputVolumePan()");
|
2016-01-28 05:22:45 -08:00
|
|
|
_panLeft = left;
|
|
|
|
|
_panRight = right;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetOutputVolumePan(float& left, float& right) const {
|
|
|
|
|
rtc::CritScope cs(&volume_settings_critsect_);
|
|
|
|
|
left = _panLeft;
|
|
|
|
|
right = _panRight;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::SetChannelOutputVolumeScaling(float scaling) {
|
|
|
|
|
rtc::CritScope cs(&volume_settings_critsect_);
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2011-07-07 08:21:25 +00:00
|
|
|
"Channel::SetChannelOutputVolumeScaling()");
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputGain = scaling;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetChannelOutputVolumeScaling(float& scaling) const {
|
|
|
|
|
rtc::CritScope cs(&volume_settings_critsect_);
|
|
|
|
|
scaling = _outputGain;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-03-11 03:06:41 -08:00
|
|
|
int Channel::SendTelephoneEventOutband(int event, int duration_ms) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2016-03-11 03:06:41 -08:00
|
|
|
"Channel::SendTelephoneEventOutband(...)");
|
|
|
|
|
RTC_DCHECK_LE(0, event);
|
|
|
|
|
RTC_DCHECK_GE(255, event);
|
|
|
|
|
RTC_DCHECK_LE(0, duration_ms);
|
|
|
|
|
RTC_DCHECK_GE(65535, duration_ms);
|
2016-01-28 05:22:45 -08:00
|
|
|
if (!Sending()) {
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2016-03-11 03:06:41 -08:00
|
|
|
if (_rtpRtcpModule->SendTelephoneEventOutband(
|
|
|
|
|
event, duration_ms, kTelephoneEventAttenuationdB) != 0) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_SEND_DTMF_FAILED, kTraceWarning,
|
|
|
|
|
"SendTelephoneEventOutband() failed to send event");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-11-17 05:25:37 -08:00
|
|
|
int Channel::SetSendTelephoneEventPayloadType(int payload_type,
|
|
|
|
|
int payload_frequency) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2011-07-07 08:21:25 +00:00
|
|
|
"Channel::SetSendTelephoneEventPayloadType()");
|
2016-03-14 08:00:37 -07:00
|
|
|
RTC_DCHECK_LE(0, payload_type);
|
|
|
|
|
RTC_DCHECK_GE(127, payload_type);
|
|
|
|
|
CodecInst codec = {0};
|
|
|
|
|
codec.pltype = payload_type;
|
2016-11-17 05:25:37 -08:00
|
|
|
codec.plfreq = payload_frequency;
|
2016-01-28 05:22:45 -08:00
|
|
|
memcpy(codec.plname, "telephone-event", 16);
|
|
|
|
|
if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
|
|
|
|
|
_rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
|
|
|
|
|
if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetSendTelephoneEventPayloadType() failed to register send"
|
|
|
|
|
"payload type");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::VoiceActivityIndicator(int& activity) {
|
|
|
|
|
activity = _sendFrameType;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::SetLocalSSRC(unsigned int ssrc) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetLocalSSRC()");
|
|
|
|
|
if (channel_state_.Get().sending) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(VE_ALREADY_SENDING, kTraceError,
|
|
|
|
|
"SetLocalSSRC() already sending");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
_rtpRtcpModule->SetSSRC(ssrc);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetLocalSSRC(unsigned int& ssrc) {
|
|
|
|
|
ssrc = _rtpRtcpModule->SSRC();
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetRemoteSSRC(unsigned int& ssrc) {
|
|
|
|
|
ssrc = rtp_receiver_->SSRC();
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2014-03-06 23:49:08 +00:00
|
|
|
int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
|
2013-09-18 22:37:32 +00:00
|
|
|
_includeAudioLevelIndication = enable;
|
2014-03-06 23:49:08 +00:00
|
|
|
return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2013-09-18 22:37:32 +00:00
|
|
|
|
2014-04-24 20:33:08 +00:00
|
|
|
int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
|
|
|
|
|
unsigned char id) {
|
2016-01-28 05:22:45 -08:00
|
|
|
rtp_header_parser_->DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
|
|
|
|
|
if (enable &&
|
|
|
|
|
!rtp_header_parser_->RegisterRtpHeaderExtension(kRtpExtensionAudioLevel,
|
|
|
|
|
id)) {
|
2014-04-24 20:33:08 +00:00
|
|
|
return -1;
|
2014-03-06 23:49:08 +00:00
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2015-12-07 10:26:18 +01:00
|
|
|
void Channel::EnableSendTransportSequenceNumber(int id) {
|
|
|
|
|
int ret =
|
|
|
|
|
SetSendRtpHeaderExtension(true, kRtpExtensionTransportSequenceNumber, id);
|
|
|
|
|
RTC_DCHECK_EQ(0, ret);
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-21 06:32:43 -08:00
|
|
|
void Channel::EnableReceiveTransportSequenceNumber(int id) {
|
|
|
|
|
rtp_header_parser_->DeregisterRtpHeaderExtension(
|
|
|
|
|
kRtpExtensionTransportSequenceNumber);
|
|
|
|
|
bool ret = rtp_header_parser_->RegisterRtpHeaderExtension(
|
|
|
|
|
kRtpExtensionTransportSequenceNumber, id);
|
|
|
|
|
RTC_DCHECK(ret);
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-01 04:39:55 -08:00
|
|
|
void Channel::RegisterSenderCongestionControlObjects(
|
2015-12-07 10:26:18 +01:00
|
|
|
RtpPacketSender* rtp_packet_sender,
|
|
|
|
|
TransportFeedbackObserver* transport_feedback_observer,
|
|
|
|
|
PacketRouter* packet_router) {
|
2016-02-01 04:39:55 -08:00
|
|
|
RTC_DCHECK(rtp_packet_sender);
|
|
|
|
|
RTC_DCHECK(transport_feedback_observer);
|
|
|
|
|
RTC_DCHECK(packet_router && !packet_router_);
|
|
|
|
|
feedback_observer_proxy_->SetTransportFeedbackObserver(
|
|
|
|
|
transport_feedback_observer);
|
|
|
|
|
seq_num_allocator_proxy_->SetSequenceNumberAllocator(packet_router);
|
|
|
|
|
rtp_packet_sender_proxy_->SetPacketSender(rtp_packet_sender);
|
|
|
|
|
_rtpRtcpModule->SetStorePacketsStatus(true, 600);
|
2016-02-25 16:56:48 +01:00
|
|
|
packet_router->AddRtpModule(_rtpRtcpModule.get());
|
2015-12-07 10:26:18 +01:00
|
|
|
packet_router_ = packet_router;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-01 04:39:55 -08:00
|
|
|
void Channel::RegisterReceiverCongestionControlObjects(
|
|
|
|
|
PacketRouter* packet_router) {
|
|
|
|
|
RTC_DCHECK(packet_router && !packet_router_);
|
2016-02-25 16:56:48 +01:00
|
|
|
packet_router->AddRtpModule(_rtpRtcpModule.get());
|
2016-02-01 04:39:55 -08:00
|
|
|
packet_router_ = packet_router;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Channel::ResetCongestionControlObjects() {
|
|
|
|
|
RTC_DCHECK(packet_router_);
|
|
|
|
|
_rtpRtcpModule->SetStorePacketsStatus(false, 600);
|
|
|
|
|
feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr);
|
|
|
|
|
seq_num_allocator_proxy_->SetSequenceNumberAllocator(nullptr);
|
2016-02-25 16:56:48 +01:00
|
|
|
packet_router_->RemoveRtpModule(_rtpRtcpModule.get());
|
2016-02-01 04:39:55 -08:00
|
|
|
packet_router_ = nullptr;
|
|
|
|
|
rtp_packet_sender_proxy_->SetPacketSender(nullptr);
|
|
|
|
|
}
|
|
|
|
|
|
2014-12-19 13:49:55 +00:00
|
|
|
void Channel::SetRTCPStatus(bool enable) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetRTCPStatus()");
|
2015-10-02 02:36:56 -07:00
|
|
|
_rtpRtcpModule->SetRTCPStatus(enable ? RtcpMode::kCompound : RtcpMode::kOff);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetRTCPStatus(bool& enabled) {
|
2015-10-02 02:36:56 -07:00
|
|
|
RtcpMode method = _rtpRtcpModule->RTCP();
|
|
|
|
|
enabled = (method != RtcpMode::kOff);
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::SetRTCP_CNAME(const char cName[256]) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetRTCP_CNAME()");
|
|
|
|
|
if (_rtpRtcpModule->SetCNAME(cName) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetRTCP_CNAME() failed to set RTCP CNAME");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetRemoteRTCP_CNAME(char cName[256]) {
|
|
|
|
|
if (cName == NULL) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"GetRemoteRTCP_CNAME() invalid CNAME input buffer");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
char cname[RTCP_CNAME_SIZE];
|
|
|
|
|
const uint32_t remoteSSRC = rtp_receiver_->SSRC();
|
|
|
|
|
if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_CANNOT_RETRIEVE_CNAME, kTraceError,
|
|
|
|
|
"GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
strcpy(cName, cname);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetRemoteRTCPData(unsigned int& NTPHigh,
|
|
|
|
|
unsigned int& NTPLow,
|
|
|
|
|
unsigned int& timestamp,
|
|
|
|
|
unsigned int& playoutTimestamp,
|
|
|
|
|
unsigned int* jitter,
|
|
|
|
|
unsigned short* fractionLost) {
|
|
|
|
|
// --- Information from sender info in received Sender Reports
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
RTCPSenderInfo senderInfo;
|
|
|
|
|
if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTP_RTCP_MODULE_ERROR, kTraceError,
|
|
|
|
|
"GetRemoteRTCPData() failed to retrieve sender info for remote "
|
|
|
|
|
"side");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// We only utilize 12 out of 20 bytes in the sender info (ignores packet
|
|
|
|
|
// and octet count)
|
|
|
|
|
NTPHigh = senderInfo.NTPseconds;
|
|
|
|
|
NTPLow = senderInfo.NTPfraction;
|
|
|
|
|
timestamp = senderInfo.RTPtimeStamp;
|
|
|
|
|
|
|
|
|
|
// --- Locally derived information
|
|
|
|
|
|
|
|
|
|
// This value is updated on each incoming RTCP packet (0 when no packet
|
|
|
|
|
// has been received)
|
|
|
|
|
playoutTimestamp = playout_timestamp_rtcp_;
|
|
|
|
|
|
|
|
|
|
if (NULL != jitter || NULL != fractionLost) {
|
|
|
|
|
// Get all RTCP receiver report blocks that have been received on this
|
|
|
|
|
// channel. If we receive RTP packets from a remote source we know the
|
|
|
|
|
// remote SSRC and use the report block from him.
|
|
|
|
|
// Otherwise use the first report block.
|
|
|
|
|
std::vector<RTCPReportBlock> remote_stats;
|
|
|
|
|
if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
|
|
|
|
|
remote_stats.empty()) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"GetRemoteRTCPData() failed to measure statistics due"
|
|
|
|
|
" to lack of received RTP and/or RTCP packets");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
uint32_t remoteSSRC = rtp_receiver_->SSRC();
|
|
|
|
|
std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
|
|
|
|
|
for (; it != remote_stats.end(); ++it) {
|
|
|
|
|
if (it->remoteSSRC == remoteSSRC)
|
|
|
|
|
break;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (it == remote_stats.end()) {
|
|
|
|
|
// If we have not received any RTCP packets from this SSRC it probably
|
|
|
|
|
// means that we have not received any RTP packets.
|
|
|
|
|
// Use the first received report block instead.
|
|
|
|
|
it = remote_stats.begin();
|
|
|
|
|
remoteSSRC = it->remoteSSRC;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (jitter) {
|
|
|
|
|
*jitter = it->jitter;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (fractionLost) {
|
|
|
|
|
*fractionLost = it->fractionLost;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::SendApplicationDefinedRTCPPacket(
|
|
|
|
|
unsigned char subType,
|
|
|
|
|
unsigned int name,
|
|
|
|
|
const char* data,
|
|
|
|
|
unsigned short dataLengthInBytes) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SendApplicationDefinedRTCPPacket()");
|
|
|
|
|
if (!channel_state_.Get().sending) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_NOT_SENDING, kTraceError,
|
|
|
|
|
"SendApplicationDefinedRTCPPacket() not sending");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
if (NULL == data) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"SendApplicationDefinedRTCPPacket() invalid data value");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
if (dataLengthInBytes % 4 != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"SendApplicationDefinedRTCPPacket() invalid length value");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
RtcpMode status = _rtpRtcpModule->RTCP();
|
|
|
|
|
if (status == RtcpMode::kOff) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_RTCP_ERROR, kTraceError,
|
|
|
|
|
"SendApplicationDefinedRTCPPacket() RTCP is disabled");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// Create and schedule the RTCP APP packet for transmission
|
|
|
|
|
if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
|
|
|
|
|
subType, name, (const unsigned char*)data, dataLengthInBytes) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_SEND_ERROR, kTraceError,
|
|
|
|
|
"SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetRTPStatistics(unsigned int& averageJitterMs,
|
|
|
|
|
unsigned int& maxJitterMs,
|
|
|
|
|
unsigned int& discardedPackets) {
|
|
|
|
|
// The jitter statistics is updated for each received RTP packet and is
|
|
|
|
|
// based on received packets.
|
|
|
|
|
if (_rtpRtcpModule->RTCP() == RtcpMode::kOff) {
|
|
|
|
|
// If RTCP is off, there is no timed thread in the RTCP module regularly
|
|
|
|
|
// generating new stats, trigger the update manually here instead.
|
|
|
|
|
StreamStatistician* statistician =
|
|
|
|
|
rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
|
|
|
|
|
if (statistician) {
|
|
|
|
|
// Don't use returned statistics, use data from proxy instead so that
|
|
|
|
|
// max jitter can be fetched atomically.
|
|
|
|
|
RtcpStatistics s;
|
|
|
|
|
statistician->GetStatistics(&s, true);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
ChannelStatistics stats = statistics_proxy_->GetStats();
|
|
|
|
|
const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
|
|
|
|
|
if (playoutFrequency > 0) {
|
|
|
|
|
// Scale RTP statistics given the current playout frequency
|
|
|
|
|
maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
|
|
|
|
|
averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
discardedPackets = _numberOfDiscardedPackets;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2012-08-22 08:53:55 +00:00
|
|
|
int Channel::GetRemoteRTCPReportBlocks(
|
|
|
|
|
std::vector<ReportBlock>* report_blocks) {
|
|
|
|
|
if (report_blocks == NULL) {
|
2016-01-28 05:22:45 -08:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"GetRemoteRTCPReportBlock()s invalid report_blocks.");
|
2012-08-22 08:53:55 +00:00
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get the report blocks from the latest received RTCP Sender or Receiver
|
|
|
|
|
// Report. Each element in the vector contains the sender's SSRC and a
|
|
|
|
|
// report block according to RFC 3550.
|
|
|
|
|
std::vector<RTCPReportBlock> rtcp_report_blocks;
|
|
|
|
|
if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (rtcp_report_blocks.empty())
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
|
|
std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
|
|
|
|
|
for (; it != rtcp_report_blocks.end(); ++it) {
|
|
|
|
|
ReportBlock report_block;
|
|
|
|
|
report_block.sender_SSRC = it->remoteSSRC;
|
|
|
|
|
report_block.source_SSRC = it->sourceSSRC;
|
|
|
|
|
report_block.fraction_lost = it->fractionLost;
|
|
|
|
|
report_block.cumulative_num_packets_lost = it->cumulativeLost;
|
|
|
|
|
report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
|
|
|
|
|
report_block.interarrival_jitter = it->jitter;
|
|
|
|
|
report_block.last_SR_timestamp = it->lastSR;
|
|
|
|
|
report_block.delay_since_last_SR = it->delaySinceLastSR;
|
|
|
|
|
report_blocks->push_back(report_block);
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetRTPStatistics(CallStatistics& stats) {
|
|
|
|
|
// --- RtcpStatistics
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// The jitter statistics is updated for each received RTP packet and is
|
|
|
|
|
// based on received packets.
|
|
|
|
|
RtcpStatistics statistics;
|
|
|
|
|
StreamStatistician* statistician =
|
|
|
|
|
rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
|
2016-02-12 11:35:08 +01:00
|
|
|
if (statistician) {
|
|
|
|
|
statistician->GetStatistics(&statistics,
|
|
|
|
|
_rtpRtcpModule->RTCP() == RtcpMode::kOff);
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
stats.fractionLost = statistics.fraction_lost;
|
|
|
|
|
stats.cumulativeLost = statistics.cumulative_lost;
|
|
|
|
|
stats.extendedMax = statistics.extended_max_sequence_number;
|
|
|
|
|
stats.jitterSamples = statistics.jitter;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- RTT
|
|
|
|
|
stats.rttMs = GetRTT(true);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- Data counters
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
size_t bytesSent(0);
|
|
|
|
|
uint32_t packetsSent(0);
|
|
|
|
|
size_t bytesReceived(0);
|
|
|
|
|
uint32_t packetsReceived(0);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (statistician) {
|
|
|
|
|
statistician->GetDataCounters(&bytesReceived, &packetsReceived);
|
|
|
|
|
}
|
2013-08-15 23:38:54 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_rtpRtcpModule->DataCountersRTP(&bytesSent, &packetsSent) != 0) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"GetRTPStatistics() failed to retrieve RTP datacounters =>"
|
|
|
|
|
" output will not be complete");
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
stats.bytesSent = bytesSent;
|
|
|
|
|
stats.packetsSent = packetsSent;
|
|
|
|
|
stats.bytesReceived = bytesReceived;
|
|
|
|
|
stats.packetsReceived = packetsReceived;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- Timestamps
|
|
|
|
|
{
|
|
|
|
|
rtc::CritScope lock(&ts_stats_lock_);
|
|
|
|
|
stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2014-05-28 09:52:06 +00:00
|
|
|
int Channel::SetCodecFECStatus(bool enable) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetCodecFECStatus()");
|
|
|
|
|
|
2016-04-06 12:22:38 -07:00
|
|
|
if (!codec_manager_.SetCodecFEC(enable) ||
|
|
|
|
|
!codec_manager_.MakeEncoder(&rent_a_codec_, audio_coding_.get())) {
|
2014-05-28 09:52:06 +00:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetCodecFECStatus() failed to set FEC state");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool Channel::GetCodecFECStatus() {
|
2016-04-06 12:22:38 -07:00
|
|
|
return codec_manager_.GetStackParams()->use_codec_fec;
|
2014-05-28 09:52:06 +00:00
|
|
|
}
|
|
|
|
|
|
2013-06-05 15:33:20 +00:00
|
|
|
void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
|
|
|
|
|
// None of these functions can fail.
|
2015-12-07 10:26:18 +01:00
|
|
|
// If pacing is enabled we always store packets.
|
|
|
|
|
if (!pacing_enabled_)
|
|
|
|
|
_rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
|
2013-09-06 13:40:11 +00:00
|
|
|
rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
|
2013-06-06 21:09:01 +00:00
|
|
|
if (enable)
|
2013-09-23 23:02:24 +00:00
|
|
|
audio_coding_->EnableNack(maxNumberOfPackets);
|
2013-06-06 21:09:01 +00:00
|
|
|
else
|
2013-09-23 23:02:24 +00:00
|
|
|
audio_coding_->DisableNack();
|
2013-06-05 15:33:20 +00:00
|
|
|
}
|
|
|
|
|
|
2013-06-06 21:09:01 +00:00
|
|
|
// Called when we are missing one or more packets.
|
|
|
|
|
int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
|
2013-06-05 15:33:20 +00:00
|
|
|
return _rtpRtcpModule->SendNACK(sequence_numbers, length);
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
uint32_t Channel::Demultiplex(const AudioFrame& audioFrame) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::Demultiplex()");
|
|
|
|
|
_audioFrame.CopyFrom(audioFrame);
|
|
|
|
|
_audioFrame.id_ = _channelId;
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2013-07-31 16:23:37 +00:00
|
|
|
void Channel::Demultiplex(const int16_t* audio_data,
|
2013-07-31 16:27:42 +00:00
|
|
|
int sample_rate,
|
Update a ton of audio code to use size_t more correctly and in general reduce
use of int16_t/uint16_t.
This is the upshot of a recommendation by henrik.lundin and kwiberg on an original small change ( https://webrtc-codereview.appspot.com/42569004/#ps1 ) to stop using int16_t just because values could fit in it, and is similar in nature to a previous "mass change to use size_t more" ( https://webrtc-codereview.appspot.com/23129004/ ) which also needed to be split up for review but to land all at once, since, like adding "const", such changes tend to cause a lot of transitive effects.
This was be reviewed and approved in pieces:
https://codereview.webrtc.org/1224093003
https://codereview.webrtc.org/1224123002
https://codereview.webrtc.org/1224163002
https://codereview.webrtc.org/1225133003
https://codereview.webrtc.org/1225173002
https://codereview.webrtc.org/1227163003
https://codereview.webrtc.org/1227203003
https://codereview.webrtc.org/1227213002
https://codereview.webrtc.org/1227893002
https://codereview.webrtc.org/1228793004
https://codereview.webrtc.org/1228803003
https://codereview.webrtc.org/1228823002
https://codereview.webrtc.org/1228823003
https://codereview.webrtc.org/1228843002
https://codereview.webrtc.org/1230693002
https://codereview.webrtc.org/1231713002
The change is being landed as TBR to all the folks who reviewed the above.
BUG=chromium:81439
TEST=none
R=andrew@webrtc.org, pbos@webrtc.org
TBR=aluebs, andrew, asapersson, henrika, hlundin, jan.skoglund, kwiberg, minyue, pbos, pthatcher
Review URL: https://codereview.webrtc.org/1230503003 .
Cr-Commit-Position: refs/heads/master@{#9768}
2015-08-24 14:52:23 -07:00
|
|
|
size_t number_of_frames,
|
Convert channel counts to size_t.
IIRC, this was originally requested by ajm during review of the other size_t conversions I did over the past year, and I agreed it made sense, but wanted to do it separately since those changes were already gargantuan.
BUG=chromium:81439
TEST=none
R=henrik.lundin@webrtc.org, henrika@webrtc.org, kjellander@webrtc.org, minyue@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1316523002 .
Cr-Commit-Position: refs/heads/master@{#11229}
2016-01-12 16:26:35 -08:00
|
|
|
size_t number_of_channels) {
|
2013-07-31 16:23:37 +00:00
|
|
|
CodecInst codec;
|
|
|
|
|
GetSendCodec(codec);
|
|
|
|
|
|
2015-09-23 12:49:12 -07:00
|
|
|
// Never upsample or upmix the capture signal here. This should be done at the
|
|
|
|
|
// end of the send chain.
|
|
|
|
|
_audioFrame.sample_rate_hz_ = std::min(codec.plfreq, sample_rate);
|
|
|
|
|
_audioFrame.num_channels_ = std::min(number_of_channels, codec.channels);
|
|
|
|
|
RemixAndResample(audio_data, number_of_frames, number_of_channels,
|
|
|
|
|
sample_rate, &input_resampler_, &_audioFrame);
|
2013-07-31 16:23:37 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
uint32_t Channel::PrepareEncodeAndSend(int mixingFrequency) {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::PrepareEncodeAndSend()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_audioFrame.samples_per_channel_ == 0) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::PrepareEncodeAndSend() invalid audio frame");
|
|
|
|
|
return 0xFFFFFFFF;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().input_file_playing) {
|
|
|
|
|
MixOrReplaceAudioWithFile(mixingFrequency);
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-03-24 10:36:00 -07:00
|
|
|
bool is_muted = InputMute(); // Cache locally as InputMute() takes a lock.
|
|
|
|
|
AudioFrameOperations::Mute(&_audioFrame, previous_frame_muted_, is_muted);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().input_external_media) {
|
|
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
|
|
|
|
const bool isStereo = (_audioFrame.num_channels_ == 2);
|
|
|
|
|
if (_inputExternalMediaCallbackPtr) {
|
|
|
|
|
_inputExternalMediaCallbackPtr->Process(
|
|
|
|
|
_channelId, kRecordingPerChannel, (int16_t*)_audioFrame.data_,
|
|
|
|
|
_audioFrame.samples_per_channel_, _audioFrame.sample_rate_hz_,
|
|
|
|
|
isStereo);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_includeAudioLevelIndication) {
|
|
|
|
|
size_t length =
|
|
|
|
|
_audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
|
2016-05-26 21:35:27 +02:00
|
|
|
RTC_CHECK_LE(length, sizeof(_audioFrame.data_));
|
2016-03-24 10:36:00 -07:00
|
|
|
if (is_muted && previous_frame_muted_) {
|
2016-11-29 04:26:24 -08:00
|
|
|
rms_level_.AnalyzeMuted(length);
|
2016-01-28 05:22:45 -08:00
|
|
|
} else {
|
2016-11-29 04:26:24 -08:00
|
|
|
rms_level_.Analyze(
|
|
|
|
|
rtc::ArrayView<const int16_t>(_audioFrame.data_, length));
|
2011-11-15 16:57:56 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2016-03-24 10:36:00 -07:00
|
|
|
previous_frame_muted_ = is_muted;
|
2011-11-15 16:57:56 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
uint32_t Channel::EncodeAndSend() {
|
|
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::EncodeAndSend()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
assert(_audioFrame.num_channels_ <= 2);
|
|
|
|
|
if (_audioFrame.samples_per_channel_ == 0) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::EncodeAndSend() invalid audio frame");
|
|
|
|
|
return 0xFFFFFFFF;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
_audioFrame.id_ = _channelId;
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
// The ACM resamples internally.
|
|
|
|
|
_audioFrame.timestamp_ = _timeStamp;
|
|
|
|
|
// This call will trigger AudioPacketizationCallback::SendData if encoding
|
|
|
|
|
// is done and payload is ready for packetization and transmission.
|
|
|
|
|
// Otherwise, it will return without invoking the callback.
|
|
|
|
|
if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) < 0) {
|
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::EncodeAndSend() ACM encoding failed");
|
|
|
|
|
return 0xFFFFFFFF;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
_timeStamp += static_cast<uint32_t>(_audioFrame.samples_per_channel_);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-11-14 11:30:07 -08:00
|
|
|
void Channel::set_associate_send_channel(const ChannelOwner& channel) {
|
|
|
|
|
RTC_DCHECK(!channel.channel() ||
|
|
|
|
|
channel.channel()->ChannelId() != _channelId);
|
|
|
|
|
rtc::CritScope lock(&assoc_send_channel_lock_);
|
|
|
|
|
associate_send_channel_ = channel;
|
|
|
|
|
}
|
|
|
|
|
|
2015-05-13 14:14:42 +02:00
|
|
|
void Channel::DisassociateSendChannel(int channel_id) {
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope lock(&assoc_send_channel_lock_);
|
2015-05-13 14:14:42 +02:00
|
|
|
Channel* channel = associate_send_channel_.channel();
|
|
|
|
|
if (channel && channel->ChannelId() == channel_id) {
|
|
|
|
|
// If this channel is associated with a send channel of the specified
|
|
|
|
|
// Channel ID, disassociate with it.
|
|
|
|
|
ChannelOwner ref(NULL);
|
|
|
|
|
associate_send_channel_ = ref;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-04 07:06:55 -07:00
|
|
|
void Channel::SetRtcEventLog(RtcEventLog* event_log) {
|
|
|
|
|
event_log_proxy_->SetEventLog(event_log);
|
|
|
|
|
}
|
|
|
|
|
|
2016-11-30 07:51:13 -08:00
|
|
|
void Channel::SetRtcpRttStats(RtcpRttStats* rtcp_rtt_stats) {
|
|
|
|
|
rtcp_rtt_stats_proxy_->SetRtcpRttStats(rtcp_rtt_stats);
|
|
|
|
|
}
|
|
|
|
|
|
2016-11-08 02:50:09 -08:00
|
|
|
void Channel::SetTransportOverhead(int transport_overhead_per_packet) {
|
|
|
|
|
_rtpRtcpModule->SetTransportOverhead(transport_overhead_per_packet);
|
|
|
|
|
}
|
|
|
|
|
|
2016-12-15 06:24:49 -08:00
|
|
|
void Channel::OnOverheadChanged(size_t overhead_bytes_per_packet) {
|
|
|
|
|
audio_coding_->ModifyEncoder([&](std::unique_ptr<AudioEncoder>* encoder) {
|
|
|
|
|
if (*encoder) {
|
|
|
|
|
(*encoder)->OnReceivedOverhead(overhead_bytes_per_packet);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::RegisterExternalMediaProcessing(ProcessingTypes type,
|
|
|
|
|
VoEMediaProcess& processObject) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RegisterExternalMediaProcessing()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (kPlaybackPerChannel == type) {
|
|
|
|
|
if (_outputExternalMediaCallbackPtr) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceError,
|
|
|
|
|
"Channel::RegisterExternalMediaProcessing() "
|
|
|
|
|
"output external media already enabled");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputExternalMediaCallbackPtr = &processObject;
|
|
|
|
|
_outputExternalMedia = true;
|
|
|
|
|
} else if (kRecordingPerChannel == type) {
|
|
|
|
|
if (_inputExternalMediaCallbackPtr) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceError,
|
|
|
|
|
"Channel::RegisterExternalMediaProcessing() "
|
|
|
|
|
"output external media already enabled");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
_inputExternalMediaCallbackPtr = &processObject;
|
|
|
|
|
channel_state_.SetInputExternalMedia(true);
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::DeRegisterExternalMediaProcessing()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
rtc::CritScope cs(&_callbackCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (kPlaybackPerChannel == type) {
|
|
|
|
|
if (!_outputExternalMediaCallbackPtr) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceWarning,
|
|
|
|
|
"Channel::DeRegisterExternalMediaProcessing() "
|
|
|
|
|
"output external media already disabled");
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
_outputExternalMedia = false;
|
|
|
|
|
_outputExternalMediaCallbackPtr = NULL;
|
|
|
|
|
} else if (kRecordingPerChannel == type) {
|
|
|
|
|
if (!_inputExternalMediaCallbackPtr) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceWarning,
|
|
|
|
|
"Channel::DeRegisterExternalMediaProcessing() "
|
|
|
|
|
"input external media already disabled");
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
channel_state_.SetInputExternalMedia(false);
|
|
|
|
|
_inputExternalMediaCallbackPtr = NULL;
|
|
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2012-12-12 23:00:29 +00:00
|
|
|
int Channel::SetExternalMixing(bool enabled) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetExternalMixing(enabled=%d)", enabled);
|
2012-12-12 23:00:29 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (channel_state_.Get().playing) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_OPERATION, kTraceError,
|
|
|
|
|
"Channel::SetExternalMixing() "
|
|
|
|
|
"external mixing cannot be changed while playing.");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2012-12-12 23:00:29 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
_externalMixing = enabled;
|
2012-12-12 23:00:29 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
return 0;
|
2012-12-12 23:00:29 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetNetworkStatistics(NetworkStatistics& stats) {
|
|
|
|
|
return audio_coding_->GetNetworkStatistics(&stats);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2013-12-13 19:17:43 +00:00
|
|
|
void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
|
|
|
|
|
audio_coding_->GetDecodingCallStatistics(stats);
|
|
|
|
|
}
|
|
|
|
|
|
2013-04-11 20:23:35 +00:00
|
|
|
bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
|
|
|
|
|
int* playout_buffer_delay_ms) const {
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope lock(&video_sync_lock_);
|
2016-08-22 15:39:53 -07:00
|
|
|
*jitter_buffer_delay_ms = audio_coding_->FilteredCurrentDelayMs();
|
2013-04-11 20:23:35 +00:00
|
|
|
*playout_buffer_delay_ms = playout_delay_ms_;
|
|
|
|
|
return true;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2015-11-27 10:46:42 -08:00
|
|
|
uint32_t Channel::GetDelayEstimate() const {
|
|
|
|
|
int jitter_buffer_delay_ms = 0;
|
|
|
|
|
int playout_buffer_delay_ms = 0;
|
|
|
|
|
GetDelayEstimate(&jitter_buffer_delay_ms, &playout_buffer_delay_ms);
|
|
|
|
|
return jitter_buffer_delay_ms + playout_buffer_delay_ms;
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-13 12:09:10 -07:00
|
|
|
int Channel::LeastRequiredDelayMs() const {
|
|
|
|
|
return audio_coding_->LeastRequiredDelayMs();
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::SetMinimumPlayoutDelay(int delayMs) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetMinimumPlayoutDelay()");
|
|
|
|
|
if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
|
|
|
|
|
(delayMs > kVoiceEngineMaxMinPlayoutDelayMs)) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
|
|
|
"SetMinimumPlayoutDelay() invalid min delay");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
|
|
|
|
|
"SetMinimumPlayoutDelay() failed to set min playout delay");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2013-04-11 20:23:35 +00:00
|
|
|
int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
|
2015-08-13 12:09:10 -07:00
|
|
|
uint32_t playout_timestamp_rtp = 0;
|
|
|
|
|
{
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope lock(&video_sync_lock_);
|
2015-08-13 12:09:10 -07:00
|
|
|
playout_timestamp_rtp = playout_timestamp_rtp_;
|
|
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
if (playout_timestamp_rtp == 0) {
|
2013-04-11 20:23:35 +00:00
|
|
|
_engineStatisticsPtr->SetLastError(
|
2016-07-07 13:06:26 -07:00
|
|
|
VE_CANNOT_RETRIEVE_VALUE, kTraceStateInfo,
|
2013-04-11 20:23:35 +00:00
|
|
|
"GetPlayoutTimestamp() failed to retrieve timestamp");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
2015-08-13 12:09:10 -07:00
|
|
|
timestamp = playout_timestamp_rtp;
|
2013-04-11 20:23:35 +00:00
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2014-12-19 13:49:55 +00:00
|
|
|
int Channel::SetInitTimestamp(unsigned int timestamp) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
2011-07-07 08:21:25 +00:00
|
|
|
"Channel::SetInitTimestamp()");
|
2014-12-19 13:49:55 +00:00
|
|
|
if (channel_state_.Get().sending) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(VE_SENDING, kTraceError,
|
|
|
|
|
"SetInitTimestamp() already sending");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
_rtpRtcpModule->SetStartTimestamp(timestamp);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2014-12-19 13:49:55 +00:00
|
|
|
int Channel::SetInitSequenceNumber(short sequenceNumber) {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::SetInitSequenceNumber()");
|
|
|
|
|
if (channel_state_.Get().sending) {
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_SENDING, kTraceError, "SetInitSequenceNumber() already sending");
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
_rtpRtcpModule->SetSequenceNumber(sequenceNumber);
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule,
|
|
|
|
|
RtpReceiver** rtp_receiver) const {
|
|
|
|
|
*rtpRtcpModule = _rtpRtcpModule.get();
|
|
|
|
|
*rtp_receiver = rtp_receiver_.get();
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2012-05-08 17:12:40 +00:00
|
|
|
// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
|
|
|
|
|
// a shared helper.
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::MixOrReplaceAudioWithFile(int mixingFrequency) {
|
2016-02-17 10:04:18 -08:00
|
|
|
std::unique_ptr<int16_t[]> fileBuffer(new int16_t[640]);
|
2016-01-28 05:22:45 -08:00
|
|
|
size_t fileSamples(0);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!input_file_player_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::MixOrReplaceAudioWithFile() fileplayer"
|
|
|
|
|
" doesnt exist");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-08-22 08:43:54 -07:00
|
|
|
if (input_file_player_->Get10msAudioFromFile(fileBuffer.get(), &fileSamples,
|
2016-08-17 07:31:12 -07:00
|
|
|
mixingFrequency) == -1) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::MixOrReplaceAudioWithFile() file mixing "
|
|
|
|
|
"failed");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
if (fileSamples == 0) {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::MixOrReplaceAudioWithFile() file is ended");
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
assert(_audioFrame.samples_per_channel_ == fileSamples);
|
|
|
|
|
|
|
|
|
|
if (_mixFileWithMicrophone) {
|
|
|
|
|
// Currently file stream is always mono.
|
|
|
|
|
// TODO(xians): Change the code when FilePlayer supports real stereo.
|
|
|
|
|
MixWithSat(_audioFrame.data_, _audioFrame.num_channels_, fileBuffer.get(),
|
|
|
|
|
1, fileSamples);
|
|
|
|
|
} else {
|
|
|
|
|
// Replace ACM audio with file.
|
|
|
|
|
// Currently file stream is always mono.
|
|
|
|
|
// TODO(xians): Change the code when FilePlayer supports real stereo.
|
|
|
|
|
_audioFrame.UpdateFrame(
|
|
|
|
|
_channelId, 0xFFFFFFFF, fileBuffer.get(), fileSamples, mixingFrequency,
|
|
|
|
|
AudioFrame::kNormalSpeech, AudioFrame::kVadUnknown, 1);
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int32_t Channel::MixAudioWithFile(AudioFrame& audioFrame, int mixingFrequency) {
|
|
|
|
|
assert(mixingFrequency <= 48000);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-02-17 10:04:18 -08:00
|
|
|
std::unique_ptr<int16_t[]> fileBuffer(new int16_t[960]);
|
2016-01-28 05:22:45 -08:00
|
|
|
size_t fileSamples(0);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&_fileCritSect);
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-08-17 07:31:12 -07:00
|
|
|
if (!output_file_player_) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::MixAudioWithFile() file mixing failed");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
|
|
|
|
|
// We should get the frequency we ask for.
|
2016-08-22 08:43:54 -07:00
|
|
|
if (output_file_player_->Get10msAudioFromFile(
|
|
|
|
|
fileBuffer.get(), &fileSamples, mixingFrequency) == -1) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::MixAudioWithFile() file mixing failed");
|
|
|
|
|
return -1;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
if (audioFrame.samples_per_channel_ == fileSamples) {
|
|
|
|
|
// Currently file stream is always mono.
|
|
|
|
|
// TODO(xians): Change the code when FilePlayer supports real stereo.
|
|
|
|
|
MixWithSat(audioFrame.data_, audioFrame.num_channels_, fileBuffer.get(), 1,
|
|
|
|
|
fileSamples);
|
|
|
|
|
} else {
|
|
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::MixAudioWithFile() samples_per_channel_(%" PRIuS
|
|
|
|
|
") != "
|
|
|
|
|
"fileSamples(%" PRIuS ")",
|
|
|
|
|
audioFrame.samples_per_channel_, fileSamples);
|
|
|
|
|
return -1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return 0;
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2015-08-13 12:09:10 -07:00
|
|
|
void Channel::UpdatePlayoutTimestamp(bool rtcp) {
|
2016-04-06 04:13:56 -07:00
|
|
|
jitter_buffer_playout_timestamp_ = audio_coding_->PlayoutTimestamp();
|
2015-08-13 12:09:10 -07:00
|
|
|
|
2016-04-06 04:13:56 -07:00
|
|
|
if (!jitter_buffer_playout_timestamp_) {
|
|
|
|
|
// This can happen if this channel has not received any RTP packets. In
|
|
|
|
|
// this case, NetEq is not capable of computing a playout timestamp.
|
2015-08-13 12:09:10 -07:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
uint16_t delay_ms = 0;
|
|
|
|
|
if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
2015-08-13 12:09:10 -07:00
|
|
|
"Channel::UpdatePlayoutTimestamp() failed to read playout"
|
|
|
|
|
" delay from the ADM");
|
|
|
|
|
_engineStatisticsPtr->SetLastError(
|
|
|
|
|
VE_CANNOT_RETRIEVE_VALUE, kTraceError,
|
|
|
|
|
"UpdatePlayoutTimestamp() failed to retrieve playout delay");
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-06 04:13:56 -07:00
|
|
|
RTC_DCHECK(jitter_buffer_playout_timestamp_);
|
|
|
|
|
uint32_t playout_timestamp = *jitter_buffer_playout_timestamp_;
|
2015-08-13 12:09:10 -07:00
|
|
|
|
|
|
|
|
// Remove the playout delay.
|
2016-10-12 11:04:10 -07:00
|
|
|
playout_timestamp -= (delay_ms * (GetRtpTimestampRateHz() / 1000));
|
2015-08-13 12:09:10 -07:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, _channelId),
|
2015-08-13 12:09:10 -07:00
|
|
|
"Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
|
2016-04-06 04:13:56 -07:00
|
|
|
playout_timestamp);
|
2015-08-13 12:09:10 -07:00
|
|
|
|
|
|
|
|
{
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope lock(&video_sync_lock_);
|
2015-08-13 12:09:10 -07:00
|
|
|
if (rtcp) {
|
2016-04-06 04:13:56 -07:00
|
|
|
playout_timestamp_rtcp_ = playout_timestamp;
|
2015-08-13 12:09:10 -07:00
|
|
|
} else {
|
2016-04-06 04:13:56 -07:00
|
|
|
playout_timestamp_rtp_ = playout_timestamp;
|
2015-08-13 12:09:10 -07:00
|
|
|
}
|
|
|
|
|
playout_delay_ms_ = delay_ms;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
void Channel::RegisterReceiveCodecsToRTPModule() {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RegisterReceiveCodecsToRTPModule()");
|
2011-07-07 08:21:25 +00:00
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
CodecInst codec;
|
|
|
|
|
const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
|
|
|
|
|
|
|
|
|
|
for (int idx = 0; idx < nSupportedCodecs; idx++) {
|
|
|
|
|
// Open up the RTP/RTCP receiver for all supported codecs
|
|
|
|
|
if ((audio_coding_->Codec(idx, &codec) == -1) ||
|
2016-11-24 09:34:46 -08:00
|
|
|
(rtp_receiver_->RegisterReceivePayload(codec) == -1)) {
|
2016-01-28 05:22:45 -08:00
|
|
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RegisterReceiveCodecsToRTPModule() unable"
|
|
|
|
|
" to register %s (%d/%d/%" PRIuS
|
|
|
|
|
"/%d) to RTP/RTCP "
|
|
|
|
|
"receiver",
|
|
|
|
|
codec.plname, codec.pltype, codec.plfreq, codec.channels,
|
|
|
|
|
codec.rate);
|
|
|
|
|
} else {
|
|
|
|
|
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
|
|
|
|
|
"Channel::RegisterReceiveCodecsToRTPModule() %s "
|
|
|
|
|
"(%d/%d/%" PRIuS
|
|
|
|
|
"/%d) has been added to the RTP/RTCP "
|
|
|
|
|
"receiver",
|
|
|
|
|
codec.plname, codec.pltype, codec.plfreq, codec.channels,
|
|
|
|
|
codec.rate);
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
2016-01-28 05:22:45 -08:00
|
|
|
}
|
2011-07-07 08:21:25 +00:00
|
|
|
}
|
|
|
|
|
|
2016-01-28 05:22:45 -08:00
|
|
|
int Channel::SetSendRtpHeaderExtension(bool enable,
|
|
|
|
|
RTPExtensionType type,
|
2014-03-06 23:49:08 +00:00
|
|
|
unsigned char id) {
|
|
|
|
|
int error = 0;
|
|
|
|
|
_rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
|
|
|
|
|
if (enable) {
|
|
|
|
|
error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
|
|
|
|
|
}
|
|
|
|
|
return error;
|
|
|
|
|
}
|
2014-05-28 09:52:06 +00:00
|
|
|
|
2016-10-12 11:04:10 -07:00
|
|
|
int Channel::GetRtpTimestampRateHz() const {
|
|
|
|
|
const auto format = audio_coding_->ReceiveFormat();
|
|
|
|
|
// Default to the playout frequency if we've not gotten any packets yet.
|
|
|
|
|
// TODO(ossu): Zero clockrate can only happen if we've added an external
|
|
|
|
|
// decoder for a format we don't support internally. Remove once that way of
|
|
|
|
|
// adding decoders is gone!
|
|
|
|
|
return (format && format->clockrate_hz != 0)
|
|
|
|
|
? format->clockrate_hz
|
|
|
|
|
: audio_coding_->PlayoutFrequency();
|
2014-06-05 20:34:08 +00:00
|
|
|
}
|
|
|
|
|
|
2015-05-13 14:14:42 +02:00
|
|
|
int64_t Channel::GetRTT(bool allow_associate_channel) const {
|
2015-10-02 02:36:56 -07:00
|
|
|
RtcpMode method = _rtpRtcpModule->RTCP();
|
|
|
|
|
if (method == RtcpMode::kOff) {
|
2014-09-11 07:51:53 +00:00
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
std::vector<RTCPReportBlock> report_blocks;
|
|
|
|
|
_rtpRtcpModule->RemoteRTCPStat(&report_blocks);
|
2015-05-13 14:14:42 +02:00
|
|
|
|
|
|
|
|
int64_t rtt = 0;
|
2014-09-11 07:51:53 +00:00
|
|
|
if (report_blocks.empty()) {
|
2015-05-13 14:14:42 +02:00
|
|
|
if (allow_associate_channel) {
|
2016-01-21 10:37:37 -08:00
|
|
|
rtc::CritScope lock(&assoc_send_channel_lock_);
|
2015-05-13 14:14:42 +02:00
|
|
|
Channel* channel = associate_send_channel_.channel();
|
|
|
|
|
// Tries to get RTT from an associated channel. This is important for
|
|
|
|
|
// receive-only channels.
|
|
|
|
|
if (channel) {
|
|
|
|
|
// To prevent infinite recursion and deadlock, calling GetRTT of
|
|
|
|
|
// associate channel should always use "false" for argument:
|
|
|
|
|
// |allow_associate_channel|.
|
|
|
|
|
rtt = channel->GetRTT(false);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return rtt;
|
2014-09-11 07:51:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
uint32_t remoteSSRC = rtp_receiver_->SSRC();
|
|
|
|
|
std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
|
|
|
|
|
for (; it != report_blocks.end(); ++it) {
|
|
|
|
|
if (it->remoteSSRC == remoteSSRC)
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
if (it == report_blocks.end()) {
|
|
|
|
|
// We have not received packets with SSRC matching the report blocks.
|
|
|
|
|
// To calculate RTT we try with the SSRC of the first report block.
|
|
|
|
|
// This is very important for send-only channels where we don't know
|
|
|
|
|
// the SSRC of the other end.
|
|
|
|
|
remoteSSRC = report_blocks[0].remoteSSRC;
|
|
|
|
|
}
|
2015-05-13 14:14:42 +02:00
|
|
|
|
2015-01-12 21:51:21 +00:00
|
|
|
int64_t avg_rtt = 0;
|
2016-01-28 05:22:45 -08:00
|
|
|
int64_t max_rtt = 0;
|
2015-01-12 21:51:21 +00:00
|
|
|
int64_t min_rtt = 0;
|
2016-01-28 05:22:45 -08:00
|
|
|
if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt) !=
|
|
|
|
|
0) {
|
2014-09-11 07:51:53 +00:00
|
|
|
return 0;
|
|
|
|
|
}
|
2015-01-12 21:51:21 +00:00
|
|
|
return rtt;
|
2014-09-11 07:51:53 +00:00
|
|
|
}
|
|
|
|
|
|
2013-07-03 15:12:26 +00:00
|
|
|
} // namespace voe
|
|
|
|
|
} // namespace webrtc
|