2014-05-13 11:07:01 +00:00
|
|
|
/*
|
2016-02-07 20:46:45 -08:00
|
|
|
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
|
2014-05-13 11:07:01 +00:00
|
|
|
*
|
2016-02-07 20:46:45 -08:00
|
|
|
* Use of this source code is governed by a BSD-style license
|
|
|
|
|
* that can be found in the LICENSE file in the root of the source
|
|
|
|
|
* tree. An additional intellectual property rights grant can be found
|
|
|
|
|
* in the file PATENTS. All contributing project authors may
|
|
|
|
|
* be found in the AUTHORS file in the root of the source tree.
|
2014-05-13 11:07:01 +00:00
|
|
|
*/
|
|
|
|
|
|
2016-02-12 06:39:40 +01:00
|
|
|
#include "webrtc/media/engine/webrtcvideoengine2.h"
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2016-03-21 04:15:50 -07:00
|
|
|
#include <stdio.h>
|
2015-01-05 18:51:13 +00:00
|
|
|
#include <algorithm>
|
2014-07-20 15:27:35 +00:00
|
|
|
#include <set>
|
2014-05-13 11:07:01 +00:00
|
|
|
#include <string>
|
|
|
|
|
|
2016-03-20 06:15:43 -07:00
|
|
|
#include "webrtc/base/copyonwritebuffer.h"
|
2014-08-13 17:26:08 +00:00
|
|
|
#include "webrtc/base/logging.h"
|
|
|
|
|
#include "webrtc/base/stringutils.h"
|
2015-07-16 10:27:16 -07:00
|
|
|
#include "webrtc/base/timeutils.h"
|
2015-10-20 23:00:48 -07:00
|
|
|
#include "webrtc/base/trace_event.h"
|
2014-05-13 11:07:01 +00:00
|
|
|
#include "webrtc/call.h"
|
2016-02-12 06:39:40 +01:00
|
|
|
#include "webrtc/media/engine/constants.h"
|
|
|
|
|
#include "webrtc/media/engine/simulcast.h"
|
|
|
|
|
#include "webrtc/media/engine/webrtcmediaengine.h"
|
|
|
|
|
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
|
|
|
|
|
#include "webrtc/media/engine/webrtcvideoframe.h"
|
|
|
|
|
#include "webrtc/media/engine/webrtcvoiceengine.h"
|
2015-06-29 14:34:58 -07:00
|
|
|
#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
|
2015-05-07 11:41:09 +02:00
|
|
|
#include "webrtc/modules/video_coding/codecs/vp8/simulcast_encoder_adapter.h"
|
2016-05-14 02:03:18 +02:00
|
|
|
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
|
2015-10-28 18:17:40 +01:00
|
|
|
#include "webrtc/system_wrappers/include/field_trial.h"
|
2016-08-22 23:56:48 -07:00
|
|
|
#include "webrtc/system_wrappers/include/metrics.h"
|
2014-10-29 15:28:39 +00:00
|
|
|
#include "webrtc/video_decoder.h"
|
2014-09-17 09:02:25 +00:00
|
|
|
#include "webrtc/video_encoder.h"
|
2014-05-13 11:07:01 +00:00
|
|
|
|
|
|
|
|
namespace cricket {
|
2014-10-14 04:25:33 +00:00
|
|
|
namespace {
|
2015-05-07 11:41:09 +02:00
|
|
|
|
|
|
|
|
// Wrap cricket::WebRtcVideoEncoderFactory as a webrtc::VideoEncoderFactory.
|
|
|
|
|
class EncoderFactoryAdapter : public webrtc::VideoEncoderFactory {
|
|
|
|
|
public:
|
|
|
|
|
// EncoderFactoryAdapter doesn't take ownership of |factory|, which is owned
|
|
|
|
|
// by e.g. PeerConnectionFactory.
|
|
|
|
|
explicit EncoderFactoryAdapter(cricket::WebRtcVideoEncoderFactory* factory)
|
|
|
|
|
: factory_(factory) {}
|
|
|
|
|
virtual ~EncoderFactoryAdapter() {}
|
|
|
|
|
|
|
|
|
|
// Implement webrtc::VideoEncoderFactory.
|
|
|
|
|
webrtc::VideoEncoder* Create() override {
|
|
|
|
|
return factory_->CreateVideoEncoder(webrtc::kVideoCodecVP8);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void Destroy(webrtc::VideoEncoder* encoder) override {
|
|
|
|
|
return factory_->DestroyVideoEncoder(encoder);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
cricket::WebRtcVideoEncoderFactory* const factory_;
|
|
|
|
|
};
|
|
|
|
|
|
2016-01-27 16:45:21 +01:00
|
|
|
webrtc::Call::Config::BitrateConfig GetBitrateConfigForCodec(
|
|
|
|
|
const VideoCodec& codec) {
|
|
|
|
|
webrtc::Call::Config::BitrateConfig config;
|
|
|
|
|
int bitrate_kbps;
|
|
|
|
|
if (codec.GetParam(kCodecParamMinBitrate, &bitrate_kbps) &&
|
|
|
|
|
bitrate_kbps > 0) {
|
|
|
|
|
config.min_bitrate_bps = bitrate_kbps * 1000;
|
|
|
|
|
} else {
|
|
|
|
|
config.min_bitrate_bps = 0;
|
|
|
|
|
}
|
|
|
|
|
if (codec.GetParam(kCodecParamStartBitrate, &bitrate_kbps) &&
|
|
|
|
|
bitrate_kbps > 0) {
|
|
|
|
|
config.start_bitrate_bps = bitrate_kbps * 1000;
|
|
|
|
|
} else {
|
|
|
|
|
// Do not reconfigure start bitrate unless it's specified and positive.
|
|
|
|
|
config.start_bitrate_bps = -1;
|
|
|
|
|
}
|
|
|
|
|
if (codec.GetParam(kCodecParamMaxBitrate, &bitrate_kbps) &&
|
|
|
|
|
bitrate_kbps > 0) {
|
|
|
|
|
config.max_bitrate_bps = bitrate_kbps * 1000;
|
|
|
|
|
} else {
|
|
|
|
|
config.max_bitrate_bps = -1;
|
|
|
|
|
}
|
|
|
|
|
return config;
|
|
|
|
|
}
|
|
|
|
|
|
2015-05-07 11:41:09 +02:00
|
|
|
// An encoder factory that wraps Create requests for simulcastable codec types
|
|
|
|
|
// with a webrtc::SimulcastEncoderAdapter. Non simulcastable codec type
|
|
|
|
|
// requests are just passed through to the contained encoder factory.
|
|
|
|
|
class WebRtcSimulcastEncoderFactory
|
|
|
|
|
: public cricket::WebRtcVideoEncoderFactory {
|
|
|
|
|
public:
|
|
|
|
|
// WebRtcSimulcastEncoderFactory doesn't take ownership of |factory|, which is
|
|
|
|
|
// owned by e.g. PeerConnectionFactory.
|
|
|
|
|
explicit WebRtcSimulcastEncoderFactory(
|
|
|
|
|
cricket::WebRtcVideoEncoderFactory* factory)
|
|
|
|
|
: factory_(factory) {}
|
|
|
|
|
|
|
|
|
|
static bool UseSimulcastEncoderFactory(
|
|
|
|
|
const std::vector<VideoCodec>& codecs) {
|
|
|
|
|
// If any codec is VP8, use the simulcast factory. If asked to create a
|
|
|
|
|
// non-VP8 codec, we'll just return a contained factory encoder directly.
|
|
|
|
|
for (const auto& codec : codecs) {
|
|
|
|
|
if (codec.type == webrtc::kVideoCodecVP8) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
webrtc::VideoEncoder* CreateVideoEncoder(
|
|
|
|
|
webrtc::VideoCodecType type) override {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(factory_ != NULL);
|
2015-05-07 11:41:09 +02:00
|
|
|
// If it's a codec type we can simulcast, create a wrapped encoder.
|
|
|
|
|
if (type == webrtc::kVideoCodecVP8) {
|
|
|
|
|
return new webrtc::SimulcastEncoderAdapter(
|
|
|
|
|
new EncoderFactoryAdapter(factory_));
|
|
|
|
|
}
|
|
|
|
|
webrtc::VideoEncoder* encoder = factory_->CreateVideoEncoder(type);
|
|
|
|
|
if (encoder) {
|
|
|
|
|
non_simulcast_encoders_.push_back(encoder);
|
|
|
|
|
}
|
|
|
|
|
return encoder;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const std::vector<VideoCodec>& codecs() const override {
|
|
|
|
|
return factory_->codecs();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool EncoderTypeHasInternalSource(
|
|
|
|
|
webrtc::VideoCodecType type) const override {
|
|
|
|
|
return factory_->EncoderTypeHasInternalSource(type);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override {
|
|
|
|
|
// Check first to see if the encoder wasn't wrapped in a
|
|
|
|
|
// SimulcastEncoderAdapter. In that case, ask the factory to destroy it.
|
|
|
|
|
if (std::remove(non_simulcast_encoders_.begin(),
|
|
|
|
|
non_simulcast_encoders_.end(),
|
|
|
|
|
encoder) != non_simulcast_encoders_.end()) {
|
|
|
|
|
factory_->DestroyVideoEncoder(encoder);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Otherwise, SimulcastEncoderAdapter can be deleted directly, and will call
|
|
|
|
|
// DestroyVideoEncoder on the factory for individual encoder instances.
|
|
|
|
|
delete encoder;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private:
|
|
|
|
|
cricket::WebRtcVideoEncoderFactory* factory_;
|
|
|
|
|
// A list of encoders that were created without being wrapped in a
|
|
|
|
|
// SimulcastEncoderAdapter.
|
|
|
|
|
std::vector<webrtc::VideoEncoder*> non_simulcast_encoders_;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
bool CodecIsInternallySupported(const std::string& codec_name) {
|
|
|
|
|
if (CodecNamesEq(codec_name, kVp8CodecName)) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
if (CodecNamesEq(codec_name, kVp9CodecName)) {
|
2016-05-14 02:03:18 +02:00
|
|
|
return webrtc::VP9Encoder::IsSupported() &&
|
|
|
|
|
webrtc::VP9Decoder::IsSupported();
|
2015-05-07 11:41:09 +02:00
|
|
|
}
|
2015-06-29 14:34:58 -07:00
|
|
|
if (CodecNamesEq(codec_name, kH264CodecName)) {
|
|
|
|
|
return webrtc::H264Encoder::IsSupported() &&
|
|
|
|
|
webrtc::H264Decoder::IsSupported();
|
|
|
|
|
}
|
2015-05-07 11:41:09 +02:00
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void AddDefaultFeedbackParams(VideoCodec* codec) {
|
|
|
|
|
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamCcm, kRtcpFbCcmParamFir));
|
|
|
|
|
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
|
|
|
|
|
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamNack, kRtcpFbNackParamPli));
|
|
|
|
|
codec->AddFeedbackParam(FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
|
2015-11-20 18:05:48 -08:00
|
|
|
codec->AddFeedbackParam(
|
|
|
|
|
FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
|
2015-05-07 11:41:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static VideoCodec MakeVideoCodecWithDefaultFeedbackParams(int payload_type,
|
|
|
|
|
const char* name) {
|
|
|
|
|
VideoCodec codec(payload_type, name, kDefaultVideoMaxWidth,
|
2016-04-13 10:07:16 -07:00
|
|
|
kDefaultVideoMaxHeight, kDefaultVideoMaxFramerate);
|
2015-05-07 11:41:09 +02:00
|
|
|
AddDefaultFeedbackParams(&codec);
|
|
|
|
|
return codec;
|
|
|
|
|
}
|
|
|
|
|
|
2014-10-14 04:25:33 +00:00
|
|
|
static std::string CodecVectorToString(const std::vector<VideoCodec>& codecs) {
|
|
|
|
|
std::stringstream out;
|
|
|
|
|
out << '{';
|
|
|
|
|
for (size_t i = 0; i < codecs.size(); ++i) {
|
|
|
|
|
out << codecs[i].ToString();
|
|
|
|
|
if (i != codecs.size() - 1) {
|
|
|
|
|
out << ", ";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
out << '}';
|
|
|
|
|
return out.str();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static bool ValidateCodecFormats(const std::vector<VideoCodec>& codecs) {
|
|
|
|
|
bool has_video = false;
|
|
|
|
|
for (size_t i = 0; i < codecs.size(); ++i) {
|
|
|
|
|
if (!codecs[i].ValidateCodecFormat()) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
if (codecs[i].GetCodecType() == VideoCodec::CODEC_VIDEO) {
|
|
|
|
|
has_video = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!has_video) {
|
|
|
|
|
LOG(LS_ERROR) << "Setting codecs without a video codec is invalid: "
|
|
|
|
|
<< CodecVectorToString(codecs);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-25 14:17:23 +01:00
|
|
|
static bool ValidateStreamParams(const StreamParams& sp) {
|
|
|
|
|
if (sp.ssrcs.empty()) {
|
|
|
|
|
LOG(LS_ERROR) << "No SSRCs in stream parameters: " << sp.ToString();
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
std::vector<uint32_t> primary_ssrcs;
|
2015-03-25 14:17:23 +01:00
|
|
|
sp.GetPrimarySsrcs(&primary_ssrcs);
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
std::vector<uint32_t> rtx_ssrcs;
|
2015-03-25 14:17:23 +01:00
|
|
|
sp.GetFidSsrcs(primary_ssrcs, &rtx_ssrcs);
|
|
|
|
|
for (uint32_t rtx_ssrc : rtx_ssrcs) {
|
|
|
|
|
bool rtx_ssrc_present = false;
|
|
|
|
|
for (uint32_t sp_ssrc : sp.ssrcs) {
|
|
|
|
|
if (sp_ssrc == rtx_ssrc) {
|
|
|
|
|
rtx_ssrc_present = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!rtx_ssrc_present) {
|
|
|
|
|
LOG(LS_ERROR) << "RTX SSRC '" << rtx_ssrc
|
|
|
|
|
<< "' missing from StreamParams ssrcs: " << sp.ToString();
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (!rtx_ssrcs.empty() && primary_ssrcs.size() != rtx_ssrcs.size()) {
|
|
|
|
|
LOG(LS_ERROR)
|
|
|
|
|
<< "RTX SSRCs exist, but don't cover all SSRCs (unsupported): "
|
|
|
|
|
<< sp.ToString();
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-27 16:45:21 +01:00
|
|
|
inline bool ContainsHeaderExtension(
|
2015-04-01 15:33:06 -07:00
|
|
|
const std::vector<webrtc::RtpExtension>& extensions,
|
2016-05-26 11:24:55 -07:00
|
|
|
const std::string& uri) {
|
2015-04-01 15:33:06 -07:00
|
|
|
for (const auto& kv : extensions) {
|
2016-05-26 11:24:55 -07:00
|
|
|
if (kv.uri == uri) {
|
2016-01-27 16:45:21 +01:00
|
|
|
return true;
|
2015-04-01 15:33:06 -07:00
|
|
|
}
|
|
|
|
|
}
|
2016-01-27 16:45:21 +01:00
|
|
|
return false;
|
2015-04-01 15:33:06 -07:00
|
|
|
}
|
|
|
|
|
|
2015-08-27 01:59:29 -07:00
|
|
|
// Returns true if the given codec is disallowed from doing simulcast.
|
|
|
|
|
bool IsCodecBlacklistedForSimulcast(const std::string& codec_name) {
|
2015-11-10 05:16:26 -08:00
|
|
|
return CodecNamesEq(codec_name, kH264CodecName) ||
|
|
|
|
|
CodecNamesEq(codec_name, kVp9CodecName);
|
2015-08-27 01:59:29 -07:00
|
|
|
}
|
|
|
|
|
|
2015-09-08 09:21:43 +02:00
|
|
|
// The selected thresholds for QVGA and VGA corresponded to a QP around 10.
|
|
|
|
|
// The change in QP declined above the selected bitrates.
|
|
|
|
|
static int GetMaxDefaultVideoBitrateKbps(int width, int height) {
|
|
|
|
|
if (width * height <= 320 * 240) {
|
|
|
|
|
return 600;
|
|
|
|
|
} else if (width * height <= 640 * 480) {
|
|
|
|
|
return 1700;
|
|
|
|
|
} else if (width * height <= 960 * 540) {
|
|
|
|
|
return 2000;
|
|
|
|
|
} else {
|
|
|
|
|
return 2500;
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-02-29 00:04:41 -08:00
|
|
|
|
2016-03-21 04:15:50 -07:00
|
|
|
bool GetVp9LayersFromFieldTrialGroup(int* num_spatial_layers,
|
|
|
|
|
int* num_temporal_layers) {
|
|
|
|
|
std::string group = webrtc::field_trial::FindFullName("WebRTC-SupportVP9SVC");
|
|
|
|
|
if (group.empty())
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
if (sscanf(group.c_str(), "EnabledByFlag_%dSL%dTL", num_spatial_layers,
|
|
|
|
|
num_temporal_layers) != 2) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2016-03-31 00:36:49 -07:00
|
|
|
const int kMaxSpatialLayers = 2;
|
2016-03-21 04:15:50 -07:00
|
|
|
if (*num_spatial_layers > kMaxSpatialLayers || *num_spatial_layers < 1)
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
const int kMaxTemporalLayers = 3;
|
|
|
|
|
if (*num_temporal_layers > kMaxTemporalLayers || *num_temporal_layers < 1)
|
|
|
|
|
return false;
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int GetDefaultVp9SpatialLayers() {
|
|
|
|
|
int num_sl;
|
|
|
|
|
int num_tl;
|
|
|
|
|
if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) {
|
|
|
|
|
return num_sl;
|
|
|
|
|
}
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
int GetDefaultVp9TemporalLayers() {
|
|
|
|
|
int num_sl;
|
|
|
|
|
int num_tl;
|
|
|
|
|
if (GetVp9LayersFromFieldTrialGroup(&num_sl, &num_tl)) {
|
|
|
|
|
return num_tl;
|
|
|
|
|
}
|
|
|
|
|
return 1;
|
|
|
|
|
}
|
2014-10-14 04:25:33 +00:00
|
|
|
} // namespace
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2016-02-12 06:39:40 +01:00
|
|
|
// Constants defined in webrtc/media/engine/constants.h
|
2015-05-07 11:41:09 +02:00
|
|
|
// TODO(pbos): Move these to a separate constants.cc file.
|
|
|
|
|
const int kMinVideoBitrate = 30;
|
|
|
|
|
const int kStartVideoBitrate = 300;
|
|
|
|
|
|
|
|
|
|
const int kVideoMtu = 1200;
|
|
|
|
|
const int kVideoRtpBufferSize = 65536;
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
// This constant is really an on/off, lower-level configurable NACK history
|
|
|
|
|
// duration hasn't been implemented.
|
|
|
|
|
static const int kNackHistoryMs = 1000;
|
|
|
|
|
|
2014-09-18 20:23:05 +00:00
|
|
|
static const int kDefaultQpMax = 56;
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
static const int kDefaultRtcpReceiverReportSsrc = 1;
|
|
|
|
|
|
2016-04-05 15:23:49 +02:00
|
|
|
// Down grade resolution at most 2 times for CPU reasons.
|
|
|
|
|
static const int kMaxCpuDowngrades = 2;
|
|
|
|
|
|
2016-08-11 08:41:18 -07:00
|
|
|
// Minimum time interval for logging stats.
|
|
|
|
|
static const int64_t kStatsLogIntervalMs = 10000;
|
|
|
|
|
|
2016-06-29 11:14:19 -07:00
|
|
|
// Adds |codec| to |list|, and also adds an RTX codec if |codec|'s name is
|
|
|
|
|
// recognized.
|
|
|
|
|
// TODO(deadbeef): Should we add RTX codecs for external codecs whose names we
|
|
|
|
|
// don't recognize?
|
|
|
|
|
void AddCodecAndMaybeRtxCodec(const VideoCodec& codec,
|
|
|
|
|
std::vector<VideoCodec>* codecs) {
|
|
|
|
|
codecs->push_back(codec);
|
|
|
|
|
int rtx_payload_type = 0;
|
|
|
|
|
if (CodecNamesEq(codec.name, kVp8CodecName)) {
|
|
|
|
|
rtx_payload_type = kDefaultRtxVp8PlType;
|
|
|
|
|
} else if (CodecNamesEq(codec.name, kVp9CodecName)) {
|
|
|
|
|
rtx_payload_type = kDefaultRtxVp9PlType;
|
|
|
|
|
} else if (CodecNamesEq(codec.name, kH264CodecName)) {
|
|
|
|
|
rtx_payload_type = kDefaultRtxH264PlType;
|
|
|
|
|
} else if (CodecNamesEq(codec.name, kRedCodecName)) {
|
|
|
|
|
rtx_payload_type = kDefaultRtxRedPlType;
|
|
|
|
|
} else {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
codecs->push_back(VideoCodec::CreateRtxCodec(rtx_payload_type, codec.id));
|
|
|
|
|
}
|
|
|
|
|
|
2015-05-07 11:41:09 +02:00
|
|
|
std::vector<VideoCodec> DefaultVideoCodecList() {
|
|
|
|
|
std::vector<VideoCodec> codecs;
|
2016-06-29 11:14:19 -07:00
|
|
|
AddCodecAndMaybeRtxCodec(
|
|
|
|
|
MakeVideoCodecWithDefaultFeedbackParams(kDefaultVp8PlType, kVp8CodecName),
|
|
|
|
|
&codecs);
|
2015-05-07 11:41:09 +02:00
|
|
|
if (CodecIsInternallySupported(kVp9CodecName)) {
|
2016-06-29 11:14:19 -07:00
|
|
|
AddCodecAndMaybeRtxCodec(MakeVideoCodecWithDefaultFeedbackParams(
|
|
|
|
|
kDefaultVp9PlType, kVp9CodecName),
|
|
|
|
|
&codecs);
|
2015-05-07 11:41:09 +02:00
|
|
|
}
|
2015-06-29 14:34:58 -07:00
|
|
|
if (CodecIsInternallySupported(kH264CodecName)) {
|
2016-04-12 10:29:17 -07:00
|
|
|
VideoCodec codec = MakeVideoCodecWithDefaultFeedbackParams(
|
|
|
|
|
kDefaultH264PlType, kH264CodecName);
|
|
|
|
|
// TODO(hta): Move all parameter generation for SDP into the codec
|
|
|
|
|
// implementation, for all codecs and parameters.
|
|
|
|
|
// TODO(hta): Move selection of profile-level-id to H.264 codec
|
|
|
|
|
// implementation.
|
|
|
|
|
// TODO(hta): Set FMTP parameters for all codecs of type H264.
|
|
|
|
|
codec.SetParam(kH264FmtpProfileLevelId,
|
|
|
|
|
kH264ProfileLevelConstrainedBaseline);
|
|
|
|
|
codec.SetParam(kH264FmtpLevelAsymmetryAllowed, "1");
|
|
|
|
|
codec.SetParam(kH264FmtpPacketizationMode, "1");
|
2016-06-29 11:14:19 -07:00
|
|
|
AddCodecAndMaybeRtxCodec(codec, &codecs);
|
2016-02-03 13:29:59 +01:00
|
|
|
}
|
2016-06-29 11:14:19 -07:00
|
|
|
AddCodecAndMaybeRtxCodec(VideoCodec(kDefaultRedPlType, kRedCodecName),
|
|
|
|
|
&codecs);
|
2015-05-07 11:41:09 +02:00
|
|
|
codecs.push_back(VideoCodec(kDefaultUlpfecType, kUlpfecCodecName));
|
|
|
|
|
return codecs;
|
|
|
|
|
}
|
|
|
|
|
|
2014-12-10 09:01:18 +00:00
|
|
|
std::vector<webrtc::VideoStream>
|
2015-01-14 17:29:27 +00:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateSimulcastVideoStreams(
|
2014-06-12 07:13:26 +00:00
|
|
|
const VideoCodec& codec,
|
|
|
|
|
const VideoOptions& options,
|
2015-03-27 15:58:11 +01:00
|
|
|
int max_bitrate_bps,
|
2014-06-12 07:13:26 +00:00
|
|
|
size_t num_streams) {
|
2014-12-10 09:01:18 +00:00
|
|
|
int max_qp = kDefaultQpMax;
|
|
|
|
|
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
|
|
|
|
|
|
|
|
|
|
return GetSimulcastConfig(
|
2015-10-16 12:49:39 -07:00
|
|
|
num_streams, codec.width, codec.height, max_bitrate_bps, max_qp,
|
2014-12-10 09:01:18 +00:00
|
|
|
codec.framerate != 0 ? codec.framerate : kDefaultVideoMaxFramerate);
|
|
|
|
|
}
|
|
|
|
|
|
2015-01-14 17:29:27 +00:00
|
|
|
std::vector<webrtc::VideoStream>
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoStreams(
|
2014-12-10 09:01:18 +00:00
|
|
|
const VideoCodec& codec,
|
|
|
|
|
const VideoOptions& options,
|
2015-03-27 15:58:11 +01:00
|
|
|
int max_bitrate_bps,
|
2014-12-10 09:01:18 +00:00
|
|
|
size_t num_streams) {
|
2015-03-27 15:58:11 +01:00
|
|
|
int codec_max_bitrate_kbps;
|
|
|
|
|
if (codec.GetParam(kCodecParamMaxBitrate, &codec_max_bitrate_kbps)) {
|
|
|
|
|
max_bitrate_bps = codec_max_bitrate_kbps * 1000;
|
|
|
|
|
}
|
|
|
|
|
if (num_streams != 1) {
|
|
|
|
|
return CreateSimulcastVideoStreams(codec, options, max_bitrate_bps,
|
|
|
|
|
num_streams);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// For unset max bitrates set default bitrate for non-simulcast.
|
2015-09-08 09:21:43 +02:00
|
|
|
if (max_bitrate_bps <= 0) {
|
|
|
|
|
max_bitrate_bps =
|
|
|
|
|
GetMaxDefaultVideoBitrateKbps(codec.width, codec.height) * 1000;
|
|
|
|
|
}
|
2014-12-10 09:01:18 +00:00
|
|
|
|
2014-06-12 07:13:26 +00:00
|
|
|
webrtc::VideoStream stream;
|
|
|
|
|
stream.width = codec.width;
|
|
|
|
|
stream.height = codec.height;
|
|
|
|
|
stream.max_framerate =
|
2014-11-07 09:37:54 +00:00
|
|
|
codec.framerate != 0 ? codec.framerate : kDefaultVideoMaxFramerate;
|
2014-06-12 07:13:26 +00:00
|
|
|
|
2014-11-25 14:03:34 +00:00
|
|
|
stream.min_bitrate_bps = kMinVideoBitrate * 1000;
|
2015-03-27 15:58:11 +01:00
|
|
|
stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate_bps;
|
2014-06-12 07:13:26 +00:00
|
|
|
|
2014-09-18 20:23:05 +00:00
|
|
|
int max_qp = kDefaultQpMax;
|
2014-06-12 07:13:26 +00:00
|
|
|
codec.GetParam(kCodecParamMaxQuantization, &max_qp);
|
|
|
|
|
stream.max_qp = max_qp;
|
|
|
|
|
std::vector<webrtc::VideoStream> streams;
|
|
|
|
|
streams.push_back(stream);
|
|
|
|
|
return streams;
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2015-01-14 17:29:27 +00:00
|
|
|
void* WebRtcVideoChannel2::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
const VideoCodec& codec) {
|
|
|
|
|
bool is_screencast = parameters_.options.is_screencast.value_or(false);
|
2015-09-09 14:32:14 +02:00
|
|
|
// No automatic resizing when using simulcast or screencast.
|
|
|
|
|
bool automatic_resize =
|
|
|
|
|
!is_screencast && parameters_.config.rtp.ssrcs.size() == 1;
|
2015-04-28 10:01:41 +02:00
|
|
|
bool frame_dropping = !is_screencast;
|
|
|
|
|
bool denoising;
|
2015-10-26 11:18:18 -07:00
|
|
|
bool codec_default_denoising = false;
|
2015-04-28 10:01:41 +02:00
|
|
|
if (is_screencast) {
|
|
|
|
|
denoising = false;
|
|
|
|
|
} else {
|
2015-10-26 11:18:18 -07:00
|
|
|
// Use codec default if video_noise_reduction is unset.
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
codec_default_denoising = !parameters_.options.video_noise_reduction;
|
|
|
|
|
denoising = parameters_.options.video_noise_reduction.value_or(false);
|
2015-04-28 10:01:41 +02:00
|
|
|
}
|
|
|
|
|
|
2016-01-27 01:36:03 -08:00
|
|
|
if (CodecNamesEq(codec.name, kH264CodecName)) {
|
|
|
|
|
encoder_settings_.h264 = webrtc::VideoEncoder::GetDefaultH264Settings();
|
|
|
|
|
encoder_settings_.h264.frameDroppingOn = frame_dropping;
|
|
|
|
|
return &encoder_settings_.h264;
|
|
|
|
|
}
|
2015-04-21 20:24:50 +08:00
|
|
|
if (CodecNamesEq(codec.name, kVp8CodecName)) {
|
2015-01-14 17:29:27 +00:00
|
|
|
encoder_settings_.vp8 = webrtc::VideoEncoder::GetDefaultVp8Settings();
|
2015-04-28 10:01:41 +02:00
|
|
|
encoder_settings_.vp8.automaticResizeOn = automatic_resize;
|
2015-10-26 11:18:18 -07:00
|
|
|
// VP8 denoising is enabled by default.
|
|
|
|
|
encoder_settings_.vp8.denoisingOn =
|
|
|
|
|
codec_default_denoising ? true : denoising;
|
2015-04-28 10:01:41 +02:00
|
|
|
encoder_settings_.vp8.frameDroppingOn = frame_dropping;
|
2015-01-14 17:29:27 +00:00
|
|
|
return &encoder_settings_.vp8;
|
2014-07-22 16:29:54 +00:00
|
|
|
}
|
2015-04-21 20:24:50 +08:00
|
|
|
if (CodecNamesEq(codec.name, kVp9CodecName)) {
|
2015-01-14 17:29:27 +00:00
|
|
|
encoder_settings_.vp9 = webrtc::VideoEncoder::GetDefaultVp9Settings();
|
2016-03-21 04:15:50 -07:00
|
|
|
if (is_screencast) {
|
|
|
|
|
// TODO(asapersson): Set to 2 for now since there is a DCHECK in
|
|
|
|
|
// VideoSendStream::ReconfigureVideoEncoder.
|
|
|
|
|
encoder_settings_.vp9.numberOfSpatialLayers = 2;
|
|
|
|
|
} else {
|
|
|
|
|
encoder_settings_.vp9.numberOfSpatialLayers =
|
|
|
|
|
GetDefaultVp9SpatialLayers();
|
|
|
|
|
}
|
2015-10-26 11:18:18 -07:00
|
|
|
// VP9 denoising is disabled by default.
|
|
|
|
|
encoder_settings_.vp9.denoisingOn =
|
|
|
|
|
codec_default_denoising ? false : denoising;
|
2015-04-28 10:01:41 +02:00
|
|
|
encoder_settings_.vp9.frameDroppingOn = frame_dropping;
|
2015-01-14 17:29:27 +00:00
|
|
|
return &encoder_settings_.vp9;
|
2014-11-07 13:21:04 +00:00
|
|
|
}
|
2014-07-22 16:29:54 +00:00
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
|
2014-08-12 23:17:13 +00:00
|
|
|
DefaultUnsignalledSsrcHandler::DefaultUnsignalledSsrcHandler()
|
2016-02-04 01:24:52 -08:00
|
|
|
: default_recv_ssrc_(0), default_sink_(NULL) {}
|
2014-08-12 23:17:13 +00:00
|
|
|
|
|
|
|
|
UnsignalledSsrcHandler::Action DefaultUnsignalledSsrcHandler::OnUnsignalledSsrc(
|
2015-03-06 15:35:19 +00:00
|
|
|
WebRtcVideoChannel2* channel,
|
2014-08-12 23:17:13 +00:00
|
|
|
uint32_t ssrc) {
|
|
|
|
|
if (default_recv_ssrc_ != 0) { // Already one default stream.
|
|
|
|
|
LOG(LS_WARNING) << "Unknown SSRC, but default receive stream already set.";
|
|
|
|
|
return kDropPacket;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
StreamParams sp;
|
|
|
|
|
sp.ssrcs.push_back(ssrc);
|
|
|
|
|
LOG(LS_INFO) << "Creating default receive stream for SSRC=" << ssrc << ".";
|
2015-03-06 15:35:19 +00:00
|
|
|
if (!channel->AddRecvStream(sp, true)) {
|
2014-08-12 23:17:13 +00:00
|
|
|
LOG(LS_WARNING) << "Could not create default receive stream.";
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-04 01:24:52 -08:00
|
|
|
channel->SetSink(ssrc, default_sink_);
|
2014-08-12 23:17:13 +00:00
|
|
|
default_recv_ssrc_ = ssrc;
|
|
|
|
|
return kDeliverPacket;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-04 01:24:52 -08:00
|
|
|
rtc::VideoSinkInterface<VideoFrame>*
|
|
|
|
|
DefaultUnsignalledSsrcHandler::GetDefaultSink() const {
|
|
|
|
|
return default_sink_;
|
2014-08-12 23:17:13 +00:00
|
|
|
}
|
|
|
|
|
|
2016-02-04 01:24:52 -08:00
|
|
|
void DefaultUnsignalledSsrcHandler::SetDefaultSink(
|
2014-08-12 23:17:13 +00:00
|
|
|
VideoMediaChannel* channel,
|
2016-02-04 01:24:52 -08:00
|
|
|
rtc::VideoSinkInterface<VideoFrame>* sink) {
|
|
|
|
|
default_sink_ = sink;
|
2014-08-12 23:17:13 +00:00
|
|
|
if (default_recv_ssrc_ != 0) {
|
2016-02-04 01:24:52 -08:00
|
|
|
channel->SetSink(default_recv_ssrc_, default_sink_);
|
2014-08-12 23:17:13 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-09-15 12:26:33 +02:00
|
|
|
WebRtcVideoEngine2::WebRtcVideoEngine2()
|
|
|
|
|
: initialized_(false),
|
2014-09-23 09:40:22 +00:00
|
|
|
external_decoder_factory_(NULL),
|
|
|
|
|
external_encoder_factory_(NULL) {
|
2014-08-26 11:08:06 +00:00
|
|
|
LOG(LS_INFO) << "WebRtcVideoEngine2::WebRtcVideoEngine2()";
|
2014-10-14 04:25:33 +00:00
|
|
|
video_codecs_ = GetSupportedCodecs();
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
WebRtcVideoEngine2::~WebRtcVideoEngine2() {
|
|
|
|
|
LOG(LS_INFO) << "WebRtcVideoEngine2::~WebRtcVideoEngine2";
|
|
|
|
|
}
|
|
|
|
|
|
2015-05-22 09:04:09 +02:00
|
|
|
void WebRtcVideoEngine2::Init() {
|
2014-05-13 11:07:01 +00:00
|
|
|
LOG(LS_INFO) << "WebRtcVideoEngine2::Init";
|
|
|
|
|
initialized_ = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
WebRtcVideoChannel2* WebRtcVideoEngine2::CreateChannel(
|
2015-09-15 12:26:33 +02:00
|
|
|
webrtc::Call* call,
|
2016-02-12 02:27:06 -08:00
|
|
|
const MediaConfig& config,
|
2015-09-15 12:26:33 +02:00
|
|
|
const VideoOptions& options) {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(initialized_);
|
2015-09-15 12:26:33 +02:00
|
|
|
LOG(LS_INFO) << "CreateChannel. Options: " << options.ToString();
|
2016-02-12 02:27:06 -08:00
|
|
|
return new WebRtcVideoChannel2(call, config, options, video_codecs_,
|
|
|
|
|
external_encoder_factory_,
|
|
|
|
|
external_decoder_factory_);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const std::vector<VideoCodec>& WebRtcVideoEngine2::codecs() const {
|
|
|
|
|
return video_codecs_;
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-07 10:45:43 +01:00
|
|
|
RtpCapabilities WebRtcVideoEngine2::GetCapabilities() const {
|
|
|
|
|
RtpCapabilities capabilities;
|
|
|
|
|
capabilities.header_extensions.push_back(
|
2016-05-26 11:24:55 -07:00
|
|
|
webrtc::RtpExtension(webrtc::RtpExtension::kTimestampOffsetUri,
|
|
|
|
|
webrtc::RtpExtension::kTimestampOffsetDefaultId));
|
2015-12-07 10:45:43 +01:00
|
|
|
capabilities.header_extensions.push_back(
|
2016-05-26 11:24:55 -07:00
|
|
|
webrtc::RtpExtension(webrtc::RtpExtension::kAbsSendTimeUri,
|
|
|
|
|
webrtc::RtpExtension::kAbsSendTimeDefaultId));
|
2015-12-07 10:45:43 +01:00
|
|
|
capabilities.header_extensions.push_back(
|
2016-05-26 11:24:55 -07:00
|
|
|
webrtc::RtpExtension(webrtc::RtpExtension::kVideoRotationUri,
|
|
|
|
|
webrtc::RtpExtension::kVideoRotationDefaultId));
|
2015-12-07 10:45:43 +01:00
|
|
|
if (webrtc::field_trial::FindFullName("WebRTC-SendSideBwe") == "Enabled") {
|
2016-05-26 11:24:55 -07:00
|
|
|
capabilities.header_extensions.push_back(webrtc::RtpExtension(
|
|
|
|
|
webrtc::RtpExtension::kTransportSequenceNumberUri,
|
|
|
|
|
webrtc::RtpExtension::kTransportSequenceNumberDefaultId));
|
2015-12-07 10:45:43 +01:00
|
|
|
}
|
2016-06-08 00:24:21 -07:00
|
|
|
capabilities.header_extensions.push_back(
|
|
|
|
|
webrtc::RtpExtension(webrtc::RtpExtension::kPlayoutDelayUri,
|
|
|
|
|
webrtc::RtpExtension::kPlayoutDelayDefaultId));
|
2015-12-07 10:45:43 +01:00
|
|
|
return capabilities;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2014-09-23 09:40:22 +00:00
|
|
|
void WebRtcVideoEngine2::SetExternalDecoderFactory(
|
|
|
|
|
WebRtcVideoDecoderFactory* decoder_factory) {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(!initialized_);
|
2014-09-23 09:40:22 +00:00
|
|
|
external_decoder_factory_ = decoder_factory;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void WebRtcVideoEngine2::SetExternalEncoderFactory(
|
|
|
|
|
WebRtcVideoEncoderFactory* encoder_factory) {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(!initialized_);
|
2015-01-14 16:26:23 +00:00
|
|
|
if (external_encoder_factory_ == encoder_factory)
|
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
// No matter what happens we shouldn't hold on to a stale
|
|
|
|
|
// WebRtcSimulcastEncoderFactory.
|
|
|
|
|
simulcast_encoder_factory_.reset();
|
|
|
|
|
|
|
|
|
|
if (encoder_factory &&
|
|
|
|
|
WebRtcSimulcastEncoderFactory::UseSimulcastEncoderFactory(
|
|
|
|
|
encoder_factory->codecs())) {
|
|
|
|
|
simulcast_encoder_factory_.reset(
|
|
|
|
|
new WebRtcSimulcastEncoderFactory(encoder_factory));
|
|
|
|
|
encoder_factory = simulcast_encoder_factory_.get();
|
|
|
|
|
}
|
2014-09-23 09:40:22 +00:00
|
|
|
external_encoder_factory_ = encoder_factory;
|
2014-10-14 04:25:33 +00:00
|
|
|
|
|
|
|
|
video_codecs_ = GetSupportedCodecs();
|
2014-09-23 09:40:22 +00:00
|
|
|
}
|
|
|
|
|
|
2014-10-14 04:25:33 +00:00
|
|
|
std::vector<VideoCodec> WebRtcVideoEngine2::GetSupportedCodecs() const {
|
2014-11-07 09:37:54 +00:00
|
|
|
std::vector<VideoCodec> supported_codecs = DefaultVideoCodecList();
|
2014-10-14 04:25:33 +00:00
|
|
|
|
|
|
|
|
if (external_encoder_factory_ == NULL) {
|
2016-04-25 11:03:48 +02:00
|
|
|
LOG(LS_INFO) << "Supported codecs: "
|
|
|
|
|
<< CodecVectorToString(supported_codecs);
|
2014-10-14 04:25:33 +00:00
|
|
|
return supported_codecs;
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-25 11:03:48 +02:00
|
|
|
std::stringstream out;
|
2014-10-14 04:25:33 +00:00
|
|
|
const std::vector<WebRtcVideoEncoderFactory::VideoCodec>& codecs =
|
|
|
|
|
external_encoder_factory_->codecs();
|
|
|
|
|
for (size_t i = 0; i < codecs.size(); ++i) {
|
2016-04-25 11:03:48 +02:00
|
|
|
out << codecs[i].name;
|
|
|
|
|
if (i != codecs.size() - 1) {
|
|
|
|
|
out << ", ";
|
|
|
|
|
}
|
2014-10-14 04:25:33 +00:00
|
|
|
// Don't add internally-supported codecs twice.
|
|
|
|
|
if (CodecIsInternallySupported(codecs[i].name)) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2015-02-23 21:28:22 +00:00
|
|
|
// External video encoders are given payloads 120-127. This also means that
|
|
|
|
|
// we only support up to 8 external payload types.
|
2016-06-29 11:14:19 -07:00
|
|
|
// TODO(deadbeef): mediasession.cc already has code to dynamically
|
|
|
|
|
// determine a payload type. We should be able to just leave the payload
|
|
|
|
|
// type empty and let mediasession determine it. However, currently RTX
|
|
|
|
|
// codecs are associated to codecs by payload type, meaning we DO need
|
|
|
|
|
// to allocate unique payload types here. So to make this change we would
|
|
|
|
|
// need to make RTX codecs associated by name instead.
|
2015-02-23 21:28:22 +00:00
|
|
|
const int kExternalVideoPayloadTypeBase = 120;
|
|
|
|
|
size_t payload_type = kExternalVideoPayloadTypeBase + i;
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(payload_type < 128);
|
2016-04-13 10:07:16 -07:00
|
|
|
VideoCodec codec(static_cast<int>(payload_type), codecs[i].name,
|
|
|
|
|
codecs[i].max_width, codecs[i].max_height,
|
|
|
|
|
codecs[i].max_fps);
|
2014-10-14 04:25:33 +00:00
|
|
|
|
|
|
|
|
AddDefaultFeedbackParams(&codec);
|
2016-06-29 11:14:19 -07:00
|
|
|
AddCodecAndMaybeRtxCodec(codec, &supported_codecs);
|
2014-10-14 04:25:33 +00:00
|
|
|
}
|
2016-04-25 11:03:48 +02:00
|
|
|
LOG(LS_INFO) << "Supported codecs (incl. external codecs): "
|
|
|
|
|
<< CodecVectorToString(supported_codecs);
|
|
|
|
|
LOG(LS_INFO) << "Codecs supported by the external encoder factory: "
|
|
|
|
|
<< out.str();
|
2014-10-14 04:25:33 +00:00
|
|
|
return supported_codecs;
|
|
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoChannel2(
|
2015-09-15 12:26:33 +02:00
|
|
|
webrtc::Call* call,
|
2016-02-12 02:27:06 -08:00
|
|
|
const MediaConfig& config,
|
2014-10-20 11:07:07 +00:00
|
|
|
const VideoOptions& options,
|
2015-09-17 16:42:56 +02:00
|
|
|
const std::vector<VideoCodec>& recv_codecs,
|
2014-10-14 04:25:33 +00:00
|
|
|
WebRtcVideoEncoderFactory* external_encoder_factory,
|
2015-01-14 17:29:27 +00:00
|
|
|
WebRtcVideoDecoderFactory* external_decoder_factory)
|
2016-02-12 02:27:06 -08:00
|
|
|
: VideoMediaChannel(config),
|
|
|
|
|
call_(call),
|
2015-09-15 12:26:33 +02:00
|
|
|
unsignalled_ssrc_handler_(&default_unsignalled_ssrc_handler_),
|
2016-03-01 04:29:59 -08:00
|
|
|
video_config_(config.video),
|
2014-10-14 04:25:33 +00:00
|
|
|
external_encoder_factory_(external_encoder_factory),
|
2016-03-16 02:22:50 -07:00
|
|
|
external_decoder_factory_(external_decoder_factory),
|
2016-05-17 16:33:30 +02:00
|
|
|
default_send_options_(options),
|
2016-08-11 08:41:18 -07:00
|
|
|
red_disabled_by_remote_side_(false),
|
|
|
|
|
last_stats_log_ms_(-1) {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
2016-02-17 07:24:50 -08:00
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc;
|
|
|
|
|
sending_ = false;
|
2016-01-28 15:58:41 -08:00
|
|
|
RTC_DCHECK(ValidateCodecFormats(recv_codecs));
|
|
|
|
|
recv_codecs_ = FilterSupportedCodecs(MapCodecs(recv_codecs));
|
2014-07-22 16:29:54 +00:00
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
WebRtcVideoChannel2::~WebRtcVideoChannel2() {
|
2015-03-27 15:58:11 +01:00
|
|
|
for (auto& kv : send_streams_)
|
|
|
|
|
delete kv.second;
|
|
|
|
|
for (auto& kv : receive_streams_)
|
|
|
|
|
delete kv.second;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2014-11-03 14:46:44 +00:00
|
|
|
bool WebRtcVideoChannel2::CodecIsExternallySupported(
|
|
|
|
|
const std::string& name) const {
|
|
|
|
|
if (external_encoder_factory_ == NULL) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const std::vector<WebRtcVideoEncoderFactory::VideoCodec> external_codecs =
|
|
|
|
|
external_encoder_factory_->codecs();
|
|
|
|
|
for (size_t c = 0; c < external_codecs.size(); ++c) {
|
2015-04-21 20:24:50 +08:00
|
|
|
if (CodecNamesEq(name, external_codecs[c].name)) {
|
2014-11-03 14:46:44 +00:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<WebRtcVideoChannel2::VideoCodecSettings>
|
|
|
|
|
WebRtcVideoChannel2::FilterSupportedCodecs(
|
|
|
|
|
const std::vector<WebRtcVideoChannel2::VideoCodecSettings>& mapped_codecs)
|
|
|
|
|
const {
|
|
|
|
|
std::vector<VideoCodecSettings> supported_codecs;
|
|
|
|
|
for (size_t i = 0; i < mapped_codecs.size(); ++i) {
|
|
|
|
|
const VideoCodecSettings& codec = mapped_codecs[i];
|
|
|
|
|
if (CodecIsInternallySupported(codec.codec.name) ||
|
|
|
|
|
CodecIsExternallySupported(codec.codec.name)) {
|
|
|
|
|
supported_codecs.push_back(codec);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return supported_codecs;
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-20 17:19:20 -07:00
|
|
|
bool WebRtcVideoChannel2::ReceiveCodecsHaveChanged(
|
|
|
|
|
std::vector<VideoCodecSettings> before,
|
|
|
|
|
std::vector<VideoCodecSettings> after) {
|
|
|
|
|
if (before.size() != after.size()) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
// The receive codec order doesn't matter, so we sort the codecs before
|
|
|
|
|
// comparing. This is necessary because currently the
|
|
|
|
|
// only way to change the send codec is to munge SDP, which causes
|
|
|
|
|
// the receive codec list to change order, which causes the streams
|
|
|
|
|
// to be recreates which causes a "blink" of black video. In order
|
|
|
|
|
// to support munging the SDP in this way without recreating receive
|
|
|
|
|
// streams, we ignore the order of the received codecs so that
|
|
|
|
|
// changing the order doesn't cause this "blink".
|
|
|
|
|
auto comparison =
|
|
|
|
|
[](const VideoCodecSettings& codec1, const VideoCodecSettings& codec2) {
|
|
|
|
|
return codec1.codec.id > codec2.codec.id;
|
|
|
|
|
};
|
|
|
|
|
std::sort(before.begin(), before.end(), comparison);
|
|
|
|
|
std::sort(after.begin(), after.end(), comparison);
|
2016-04-13 10:07:16 -07:00
|
|
|
return before != after;
|
2015-08-20 17:19:20 -07:00
|
|
|
}
|
|
|
|
|
|
2016-01-27 16:45:21 +01:00
|
|
|
bool WebRtcVideoChannel2::GetChangedSendParameters(
|
|
|
|
|
const VideoSendParameters& params,
|
|
|
|
|
ChangedSendParameters* changed_params) const {
|
|
|
|
|
if (!ValidateCodecFormats(params.codecs) ||
|
|
|
|
|
!ValidateRtpExtensions(params.extensions)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 15:58:41 -08:00
|
|
|
// Handle send codec.
|
2016-01-27 16:45:21 +01:00
|
|
|
const std::vector<VideoCodecSettings> supported_codecs =
|
|
|
|
|
FilterSupportedCodecs(MapCodecs(params.codecs));
|
|
|
|
|
|
|
|
|
|
if (supported_codecs.empty()) {
|
|
|
|
|
LOG(LS_ERROR) << "No video codecs supported.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!send_codec_ || supported_codecs.front() != *send_codec_) {
|
|
|
|
|
changed_params->codec =
|
|
|
|
|
rtc::Optional<VideoCodecSettings>(supported_codecs.front());
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 15:58:41 -08:00
|
|
|
// Handle RTP header extensions.
|
2016-01-27 16:45:21 +01:00
|
|
|
std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
|
|
|
|
|
params.extensions, webrtc::RtpExtension::IsSupportedForVideo, true);
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
if (!send_rtp_extensions_ || (*send_rtp_extensions_ != filtered_extensions)) {
|
2016-01-27 16:45:21 +01:00
|
|
|
changed_params->rtp_header_extensions =
|
|
|
|
|
rtc::Optional<std::vector<webrtc::RtpExtension>>(filtered_extensions);
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 15:58:41 -08:00
|
|
|
// Handle max bitrate.
|
2016-04-26 17:15:23 -07:00
|
|
|
if (params.max_bandwidth_bps != send_params_.max_bandwidth_bps &&
|
2016-01-27 16:45:21 +01:00
|
|
|
params.max_bandwidth_bps >= 0) {
|
|
|
|
|
// 0 uncaps max bitrate (-1).
|
|
|
|
|
changed_params->max_bandwidth_bps = rtc::Optional<int>(
|
|
|
|
|
params.max_bandwidth_bps == 0 ? -1 : params.max_bandwidth_bps);
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-17 05:25:36 -08:00
|
|
|
// Handle conference mode.
|
|
|
|
|
if (params.conference_mode != send_params_.conference_mode) {
|
|
|
|
|
changed_params->conference_mode =
|
|
|
|
|
rtc::Optional<bool>(params.conference_mode);
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 15:58:41 -08:00
|
|
|
// Handle RTCP mode.
|
2016-01-27 16:45:21 +01:00
|
|
|
if (params.rtcp.reduced_size != send_params_.rtcp.reduced_size) {
|
|
|
|
|
changed_params->rtcp_mode = rtc::Optional<webrtc::RtcpMode>(
|
|
|
|
|
params.rtcp.reduced_size ? webrtc::RtcpMode::kReducedSize
|
|
|
|
|
: webrtc::RtcpMode::kCompound);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-12 02:27:06 -08:00
|
|
|
rtc::DiffServCodePoint WebRtcVideoChannel2::PreferredDscp() const {
|
|
|
|
|
return rtc::DSCP_AF41;
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-07 16:05:34 -07:00
|
|
|
bool WebRtcVideoChannel2::SetSendParameters(const VideoSendParameters& params) {
|
2015-12-08 13:25:57 +01:00
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetSendParameters");
|
2015-12-02 08:05:01 -08:00
|
|
|
LOG(LS_INFO) << "SetSendParameters: " << params.ToString();
|
2016-01-27 16:45:21 +01:00
|
|
|
ChangedSendParameters changed_params;
|
|
|
|
|
if (!GetChangedSendParameters(params, &changed_params)) {
|
2015-12-09 12:37:51 -08:00
|
|
|
return false;
|
|
|
|
|
}
|
2016-01-27 16:45:21 +01:00
|
|
|
|
|
|
|
|
if (changed_params.codec) {
|
|
|
|
|
const VideoCodecSettings& codec_settings = *changed_params.codec;
|
|
|
|
|
send_codec_ = rtc::Optional<VideoCodecSettings>(codec_settings);
|
|
|
|
|
LOG(LS_INFO) << "Using codec: " << codec_settings.codec.ToString();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (changed_params.rtp_header_extensions) {
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
send_rtp_extensions_ = changed_params.rtp_header_extensions;
|
2016-01-27 16:45:21 +01:00
|
|
|
}
|
|
|
|
|
|
2016-04-26 17:15:23 -07:00
|
|
|
if (changed_params.codec || changed_params.max_bandwidth_bps) {
|
|
|
|
|
if (send_codec_) {
|
|
|
|
|
// TODO(holmer): Changing the codec parameters shouldn't necessarily mean
|
|
|
|
|
// that we change the min/max of bandwidth estimation. Reevaluate this.
|
|
|
|
|
bitrate_config_ = GetBitrateConfigForCodec(send_codec_->codec);
|
|
|
|
|
if (!changed_params.codec) {
|
|
|
|
|
// If the codec isn't changing, set the start bitrate to -1 which means
|
|
|
|
|
// "unchanged" so that BWE isn't affected.
|
|
|
|
|
bitrate_config_.start_bitrate_bps = -1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (params.max_bandwidth_bps >= 0) {
|
|
|
|
|
// Note that max_bandwidth_bps intentionally takes priority over the
|
|
|
|
|
// bitrate config for the codec. This allows FEC to be applied above the
|
|
|
|
|
// codec target bitrate.
|
|
|
|
|
// TODO(pbos): Figure out whether b=AS means max bitrate for this
|
|
|
|
|
// WebRtcVideoChannel2 (in which case we're good), or per sender (SSRC),
|
|
|
|
|
// in which case this should not set a Call::BitrateConfig but rather
|
|
|
|
|
// reconfigure all senders.
|
|
|
|
|
bitrate_config_.max_bitrate_bps =
|
|
|
|
|
params.max_bandwidth_bps == 0 ? -1 : params.max_bandwidth_bps;
|
2016-01-27 16:45:21 +01:00
|
|
|
}
|
|
|
|
|
call_->SetBitrateConfig(bitrate_config_);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
{
|
2015-12-09 12:37:51 -08:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
for (auto& kv : send_streams_) {
|
2016-01-27 16:45:21 +01:00
|
|
|
kv.second->SetSendParameters(changed_params);
|
|
|
|
|
}
|
2016-03-18 15:02:07 -07:00
|
|
|
if (changed_params.codec || changed_params.rtcp_mode) {
|
|
|
|
|
// Update receive feedback parameters from new codec or RTCP mode.
|
2016-01-27 16:45:21 +01:00
|
|
|
LOG(LS_INFO)
|
|
|
|
|
<< "SetFeedbackOptions on all the receive streams because the send "
|
2016-03-18 15:02:07 -07:00
|
|
|
"codec or RTCP mode has changed.";
|
2016-01-27 16:45:21 +01:00
|
|
|
for (auto& kv : receive_streams_) {
|
|
|
|
|
RTC_DCHECK(kv.second != nullptr);
|
2016-03-18 15:02:07 -07:00
|
|
|
kv.second->SetFeedbackParameters(
|
|
|
|
|
HasNack(send_codec_->codec), HasRemb(send_codec_->codec),
|
|
|
|
|
HasTransportCc(send_codec_->codec),
|
|
|
|
|
params.rtcp.reduced_size ? webrtc::RtcpMode::kReducedSize
|
|
|
|
|
: webrtc::RtcpMode::kCompound);
|
2016-01-27 16:45:21 +01:00
|
|
|
}
|
2015-12-09 12:37:51 -08:00
|
|
|
}
|
2016-05-17 16:33:30 +02:00
|
|
|
if (changed_params.codec) {
|
|
|
|
|
bool red_was_disabled = red_disabled_by_remote_side_;
|
|
|
|
|
red_disabled_by_remote_side_ =
|
|
|
|
|
changed_params.codec->fec.red_payload_type == -1;
|
|
|
|
|
if (red_was_disabled != red_disabled_by_remote_side_) {
|
|
|
|
|
for (auto& kv : receive_streams_) {
|
|
|
|
|
// In practice VideoChannel::SetRemoteContent appears to most of the
|
|
|
|
|
// time also call UpdateRemoteStreams, which recreates the receive
|
|
|
|
|
// streams. If that's always true this call isn't needed.
|
|
|
|
|
kv.second->SetFecDisabledRemotely(red_disabled_by_remote_side_);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-12-09 12:37:51 -08:00
|
|
|
}
|
|
|
|
|
send_params_ = params;
|
|
|
|
|
return true;
|
2015-08-07 16:05:34 -07:00
|
|
|
}
|
2016-04-20 16:23:10 -07:00
|
|
|
|
2016-05-16 11:40:30 -07:00
|
|
|
webrtc::RtpParameters WebRtcVideoChannel2::GetRtpSendParameters(
|
2016-03-16 19:07:43 -07:00
|
|
|
uint32_t ssrc) const {
|
|
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
auto it = send_streams_.find(ssrc);
|
|
|
|
|
if (it == send_streams_.end()) {
|
2016-05-16 11:40:30 -07:00
|
|
|
LOG(LS_WARNING) << "Attempting to get RTP send parameters for stream "
|
|
|
|
|
<< "with ssrc " << ssrc << " which doesn't exist.";
|
2016-03-16 19:07:43 -07:00
|
|
|
return webrtc::RtpParameters();
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-20 16:23:10 -07:00
|
|
|
webrtc::RtpParameters rtp_params = it->second->GetRtpParameters();
|
|
|
|
|
// Need to add the common list of codecs to the send stream-specific
|
|
|
|
|
// RTP parameters.
|
|
|
|
|
for (const VideoCodec& codec : send_params_.codecs) {
|
|
|
|
|
rtp_params.codecs.push_back(codec.ToCodecParameters());
|
|
|
|
|
}
|
|
|
|
|
return rtp_params;
|
2016-03-16 19:07:43 -07:00
|
|
|
}
|
|
|
|
|
|
2016-05-16 11:40:30 -07:00
|
|
|
bool WebRtcVideoChannel2::SetRtpSendParameters(
|
2016-03-16 19:07:43 -07:00
|
|
|
uint32_t ssrc,
|
|
|
|
|
const webrtc::RtpParameters& parameters) {
|
2016-05-16 11:40:30 -07:00
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetRtpSendParameters");
|
2016-03-16 19:07:43 -07:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
auto it = send_streams_.find(ssrc);
|
|
|
|
|
if (it == send_streams_.end()) {
|
2016-05-16 11:40:30 -07:00
|
|
|
LOG(LS_ERROR) << "Attempting to set RTP send parameters for stream "
|
|
|
|
|
<< "with ssrc " << ssrc << " which doesn't exist.";
|
2016-03-16 19:07:43 -07:00
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-20 16:23:10 -07:00
|
|
|
// TODO(deadbeef): Handle setting parameters with a list of codecs in a
|
|
|
|
|
// different order (which should change the send codec).
|
2016-05-16 11:40:30 -07:00
|
|
|
webrtc::RtpParameters current_parameters = GetRtpSendParameters(ssrc);
|
|
|
|
|
if (current_parameters.codecs != parameters.codecs) {
|
|
|
|
|
LOG(LS_ERROR) << "Using SetParameters to change the set of codecs "
|
|
|
|
|
<< "is not currently supported.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-16 19:07:43 -07:00
|
|
|
return it->second->SetRtpParameters(parameters);
|
|
|
|
|
}
|
2015-08-07 16:05:34 -07:00
|
|
|
|
2016-05-16 11:40:30 -07:00
|
|
|
webrtc::RtpParameters WebRtcVideoChannel2::GetRtpReceiveParameters(
|
|
|
|
|
uint32_t ssrc) const {
|
|
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
auto it = receive_streams_.find(ssrc);
|
|
|
|
|
if (it == receive_streams_.end()) {
|
|
|
|
|
LOG(LS_WARNING) << "Attempting to get RTP receive parameters for stream "
|
|
|
|
|
<< "with ssrc " << ssrc << " which doesn't exist.";
|
|
|
|
|
return webrtc::RtpParameters();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// TODO(deadbeef): Return stream-specific parameters.
|
|
|
|
|
webrtc::RtpParameters rtp_params = CreateRtpParametersWithOneEncoding();
|
|
|
|
|
for (const VideoCodec& codec : recv_params_.codecs) {
|
|
|
|
|
rtp_params.codecs.push_back(codec.ToCodecParameters());
|
|
|
|
|
}
|
2016-06-22 00:46:15 -07:00
|
|
|
rtp_params.encodings[0].ssrc = it->second->GetFirstPrimarySsrc();
|
2016-05-16 11:40:30 -07:00
|
|
|
return rtp_params;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool WebRtcVideoChannel2::SetRtpReceiveParameters(
|
|
|
|
|
uint32_t ssrc,
|
|
|
|
|
const webrtc::RtpParameters& parameters) {
|
|
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetRtpReceiveParameters");
|
|
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
auto it = receive_streams_.find(ssrc);
|
|
|
|
|
if (it == receive_streams_.end()) {
|
|
|
|
|
LOG(LS_ERROR) << "Attempting to set RTP receive parameters for stream "
|
|
|
|
|
<< "with ssrc " << ssrc << " which doesn't exist.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
webrtc::RtpParameters current_parameters = GetRtpReceiveParameters(ssrc);
|
|
|
|
|
if (current_parameters != parameters) {
|
|
|
|
|
LOG(LS_ERROR) << "Changing the RTP receive parameters is currently "
|
|
|
|
|
<< "unsupported.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 15:58:41 -08:00
|
|
|
bool WebRtcVideoChannel2::GetChangedRecvParameters(
|
|
|
|
|
const VideoRecvParameters& params,
|
|
|
|
|
ChangedRecvParameters* changed_params) const {
|
|
|
|
|
if (!ValidateCodecFormats(params.codecs) ||
|
|
|
|
|
!ValidateRtpExtensions(params.extensions)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Handle receive codecs.
|
|
|
|
|
const std::vector<VideoCodecSettings> mapped_codecs =
|
|
|
|
|
MapCodecs(params.codecs);
|
|
|
|
|
if (mapped_codecs.empty()) {
|
|
|
|
|
LOG(LS_ERROR) << "SetRecvParameters called without any video codecs.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::vector<VideoCodecSettings> supported_codecs =
|
|
|
|
|
FilterSupportedCodecs(mapped_codecs);
|
|
|
|
|
|
|
|
|
|
if (mapped_codecs.size() != supported_codecs.size()) {
|
|
|
|
|
LOG(LS_ERROR) << "SetRecvParameters called with unsupported video codecs.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (ReceiveCodecsHaveChanged(recv_codecs_, supported_codecs)) {
|
|
|
|
|
changed_params->codec_settings =
|
|
|
|
|
rtc::Optional<std::vector<VideoCodecSettings>>(supported_codecs);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Handle RTP header extensions.
|
|
|
|
|
std::vector<webrtc::RtpExtension> filtered_extensions = FilterRtpExtensions(
|
|
|
|
|
params.extensions, webrtc::RtpExtension::IsSupportedForVideo, false);
|
|
|
|
|
if (filtered_extensions != recv_rtp_extensions_) {
|
|
|
|
|
changed_params->rtp_header_extensions =
|
|
|
|
|
rtc::Optional<std::vector<webrtc::RtpExtension>>(filtered_extensions);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-07 16:05:34 -07:00
|
|
|
bool WebRtcVideoChannel2::SetRecvParameters(const VideoRecvParameters& params) {
|
2015-12-08 13:25:57 +01:00
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetRecvParameters");
|
2015-12-02 08:05:01 -08:00
|
|
|
LOG(LS_INFO) << "SetRecvParameters: " << params.ToString();
|
2016-01-28 15:58:41 -08:00
|
|
|
ChangedRecvParameters changed_params;
|
|
|
|
|
if (!GetChangedRecvParameters(params, &changed_params)) {
|
2015-12-09 12:37:51 -08:00
|
|
|
return false;
|
|
|
|
|
}
|
2016-01-28 15:58:41 -08:00
|
|
|
if (changed_params.rtp_header_extensions) {
|
|
|
|
|
recv_rtp_extensions_ = *changed_params.rtp_header_extensions;
|
|
|
|
|
}
|
|
|
|
|
if (changed_params.codec_settings) {
|
|
|
|
|
LOG(LS_INFO) << "Changing recv codecs from "
|
|
|
|
|
<< CodecSettingsVectorToString(recv_codecs_) << " to "
|
|
|
|
|
<< CodecSettingsVectorToString(*changed_params.codec_settings);
|
|
|
|
|
recv_codecs_ = *changed_params.codec_settings;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
{
|
2015-12-09 12:37:51 -08:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
for (auto& kv : receive_streams_) {
|
2016-01-28 15:58:41 -08:00
|
|
|
kv.second->SetRecvParameters(changed_params);
|
2015-12-09 12:37:51 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
recv_params_ = params;
|
|
|
|
|
return true;
|
2015-08-07 16:05:34 -07:00
|
|
|
}
|
|
|
|
|
|
2015-08-20 17:19:20 -07:00
|
|
|
std::string WebRtcVideoChannel2::CodecSettingsVectorToString(
|
|
|
|
|
const std::vector<VideoCodecSettings>& codecs) {
|
|
|
|
|
std::stringstream out;
|
|
|
|
|
out << '{';
|
|
|
|
|
for (size_t i = 0; i < codecs.size(); ++i) {
|
|
|
|
|
out << codecs[i].codec.ToString();
|
|
|
|
|
if (i != codecs.size() - 1) {
|
|
|
|
|
out << ", ";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
out << '}';
|
|
|
|
|
return out.str();
|
|
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
bool WebRtcVideoChannel2::GetSendCodec(VideoCodec* codec) {
|
2015-10-30 02:47:38 -07:00
|
|
|
if (!send_codec_) {
|
2014-05-13 11:07:01 +00:00
|
|
|
LOG(LS_VERBOSE) << "GetSendCodec: No send codec set.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2015-10-30 02:47:38 -07:00
|
|
|
*codec = send_codec_->codec;
|
2014-05-13 11:07:01 +00:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool WebRtcVideoChannel2::SetSend(bool send) {
|
2016-04-11 11:45:14 +02:00
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::SetSend");
|
2014-05-13 11:07:01 +00:00
|
|
|
LOG(LS_VERBOSE) << "SetSend: " << (send ? "true" : "false");
|
2015-10-30 02:47:38 -07:00
|
|
|
if (send && !send_codec_) {
|
2014-05-13 11:07:01 +00:00
|
|
|
LOG(LS_ERROR) << "SetSend(true) called before setting codec.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2016-03-22 15:42:00 -07:00
|
|
|
{
|
|
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
for (const auto& kv : send_streams_) {
|
|
|
|
|
kv.second->SetSend(send);
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
sending_ = send;
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-08 02:23:55 -07:00
|
|
|
// TODO(nisse): The enable argument was used for mute logic which has
|
2016-06-02 16:23:38 -07:00
|
|
|
// been moved to VideoBroadcaster. So remove the argument from this
|
|
|
|
|
// method.
|
|
|
|
|
bool WebRtcVideoChannel2::SetVideoSend(
|
|
|
|
|
uint32_t ssrc,
|
|
|
|
|
bool enable,
|
|
|
|
|
const VideoOptions* options,
|
|
|
|
|
rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
|
2016-01-27 16:45:21 +01:00
|
|
|
TRACE_EVENT0("webrtc", "SetVideoSend");
|
2016-06-02 16:23:38 -07:00
|
|
|
RTC_DCHECK(ssrc != 0);
|
2016-01-27 16:45:21 +01:00
|
|
|
LOG(LS_INFO) << "SetVideoSend (ssrc= " << ssrc << ", enable = " << enable
|
2016-06-02 16:23:38 -07:00
|
|
|
<< ", options: " << (options ? options->ToString() : "nullptr")
|
|
|
|
|
<< ", source = " << (source ? "(source)" : "nullptr") << ")";
|
2016-01-27 16:45:21 +01:00
|
|
|
|
2016-06-02 16:23:38 -07:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
|
|
|
|
const auto& kv = send_streams_.find(ssrc);
|
|
|
|
|
if (kv == send_streams_.end()) {
|
|
|
|
|
// Allow unknown ssrc only if source is null.
|
|
|
|
|
RTC_CHECK(source == nullptr);
|
|
|
|
|
LOG(LS_ERROR) << "No sending stream on ssrc " << ssrc;
|
|
|
|
|
return false;
|
2015-09-10 01:57:14 -07:00
|
|
|
}
|
2016-06-02 16:23:38 -07:00
|
|
|
|
|
|
|
|
return kv->second->SetVideoSend(enable, options, source);
|
2015-09-10 01:57:14 -07:00
|
|
|
}
|
|
|
|
|
|
2015-03-26 16:23:04 +01:00
|
|
|
bool WebRtcVideoChannel2::ValidateSendSsrcAvailability(
|
|
|
|
|
const StreamParams& sp) const {
|
2016-02-12 13:30:57 +01:00
|
|
|
for (uint32_t ssrc : sp.ssrcs) {
|
2015-03-26 16:23:04 +01:00
|
|
|
if (send_ssrcs_.find(ssrc) != send_ssrcs_.end()) {
|
|
|
|
|
LOG(LS_ERROR) << "Send stream with SSRC '" << ssrc << "' already exists.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool WebRtcVideoChannel2::ValidateReceiveSsrcAvailability(
|
|
|
|
|
const StreamParams& sp) const {
|
2016-02-12 13:30:57 +01:00
|
|
|
for (uint32_t ssrc : sp.ssrcs) {
|
2015-03-26 16:23:04 +01:00
|
|
|
if (receive_ssrcs_.find(ssrc) != receive_ssrcs_.end()) {
|
|
|
|
|
LOG(LS_ERROR) << "Receive stream with SSRC '" << ssrc
|
|
|
|
|
<< "' already exists.";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
bool WebRtcVideoChannel2::AddSendStream(const StreamParams& sp) {
|
|
|
|
|
LOG(LS_INFO) << "AddSendStream: " << sp.ToString();
|
2015-03-25 14:17:23 +01:00
|
|
|
if (!ValidateStreamParams(sp))
|
2014-05-13 11:07:01 +00:00
|
|
|
return false;
|
|
|
|
|
|
2014-10-08 14:48:08 +00:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
2015-03-26 16:23:04 +01:00
|
|
|
|
|
|
|
|
if (!ValidateSendSsrcAvailability(sp))
|
2014-05-13 11:07:01 +00:00
|
|
|
return false;
|
2015-03-26 16:23:04 +01:00
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
for (uint32_t used_ssrc : sp.ssrcs)
|
2015-03-26 16:23:04 +01:00
|
|
|
send_ssrcs_.insert(used_ssrc);
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2015-09-08 05:13:22 -07:00
|
|
|
webrtc::VideoSendStream::Config config(this);
|
2016-03-01 04:29:59 -08:00
|
|
|
config.suspend_below_min_bitrate = video_config_.suspend_below_min_bitrate;
|
2016-03-16 02:22:50 -07:00
|
|
|
WebRtcVideoSendStream* stream = new WebRtcVideoSendStream(
|
2016-08-16 02:40:55 -07:00
|
|
|
call_, sp, config, default_send_options_, external_encoder_factory_,
|
|
|
|
|
video_config_.enable_cpu_overuse_detection,
|
2016-03-16 02:22:50 -07:00
|
|
|
bitrate_config_.max_bitrate_bps, send_codec_, send_rtp_extensions_,
|
|
|
|
|
send_params_);
|
2014-07-17 08:51:46 +00:00
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
uint32_t ssrc = sp.first_ssrc();
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(ssrc != 0);
|
2014-05-13 11:07:01 +00:00
|
|
|
send_streams_[ssrc] = stream;
|
|
|
|
|
|
|
|
|
|
if (rtcp_receiver_report_ssrc_ == kDefaultRtcpReceiverReportSsrc) {
|
|
|
|
|
rtcp_receiver_report_ssrc_ = ssrc;
|
2015-08-20 17:19:20 -07:00
|
|
|
LOG(LS_INFO) << "SetLocalSsrc on all the receive streams because we added "
|
|
|
|
|
"a send stream.";
|
2015-05-22 18:48:36 +02:00
|
|
|
for (auto& kv : receive_streams_)
|
|
|
|
|
kv.second->SetLocalSsrc(ssrc);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
if (sending_) {
|
2016-03-22 15:42:00 -07:00
|
|
|
stream->SetSend(true);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
bool WebRtcVideoChannel2::RemoveSendStream(uint32_t ssrc) {
|
2014-05-13 11:07:01 +00:00
|
|
|
LOG(LS_INFO) << "RemoveSendStream: " << ssrc;
|
|
|
|
|
|
2014-10-08 14:48:08 +00:00
|
|
|
WebRtcVideoSendStream* removed_stream;
|
|
|
|
|
{
|
|
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
std::map<uint32_t, WebRtcVideoSendStream*>::iterator it =
|
2014-10-08 14:48:08 +00:00
|
|
|
send_streams_.find(ssrc);
|
|
|
|
|
if (it == send_streams_.end()) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
for (uint32_t old_ssrc : it->second->GetSsrcs())
|
2015-03-26 16:23:04 +01:00
|
|
|
send_ssrcs_.erase(old_ssrc);
|
|
|
|
|
|
2014-10-08 14:48:08 +00:00
|
|
|
removed_stream = it->second;
|
|
|
|
|
send_streams_.erase(it);
|
2015-10-21 17:21:10 +02:00
|
|
|
|
|
|
|
|
// Switch receiver report SSRCs, the one in use is no longer valid.
|
|
|
|
|
if (rtcp_receiver_report_ssrc_ == ssrc) {
|
|
|
|
|
rtcp_receiver_report_ssrc_ = send_streams_.empty()
|
|
|
|
|
? kDefaultRtcpReceiverReportSsrc
|
|
|
|
|
: send_streams_.begin()->first;
|
|
|
|
|
LOG(LS_INFO) << "SetLocalSsrc on all the receive streams because the "
|
|
|
|
|
"previous local SSRC was removed.";
|
|
|
|
|
|
|
|
|
|
for (auto& kv : receive_streams_) {
|
|
|
|
|
kv.second->SetLocalSsrc(rtcp_receiver_report_ssrc_);
|
|
|
|
|
}
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2014-10-08 14:48:08 +00:00
|
|
|
delete removed_stream;
|
2014-05-13 11:07:01 +00:00
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-26 16:23:04 +01:00
|
|
|
void WebRtcVideoChannel2::DeleteReceiveStream(
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream* stream) {
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
for (uint32_t old_ssrc : stream->GetSsrcs())
|
2015-03-26 16:23:04 +01:00
|
|
|
receive_ssrcs_.erase(old_ssrc);
|
|
|
|
|
delete stream;
|
|
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp) {
|
2015-03-06 15:35:19 +00:00
|
|
|
return AddRecvStream(sp, false);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp,
|
|
|
|
|
bool default_stream) {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
2015-05-07 14:07:48 +02:00
|
|
|
|
2015-03-25 14:17:23 +01:00
|
|
|
LOG(LS_INFO) << "AddRecvStream" << (default_stream ? " (default stream)" : "")
|
|
|
|
|
<< ": " << sp.ToString();
|
|
|
|
|
if (!ValidateStreamParams(sp))
|
|
|
|
|
return false;
|
2014-05-13 11:07:01 +00:00
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
uint32_t ssrc = sp.first_ssrc();
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(ssrc != 0); // TODO(pbos): Is this ever valid?
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2014-10-08 14:48:08 +00:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
2015-03-26 16:23:04 +01:00
|
|
|
// Remove running stream if this was a default stream.
|
2016-02-17 07:24:50 -08:00
|
|
|
const auto& prev_stream = receive_streams_.find(ssrc);
|
2015-03-26 16:23:04 +01:00
|
|
|
if (prev_stream != receive_streams_.end()) {
|
|
|
|
|
if (default_stream || !prev_stream->second->IsDefaultStream()) {
|
|
|
|
|
LOG(LS_ERROR) << "Receive stream for SSRC '" << ssrc
|
|
|
|
|
<< "' already exists.";
|
|
|
|
|
return false;
|
2015-03-06 15:35:19 +00:00
|
|
|
}
|
2015-03-26 16:23:04 +01:00
|
|
|
DeleteReceiveStream(prev_stream->second);
|
|
|
|
|
receive_streams_.erase(prev_stream);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2015-03-26 16:23:04 +01:00
|
|
|
if (!ValidateReceiveSsrcAvailability(sp))
|
|
|
|
|
return false;
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
for (uint32_t used_ssrc : sp.ssrcs)
|
2015-03-26 16:23:04 +01:00
|
|
|
receive_ssrcs_.insert(used_ssrc);
|
|
|
|
|
|
2015-08-28 04:07:10 -07:00
|
|
|
webrtc::VideoReceiveStream::Config config(this);
|
2014-07-18 09:35:58 +00:00
|
|
|
ConfigureReceiverRtp(&config, sp);
|
2014-10-31 12:59:34 +00:00
|
|
|
|
2015-07-15 08:02:58 -07:00
|
|
|
// Set up A/V sync group based on sync label.
|
|
|
|
|
config.sync_group = sp.sync_label;
|
2014-10-31 12:59:34 +00:00
|
|
|
|
2015-10-30 02:47:38 -07:00
|
|
|
config.rtp.remb = send_codec_ ? HasRemb(send_codec_->codec) : false;
|
2015-11-20 18:05:48 -08:00
|
|
|
config.rtp.transport_cc =
|
|
|
|
|
send_codec_ ? HasTransportCc(send_codec_->codec) : false;
|
2016-03-23 04:48:10 -07:00
|
|
|
config.disable_prerenderer_smoothing =
|
|
|
|
|
video_config_.disable_prerenderer_smoothing;
|
2015-05-11 12:48:12 +02:00
|
|
|
|
2015-03-26 16:23:04 +01:00
|
|
|
receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
|
2016-06-10 17:58:01 +02:00
|
|
|
call_, sp, std::move(config), external_decoder_factory_, default_stream,
|
2016-05-17 16:33:30 +02:00
|
|
|
recv_codecs_, red_disabled_by_remote_side_);
|
2014-07-18 09:35:58 +00:00
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void WebRtcVideoChannel2::ConfigureReceiverRtp(
|
|
|
|
|
webrtc::VideoReceiveStream::Config* config,
|
|
|
|
|
const StreamParams& sp) const {
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
uint32_t ssrc = sp.first_ssrc();
|
2014-07-18 09:35:58 +00:00
|
|
|
|
|
|
|
|
config->rtp.remote_ssrc = ssrc;
|
|
|
|
|
config->rtp.local_ssrc = rtcp_receiver_report_ssrc_;
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2014-07-18 09:35:58 +00:00
|
|
|
config->rtp.extensions = recv_rtp_extensions_;
|
2016-03-18 15:02:07 -07:00
|
|
|
// Whether or not the receive stream sends reduced size RTCP is determined
|
|
|
|
|
// by the send params.
|
|
|
|
|
// TODO(deadbeef): Once we change "send_params" to "sender_params" and
|
|
|
|
|
// "recv_params" to "receiver_params", we should get this out of
|
|
|
|
|
// receiver_params_.
|
|
|
|
|
config->rtp.rtcp_mode = send_params_.rtcp.reduced_size
|
2015-12-09 12:37:51 -08:00
|
|
|
? webrtc::RtcpMode::kReducedSize
|
|
|
|
|
: webrtc::RtcpMode::kCompound;
|
2014-07-25 19:01:32 +00:00
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
// TODO(pbos): This protection is against setting the same local ssrc as
|
|
|
|
|
// remote which is not permitted by the lower-level API. RTCP requires a
|
|
|
|
|
// corresponding sender SSRC. Figure out what to do when we don't have
|
|
|
|
|
// (receive-only) or know a good local SSRC.
|
2014-07-18 09:35:58 +00:00
|
|
|
if (config->rtp.remote_ssrc == config->rtp.local_ssrc) {
|
|
|
|
|
if (config->rtp.local_ssrc != kDefaultRtcpReceiverReportSsrc) {
|
|
|
|
|
config->rtp.local_ssrc = kDefaultRtcpReceiverReportSsrc;
|
2014-05-13 11:07:01 +00:00
|
|
|
} else {
|
2014-07-18 09:35:58 +00:00
|
|
|
config->rtp.local_ssrc = kDefaultRtcpReceiverReportSsrc + 1;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-11-07 09:37:54 +00:00
|
|
|
for (size_t i = 0; i < recv_codecs_.size(); ++i) {
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
uint32_t rtx_ssrc;
|
2014-11-07 09:37:54 +00:00
|
|
|
if (recv_codecs_[i].rtx_payload_type != -1 &&
|
|
|
|
|
sp.GetFidSsrc(ssrc, &rtx_ssrc)) {
|
|
|
|
|
webrtc::VideoReceiveStream::Config::Rtp::Rtx& rtx =
|
|
|
|
|
config->rtp.rtx[recv_codecs_[i].codec.id];
|
|
|
|
|
rtx.ssrc = rtx_ssrc;
|
|
|
|
|
rtx.payload_type = recv_codecs_[i].rtx_payload_type;
|
|
|
|
|
}
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
bool WebRtcVideoChannel2::RemoveRecvStream(uint32_t ssrc) {
|
2014-05-13 11:07:01 +00:00
|
|
|
LOG(LS_INFO) << "RemoveRecvStream: " << ssrc;
|
|
|
|
|
if (ssrc == 0) {
|
2014-08-12 23:17:13 +00:00
|
|
|
LOG(LS_ERROR) << "RemoveRecvStream with 0 ssrc is not supported.";
|
|
|
|
|
return false;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2014-10-08 14:48:08 +00:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
std::map<uint32_t, WebRtcVideoReceiveStream*>::iterator stream =
|
2014-05-13 11:07:01 +00:00
|
|
|
receive_streams_.find(ssrc);
|
|
|
|
|
if (stream == receive_streams_.end()) {
|
|
|
|
|
LOG(LS_ERROR) << "Stream not found for ssrc: " << ssrc;
|
|
|
|
|
return false;
|
|
|
|
|
}
|
2015-03-26 16:23:04 +01:00
|
|
|
DeleteReceiveStream(stream->second);
|
2014-05-13 11:07:01 +00:00
|
|
|
receive_streams_.erase(stream);
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-04 01:24:52 -08:00
|
|
|
bool WebRtcVideoChannel2::SetSink(uint32_t ssrc,
|
|
|
|
|
rtc::VideoSinkInterface<VideoFrame>* sink) {
|
2016-06-02 16:23:38 -07:00
|
|
|
LOG(LS_INFO) << "SetSink: ssrc:" << ssrc << " "
|
|
|
|
|
<< (sink ? "(ptr)" : "nullptr");
|
2014-05-13 11:07:01 +00:00
|
|
|
if (ssrc == 0) {
|
2016-02-04 01:24:52 -08:00
|
|
|
default_unsignalled_ssrc_handler_.SetDefaultSink(this, sink);
|
2014-07-18 09:35:58 +00:00
|
|
|
return true;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2014-10-08 14:48:08 +00:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
std::map<uint32_t, WebRtcVideoReceiveStream*>::iterator it =
|
2014-07-18 09:35:58 +00:00
|
|
|
receive_streams_.find(ssrc);
|
|
|
|
|
if (it == receive_streams_.end()) {
|
|
|
|
|
return false;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2016-02-04 01:24:52 -08:00
|
|
|
it->second->SetSink(sink);
|
2014-05-13 11:07:01 +00:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-04 08:54:32 +00:00
|
|
|
bool WebRtcVideoChannel2::GetStats(VideoMediaInfo* info) {
|
2016-03-08 14:24:13 -08:00
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoChannel2::GetStats");
|
2016-08-11 08:41:18 -07:00
|
|
|
|
|
|
|
|
// Log stats periodically.
|
|
|
|
|
bool log_stats = false;
|
|
|
|
|
int64_t now_ms = rtc::TimeMillis();
|
|
|
|
|
if (last_stats_log_ms_ == -1 ||
|
|
|
|
|
now_ms - last_stats_log_ms_ > kStatsLogIntervalMs) {
|
|
|
|
|
last_stats_log_ms_ = now_ms;
|
|
|
|
|
log_stats = true;
|
|
|
|
|
}
|
|
|
|
|
|
2014-07-18 11:11:55 +00:00
|
|
|
info->Clear();
|
2016-08-11 08:41:18 -07:00
|
|
|
FillSenderStats(info, log_stats);
|
|
|
|
|
FillReceiverStats(info, log_stats);
|
2014-12-11 13:26:09 +00:00
|
|
|
webrtc::Call::Stats stats = call_->GetStats();
|
|
|
|
|
FillBandwidthEstimationStats(stats, info);
|
|
|
|
|
if (stats.rtt_ms != -1) {
|
|
|
|
|
for (size_t i = 0; i < info->senders.size(); ++i) {
|
|
|
|
|
info->senders[i].rtt_ms = stats.rtt_ms;
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-08-11 08:41:18 -07:00
|
|
|
|
|
|
|
|
if (log_stats)
|
|
|
|
|
LOG(LS_INFO) << stats.ToString(now_ms);
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-11 08:41:18 -07:00
|
|
|
void WebRtcVideoChannel2::FillSenderStats(VideoMediaInfo* video_media_info,
|
|
|
|
|
bool log_stats) {
|
2014-10-08 14:48:08 +00:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
for (std::map<uint32_t, WebRtcVideoSendStream*>::iterator it =
|
2014-07-18 11:11:55 +00:00
|
|
|
send_streams_.begin();
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
it != send_streams_.end(); ++it) {
|
2016-08-11 08:41:18 -07:00
|
|
|
video_media_info->senders.push_back(
|
|
|
|
|
it->second->GetVideoSenderInfo(log_stats));
|
2014-07-18 11:11:55 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-11 08:41:18 -07:00
|
|
|
void WebRtcVideoChannel2::FillReceiverStats(VideoMediaInfo* video_media_info,
|
|
|
|
|
bool log_stats) {
|
2014-10-08 14:48:08 +00:00
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
for (std::map<uint32_t, WebRtcVideoReceiveStream*>::iterator it =
|
2014-07-18 11:11:55 +00:00
|
|
|
receive_streams_.begin();
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
it != receive_streams_.end(); ++it) {
|
2016-08-11 08:41:18 -07:00
|
|
|
video_media_info->receivers.push_back(
|
|
|
|
|
it->second->GetVideoReceiverInfo(log_stats));
|
2014-07-18 11:11:55 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void WebRtcVideoChannel2::FillBandwidthEstimationStats(
|
2014-12-11 13:26:09 +00:00
|
|
|
const webrtc::Call::Stats& stats,
|
2014-07-18 11:11:55 +00:00
|
|
|
VideoMediaInfo* video_media_info) {
|
2014-11-05 14:05:29 +00:00
|
|
|
BandwidthEstimationInfo bwe_info;
|
|
|
|
|
bwe_info.available_send_bandwidth = stats.send_bandwidth_bps;
|
|
|
|
|
bwe_info.available_recv_bandwidth = stats.recv_bandwidth_bps;
|
|
|
|
|
bwe_info.bucket_delay = stats.pacer_delay_ms;
|
|
|
|
|
|
|
|
|
|
// Get send stream bitrate stats.
|
|
|
|
|
rtc::CritScope stream_lock(&stream_crit_);
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
for (std::map<uint32_t, WebRtcVideoSendStream*>::iterator stream =
|
2014-11-05 14:05:29 +00:00
|
|
|
send_streams_.begin();
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
stream != send_streams_.end(); ++stream) {
|
2014-11-05 14:05:29 +00:00
|
|
|
stream->second->FillBandwidthEstimationInfo(&bwe_info);
|
|
|
|
|
}
|
|
|
|
|
video_media_info->bw_estimations.push_back(bwe_info);
|
2014-07-18 11:11:55 +00:00
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
void WebRtcVideoChannel2::OnPacketReceived(
|
2016-03-20 06:15:43 -07:00
|
|
|
rtc::CopyOnWriteBuffer* packet,
|
2014-07-29 17:36:52 +00:00
|
|
|
const rtc::PacketTime& packet_time) {
|
2015-09-08 05:36:15 -07:00
|
|
|
const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
|
|
|
|
|
packet_time.not_before);
|
2014-05-14 13:58:13 +00:00
|
|
|
const webrtc::PacketReceiver::DeliveryStatus delivery_result =
|
2015-09-08 05:36:15 -07:00
|
|
|
call_->Receiver()->DeliverPacket(
|
|
|
|
|
webrtc::MediaType::VIDEO,
|
2016-03-20 06:15:43 -07:00
|
|
|
packet->cdata(), packet->size(),
|
2015-09-08 05:36:15 -07:00
|
|
|
webrtc_packet_time);
|
2014-05-14 13:58:13 +00:00
|
|
|
switch (delivery_result) {
|
|
|
|
|
case webrtc::PacketReceiver::DELIVERY_OK:
|
|
|
|
|
return;
|
|
|
|
|
case webrtc::PacketReceiver::DELIVERY_PACKET_ERROR:
|
|
|
|
|
return;
|
|
|
|
|
case webrtc::PacketReceiver::DELIVERY_UNKNOWN_SSRC:
|
|
|
|
|
break;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
uint32_t ssrc = 0;
|
2016-03-20 06:15:43 -07:00
|
|
|
if (!GetRtpSsrc(packet->cdata(), packet->size(), &ssrc)) {
|
2014-05-13 11:07:01 +00:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2015-07-10 11:27:55 -07:00
|
|
|
int payload_type = 0;
|
2016-03-20 06:15:43 -07:00
|
|
|
if (!GetRtpPayloadType(packet->cdata(), packet->size(), &payload_type)) {
|
2015-07-10 11:27:55 -07:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// See if this payload_type is registered as one that usually gets its own
|
|
|
|
|
// SSRC (RTX) or at least is safe to drop either way (ULPFEC). If it is, and
|
|
|
|
|
// it wasn't handled above by DeliverPacket, that means we don't know what
|
|
|
|
|
// stream it associates with, and we shouldn't ever create an implicit channel
|
|
|
|
|
// for these.
|
|
|
|
|
for (auto& codec : recv_codecs_) {
|
|
|
|
|
if (payload_type == codec.rtx_payload_type ||
|
|
|
|
|
payload_type == codec.fec.red_rtx_payload_type ||
|
|
|
|
|
payload_type == codec.fec.ulpfec_payload_type) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-08-12 23:17:13 +00:00
|
|
|
switch (unsignalled_ssrc_handler_->OnUnsignalledSsrc(this, ssrc)) {
|
|
|
|
|
case UnsignalledSsrcHandler::kDropPacket:
|
|
|
|
|
return;
|
|
|
|
|
case UnsignalledSsrcHandler::kDeliverPacket:
|
|
|
|
|
break;
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2015-09-08 05:36:15 -07:00
|
|
|
if (call_->Receiver()->DeliverPacket(
|
|
|
|
|
webrtc::MediaType::VIDEO,
|
2016-03-20 06:15:43 -07:00
|
|
|
packet->cdata(), packet->size(),
|
2015-09-08 05:36:15 -07:00
|
|
|
webrtc_packet_time) != webrtc::PacketReceiver::DELIVERY_OK) {
|
2014-08-12 23:17:13 +00:00
|
|
|
LOG(LS_WARNING) << "Failed to deliver RTP packet on re-delivery.";
|
2014-05-13 11:07:01 +00:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void WebRtcVideoChannel2::OnRtcpReceived(
|
2016-03-20 06:15:43 -07:00
|
|
|
rtc::CopyOnWriteBuffer* packet,
|
2014-07-29 17:36:52 +00:00
|
|
|
const rtc::PacketTime& packet_time) {
|
2015-09-08 05:36:15 -07:00
|
|
|
const webrtc::PacketTime webrtc_packet_time(packet_time.timestamp,
|
|
|
|
|
packet_time.not_before);
|
2015-11-18 13:47:16 +01:00
|
|
|
// TODO(pbos): Check webrtc::PacketReceiver::DELIVERY_OK once we deliver
|
|
|
|
|
// for both audio and video on the same path. Since BundleFilter doesn't
|
|
|
|
|
// filter RTCP anymore incoming RTCP packets could've been going to audio (so
|
|
|
|
|
// logging failures spam the log).
|
|
|
|
|
call_->Receiver()->DeliverPacket(
|
|
|
|
|
webrtc::MediaType::VIDEO,
|
2016-03-20 06:15:43 -07:00
|
|
|
packet->cdata(), packet->size(),
|
2015-11-18 13:47:16 +01:00
|
|
|
webrtc_packet_time);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void WebRtcVideoChannel2::OnReadyToSend(bool ready) {
|
2014-09-03 16:17:12 +00:00
|
|
|
LOG(LS_VERBOSE) << "OnReadyToSend: " << (ready ? "Ready." : "Not ready.");
|
2016-03-22 15:32:27 -07:00
|
|
|
call_->SignalChannelNetworkState(
|
|
|
|
|
webrtc::MediaType::VIDEO,
|
|
|
|
|
ready ? webrtc::kNetworkUp : webrtc::kNetworkDown);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2016-03-29 17:27:21 -07:00
|
|
|
void WebRtcVideoChannel2::OnNetworkRouteChanged(
|
|
|
|
|
const std::string& transport_name,
|
2016-04-19 15:41:36 -07:00
|
|
|
const rtc::NetworkRoute& network_route) {
|
|
|
|
|
call_->OnNetworkRouteChanged(transport_name, network_route);
|
2016-03-29 17:27:21 -07:00
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
void WebRtcVideoChannel2::SetInterface(NetworkInterface* iface) {
|
|
|
|
|
MediaChannel::SetInterface(iface);
|
|
|
|
|
// Set the RTP recv/send buffer to a bigger size
|
|
|
|
|
MediaChannel::SetOption(NetworkInterface::ST_RTP,
|
2014-07-29 17:36:52 +00:00
|
|
|
rtc::Socket::OPT_RCVBUF,
|
2014-05-13 11:07:01 +00:00
|
|
|
kVideoRtpBufferSize);
|
|
|
|
|
|
2014-10-28 17:37:17 +00:00
|
|
|
// Speculative change to increase the outbound socket buffer size.
|
|
|
|
|
// In b/15152257, we are seeing a significant number of packets discarded
|
|
|
|
|
// due to lack of socket buffer space, although it's not yet clear what the
|
|
|
|
|
// ideal value should be.
|
|
|
|
|
MediaChannel::SetOption(NetworkInterface::ST_RTP,
|
|
|
|
|
rtc::Socket::OPT_SNDBUF,
|
|
|
|
|
kVideoRtpBufferSize);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2015-10-02 03:39:33 -07:00
|
|
|
bool WebRtcVideoChannel2::SendRtp(const uint8_t* data,
|
|
|
|
|
size_t len,
|
|
|
|
|
const webrtc::PacketOptions& options) {
|
2016-03-20 06:15:43 -07:00
|
|
|
rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
|
2015-10-15 07:26:07 -07:00
|
|
|
rtc::PacketOptions rtc_options;
|
|
|
|
|
rtc_options.packet_id = options.packet_id;
|
|
|
|
|
return MediaChannel::SendPacket(&packet, rtc_options);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool WebRtcVideoChannel2::SendRtcp(const uint8_t* data, size_t len) {
|
2016-03-20 06:15:43 -07:00
|
|
|
rtc::CopyOnWriteBuffer packet(data, len, kMaxRtpPacketLen);
|
2015-10-15 07:26:07 -07:00
|
|
|
return MediaChannel::SendRtcp(&packet, rtc::PacketOptions());
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2014-06-06 10:49:19 +00:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::VideoSendStreamParameters::
|
|
|
|
|
VideoSendStreamParameters(
|
2016-08-16 02:40:55 -07:00
|
|
|
const webrtc::VideoSendStream::Config& config,
|
2014-06-06 10:49:19 +00:00
|
|
|
const VideoOptions& options,
|
2015-03-27 15:58:11 +01:00
|
|
|
int max_bitrate_bps,
|
2015-11-10 22:34:18 +01:00
|
|
|
const rtc::Optional<VideoCodecSettings>& codec_settings)
|
2016-08-16 02:40:55 -07:00
|
|
|
: config(config),
|
2015-03-27 15:58:11 +01:00
|
|
|
options(options),
|
|
|
|
|
max_bitrate_bps(max_bitrate_bps),
|
2015-10-30 02:47:38 -07:00
|
|
|
codec_settings(codec_settings) {}
|
2014-06-06 10:49:19 +00:00
|
|
|
|
2015-05-19 23:09:35 +02:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::AllocatedEncoder::AllocatedEncoder(
|
|
|
|
|
webrtc::VideoEncoder* encoder,
|
|
|
|
|
webrtc::VideoCodecType type,
|
|
|
|
|
bool external)
|
|
|
|
|
: encoder(encoder),
|
|
|
|
|
external_encoder(nullptr),
|
|
|
|
|
type(type),
|
|
|
|
|
external(external) {
|
|
|
|
|
if (external) {
|
|
|
|
|
external_encoder = encoder;
|
|
|
|
|
this->encoder =
|
|
|
|
|
new webrtc::VideoEncoderSoftwareFallbackWrapper(type, encoder);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
|
|
|
|
|
webrtc::Call* call,
|
2015-08-28 04:07:10 -07:00
|
|
|
const StreamParams& sp,
|
2016-08-16 02:40:55 -07:00
|
|
|
const webrtc::VideoSendStream::Config& config,
|
2016-03-16 02:22:50 -07:00
|
|
|
const VideoOptions& options,
|
2014-10-14 04:25:33 +00:00
|
|
|
WebRtcVideoEncoderFactory* external_encoder_factory,
|
2016-02-29 00:04:41 -08:00
|
|
|
bool enable_cpu_overuse_detection,
|
2015-03-27 15:58:11 +01:00
|
|
|
int max_bitrate_bps,
|
2015-11-10 22:34:18 +01:00
|
|
|
const rtc::Optional<VideoCodecSettings>& codec_settings,
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
const rtc::Optional<std::vector<webrtc::RtpExtension>>& rtp_extensions,
|
2015-12-09 12:37:51 -08:00
|
|
|
// TODO(deadbeef): Don't duplicate information between send_params,
|
|
|
|
|
// rtp_extensions, options, etc.
|
|
|
|
|
const VideoSendParameters& send_params)
|
2016-02-29 00:04:41 -08:00
|
|
|
: worker_thread_(rtc::Thread::Current()),
|
|
|
|
|
ssrcs_(sp.ssrcs),
|
2015-05-28 13:39:50 +02:00
|
|
|
ssrc_groups_(sp.ssrc_groups),
|
2015-04-22 13:21:30 +02:00
|
|
|
call_(call),
|
2016-02-29 00:04:41 -08:00
|
|
|
cpu_restricted_counter_(0),
|
|
|
|
|
number_of_cpu_adapt_changes_(0),
|
2016-08-22 23:56:48 -07:00
|
|
|
frame_count_(0),
|
|
|
|
|
cpu_restricted_frame_count_(0),
|
2016-04-08 02:23:55 -07:00
|
|
|
source_(nullptr),
|
2014-10-14 04:25:33 +00:00
|
|
|
external_encoder_factory_(external_encoder_factory),
|
2016-02-29 00:04:41 -08:00
|
|
|
stream_(nullptr),
|
2016-08-16 02:40:55 -07:00
|
|
|
parameters_(config, options, max_bitrate_bps, codec_settings),
|
2016-03-16 19:07:43 -07:00
|
|
|
rtp_parameters_(CreateRtpParametersWithOneEncoding()),
|
2016-01-27 16:45:21 +01:00
|
|
|
pending_encoder_reconfiguration_(false),
|
2016-02-29 00:04:41 -08:00
|
|
|
allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false),
|
2014-05-13 11:07:01 +00:00
|
|
|
sending_(false),
|
2015-07-16 10:27:16 -07:00
|
|
|
last_frame_timestamp_ms_(0) {
|
2014-07-17 08:51:46 +00:00
|
|
|
parameters_.config.rtp.max_packet_size = kVideoMtu;
|
2016-02-17 05:25:36 -08:00
|
|
|
parameters_.conference_mode = send_params.conference_mode;
|
2014-07-17 08:51:46 +00:00
|
|
|
|
|
|
|
|
sp.GetPrimarySsrcs(¶meters_.config.rtp.ssrcs);
|
|
|
|
|
sp.GetFidSsrcs(parameters_.config.rtp.ssrcs,
|
|
|
|
|
¶meters_.config.rtp.rtx.ssrcs);
|
|
|
|
|
parameters_.config.rtp.c_name = sp.cname;
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
if (rtp_extensions) {
|
|
|
|
|
parameters_.config.rtp.extensions = *rtp_extensions;
|
|
|
|
|
}
|
2015-12-09 12:37:51 -08:00
|
|
|
parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size
|
|
|
|
|
? webrtc::RtcpMode::kReducedSize
|
|
|
|
|
: webrtc::RtcpMode::kCompound;
|
2016-02-29 00:04:41 -08:00
|
|
|
parameters_.config.overuse_callback =
|
|
|
|
|
enable_cpu_overuse_detection ? this : nullptr;
|
2014-07-17 08:51:46 +00:00
|
|
|
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
// Only request rotation at the source when we positively know that the remote
|
|
|
|
|
// side doesn't support the rotation extension. This allows us to prepare the
|
|
|
|
|
// encoder in the expectation that rotation is supported - which is the common
|
|
|
|
|
// case.
|
|
|
|
|
sink_wants_.rotation_applied =
|
|
|
|
|
rtp_extensions &&
|
|
|
|
|
!ContainsHeaderExtension(*rtp_extensions,
|
|
|
|
|
webrtc::RtpExtension::kVideoRotationUri);
|
2016-03-02 05:34:00 -08:00
|
|
|
|
2015-10-30 02:47:38 -07:00
|
|
|
if (codec_settings) {
|
2016-03-01 04:29:59 -08:00
|
|
|
SetCodec(*codec_settings);
|
2014-07-17 08:51:46 +00:00
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
|
2016-04-08 02:23:55 -07:00
|
|
|
DisconnectSource();
|
2014-07-17 08:51:46 +00:00
|
|
|
if (stream_ != NULL) {
|
|
|
|
|
call_->DestroyVideoSendStream(stream_);
|
|
|
|
|
}
|
2014-10-14 04:25:33 +00:00
|
|
|
DestroyVideoEncoder(&allocated_encoder_);
|
2016-08-22 23:56:48 -07:00
|
|
|
UpdateHistograms();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::UpdateHistograms() const {
|
|
|
|
|
const int kMinRequiredFrames = 200;
|
|
|
|
|
if (frame_count_ > kMinRequiredFrames) {
|
|
|
|
|
RTC_LOGGED_HISTOGRAM_PERCENTAGE(
|
|
|
|
|
"WebRTC.Video.CpuLimitedResolutionInPercent",
|
|
|
|
|
cpu_restricted_frame_count_ * 100 / frame_count_);
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2016-02-12 13:30:57 +01:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
|
|
|
|
|
const VideoFrame& frame) {
|
|
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::OnFrame");
|
2016-04-13 03:29:16 -07:00
|
|
|
webrtc::VideoFrame video_frame(frame.video_frame_buffer(), 0, 0,
|
|
|
|
|
frame.rotation());
|
2014-07-29 17:36:52 +00:00
|
|
|
rtc::CritScope cs(&lock_);
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
|
|
|
|
|
if (video_frame.width() != last_frame_info_.width ||
|
|
|
|
|
video_frame.height() != last_frame_info_.height ||
|
|
|
|
|
video_frame.rotation() != last_frame_info_.rotation ||
|
|
|
|
|
video_frame.is_texture() != last_frame_info_.is_texture) {
|
|
|
|
|
last_frame_info_.width = video_frame.width();
|
|
|
|
|
last_frame_info_.height = video_frame.height();
|
|
|
|
|
last_frame_info_.rotation = video_frame.rotation();
|
|
|
|
|
last_frame_info_.is_texture = video_frame.is_texture();
|
|
|
|
|
pending_encoder_reconfiguration_ = true;
|
|
|
|
|
|
|
|
|
|
LOG(LS_INFO) << "Video frame parameters changed: dimensions="
|
|
|
|
|
<< last_frame_info_.width << "x" << last_frame_info_.height
|
|
|
|
|
<< ", rotation=" << last_frame_info_.rotation
|
|
|
|
|
<< ", texture=" << last_frame_info_.is_texture;
|
|
|
|
|
}
|
|
|
|
|
|
2014-07-17 08:51:46 +00:00
|
|
|
if (stream_ == NULL) {
|
2015-04-08 14:04:01 +02:00
|
|
|
// Frame input before send codecs are configured, dropping frame.
|
2014-07-17 08:51:46 +00:00
|
|
|
return;
|
|
|
|
|
}
|
2015-02-16 21:02:00 +00:00
|
|
|
|
2016-02-12 13:30:57 +01:00
|
|
|
int64_t frame_delta_ms = frame.GetTimeStamp() / rtc::kNumNanosecsPerMillisec;
|
2016-04-14 02:29:29 -07:00
|
|
|
|
2015-07-16 10:27:16 -07:00
|
|
|
// frame->GetTimeStamp() is essentially a delta, align to webrtc time
|
2016-04-14 02:29:29 -07:00
|
|
|
if (!first_frame_timestamp_ms_) {
|
|
|
|
|
first_frame_timestamp_ms_ =
|
2016-05-06 11:29:15 -07:00
|
|
|
rtc::Optional<int64_t>(rtc::TimeMillis() - frame_delta_ms);
|
2015-07-16 10:27:16 -07:00
|
|
|
}
|
|
|
|
|
|
2016-04-14 02:29:29 -07:00
|
|
|
last_frame_timestamp_ms_ = *first_frame_timestamp_ms_ + frame_delta_ms;
|
|
|
|
|
|
2015-07-16 10:27:16 -07:00
|
|
|
video_frame.set_render_time_ms(last_frame_timestamp_ms_);
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
|
|
|
|
|
if (pending_encoder_reconfiguration_) {
|
|
|
|
|
ReconfigureEncoder();
|
|
|
|
|
pending_encoder_reconfiguration_ = false;
|
|
|
|
|
}
|
2014-09-03 15:25:49 +00:00
|
|
|
|
2016-03-12 00:02:28 +01:00
|
|
|
// Not sending, abort after reconfiguration. Reconfiguration should still
|
|
|
|
|
// occur to permit sending this input as quickly as possible once we start
|
|
|
|
|
// sending (without having to reconfigure then).
|
|
|
|
|
if (!sending_) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-22 23:56:48 -07:00
|
|
|
++frame_count_;
|
|
|
|
|
if (cpu_restricted_counter_ > 0)
|
|
|
|
|
++cpu_restricted_frame_count_;
|
|
|
|
|
|
2015-03-18 09:51:05 +00:00
|
|
|
stream_->Input()->IncomingCapturedFrame(video_frame);
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2016-06-02 16:23:38 -07:00
|
|
|
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
|
|
|
|
|
bool enable,
|
|
|
|
|
const VideoOptions* options,
|
2016-04-08 02:23:55 -07:00
|
|
|
rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
|
2016-06-02 16:23:38 -07:00
|
|
|
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetVideoSend");
|
2016-02-29 00:04:41 -08:00
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
2016-04-08 02:23:55 -07:00
|
|
|
|
2016-06-02 16:23:38 -07:00
|
|
|
// Ignore |options| pointer if |enable| is false.
|
|
|
|
|
bool options_present = enable && options;
|
|
|
|
|
bool source_changing = source_ != source;
|
|
|
|
|
if (source_changing) {
|
|
|
|
|
DisconnectSource();
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2016-06-02 16:23:38 -07:00
|
|
|
if (options_present || source_changing) {
|
2014-07-29 17:36:52 +00:00
|
|
|
rtc::CritScope cs(&lock_);
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2016-06-02 16:23:38 -07:00
|
|
|
if (options_present) {
|
|
|
|
|
VideoOptions old_options = parameters_.options;
|
|
|
|
|
parameters_.options.SetAll(*options);
|
|
|
|
|
// Reconfigure encoder settings on the naext frame or stream
|
|
|
|
|
// recreation if the options changed.
|
|
|
|
|
if (parameters_.options != old_options) {
|
|
|
|
|
pending_encoder_reconfiguration_ = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (source_changing) {
|
|
|
|
|
// Reset timestamps to realign new incoming frames to a webrtc timestamp.
|
|
|
|
|
// A new source may have a different timestamp delta than the previous
|
|
|
|
|
// one.
|
|
|
|
|
first_frame_timestamp_ms_ = rtc::Optional<int64_t>();
|
2015-09-14 11:38:38 -07:00
|
|
|
|
2016-06-02 16:23:38 -07:00
|
|
|
if (source == nullptr && stream_ != nullptr) {
|
2014-07-23 15:44:48 +00:00
|
|
|
LOG(LS_VERBOSE) << "Disabling capturer, sending black frame.";
|
2015-07-16 10:27:16 -07:00
|
|
|
// Force this black frame not to be dropped due to timestamp order
|
|
|
|
|
// check. As IncomingCapturedFrame will drop the frame if this frame's
|
|
|
|
|
// timestamp is less than or equal to last frame's timestamp, it is
|
|
|
|
|
// necessary to give this black frame a larger timestamp than the
|
|
|
|
|
// previous one.
|
2016-02-10 20:12:52 +01:00
|
|
|
last_frame_timestamp_ms_ += 1;
|
2016-06-20 03:38:52 -07:00
|
|
|
rtc::scoped_refptr<webrtc::I420Buffer> black_buffer(
|
|
|
|
|
webrtc::I420Buffer::Create(last_frame_info_.width,
|
|
|
|
|
last_frame_info_.height));
|
|
|
|
|
black_buffer->SetToBlack();
|
|
|
|
|
|
|
|
|
|
stream_->Input()->IncomingCapturedFrame(webrtc::VideoFrame(
|
|
|
|
|
black_buffer, 0 /* timestamp (90 kHz) */,
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
last_frame_timestamp_ms_, last_frame_info_.rotation));
|
2014-07-23 15:44:48 +00:00
|
|
|
}
|
2016-06-02 16:23:38 -07:00
|
|
|
source_ = source;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
}
|
2016-06-02 16:23:38 -07:00
|
|
|
|
2016-04-08 02:23:55 -07:00
|
|
|
// |source_->AddOrUpdateSink| may not be called while holding |lock_| since
|
2016-03-10 18:32:00 +01:00
|
|
|
// that might cause a lock order inversion.
|
2016-06-02 16:23:38 -07:00
|
|
|
if (source_changing && source_) {
|
2016-04-08 02:23:55 -07:00
|
|
|
source_->AddOrUpdateSink(this, sink_wants_);
|
|
|
|
|
}
|
2016-06-02 16:23:38 -07:00
|
|
|
return true;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2016-04-08 02:23:55 -07:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectSource() {
|
2016-02-29 00:04:41 -08:00
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
2016-04-08 02:23:55 -07:00
|
|
|
if (source_ == NULL) {
|
|
|
|
|
return;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
2016-02-12 13:30:57 +01:00
|
|
|
|
2016-04-08 02:23:55 -07:00
|
|
|
// |source_->RemoveSink| may not be called while holding |lock_| since
|
2016-03-10 18:32:00 +01:00
|
|
|
// that might cause a lock order inversion.
|
2016-04-08 02:23:55 -07:00
|
|
|
source_->RemoveSink(this);
|
|
|
|
|
source_ = nullptr;
|
2016-06-02 16:23:38 -07:00
|
|
|
// Reset |cpu_restricted_counter_| if the source is changed. It is not
|
2016-02-29 00:04:41 -08:00
|
|
|
// possible to know if the video resolution is restricted by CPU usage after
|
2016-06-02 16:23:38 -07:00
|
|
|
// the source is changed since the next source might be screen capture
|
2016-02-29 00:04:41 -08:00
|
|
|
// with another resolution and frame rate.
|
|
|
|
|
cpu_restricted_counter_ = 0;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
const std::vector<uint32_t>&
|
2015-03-26 16:23:04 +01:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::GetSsrcs() const {
|
|
|
|
|
return ssrcs_;
|
|
|
|
|
}
|
|
|
|
|
|
2014-10-14 04:25:33 +00:00
|
|
|
webrtc::VideoCodecType CodecTypeFromName(const std::string& name) {
|
2015-04-21 20:24:50 +08:00
|
|
|
if (CodecNamesEq(name, kVp8CodecName)) {
|
2014-10-14 04:25:33 +00:00
|
|
|
return webrtc::kVideoCodecVP8;
|
2015-04-21 20:24:50 +08:00
|
|
|
} else if (CodecNamesEq(name, kVp9CodecName)) {
|
2014-11-07 13:21:04 +00:00
|
|
|
return webrtc::kVideoCodecVP9;
|
2015-04-21 20:24:50 +08:00
|
|
|
} else if (CodecNamesEq(name, kH264CodecName)) {
|
2014-10-14 04:25:33 +00:00
|
|
|
return webrtc::kVideoCodecH264;
|
|
|
|
|
}
|
|
|
|
|
return webrtc::kVideoCodecUnknown;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::AllocatedEncoder
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoder(
|
|
|
|
|
const VideoCodec& codec) {
|
|
|
|
|
webrtc::VideoCodecType type = CodecTypeFromName(codec.name);
|
|
|
|
|
|
|
|
|
|
// Do not re-create encoders of the same type.
|
|
|
|
|
if (type == allocated_encoder_.type && allocated_encoder_.encoder != NULL) {
|
|
|
|
|
return allocated_encoder_;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (external_encoder_factory_ != NULL) {
|
|
|
|
|
webrtc::VideoEncoder* encoder =
|
|
|
|
|
external_encoder_factory_->CreateVideoEncoder(type);
|
|
|
|
|
if (encoder != NULL) {
|
|
|
|
|
return AllocatedEncoder(encoder, type, true);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (type == webrtc::kVideoCodecVP8) {
|
|
|
|
|
return AllocatedEncoder(
|
|
|
|
|
webrtc::VideoEncoder::Create(webrtc::VideoEncoder::kVp8), type, false);
|
2014-11-07 13:21:04 +00:00
|
|
|
} else if (type == webrtc::kVideoCodecVP9) {
|
|
|
|
|
return AllocatedEncoder(
|
|
|
|
|
webrtc::VideoEncoder::Create(webrtc::VideoEncoder::kVp9), type, false);
|
2015-06-29 14:34:58 -07:00
|
|
|
} else if (type == webrtc::kVideoCodecH264) {
|
|
|
|
|
return AllocatedEncoder(
|
|
|
|
|
webrtc::VideoEncoder::Create(webrtc::VideoEncoder::kH264), type, false);
|
2014-10-14 04:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// This shouldn't happen, we should not be trying to create something we don't
|
|
|
|
|
// support.
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(false);
|
2014-10-14 04:25:33 +00:00
|
|
|
return AllocatedEncoder(NULL, webrtc::kVideoCodecUnknown, false);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::DestroyVideoEncoder(
|
|
|
|
|
AllocatedEncoder* encoder) {
|
|
|
|
|
if (encoder->external) {
|
2015-05-19 23:09:35 +02:00
|
|
|
external_encoder_factory_->DestroyVideoEncoder(encoder->external_encoder);
|
2014-10-14 04:25:33 +00:00
|
|
|
}
|
2015-05-19 23:09:35 +02:00
|
|
|
delete encoder->encoder;
|
2014-10-14 04:25:33 +00:00
|
|
|
}
|
|
|
|
|
|
2016-03-01 04:29:59 -08:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetCodec(
|
|
|
|
|
const VideoCodecSettings& codec_settings) {
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
parameters_.encoder_config = CreateVideoEncoderConfig(codec_settings.codec);
|
2016-01-27 16:45:21 +01:00
|
|
|
RTC_DCHECK(!parameters_.encoder_config.streams.empty());
|
2014-11-07 10:54:43 +00:00
|
|
|
|
2014-10-14 04:25:33 +00:00
|
|
|
AllocatedEncoder new_encoder = CreateVideoEncoder(codec_settings.codec);
|
|
|
|
|
parameters_.config.encoder_settings.encoder = new_encoder.encoder;
|
2016-02-05 11:13:28 +01:00
|
|
|
parameters_.config.encoder_settings.full_overuse_time = new_encoder.external;
|
2014-07-17 08:51:46 +00:00
|
|
|
parameters_.config.encoder_settings.payload_name = codec_settings.codec.name;
|
|
|
|
|
parameters_.config.encoder_settings.payload_type = codec_settings.codec.id;
|
2015-09-03 18:24:44 -07:00
|
|
|
if (new_encoder.external) {
|
|
|
|
|
webrtc::VideoCodecType type = CodecTypeFromName(codec_settings.codec.name);
|
|
|
|
|
parameters_.config.encoder_settings.internal_source =
|
|
|
|
|
external_encoder_factory_->EncoderTypeHasInternalSource(type);
|
|
|
|
|
}
|
2014-07-17 08:51:46 +00:00
|
|
|
parameters_.config.rtp.fec = codec_settings.fec;
|
|
|
|
|
|
|
|
|
|
// Set RTX payload type if RTX is enabled.
|
|
|
|
|
if (!parameters_.config.rtp.rtx.ssrcs.empty()) {
|
2015-03-20 19:52:56 +00:00
|
|
|
if (codec_settings.rtx_payload_type == -1) {
|
|
|
|
|
LOG(LS_WARNING) << "RTX SSRCs configured but there's no configured RTX "
|
|
|
|
|
"payload type. Ignoring.";
|
|
|
|
|
parameters_.config.rtp.rtx.ssrcs.clear();
|
|
|
|
|
} else {
|
|
|
|
|
parameters_.config.rtp.rtx.payload_type = codec_settings.rtx_payload_type;
|
|
|
|
|
}
|
2014-07-17 08:51:46 +00:00
|
|
|
}
|
|
|
|
|
|
2015-05-11 14:34:58 +02:00
|
|
|
parameters_.config.rtp.nack.rtp_history_ms =
|
|
|
|
|
HasNack(codec_settings.codec) ? kNackHistoryMs : 0;
|
2014-07-17 08:51:46 +00:00
|
|
|
|
2015-10-30 02:47:38 -07:00
|
|
|
parameters_.codec_settings =
|
2015-11-10 22:34:18 +01:00
|
|
|
rtc::Optional<WebRtcVideoChannel2::VideoCodecSettings>(codec_settings);
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
|
|
|
|
|
LOG(LS_INFO) << "RecreateWebRtcStream (send) because of SetCodec.";
|
2014-05-13 11:07:01 +00:00
|
|
|
RecreateWebRtcStream();
|
2014-10-14 04:25:33 +00:00
|
|
|
if (allocated_encoder_.encoder != new_encoder.encoder) {
|
|
|
|
|
DestroyVideoEncoder(&allocated_encoder_);
|
|
|
|
|
allocated_encoder_ = new_encoder;
|
|
|
|
|
}
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2015-12-09 12:37:51 -08:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters(
|
2016-01-27 16:45:21 +01:00
|
|
|
const ChangedSendParameters& params) {
|
2016-03-10 18:32:00 +01:00
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&lock_);
|
|
|
|
|
// |recreate_stream| means construction-time parameters have changed and the
|
|
|
|
|
// sending stream needs to be reset with the new config.
|
|
|
|
|
bool recreate_stream = false;
|
|
|
|
|
if (params.rtcp_mode) {
|
|
|
|
|
parameters_.config.rtp.rtcp_mode = *params.rtcp_mode;
|
|
|
|
|
recreate_stream = true;
|
|
|
|
|
}
|
|
|
|
|
if (params.rtp_header_extensions) {
|
|
|
|
|
parameters_.config.rtp.extensions = *params.rtp_header_extensions;
|
|
|
|
|
recreate_stream = true;
|
|
|
|
|
}
|
|
|
|
|
if (params.max_bandwidth_bps) {
|
|
|
|
|
parameters_.max_bitrate_bps = *params.max_bandwidth_bps;
|
|
|
|
|
pending_encoder_reconfiguration_ = true;
|
|
|
|
|
}
|
|
|
|
|
if (params.conference_mode) {
|
|
|
|
|
parameters_.conference_mode = *params.conference_mode;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Set codecs and options.
|
|
|
|
|
if (params.codec) {
|
|
|
|
|
SetCodec(*params.codec);
|
2016-03-20 07:34:29 -07:00
|
|
|
recreate_stream = false; // SetCodec has already recreated the stream.
|
2016-03-10 18:32:00 +01:00
|
|
|
} else if (params.conference_mode && parameters_.codec_settings) {
|
|
|
|
|
SetCodec(*parameters_.codec_settings);
|
2016-03-20 07:34:29 -07:00
|
|
|
recreate_stream = false; // SetCodec has already recreated the stream.
|
2016-03-10 18:32:00 +01:00
|
|
|
}
|
|
|
|
|
if (recreate_stream) {
|
|
|
|
|
LOG(LS_INFO)
|
|
|
|
|
<< "RecreateWebRtcStream (send) because of SetSendParameters";
|
|
|
|
|
RecreateWebRtcStream();
|
|
|
|
|
}
|
2016-04-05 15:23:49 +02:00
|
|
|
} // release |lock_|
|
2016-03-10 18:32:00 +01:00
|
|
|
|
2016-06-02 16:23:38 -07:00
|
|
|
// |source_->AddOrUpdateSink| may not be called while holding |lock_| since
|
2016-03-10 18:32:00 +01:00
|
|
|
// that might cause a lock order inversion.
|
2016-01-27 16:45:21 +01:00
|
|
|
if (params.rtp_header_extensions) {
|
2016-02-12 13:30:57 +01:00
|
|
|
sink_wants_.rotation_applied = !ContainsHeaderExtension(
|
2016-05-26 11:24:55 -07:00
|
|
|
*params.rtp_header_extensions, webrtc::RtpExtension::kVideoRotationUri);
|
2016-04-08 02:23:55 -07:00
|
|
|
if (source_) {
|
|
|
|
|
source_->AddOrUpdateSink(this, sink_wants_);
|
2016-01-27 16:45:21 +01:00
|
|
|
}
|
2015-12-09 12:37:51 -08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-16 19:07:43 -07:00
|
|
|
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetRtpParameters(
|
|
|
|
|
const webrtc::RtpParameters& new_parameters) {
|
|
|
|
|
if (!ValidateRtpParameters(new_parameters)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
rtc::CritScope cs(&lock_);
|
|
|
|
|
if (new_parameters.encodings[0].max_bitrate_bps !=
|
|
|
|
|
rtp_parameters_.encodings[0].max_bitrate_bps) {
|
|
|
|
|
pending_encoder_reconfiguration_ = true;
|
|
|
|
|
}
|
|
|
|
|
rtp_parameters_ = new_parameters;
|
2016-04-20 16:23:10 -07:00
|
|
|
// Codecs are currently handled at the WebRtcVideoChannel2 level.
|
|
|
|
|
rtp_parameters_.codecs.clear();
|
2016-03-22 15:42:00 -07:00
|
|
|
// Encoding may have been activated/deactivated.
|
|
|
|
|
UpdateSendState();
|
2016-03-16 19:07:43 -07:00
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-22 15:42:00 -07:00
|
|
|
webrtc::RtpParameters
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::GetRtpParameters() const {
|
|
|
|
|
rtc::CritScope cs(&lock_);
|
|
|
|
|
return rtp_parameters_;
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-16 19:07:43 -07:00
|
|
|
bool WebRtcVideoChannel2::WebRtcVideoSendStream::ValidateRtpParameters(
|
|
|
|
|
const webrtc::RtpParameters& rtp_parameters) {
|
|
|
|
|
if (rtp_parameters.encodings.size() != 1) {
|
|
|
|
|
LOG(LS_ERROR)
|
|
|
|
|
<< "Attempted to set RtpParameters without exactly one encoding";
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-22 15:42:00 -07:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::UpdateSendState() {
|
|
|
|
|
// TODO(deadbeef): Need to handle more than one encoding in the future.
|
|
|
|
|
RTC_DCHECK(rtp_parameters_.encodings.size() == 1u);
|
|
|
|
|
if (sending_ && rtp_parameters_.encodings[0].active) {
|
|
|
|
|
RTC_DCHECK(stream_ != nullptr);
|
|
|
|
|
stream_->Start();
|
|
|
|
|
} else {
|
|
|
|
|
if (stream_ != nullptr) {
|
|
|
|
|
stream_->Stop();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-11-07 10:54:43 +00:00
|
|
|
webrtc::VideoEncoderConfig
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoEncoderConfig(
|
|
|
|
|
const VideoCodec& codec) const {
|
|
|
|
|
webrtc::VideoEncoderConfig encoder_config;
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
bool is_screencast = parameters_.options.is_screencast.value_or(false);
|
|
|
|
|
if (is_screencast) {
|
2014-11-07 10:54:43 +00:00
|
|
|
encoder_config.min_transmit_bitrate_bps =
|
2016-02-12 02:27:06 -08:00
|
|
|
1000 * parameters_.options.screencast_min_bitrate_kbps.value_or(0);
|
2015-04-28 10:01:41 +02:00
|
|
|
encoder_config.content_type =
|
|
|
|
|
webrtc::VideoEncoderConfig::ContentType::kScreen;
|
2014-11-07 10:54:43 +00:00
|
|
|
} else {
|
|
|
|
|
encoder_config.min_transmit_bitrate_bps = 0;
|
2015-04-28 10:01:41 +02:00
|
|
|
encoder_config.content_type =
|
|
|
|
|
webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo;
|
2014-11-07 10:54:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Restrict dimensions according to codec max.
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
int width = last_frame_info_.width;
|
|
|
|
|
int height = last_frame_info_.height;
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
if (!is_screencast) {
|
2014-11-07 10:54:43 +00:00
|
|
|
if (codec.width < width)
|
|
|
|
|
width = codec.width;
|
|
|
|
|
if (codec.height < height)
|
|
|
|
|
height = codec.height;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
VideoCodec clamped_codec = codec;
|
|
|
|
|
clamped_codec.width = width;
|
|
|
|
|
clamped_codec.height = height;
|
|
|
|
|
|
2015-08-27 01:59:29 -07:00
|
|
|
// By default, the stream count for the codec configuration should match the
|
|
|
|
|
// number of negotiated ssrcs. But if the codec is blacklisted for simulcast
|
|
|
|
|
// or a screencast, only configure a single stream.
|
|
|
|
|
size_t stream_count = parameters_.config.rtp.ssrcs.size();
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
if (IsCodecBlacklistedForSimulcast(codec.name) || is_screencast) {
|
2015-08-27 01:59:29 -07:00
|
|
|
stream_count = 1;
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-16 19:07:43 -07:00
|
|
|
int stream_max_bitrate =
|
|
|
|
|
MinPositive(rtp_parameters_.encodings[0].max_bitrate_bps,
|
|
|
|
|
parameters_.max_bitrate_bps);
|
|
|
|
|
encoder_config.streams = CreateVideoStreams(
|
|
|
|
|
clamped_codec, parameters_.options, stream_max_bitrate, stream_count);
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
encoder_config.expect_encode_from_texture = last_frame_info_.is_texture;
|
2014-11-07 10:54:43 +00:00
|
|
|
|
|
|
|
|
// Conference mode screencast uses 2 temporal layers split at 100kbit.
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
if (parameters_.conference_mode && is_screencast &&
|
2016-02-17 05:25:36 -08:00
|
|
|
encoder_config.streams.size() == 1) {
|
2014-12-23 15:19:35 +00:00
|
|
|
ScreenshareLayerConfig config = ScreenshareLayerConfig::GetDefault();
|
|
|
|
|
|
|
|
|
|
// For screenshare in conference mode, tl0 and tl1 bitrates are piggybacked
|
|
|
|
|
// on the VideoCodec struct as target and max bitrates, respectively.
|
|
|
|
|
// See eg. webrtc::VP8EncoderImpl::SetRates().
|
|
|
|
|
encoder_config.streams[0].target_bitrate_bps =
|
|
|
|
|
config.tl0_bitrate_kbps * 1000;
|
|
|
|
|
encoder_config.streams[0].max_bitrate_bps = config.tl1_bitrate_kbps * 1000;
|
2014-11-07 10:54:43 +00:00
|
|
|
encoder_config.streams[0].temporal_layer_thresholds_bps.clear();
|
|
|
|
|
encoder_config.streams[0].temporal_layer_thresholds_bps.push_back(
|
2014-12-23 15:19:35 +00:00
|
|
|
config.tl0_bitrate_kbps * 1000);
|
2014-11-07 10:54:43 +00:00
|
|
|
}
|
2016-03-21 04:15:50 -07:00
|
|
|
if (CodecNamesEq(codec.name, kVp9CodecName) && !is_screencast &&
|
|
|
|
|
encoder_config.streams.size() == 1) {
|
|
|
|
|
encoder_config.streams[0].temporal_layer_thresholds_bps.resize(
|
|
|
|
|
GetDefaultVp9TemporalLayers() - 1);
|
|
|
|
|
}
|
2014-11-07 10:54:43 +00:00
|
|
|
return encoder_config;
|
|
|
|
|
}
|
|
|
|
|
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::ReconfigureEncoder() {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(!parameters_.encoder_config.streams.empty());
|
2014-09-03 15:25:49 +00:00
|
|
|
|
2015-10-30 02:47:38 -07:00
|
|
|
RTC_CHECK(parameters_.codec_settings);
|
|
|
|
|
VideoCodecSettings codec_settings = *parameters_.codec_settings;
|
2014-09-03 15:25:49 +00:00
|
|
|
|
2014-11-07 10:54:43 +00:00
|
|
|
webrtc::VideoEncoderConfig encoder_config =
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
CreateVideoEncoderConfig(codec_settings.codec);
|
2014-11-07 10:54:43 +00:00
|
|
|
|
2015-04-28 10:01:41 +02:00
|
|
|
encoder_config.encoder_specific_settings = ConfigureVideoEncoderSettings(
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
codec_settings.codec);
|
2014-07-22 16:29:54 +00:00
|
|
|
|
2016-08-16 02:40:55 -07:00
|
|
|
stream_->ReconfigureVideoEncoder(encoder_config);
|
2014-09-19 12:30:25 +00:00
|
|
|
|
|
|
|
|
encoder_config.encoder_specific_settings = NULL;
|
2014-07-22 16:29:54 +00:00
|
|
|
|
2016-08-16 02:40:55 -07:00
|
|
|
parameters_.encoder_config = encoder_config;
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2016-03-22 15:42:00 -07:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSend(bool send) {
|
2014-07-29 17:36:52 +00:00
|
|
|
rtc::CritScope cs(&lock_);
|
2016-03-22 15:42:00 -07:00
|
|
|
sending_ = send;
|
|
|
|
|
UpdateSendState();
|
2014-05-13 11:07:01 +00:00
|
|
|
}
|
|
|
|
|
|
2016-02-29 00:04:41 -08:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) {
|
|
|
|
|
if (worker_thread_ != rtc::Thread::Current()) {
|
|
|
|
|
invoker_.AsyncInvoke<void>(
|
2016-06-10 14:17:27 -07:00
|
|
|
RTC_FROM_HERE, worker_thread_,
|
2016-02-29 00:04:41 -08:00
|
|
|
rtc::Bind(&WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate,
|
|
|
|
|
this, load));
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
2016-04-08 02:23:55 -07:00
|
|
|
if (!source_) {
|
2016-02-29 00:04:41 -08:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&lock_);
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
LOG(LS_INFO) << "OnLoadUpdate " << load << ", is_screencast: "
|
|
|
|
|
<< (parameters_.options.is_screencast
|
|
|
|
|
? (*parameters_.options.is_screencast ? "true"
|
|
|
|
|
: "false")
|
|
|
|
|
: "unset");
|
2016-02-29 00:04:41 -08:00
|
|
|
// Do not adapt resolution for screen content as this will likely result in
|
|
|
|
|
// blurry and unreadable text.
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
if (parameters_.options.is_screencast.value_or(false))
|
2016-02-29 00:04:41 -08:00
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
rtc::Optional<int> max_pixel_count;
|
|
|
|
|
rtc::Optional<int> max_pixel_count_step_up;
|
|
|
|
|
if (load == kOveruse) {
|
2016-04-05 15:23:49 +02:00
|
|
|
if (cpu_restricted_counter_ >= kMaxCpuDowngrades) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
// The input video frame size will have a resolution with less than or
|
2016-06-02 16:23:38 -07:00
|
|
|
// equal to |max_pixel_count| depending on how the source can scale the
|
2016-04-05 15:23:49 +02:00
|
|
|
// input frame size.
|
|
|
|
|
max_pixel_count = rtc::Optional<int>(
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
(last_frame_info_.height * last_frame_info_.width * 3) / 5);
|
2016-02-29 00:04:41 -08:00
|
|
|
// Increase |number_of_cpu_adapt_changes_| if
|
|
|
|
|
// sink_wants_.max_pixel_count will be changed since
|
2016-06-02 16:23:38 -07:00
|
|
|
// last time |source_->AddOrUpdateSink| was called. That is, this will
|
|
|
|
|
// result in a new request for the source to change resolution.
|
2016-02-29 00:04:41 -08:00
|
|
|
if (!sink_wants_.max_pixel_count ||
|
|
|
|
|
*sink_wants_.max_pixel_count > *max_pixel_count) {
|
|
|
|
|
++number_of_cpu_adapt_changes_;
|
|
|
|
|
++cpu_restricted_counter_;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
RTC_DCHECK(load == kUnderuse);
|
2016-04-05 15:23:49 +02:00
|
|
|
// The input video frame size will have a resolution with "one step up"
|
|
|
|
|
// pixels than |max_pixel_count_step_up| where "one step up" depends on
|
2016-06-02 16:23:38 -07:00
|
|
|
// how the source can scale the input frame size.
|
Avoid unnecessary HW video encoder reconfiguration
This change reduces the number of times the Android hardware video
encoder is reconfigured when making an outgoing call. With this change,
the encoder should only be initialized once as opposed to the ~3 times
it happens currently.
Before the fix, the following sequence of events caused the extra
reconfigurations:
1. After the SetLocalDescription call, the WebRtcVideoSendStream is created.
All frames from the camera are dropped until the corresponding
VideoSendStream is created.
2. SetRemoteDescription() triggers the VideoSendStream creation. At
this point, the encoder is configured for the first time, with the
frame dimensions set to a low resolution default (176x144).
3. When the first video frame is received from the camera after the
VideoSendStreamIsCreated, the encoder is reconfigured to the correct
dimensions. If we are using the Android hardware encoder, the default
configuration is set to encode from a memory buffer (use_surface=false).
4. When the frame is passed down to the encoder in
androidmediaencoder_jni.cc EncodeOnCodecThread(), it may be stored in
a texture instead of a memory buffer. In this case, yet another
reconfiguration takes place to enable encoding from a texture.
5. Even if the resolution and texture flag were known at the start of
the call, there would be a reconfiguration involved if the camera is
rotated (such as when making a call from a phone in portrait orientation).
The reason for that is that at construction time, WebRtcVideoEngine2
sets the VideoSinkWants structure parameter to request frames rotated
by the source; the early frames will then arrive in portrait resolution.
When the remote description is finally set, if the rotation RTP extension
is supported by the remote receiver, the source is asked to provide
non-rotated frames. The very next frame will then arrive in landscape
resolution with a non-zero rotation value to be applied by the receiver.
Since the encoder was configured with the last (portrait) frame size,
it's going to need to be reconfigured again.
The fix makes the following changes:
1. WebRtcVideoSendStream::OnFrame() now caches the last seen frame
dimensions, and whether the frame was stored in a texture.
2. When the encoder is configured the first time
(WebRtcVideoSendStream::SetCodec()) - the last seen frame dimensions
are used instead of the default dimensions.
3. A flag that indicates if encoding is to be done from a texture has
been added to the webrtc::VideoStream and webrtc::VideoCodec structs,
and it's been wired up to be passed down all the way to the JNI code in
androidmediaencoder_jni.cc.
4. MediaCodecVideoEncoder::InitEncode is now reading the is_surface
flag from the VideoCodec structure instead of guessing the default as
false. This way we end up with the correct encoder configuration the
first time around.
5. WebRtcVideoSendStream now takes an optimistic guess and requests non-
rotated frames when the supported RtpExtensions list is not available.
This makes the "early" frames arrive non-rotated, and the cached dimensions
will be correct for the common case when the rotation extension is supported.
If the other side is an older endpoint which does not support rotation,
the encoder will have to be reconfigured - but it's better to penalize the
uncommon case rather than the common one.
Review-Url: https://codereview.webrtc.org/2067103002
Cr-Commit-Position: refs/heads/master@{#13173}
2016-06-16 12:08:03 -07:00
|
|
|
max_pixel_count_step_up =
|
|
|
|
|
rtc::Optional<int>(last_frame_info_.height * last_frame_info_.width);
|
2016-02-29 00:04:41 -08:00
|
|
|
// Increase |number_of_cpu_adapt_changes_| if
|
|
|
|
|
// sink_wants_.max_pixel_count_step_up will be changed since
|
2016-06-02 16:23:38 -07:00
|
|
|
// last time |source_->AddOrUpdateSink| was called. That is, this will
|
|
|
|
|
// result in a new request for the source to change resolution.
|
2016-02-29 00:04:41 -08:00
|
|
|
if (sink_wants_.max_pixel_count ||
|
|
|
|
|
(sink_wants_.max_pixel_count_step_up &&
|
|
|
|
|
*sink_wants_.max_pixel_count_step_up < *max_pixel_count_step_up)) {
|
|
|
|
|
++number_of_cpu_adapt_changes_;
|
|
|
|
|
--cpu_restricted_counter_;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
sink_wants_.max_pixel_count = max_pixel_count;
|
|
|
|
|
sink_wants_.max_pixel_count_step_up = max_pixel_count_step_up;
|
|
|
|
|
}
|
2016-04-08 02:23:55 -07:00
|
|
|
// |source_->AddOrUpdateSink| may not be called while holding |lock_| since
|
2016-03-10 18:32:00 +01:00
|
|
|
// that might cause a lock order inversion.
|
2016-04-08 02:23:55 -07:00
|
|
|
source_->AddOrUpdateSink(this, sink_wants_);
|
2016-02-29 00:04:41 -08:00
|
|
|
}
|
|
|
|
|
|
2016-08-11 08:41:18 -07:00
|
|
|
VideoSenderInfo WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo(
|
|
|
|
|
bool log_stats) {
|
2014-07-18 11:11:55 +00:00
|
|
|
VideoSenderInfo info;
|
2015-02-23 16:39:07 +00:00
|
|
|
webrtc::VideoSendStream::Stats stats;
|
2016-02-29 00:04:41 -08:00
|
|
|
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
2015-02-23 16:39:07 +00:00
|
|
|
{
|
|
|
|
|
rtc::CritScope cs(&lock_);
|
|
|
|
|
for (uint32_t ssrc : parameters_.config.rtp.ssrcs)
|
|
|
|
|
info.add_ssrc(ssrc);
|
|
|
|
|
|
2015-10-30 02:47:38 -07:00
|
|
|
if (parameters_.codec_settings)
|
|
|
|
|
info.codec_name = parameters_.codec_settings->codec.name;
|
2015-02-23 16:39:07 +00:00
|
|
|
for (size_t i = 0; i < parameters_.encoder_config.streams.size(); ++i) {
|
|
|
|
|
if (i == parameters_.encoder_config.streams.size() - 1) {
|
|
|
|
|
info.preferred_bitrate +=
|
|
|
|
|
parameters_.encoder_config.streams[i].max_bitrate_bps;
|
|
|
|
|
} else {
|
|
|
|
|
info.preferred_bitrate +=
|
|
|
|
|
parameters_.encoder_config.streams[i].target_bitrate_bps;
|
|
|
|
|
}
|
|
|
|
|
}
|
2014-07-18 11:11:55 +00:00
|
|
|
|
2015-02-23 16:39:07 +00:00
|
|
|
if (stream_ == NULL)
|
|
|
|
|
return info;
|
2014-08-12 20:55:10 +00:00
|
|
|
|
2015-02-23 16:39:07 +00:00
|
|
|
stats = stream_->GetStats();
|
2016-02-29 00:04:41 -08:00
|
|
|
}
|
2016-08-11 08:41:18 -07:00
|
|
|
|
|
|
|
|
if (log_stats)
|
|
|
|
|
LOG(LS_INFO) << stats.ToString(rtc::TimeMillis());
|
|
|
|
|
|
2016-02-29 00:04:41 -08:00
|
|
|
info.adapt_changes = number_of_cpu_adapt_changes_;
|
2016-04-05 15:23:49 +02:00
|
|
|
info.adapt_reason =
|
|
|
|
|
cpu_restricted_counter_ <= 0 ? ADAPTREASON_NONE : ADAPTREASON_CPU;
|
2016-02-29 00:04:41 -08:00
|
|
|
|
2015-12-14 02:08:12 -08:00
|
|
|
// Get bandwidth limitation info from stream_->GetStats().
|
|
|
|
|
// Input resolution (output from video_adapter) can be further scaled down or
|
|
|
|
|
// higher video layer(s) can be dropped due to bitrate constraints.
|
|
|
|
|
// Note, adapt_changes only include changes from the video_adapter.
|
|
|
|
|
if (stats.bw_limited_resolution)
|
2016-04-05 15:23:49 +02:00
|
|
|
info.adapt_reason |= ADAPTREASON_BANDWIDTH;
|
2015-12-14 02:08:12 -08:00
|
|
|
|
2015-12-18 16:01:11 +01:00
|
|
|
info.encoder_implementation_name = stats.encoder_implementation_name;
|
2015-05-28 13:39:50 +02:00
|
|
|
info.ssrc_groups = ssrc_groups_;
|
2014-07-18 11:11:55 +00:00
|
|
|
info.framerate_input = stats.input_frame_rate;
|
|
|
|
|
info.framerate_sent = stats.encode_frame_rate;
|
2015-02-26 12:19:31 +00:00
|
|
|
info.avg_encode_ms = stats.avg_encode_time_ms;
|
|
|
|
|
info.encode_usage_percent = stats.encode_usage_percent;
|
2014-07-18 11:11:55 +00:00
|
|
|
|
2015-02-23 16:39:07 +00:00
|
|
|
info.nominal_bitrate = stats.media_bitrate_bps;
|
|
|
|
|
|
2014-12-01 15:23:21 +00:00
|
|
|
info.send_frame_width = 0;
|
|
|
|
|
info.send_frame_height = 0;
|
2015-02-25 10:42:16 +00:00
|
|
|
for (std::map<uint32_t, webrtc::VideoSendStream::StreamStats>::iterator it =
|
2014-07-18 11:11:55 +00:00
|
|
|
stats.substreams.begin();
|
2015-02-25 10:42:16 +00:00
|
|
|
it != stats.substreams.end(); ++it) {
|
2014-07-18 11:11:55 +00:00
|
|
|
// TODO(pbos): Wire up additional stats, such as padding bytes.
|
2015-02-25 10:42:16 +00:00
|
|
|
webrtc::VideoSendStream::StreamStats stream_stats = it->second;
|
2015-01-22 09:39:59 +00:00
|
|
|
info.bytes_sent += stream_stats.rtp_stats.transmitted.payload_bytes +
|
|
|
|
|
stream_stats.rtp_stats.transmitted.header_bytes +
|
|
|
|
|
stream_stats.rtp_stats.transmitted.padding_bytes;
|
|
|
|
|
info.packets_sent += stream_stats.rtp_stats.transmitted.packets;
|
2014-07-18 11:11:55 +00:00
|
|
|
info.packets_lost += stream_stats.rtcp_stats.cumulative_lost;
|
2015-02-25 10:42:16 +00:00
|
|
|
if (stream_stats.width > info.send_frame_width)
|
|
|
|
|
info.send_frame_width = stream_stats.width;
|
|
|
|
|
if (stream_stats.height > info.send_frame_height)
|
|
|
|
|
info.send_frame_height = stream_stats.height;
|
2015-02-19 12:47:00 +00:00
|
|
|
info.firs_rcvd += stream_stats.rtcp_packet_type_counts.fir_packets;
|
|
|
|
|
info.nacks_rcvd += stream_stats.rtcp_packet_type_counts.nack_packets;
|
|
|
|
|
info.plis_rcvd += stream_stats.rtcp_packet_type_counts.pli_packets;
|
2014-07-18 11:11:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!stats.substreams.empty()) {
|
|
|
|
|
// TODO(pbos): Report fraction lost per SSRC.
|
2015-02-25 10:42:16 +00:00
|
|
|
webrtc::VideoSendStream::StreamStats first_stream_stats =
|
|
|
|
|
stats.substreams.begin()->second;
|
2014-07-18 11:11:55 +00:00
|
|
|
info.fraction_lost =
|
|
|
|
|
static_cast<float>(first_stream_stats.rtcp_stats.fraction_lost) /
|
|
|
|
|
(1 << 8);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return info;
|
|
|
|
|
}
|
|
|
|
|
|
2014-11-05 14:05:29 +00:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::FillBandwidthEstimationInfo(
|
|
|
|
|
BandwidthEstimationInfo* bwe_info) {
|
|
|
|
|
rtc::CritScope cs(&lock_);
|
|
|
|
|
if (stream_ == NULL) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
webrtc::VideoSendStream::Stats stats = stream_->GetStats();
|
2015-02-25 10:42:16 +00:00
|
|
|
for (std::map<uint32_t, webrtc::VideoSendStream::StreamStats>::iterator it =
|
2014-11-05 14:05:29 +00:00
|
|
|
stats.substreams.begin();
|
2015-02-25 10:42:16 +00:00
|
|
|
it != stats.substreams.end(); ++it) {
|
2014-11-05 14:05:29 +00:00
|
|
|
bwe_info->transmit_bitrate += it->second.total_bitrate_bps;
|
|
|
|
|
bwe_info->retransmit_bitrate += it->second.retransmit_bitrate_bps;
|
|
|
|
|
}
|
2015-02-26 13:15:22 +00:00
|
|
|
bwe_info->target_enc_bitrate += stats.target_media_bitrate_bps;
|
2015-02-23 16:39:07 +00:00
|
|
|
bwe_info->actual_enc_bitrate += stats.media_bitrate_bps;
|
2014-11-05 14:05:29 +00:00
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
|
|
|
|
|
if (stream_ != NULL) {
|
|
|
|
|
call_->DestroyVideoSendStream(stream_);
|
|
|
|
|
}
|
2014-06-06 10:49:19 +00:00
|
|
|
|
2015-10-30 02:47:38 -07:00
|
|
|
RTC_CHECK(parameters_.codec_settings);
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
RTC_DCHECK_EQ((parameters_.encoder_config.content_type ==
|
|
|
|
|
webrtc::VideoEncoderConfig::ContentType::kScreen),
|
|
|
|
|
parameters_.options.is_screencast.value_or(false))
|
|
|
|
|
<< "encoder content type inconsistent with screencast option";
|
2014-09-19 12:30:25 +00:00
|
|
|
parameters_.encoder_config.encoder_specific_settings =
|
New flag is_screencast in VideoOptions.
This cl copies the value of cricket::VideoCapturer::IsScreencast into
a flag in VideoOptions. It is passed on via the chain
VideortpSender::SetVideoSend
WebRtcVideoChannel2::SetVideoSend
WebRtcVideoChannel2::SetOptions
WebRtcVideoChannel2::WebRtcVideoSendStream::SetOptions
Where it's used, in
WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame, we can look it up
in parameters_, instead of calling capturer_->IsScreencast().
Doesn't touch screencast logic related to cpu adaptation, since that
code is in flux in a different cl.
Also drop the is_screencast flag from the Dimensions struct, and drop separate options argument from ConfigureVideoEncoderSettings and SetCodecAndOptions, instead always using the options recorded in VideoSendStreamParameters::options.
In the tests, changed FakeVideoCapturer::is_screencast to be a construction time flag. Generally, unittests of screencast have to both use a capturer configured for screencast, and set the screencast flag using SetSendParameters. Since the automatic connection via VideoSource and VideoRtpSender isn't involved in the unit tests.
Note that using SetSendParameters to set the screencast flag doesn't make sense, since it's not per-stream. SetVideoSend would be more appropriate. That should be fixed if/when we drop VideoOptions from SetSendParameters.
BUG=webrtc:5426
R=pbos@webrtc.org, perkj@webrtc.org, pthatcher@webrtc.org
Review URL: https://codereview.webrtc.org/1711763003 .
Cr-Commit-Position: refs/heads/master@{#11837}
2016-03-02 11:41:36 +01:00
|
|
|
ConfigureVideoEncoderSettings(parameters_.codec_settings->codec);
|
2014-09-19 12:30:25 +00:00
|
|
|
|
2016-08-16 02:40:55 -07:00
|
|
|
webrtc::VideoSendStream::Config config = parameters_.config;
|
2015-03-20 19:52:56 +00:00
|
|
|
if (!config.rtp.rtx.ssrcs.empty() && config.rtp.rtx.payload_type == -1) {
|
|
|
|
|
LOG(LS_WARNING) << "RTX SSRCs configured but there's no configured RTX "
|
|
|
|
|
"payload type the set codec. Ignoring RTX.";
|
|
|
|
|
config.rtp.rtx.ssrcs.clear();
|
|
|
|
|
}
|
2016-08-16 02:40:55 -07:00
|
|
|
stream_ = call_->CreateVideoSendStream(config, parameters_.encoder_config);
|
2014-07-22 16:29:54 +00:00
|
|
|
|
2014-09-19 12:30:25 +00:00
|
|
|
parameters_.encoder_config.encoder_specific_settings = NULL;
|
2016-01-27 16:45:21 +01:00
|
|
|
pending_encoder_reconfiguration_ = false;
|
2014-07-22 16:29:54 +00:00
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
if (sending_) {
|
|
|
|
|
stream_->Start();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-07-18 09:35:58 +00:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
|
|
|
|
|
webrtc::Call* call,
|
2015-05-28 13:39:50 +02:00
|
|
|
const StreamParams& sp,
|
2016-06-10 17:58:01 +02:00
|
|
|
webrtc::VideoReceiveStream::Config config,
|
2014-10-29 15:28:39 +00:00
|
|
|
WebRtcVideoDecoderFactory* external_decoder_factory,
|
2015-03-06 15:35:19 +00:00
|
|
|
bool default_stream,
|
2016-05-17 16:33:30 +02:00
|
|
|
const std::vector<VideoCodecSettings>& recv_codecs,
|
|
|
|
|
bool red_disabled_by_remote_side)
|
2014-07-18 09:35:58 +00:00
|
|
|
: call_(call),
|
2016-06-22 00:46:15 -07:00
|
|
|
stream_params_(sp),
|
2014-07-18 09:35:58 +00:00
|
|
|
stream_(NULL),
|
2015-03-06 15:35:19 +00:00
|
|
|
default_stream_(default_stream),
|
2016-06-10 17:58:01 +02:00
|
|
|
config_(std::move(config)),
|
2016-05-17 16:33:30 +02:00
|
|
|
red_disabled_by_remote_side_(red_disabled_by_remote_side),
|
2014-10-29 15:28:39 +00:00
|
|
|
external_decoder_factory_(external_decoder_factory),
|
2016-01-28 04:47:08 -08:00
|
|
|
sink_(NULL),
|
2015-01-27 09:57:01 +00:00
|
|
|
first_frame_timestamp_(-1),
|
|
|
|
|
estimated_remote_start_ntp_time_ms_(0) {
|
2014-07-18 09:35:58 +00:00
|
|
|
config_.renderer = this;
|
2016-01-28 15:58:41 -08:00
|
|
|
std::vector<AllocatedDecoder> old_decoders;
|
|
|
|
|
ConfigureCodecs(recv_codecs, &old_decoders);
|
|
|
|
|
RecreateWebRtcStream();
|
|
|
|
|
RTC_DCHECK(old_decoders.empty());
|
2014-07-18 09:35:58 +00:00
|
|
|
}
|
|
|
|
|
|
2015-05-18 19:42:03 +02:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::AllocatedDecoder::
|
|
|
|
|
AllocatedDecoder(webrtc::VideoDecoder* decoder,
|
|
|
|
|
webrtc::VideoCodecType type,
|
|
|
|
|
bool external)
|
|
|
|
|
: decoder(decoder),
|
|
|
|
|
external_decoder(nullptr),
|
|
|
|
|
type(type),
|
|
|
|
|
external(external) {
|
|
|
|
|
if (external) {
|
|
|
|
|
external_decoder = decoder;
|
|
|
|
|
this->decoder =
|
|
|
|
|
new webrtc::VideoDecoderSoftwareFallbackWrapper(type, external_decoder);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-07-18 09:35:58 +00:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::~WebRtcVideoReceiveStream() {
|
|
|
|
|
call_->DestroyVideoReceiveStream(stream_);
|
2014-11-03 14:46:44 +00:00
|
|
|
ClearDecoders(&allocated_decoders_);
|
2014-07-18 09:35:58 +00:00
|
|
|
}
|
|
|
|
|
|
Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
2015-10-07 12:23:21 +02:00
|
|
|
const std::vector<uint32_t>&
|
2015-03-26 16:23:04 +01:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetSsrcs() const {
|
2016-06-22 00:46:15 -07:00
|
|
|
return stream_params_.ssrcs;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
rtc::Optional<uint32_t>
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetFirstPrimarySsrc() const {
|
|
|
|
|
std::vector<uint32_t> primary_ssrcs;
|
|
|
|
|
stream_params_.GetPrimarySsrcs(&primary_ssrcs);
|
|
|
|
|
|
|
|
|
|
if (primary_ssrcs.empty()) {
|
|
|
|
|
LOG(LS_WARNING) << "Empty primary ssrcs vector, returning empty optional";
|
|
|
|
|
return rtc::Optional<uint32_t>();
|
|
|
|
|
} else {
|
|
|
|
|
return rtc::Optional<uint32_t>(primary_ssrcs[0]);
|
|
|
|
|
}
|
2015-03-26 16:23:04 +01:00
|
|
|
}
|
|
|
|
|
|
2014-11-03 14:46:44 +00:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::AllocatedDecoder
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::CreateOrReuseVideoDecoder(
|
|
|
|
|
std::vector<AllocatedDecoder>* old_decoders,
|
|
|
|
|
const VideoCodec& codec) {
|
|
|
|
|
webrtc::VideoCodecType type = CodecTypeFromName(codec.name);
|
2014-10-29 15:28:39 +00:00
|
|
|
|
2014-11-03 14:46:44 +00:00
|
|
|
for (size_t i = 0; i < old_decoders->size(); ++i) {
|
|
|
|
|
if ((*old_decoders)[i].type == type) {
|
|
|
|
|
AllocatedDecoder decoder = (*old_decoders)[i];
|
|
|
|
|
(*old_decoders)[i] = old_decoders->back();
|
|
|
|
|
old_decoders->pop_back();
|
|
|
|
|
return decoder;
|
|
|
|
|
}
|
|
|
|
|
}
|
2014-10-29 15:28:39 +00:00
|
|
|
|
2014-11-03 14:46:44 +00:00
|
|
|
if (external_decoder_factory_ != NULL) {
|
|
|
|
|
webrtc::VideoDecoder* decoder =
|
2016-06-22 00:46:15 -07:00
|
|
|
external_decoder_factory_->CreateVideoDecoderWithParams(
|
|
|
|
|
type, {stream_params_.id});
|
2014-11-03 14:46:44 +00:00
|
|
|
if (decoder != NULL) {
|
|
|
|
|
return AllocatedDecoder(decoder, type, true);
|
|
|
|
|
}
|
|
|
|
|
}
|
2014-10-29 15:28:39 +00:00
|
|
|
|
2014-11-03 14:46:44 +00:00
|
|
|
if (type == webrtc::kVideoCodecVP8) {
|
|
|
|
|
return AllocatedDecoder(
|
|
|
|
|
webrtc::VideoDecoder::Create(webrtc::VideoDecoder::kVp8), type, false);
|
|
|
|
|
}
|
2014-07-18 09:35:58 +00:00
|
|
|
|
2015-03-20 19:52:56 +00:00
|
|
|
if (type == webrtc::kVideoCodecVP9) {
|
|
|
|
|
return AllocatedDecoder(
|
|
|
|
|
webrtc::VideoDecoder::Create(webrtc::VideoDecoder::kVp9), type, false);
|
|
|
|
|
}
|
|
|
|
|
|
2015-06-29 14:34:58 -07:00
|
|
|
if (type == webrtc::kVideoCodecH264) {
|
|
|
|
|
return AllocatedDecoder(
|
|
|
|
|
webrtc::VideoDecoder::Create(webrtc::VideoDecoder::kH264), type, false);
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-03 05:51:48 -08:00
|
|
|
return AllocatedDecoder(
|
|
|
|
|
webrtc::VideoDecoder::Create(webrtc::VideoDecoder::kUnsupportedCodec),
|
|
|
|
|
webrtc::kVideoCodecUnknown, false);
|
2014-11-03 14:46:44 +00:00
|
|
|
}
|
|
|
|
|
|
2016-08-05 09:19:25 -07:00
|
|
|
void ConfigureDecoderSpecifics(webrtc::VideoReceiveStream::Decoder* decoder,
|
|
|
|
|
const cricket::VideoCodec& recv_video_codec) {
|
|
|
|
|
if (recv_video_codec.name.compare("H264") == 0) {
|
|
|
|
|
auto it = recv_video_codec.params.find("sprop-parameter-sets");
|
|
|
|
|
if (it != recv_video_codec.params.end()) {
|
|
|
|
|
decoder->decoder_specific.h264_extra_settings =
|
|
|
|
|
rtc::Optional<webrtc::VideoDecoderH264Settings>(
|
|
|
|
|
webrtc::VideoDecoderH264Settings());
|
|
|
|
|
decoder->decoder_specific.h264_extra_settings->sprop_parameter_sets =
|
|
|
|
|
it->second;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 15:58:41 -08:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::ConfigureCodecs(
|
|
|
|
|
const std::vector<VideoCodecSettings>& recv_codecs,
|
|
|
|
|
std::vector<AllocatedDecoder>* old_decoders) {
|
|
|
|
|
*old_decoders = allocated_decoders_;
|
2014-11-03 14:46:44 +00:00
|
|
|
allocated_decoders_.clear();
|
|
|
|
|
config_.decoders.clear();
|
|
|
|
|
for (size_t i = 0; i < recv_codecs.size(); ++i) {
|
|
|
|
|
AllocatedDecoder allocated_decoder =
|
2016-01-28 15:58:41 -08:00
|
|
|
CreateOrReuseVideoDecoder(old_decoders, recv_codecs[i].codec);
|
2014-11-03 14:46:44 +00:00
|
|
|
allocated_decoders_.push_back(allocated_decoder);
|
2014-07-18 09:35:58 +00:00
|
|
|
|
2014-11-03 14:46:44 +00:00
|
|
|
webrtc::VideoReceiveStream::Decoder decoder;
|
|
|
|
|
decoder.decoder = allocated_decoder.decoder;
|
|
|
|
|
decoder.payload_type = recv_codecs[i].codec.id;
|
|
|
|
|
decoder.payload_name = recv_codecs[i].codec.name;
|
2016-08-05 09:19:25 -07:00
|
|
|
ConfigureDecoderSpecifics(&decoder, recv_codecs[i].codec);
|
2014-11-03 14:46:44 +00:00
|
|
|
config_.decoders.push_back(decoder);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// TODO(pbos): Reconfigure RTX based on incoming recv_codecs.
|
|
|
|
|
config_.rtp.fec = recv_codecs.front().fec;
|
2014-07-25 19:01:32 +00:00
|
|
|
config_.rtp.nack.rtp_history_ms =
|
2015-04-21 20:24:50 +08:00
|
|
|
HasNack(recv_codecs.begin()->codec) ? kNackHistoryMs : 0;
|
2014-07-18 09:35:58 +00:00
|
|
|
}
|
|
|
|
|
|
2015-05-22 18:48:36 +02:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetLocalSsrc(
|
|
|
|
|
uint32_t local_ssrc) {
|
2015-09-17 00:24:34 -07:00
|
|
|
// TODO(pbos): Consider turning this sanity check into a RTC_DCHECK. You
|
|
|
|
|
// should not be able to create a sender with the same SSRC as a receiver, but
|
|
|
|
|
// right now this can't be done due to unittests depending on receiving what
|
|
|
|
|
// they are sending from the same MediaChannel.
|
2015-08-20 17:19:20 -07:00
|
|
|
if (local_ssrc == config_.rtp.remote_ssrc) {
|
|
|
|
|
LOG(LS_INFO) << "Ignoring call to SetLocalSsrc because parameters are "
|
|
|
|
|
"unchanged; local_ssrc=" << local_ssrc;
|
2015-05-22 18:48:36 +02:00
|
|
|
return;
|
2015-08-20 17:19:20 -07:00
|
|
|
}
|
2015-05-22 18:48:36 +02:00
|
|
|
|
|
|
|
|
config_.rtp.local_ssrc = local_ssrc;
|
2015-08-20 17:19:20 -07:00
|
|
|
LOG(LS_INFO)
|
|
|
|
|
<< "RecreateWebRtcStream (recv) because of SetLocalSsrc; local_ssrc="
|
|
|
|
|
<< local_ssrc;
|
2015-05-22 18:48:36 +02:00
|
|
|
RecreateWebRtcStream();
|
|
|
|
|
}
|
|
|
|
|
|
2015-11-20 18:05:48 -08:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetFeedbackParameters(
|
|
|
|
|
bool nack_enabled,
|
|
|
|
|
bool remb_enabled,
|
2016-03-18 15:02:07 -07:00
|
|
|
bool transport_cc_enabled,
|
|
|
|
|
webrtc::RtcpMode rtcp_mode) {
|
2015-05-11 14:34:58 +02:00
|
|
|
int nack_history_ms = nack_enabled ? kNackHistoryMs : 0;
|
|
|
|
|
if (config_.rtp.nack.rtp_history_ms == nack_history_ms &&
|
2015-11-20 18:05:48 -08:00
|
|
|
config_.rtp.remb == remb_enabled &&
|
2016-03-18 15:02:07 -07:00
|
|
|
config_.rtp.transport_cc == transport_cc_enabled &&
|
|
|
|
|
config_.rtp.rtcp_mode == rtcp_mode) {
|
2015-11-20 18:05:48 -08:00
|
|
|
LOG(LS_INFO)
|
|
|
|
|
<< "Ignoring call to SetFeedbackParameters because parameters are "
|
|
|
|
|
"unchanged; nack="
|
|
|
|
|
<< nack_enabled << ", remb=" << remb_enabled
|
|
|
|
|
<< ", transport_cc=" << transport_cc_enabled;
|
2015-05-11 12:48:12 +02:00
|
|
|
return;
|
2015-05-11 14:34:58 +02:00
|
|
|
}
|
|
|
|
|
config_.rtp.remb = remb_enabled;
|
|
|
|
|
config_.rtp.nack.rtp_history_ms = nack_history_ms;
|
2015-11-20 18:05:48 -08:00
|
|
|
config_.rtp.transport_cc = transport_cc_enabled;
|
2016-03-18 15:02:07 -07:00
|
|
|
config_.rtp.rtcp_mode = rtcp_mode;
|
2015-11-20 18:05:48 -08:00
|
|
|
LOG(LS_INFO)
|
|
|
|
|
<< "RecreateWebRtcStream (recv) because of SetFeedbackParameters; nack="
|
|
|
|
|
<< nack_enabled << ", remb=" << remb_enabled
|
|
|
|
|
<< ", transport_cc=" << transport_cc_enabled;
|
2015-05-11 12:48:12 +02:00
|
|
|
RecreateWebRtcStream();
|
|
|
|
|
}
|
|
|
|
|
|
2015-12-09 12:37:51 -08:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetRecvParameters(
|
2016-01-28 15:58:41 -08:00
|
|
|
const ChangedRecvParameters& params) {
|
|
|
|
|
bool needs_recreation = false;
|
|
|
|
|
std::vector<AllocatedDecoder> old_decoders;
|
|
|
|
|
if (params.codec_settings) {
|
|
|
|
|
ConfigureCodecs(*params.codec_settings, &old_decoders);
|
|
|
|
|
needs_recreation = true;
|
|
|
|
|
}
|
|
|
|
|
if (params.rtp_header_extensions) {
|
|
|
|
|
config_.rtp.extensions = *params.rtp_header_extensions;
|
|
|
|
|
needs_recreation = true;
|
|
|
|
|
}
|
|
|
|
|
if (needs_recreation) {
|
|
|
|
|
LOG(LS_INFO) << "RecreateWebRtcStream (recv) because of SetRecvParameters";
|
|
|
|
|
RecreateWebRtcStream();
|
|
|
|
|
ClearDecoders(&old_decoders);
|
|
|
|
|
}
|
2015-12-09 12:37:51 -08:00
|
|
|
}
|
|
|
|
|
|
2014-07-18 09:35:58 +00:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RecreateWebRtcStream() {
|
|
|
|
|
if (stream_ != NULL) {
|
|
|
|
|
call_->DestroyVideoReceiveStream(stream_);
|
|
|
|
|
}
|
2016-06-10 17:58:01 +02:00
|
|
|
webrtc::VideoReceiveStream::Config config = config_.Copy();
|
2016-05-17 16:33:30 +02:00
|
|
|
if (red_disabled_by_remote_side_) {
|
|
|
|
|
config.rtp.fec.red_payload_type = -1;
|
|
|
|
|
config.rtp.fec.ulpfec_payload_type = -1;
|
|
|
|
|
config.rtp.fec.red_rtx_payload_type = -1;
|
|
|
|
|
}
|
2016-06-10 17:58:01 +02:00
|
|
|
stream_ = call_->CreateVideoReceiveStream(std::move(config));
|
2014-07-18 09:35:58 +00:00
|
|
|
stream_->Start();
|
|
|
|
|
}
|
|
|
|
|
|
2014-11-03 14:46:44 +00:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::ClearDecoders(
|
|
|
|
|
std::vector<AllocatedDecoder>* allocated_decoders) {
|
|
|
|
|
for (size_t i = 0; i < allocated_decoders->size(); ++i) {
|
|
|
|
|
if ((*allocated_decoders)[i].external) {
|
2014-10-29 15:28:39 +00:00
|
|
|
external_decoder_factory_->DestroyVideoDecoder(
|
2015-05-18 19:42:03 +02:00
|
|
|
(*allocated_decoders)[i].external_decoder);
|
2014-10-29 15:28:39 +00:00
|
|
|
}
|
2015-05-18 19:42:03 +02:00
|
|
|
delete (*allocated_decoders)[i].decoder;
|
2014-10-29 15:28:39 +00:00
|
|
|
}
|
2014-11-03 14:46:44 +00:00
|
|
|
allocated_decoders->clear();
|
2014-10-29 15:28:39 +00:00
|
|
|
}
|
|
|
|
|
|
2016-03-21 01:27:56 -07:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::OnFrame(
|
|
|
|
|
const webrtc::VideoFrame& frame) {
|
2016-01-28 04:47:08 -08:00
|
|
|
rtc::CritScope crit(&sink_lock_);
|
2015-01-27 09:57:01 +00:00
|
|
|
|
|
|
|
|
if (first_frame_timestamp_ < 0)
|
|
|
|
|
first_frame_timestamp_ = frame.timestamp();
|
|
|
|
|
int64_t rtp_time_elapsed_since_first_frame =
|
|
|
|
|
(timestamp_wraparound_handler_.Unwrap(frame.timestamp()) -
|
|
|
|
|
first_frame_timestamp_);
|
|
|
|
|
int64_t elapsed_time_ms = rtp_time_elapsed_since_first_frame /
|
|
|
|
|
(cricket::kVideoCodecClockrate / 1000);
|
|
|
|
|
if (frame.ntp_time_ms() > 0)
|
|
|
|
|
estimated_remote_start_ntp_time_ms_ = frame.ntp_time_ms() - elapsed_time_ms;
|
|
|
|
|
|
2016-01-28 04:47:08 -08:00
|
|
|
if (sink_ == NULL) {
|
|
|
|
|
LOG(LS_WARNING) << "VideoReceiveStream not connected to a VideoSink.";
|
2014-07-18 09:35:58 +00:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-01 13:35:55 -07:00
|
|
|
WebRtcVideoFrame render_frame(
|
2016-04-14 02:29:29 -07:00
|
|
|
frame.video_frame_buffer(), frame.rotation(),
|
2016-08-01 13:35:55 -07:00
|
|
|
frame.render_time_ms() * rtc::kNumNanosecsPerMicrosec, frame.timestamp());
|
2016-01-28 04:47:08 -08:00
|
|
|
sink_->OnFrame(render_frame);
|
2014-07-18 09:35:58 +00:00
|
|
|
}
|
|
|
|
|
|
2015-03-06 15:35:19 +00:00
|
|
|
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsDefaultStream() const {
|
|
|
|
|
return default_stream_;
|
|
|
|
|
}
|
|
|
|
|
|
2016-01-28 04:47:08 -08:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetSink(
|
|
|
|
|
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
|
|
|
|
|
rtc::CritScope crit(&sink_lock_);
|
|
|
|
|
sink_ = sink;
|
2014-07-18 09:35:58 +00:00
|
|
|
}
|
|
|
|
|
|
2015-08-28 07:35:32 -07:00
|
|
|
std::string
|
|
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetCodecNameFromPayloadType(
|
|
|
|
|
int payload_type) {
|
|
|
|
|
for (const webrtc::VideoReceiveStream::Decoder& decoder : config_.decoders) {
|
|
|
|
|
if (decoder.payload_type == payload_type) {
|
|
|
|
|
return decoder.payload_name;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return "";
|
|
|
|
|
}
|
|
|
|
|
|
2014-07-18 11:11:55 +00:00
|
|
|
VideoReceiverInfo
|
2016-08-11 08:41:18 -07:00
|
|
|
WebRtcVideoChannel2::WebRtcVideoReceiveStream::GetVideoReceiverInfo(
|
|
|
|
|
bool log_stats) {
|
2014-07-18 11:11:55 +00:00
|
|
|
VideoReceiverInfo info;
|
2016-06-22 00:46:15 -07:00
|
|
|
info.ssrc_groups = stream_params_.ssrc_groups;
|
2014-07-18 11:11:55 +00:00
|
|
|
info.add_ssrc(config_.rtp.remote_ssrc);
|
|
|
|
|
webrtc::VideoReceiveStream::Stats stats = stream_->GetStats();
|
2015-12-18 16:01:11 +01:00
|
|
|
info.decoder_implementation_name = stats.decoder_implementation_name;
|
2015-01-22 09:39:59 +00:00
|
|
|
info.bytes_rcvd = stats.rtp_stats.transmitted.payload_bytes +
|
|
|
|
|
stats.rtp_stats.transmitted.header_bytes +
|
|
|
|
|
stats.rtp_stats.transmitted.padding_bytes;
|
|
|
|
|
info.packets_rcvd = stats.rtp_stats.transmitted.packets;
|
2015-04-22 14:52:45 +02:00
|
|
|
info.packets_lost = stats.rtcp_stats.cumulative_lost;
|
|
|
|
|
info.fraction_lost =
|
|
|
|
|
static_cast<float>(stats.rtcp_stats.fraction_lost) / (1 << 8);
|
2014-07-18 11:11:55 +00:00
|
|
|
|
|
|
|
|
info.framerate_rcvd = stats.network_frame_rate;
|
|
|
|
|
info.framerate_decoded = stats.decode_frame_rate;
|
|
|
|
|
info.framerate_output = stats.render_frame_rate;
|
2016-08-30 00:45:45 -07:00
|
|
|
info.frame_width = stats.width;
|
|
|
|
|
info.frame_height = stats.height;
|
2014-07-18 11:11:55 +00:00
|
|
|
|
2015-02-19 12:47:00 +00:00
|
|
|
{
|
2016-01-28 04:47:08 -08:00
|
|
|
rtc::CritScope frame_cs(&sink_lock_);
|
2015-02-19 12:47:00 +00:00
|
|
|
info.capture_start_ntp_time_ms = estimated_remote_start_ntp_time_ms_;
|
|
|
|
|
}
|
|
|
|
|
|
2015-02-25 10:42:16 +00:00
|
|
|
info.decode_ms = stats.decode_ms;
|
|
|
|
|
info.max_decode_ms = stats.max_decode_ms;
|
|
|
|
|
info.current_delay_ms = stats.current_delay_ms;
|
|
|
|
|
info.target_delay_ms = stats.target_delay_ms;
|
|
|
|
|
info.jitter_buffer_ms = stats.jitter_buffer_ms;
|
|
|
|
|
info.min_playout_delay_ms = stats.min_playout_delay_ms;
|
|
|
|
|
info.render_delay_ms = stats.render_delay_ms;
|
|
|
|
|
|
2015-08-28 07:35:32 -07:00
|
|
|
info.codec_name = GetCodecNameFromPayloadType(stats.current_payload_type);
|
|
|
|
|
|
2015-02-19 12:47:00 +00:00
|
|
|
info.firs_sent = stats.rtcp_packet_type_counts.fir_packets;
|
|
|
|
|
info.plis_sent = stats.rtcp_packet_type_counts.pli_packets;
|
|
|
|
|
info.nacks_sent = stats.rtcp_packet_type_counts.nack_packets;
|
2014-07-18 11:11:55 +00:00
|
|
|
|
2016-08-11 08:41:18 -07:00
|
|
|
if (log_stats)
|
|
|
|
|
LOG(LS_INFO) << stats.ToString(rtc::TimeMillis());
|
|
|
|
|
|
2014-07-18 11:11:55 +00:00
|
|
|
return info;
|
|
|
|
|
}
|
|
|
|
|
|
2016-05-17 16:33:30 +02:00
|
|
|
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetFecDisabledRemotely(
|
|
|
|
|
bool disable) {
|
|
|
|
|
red_disabled_by_remote_side_ = disable;
|
|
|
|
|
RecreateWebRtcStream();
|
|
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
WebRtcVideoChannel2::VideoCodecSettings::VideoCodecSettings()
|
|
|
|
|
: rtx_payload_type(-1) {}
|
|
|
|
|
|
2014-11-07 10:54:43 +00:00
|
|
|
bool WebRtcVideoChannel2::VideoCodecSettings::operator==(
|
|
|
|
|
const WebRtcVideoChannel2::VideoCodecSettings& other) const {
|
|
|
|
|
return codec == other.codec &&
|
|
|
|
|
fec.ulpfec_payload_type == other.fec.ulpfec_payload_type &&
|
|
|
|
|
fec.red_payload_type == other.fec.red_payload_type &&
|
2015-04-21 20:24:50 +08:00
|
|
|
fec.red_rtx_payload_type == other.fec.red_rtx_payload_type &&
|
2014-11-07 10:54:43 +00:00
|
|
|
rtx_payload_type == other.rtx_payload_type;
|
|
|
|
|
}
|
|
|
|
|
|
2015-04-22 18:41:14 +02:00
|
|
|
bool WebRtcVideoChannel2::VideoCodecSettings::operator!=(
|
|
|
|
|
const WebRtcVideoChannel2::VideoCodecSettings& other) const {
|
|
|
|
|
return !(*this == other);
|
|
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
std::vector<WebRtcVideoChannel2::VideoCodecSettings>
|
|
|
|
|
WebRtcVideoChannel2::MapCodecs(const std::vector<VideoCodec>& codecs) {
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(!codecs.empty());
|
2014-05-13 11:07:01 +00:00
|
|
|
|
|
|
|
|
std::vector<VideoCodecSettings> video_codecs;
|
|
|
|
|
std::map<int, bool> payload_used;
|
2014-06-13 11:47:28 +00:00
|
|
|
std::map<int, VideoCodec::CodecType> payload_codec_type;
|
2015-02-23 21:28:22 +00:00
|
|
|
// |rtx_mapping| maps video payload type to rtx payload type.
|
|
|
|
|
std::map<int, int> rtx_mapping;
|
2014-05-13 11:07:01 +00:00
|
|
|
|
|
|
|
|
webrtc::FecConfig fec_settings;
|
|
|
|
|
|
|
|
|
|
for (size_t i = 0; i < codecs.size(); ++i) {
|
|
|
|
|
const VideoCodec& in_codec = codecs[i];
|
|
|
|
|
int payload_type = in_codec.id;
|
|
|
|
|
|
|
|
|
|
if (payload_used[payload_type]) {
|
|
|
|
|
LOG(LS_ERROR) << "Payload type already registered: "
|
|
|
|
|
<< in_codec.ToString();
|
|
|
|
|
return std::vector<VideoCodecSettings>();
|
|
|
|
|
}
|
|
|
|
|
payload_used[payload_type] = true;
|
2014-06-13 11:47:28 +00:00
|
|
|
payload_codec_type[payload_type] = in_codec.GetCodecType();
|
2014-05-13 11:07:01 +00:00
|
|
|
|
|
|
|
|
switch (in_codec.GetCodecType()) {
|
|
|
|
|
case VideoCodec::CODEC_RED: {
|
|
|
|
|
// RED payload type, should not have duplicates.
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(fec_settings.red_payload_type == -1);
|
2014-05-13 11:07:01 +00:00
|
|
|
fec_settings.red_payload_type = in_codec.id;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case VideoCodec::CODEC_ULPFEC: {
|
|
|
|
|
// ULPFEC payload type, should not have duplicates.
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(fec_settings.ulpfec_payload_type == -1);
|
2014-05-13 11:07:01 +00:00
|
|
|
fec_settings.ulpfec_payload_type = in_codec.id;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case VideoCodec::CODEC_RTX: {
|
|
|
|
|
int associated_payload_type;
|
|
|
|
|
if (!in_codec.GetParam(kCodecParamAssociatedPayloadType,
|
2015-02-17 20:36:28 +00:00
|
|
|
&associated_payload_type) ||
|
|
|
|
|
!IsValidRtpPayloadType(associated_payload_type)) {
|
|
|
|
|
LOG(LS_ERROR)
|
|
|
|
|
<< "RTX codec with invalid or no associated payload type: "
|
|
|
|
|
<< in_codec.ToString();
|
2014-05-13 11:07:01 +00:00
|
|
|
return std::vector<VideoCodecSettings>();
|
|
|
|
|
}
|
|
|
|
|
rtx_mapping[associated_payload_type] = in_codec.id;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case VideoCodec::CODEC_VIDEO:
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
video_codecs.push_back(VideoCodecSettings());
|
|
|
|
|
video_codecs.back().codec = in_codec;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// One of these codecs should have been a video codec. Only having FEC
|
|
|
|
|
// parameters into this code is a logic error.
|
2015-09-17 00:24:34 -07:00
|
|
|
RTC_DCHECK(!video_codecs.empty());
|
2014-05-13 11:07:01 +00:00
|
|
|
|
2014-06-13 11:47:28 +00:00
|
|
|
for (std::map<int, int>::const_iterator it = rtx_mapping.begin();
|
|
|
|
|
it != rtx_mapping.end();
|
|
|
|
|
++it) {
|
|
|
|
|
if (!payload_used[it->first]) {
|
|
|
|
|
LOG(LS_ERROR) << "RTX mapped to payload not in codec list.";
|
|
|
|
|
return std::vector<VideoCodecSettings>();
|
|
|
|
|
}
|
2015-04-21 20:24:50 +08:00
|
|
|
if (payload_codec_type[it->first] != VideoCodec::CODEC_VIDEO &&
|
|
|
|
|
payload_codec_type[it->first] != VideoCodec::CODEC_RED) {
|
|
|
|
|
LOG(LS_ERROR) << "RTX not mapped to regular video codec or RED codec.";
|
2014-06-13 11:47:28 +00:00
|
|
|
return std::vector<VideoCodecSettings>();
|
|
|
|
|
}
|
2015-04-21 20:24:50 +08:00
|
|
|
|
|
|
|
|
if (it->first == fec_settings.red_payload_type) {
|
|
|
|
|
fec_settings.red_rtx_payload_type = it->second;
|
|
|
|
|
}
|
2014-06-13 11:47:28 +00:00
|
|
|
}
|
|
|
|
|
|
2014-05-13 11:07:01 +00:00
|
|
|
for (size_t i = 0; i < video_codecs.size(); ++i) {
|
|
|
|
|
video_codecs[i].fec = fec_settings;
|
2015-04-21 20:24:50 +08:00
|
|
|
if (rtx_mapping[video_codecs[i].codec.id] != 0 &&
|
|
|
|
|
rtx_mapping[video_codecs[i].codec.id] !=
|
|
|
|
|
fec_settings.red_payload_type) {
|
2014-05-13 11:07:01 +00:00
|
|
|
video_codecs[i].rtx_payload_type = rtx_mapping[video_codecs[i].codec.id];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return video_codecs;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
} // namespace cricket
|