webrtc_m130/modules/rtp_rtcp/source/rtp_sender_video_unittest.cc

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

1057 lines
44 KiB
C++
Raw Normal View History

/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/rtp_rtcp/source/rtp_sender_video.h"
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/memory/memory.h"
#include "api/test/mock_frame_encryptor.h"
#include "api/transport/field_trial_based_config.h"
#include "api/transport/rtp/dependency_descriptor.h"
#include "api/video/video_codec_constants.h"
#include "api/video/video_timing.h"
#include "common_video/generic_frame_descriptor/generic_frame_info.h"
#include "modules/rtp_rtcp/include/rtp_cvo.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
#include "modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/arraysize.h"
#include "rtc_base/rate_limiter.h"
#include "rtc_base/task_queue_for_test.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/mock_frame_transformer.h"
namespace webrtc {
namespace {
using ::testing::_;
using ::testing::ContainerEq;
using ::testing::ElementsAre;
using ::testing::ElementsAreArray;
using ::testing::IsEmpty;
using ::testing::NiceMock;
using ::testing::Return;
using ::testing::ReturnArg;
using ::testing::SaveArg;
using ::testing::SizeIs;
using ::testing::WithArgs;
Reland "Delete test/constants.h" This reverts commit 4f36b7a478c2763463c7a9ea970548ec68bc3ea6. Reason for revert: Failing tests fixed. Original change's description: > Revert "Delete test/constants.h" > > This reverts commit 389b1672a32f2dd49af6c6ed40e8ddf394b986de. > > Reason for revert: Causes failure (and empty result list) in CallPerfTest.PadsToMinTransmitBitrate > > Original change's description: > > Delete test/constants.h > > > > It's not possible to use constants.h for all RTP extensions > > after the number of extensions exceeds 14, which is the maximum > > number of one-byte RTP extensions. This is because some extensions > > would have to be assigned a number greater than 14, even if the > > test only involves 14 extensions or less. > > > > For uniformity's sake, this CL also edits some files to use an > > enum as the files involved in this CL, rather than free-floating > > const-ints. > > > > Bug: webrtc:10288 > > Change-Id: Ib5e58ad72c4d3756f4c4f6521f140ec59617f3f5 > > Reviewed-on: https://webrtc-review.googlesource.com/c/123048 > > Commit-Queue: Elad Alon <eladalon@webrtc.org> > > Reviewed-by: Danil Chapovalov <danilchap@webrtc.org> > > Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> > > Reviewed-by: Erik Språng <sprang@webrtc.org> > > Cr-Commit-Position: refs/heads/master@{#26728} > > TBR=danilchap@webrtc.org,kwiberg@webrtc.org,eladalon@webrtc.org,sprang@webrtc.org > > Bug: webrtc:10288, chromium:933127 > Change-Id: If1de0bd8992137c52bf0b877b3cb0a2bafc809d4 > Reviewed-on: https://webrtc-review.googlesource.com/c/123381 > Commit-Queue: Oleh Prypin <oprypin@webrtc.org> > Reviewed-by: Oleh Prypin <oprypin@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#26744} TBR=danilchap@webrtc.org,oprypin@webrtc.org,kwiberg@webrtc.org,eladalon@webrtc.org,sprang@webrtc.org Change-Id: I65e391325d3a6df6db3c0739185e2002e70fb954 Bug: webrtc:10288, chromium:933127 Reviewed-on: https://webrtc-review.googlesource.com/c/123384 Reviewed-by: Elad Alon <eladalon@webrtc.org> Commit-Queue: Elad Alon <eladalon@webrtc.org> Cr-Commit-Position: refs/heads/master@{#26750}
2019-02-18 23:45:57 +01:00
enum : int { // The first valid value is 1.
kAbsoluteSendTimeExtensionId = 1,
kGenericDescriptorId,
kDependencyDescriptorId,
Reland "Delete test/constants.h" This reverts commit 4f36b7a478c2763463c7a9ea970548ec68bc3ea6. Reason for revert: Failing tests fixed. Original change's description: > Revert "Delete test/constants.h" > > This reverts commit 389b1672a32f2dd49af6c6ed40e8ddf394b986de. > > Reason for revert: Causes failure (and empty result list) in CallPerfTest.PadsToMinTransmitBitrate > > Original change's description: > > Delete test/constants.h > > > > It's not possible to use constants.h for all RTP extensions > > after the number of extensions exceeds 14, which is the maximum > > number of one-byte RTP extensions. This is because some extensions > > would have to be assigned a number greater than 14, even if the > > test only involves 14 extensions or less. > > > > For uniformity's sake, this CL also edits some files to use an > > enum as the files involved in this CL, rather than free-floating > > const-ints. > > > > Bug: webrtc:10288 > > Change-Id: Ib5e58ad72c4d3756f4c4f6521f140ec59617f3f5 > > Reviewed-on: https://webrtc-review.googlesource.com/c/123048 > > Commit-Queue: Elad Alon <eladalon@webrtc.org> > > Reviewed-by: Danil Chapovalov <danilchap@webrtc.org> > > Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> > > Reviewed-by: Erik Språng <sprang@webrtc.org> > > Cr-Commit-Position: refs/heads/master@{#26728} > > TBR=danilchap@webrtc.org,kwiberg@webrtc.org,eladalon@webrtc.org,sprang@webrtc.org > > Bug: webrtc:10288, chromium:933127 > Change-Id: If1de0bd8992137c52bf0b877b3cb0a2bafc809d4 > Reviewed-on: https://webrtc-review.googlesource.com/c/123381 > Commit-Queue: Oleh Prypin <oprypin@webrtc.org> > Reviewed-by: Oleh Prypin <oprypin@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#26744} TBR=danilchap@webrtc.org,oprypin@webrtc.org,kwiberg@webrtc.org,eladalon@webrtc.org,sprang@webrtc.org Change-Id: I65e391325d3a6df6db3c0739185e2002e70fb954 Bug: webrtc:10288, chromium:933127 Reviewed-on: https://webrtc-review.googlesource.com/c/123384 Reviewed-by: Elad Alon <eladalon@webrtc.org> Commit-Queue: Elad Alon <eladalon@webrtc.org> Cr-Commit-Position: refs/heads/master@{#26750}
2019-02-18 23:45:57 +01:00
kTransmissionTimeOffsetExtensionId,
kTransportSequenceNumberExtensionId,
kVideoRotationExtensionId,
kVideoTimingExtensionId,
kAbsoluteCaptureTimeExtensionId,
Reland "Remove PlayoutDelayOracle and make RtpSenderVideo guarantee delivery" This is a reland of 4f68f5398d7fa3d47c449e99893c9bea07bf7ca2 Original change's description: > Remove PlayoutDelayOracle and make RtpSenderVideo guarantee delivery > > The PlayoutDelayOracle was responsible for making sure the PlayoutDelay > header extension was successfully propagated to the receiving side. Once > it was determined that the receiver had received a frame with the new > delay tag, it's no longer necessary to propagate. > > The issue with this implementation is that it is based on max > extended sequence number reported via RTCP, which makes it often slow > to react, could theoretically fail to produce desired outcome (max > received > X does not guarantee X was fully received and decoded), and > added a lot of code complexity. > > The guarantee of delivery can in fact be accomplished more reliably and > with less code by making sure to tag each frame until an undiscardable > frame is sent. > > This allows containing the logic fully within RTPSenderVideo. > > Bug: webrtc:11340 > Change-Id: I2d1d2b6b67f4f07b8b33336f8fcfcde724243eef > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168221 > Reviewed-by: Stefan Holmer <stefan@webrtc.org> > Reviewed-by: Sebastian Jansson <srte@webrtc.org> > Commit-Queue: Erik Språng <sprang@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#30473} TBR=stefan@webrtc.org Bug: webrtc:11340 Change-Id: I2fdd0004121b13b96497b21e052359e31d0c477a Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168305 Commit-Queue: Erik Språng <sprang@webrtc.org> Reviewed-by: Sebastian Jansson <srte@webrtc.org> Cr-Commit-Position: refs/heads/master@{#30479}
2020-02-06 17:10:08 +01:00
kPlayoutDelayExtensionId
Reland "Delete test/constants.h" This reverts commit 4f36b7a478c2763463c7a9ea970548ec68bc3ea6. Reason for revert: Failing tests fixed. Original change's description: > Revert "Delete test/constants.h" > > This reverts commit 389b1672a32f2dd49af6c6ed40e8ddf394b986de. > > Reason for revert: Causes failure (and empty result list) in CallPerfTest.PadsToMinTransmitBitrate > > Original change's description: > > Delete test/constants.h > > > > It's not possible to use constants.h for all RTP extensions > > after the number of extensions exceeds 14, which is the maximum > > number of one-byte RTP extensions. This is because some extensions > > would have to be assigned a number greater than 14, even if the > > test only involves 14 extensions or less. > > > > For uniformity's sake, this CL also edits some files to use an > > enum as the files involved in this CL, rather than free-floating > > const-ints. > > > > Bug: webrtc:10288 > > Change-Id: Ib5e58ad72c4d3756f4c4f6521f140ec59617f3f5 > > Reviewed-on: https://webrtc-review.googlesource.com/c/123048 > > Commit-Queue: Elad Alon <eladalon@webrtc.org> > > Reviewed-by: Danil Chapovalov <danilchap@webrtc.org> > > Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> > > Reviewed-by: Erik Språng <sprang@webrtc.org> > > Cr-Commit-Position: refs/heads/master@{#26728} > > TBR=danilchap@webrtc.org,kwiberg@webrtc.org,eladalon@webrtc.org,sprang@webrtc.org > > Bug: webrtc:10288, chromium:933127 > Change-Id: If1de0bd8992137c52bf0b877b3cb0a2bafc809d4 > Reviewed-on: https://webrtc-review.googlesource.com/c/123381 > Commit-Queue: Oleh Prypin <oprypin@webrtc.org> > Reviewed-by: Oleh Prypin <oprypin@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#26744} TBR=danilchap@webrtc.org,oprypin@webrtc.org,kwiberg@webrtc.org,eladalon@webrtc.org,sprang@webrtc.org Change-Id: I65e391325d3a6df6db3c0739185e2002e70fb954 Bug: webrtc:10288, chromium:933127 Reviewed-on: https://webrtc-review.googlesource.com/c/123384 Reviewed-by: Elad Alon <eladalon@webrtc.org> Commit-Queue: Elad Alon <eladalon@webrtc.org> Cr-Commit-Position: refs/heads/master@{#26750}
2019-02-18 23:45:57 +01:00
};
constexpr int kPayload = 100;
constexpr VideoCodecType kType = VideoCodecType::kVideoCodecGeneric;
constexpr uint32_t kTimestamp = 10;
constexpr uint16_t kSeqNum = 33;
constexpr uint32_t kSsrc = 725242;
constexpr int kMaxPacketLength = 1500;
constexpr uint64_t kStartTime = 123456789;
constexpr int64_t kDefaultExpectedRetransmissionTimeMs = 125;
class LoopbackTransportTest : public webrtc::Transport {
public:
LoopbackTransportTest() {
receivers_extensions_.Register<TransmissionOffset>(
kTransmissionTimeOffsetExtensionId);
receivers_extensions_.Register<AbsoluteSendTime>(
kAbsoluteSendTimeExtensionId);
receivers_extensions_.Register<TransportSequenceNumber>(
kTransportSequenceNumberExtensionId);
receivers_extensions_.Register<VideoOrientation>(kVideoRotationExtensionId);
receivers_extensions_.Register<VideoTimingExtension>(
kVideoTimingExtensionId);
receivers_extensions_.Register<RtpGenericFrameDescriptorExtension00>(
kGenericDescriptorId);
receivers_extensions_.Register<RtpDependencyDescriptorExtension>(
kDependencyDescriptorId);
receivers_extensions_.Register<AbsoluteCaptureTimeExtension>(
kAbsoluteCaptureTimeExtensionId);
Reland "Remove PlayoutDelayOracle and make RtpSenderVideo guarantee delivery" This is a reland of 4f68f5398d7fa3d47c449e99893c9bea07bf7ca2 Original change's description: > Remove PlayoutDelayOracle and make RtpSenderVideo guarantee delivery > > The PlayoutDelayOracle was responsible for making sure the PlayoutDelay > header extension was successfully propagated to the receiving side. Once > it was determined that the receiver had received a frame with the new > delay tag, it's no longer necessary to propagate. > > The issue with this implementation is that it is based on max > extended sequence number reported via RTCP, which makes it often slow > to react, could theoretically fail to produce desired outcome (max > received > X does not guarantee X was fully received and decoded), and > added a lot of code complexity. > > The guarantee of delivery can in fact be accomplished more reliably and > with less code by making sure to tag each frame until an undiscardable > frame is sent. > > This allows containing the logic fully within RTPSenderVideo. > > Bug: webrtc:11340 > Change-Id: I2d1d2b6b67f4f07b8b33336f8fcfcde724243eef > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168221 > Reviewed-by: Stefan Holmer <stefan@webrtc.org> > Reviewed-by: Sebastian Jansson <srte@webrtc.org> > Commit-Queue: Erik Språng <sprang@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#30473} TBR=stefan@webrtc.org Bug: webrtc:11340 Change-Id: I2fdd0004121b13b96497b21e052359e31d0c477a Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168305 Commit-Queue: Erik Språng <sprang@webrtc.org> Reviewed-by: Sebastian Jansson <srte@webrtc.org> Cr-Commit-Position: refs/heads/master@{#30479}
2020-02-06 17:10:08 +01:00
receivers_extensions_.Register<PlayoutDelayLimits>(
kPlayoutDelayExtensionId);
}
bool SendRtp(const uint8_t* data,
size_t len,
const PacketOptions& options) override {
sent_packets_.push_back(RtpPacketReceived(&receivers_extensions_));
EXPECT_TRUE(sent_packets_.back().Parse(data, len));
return true;
}
bool SendRtcp(const uint8_t* data, size_t len) override { return false; }
const RtpPacketReceived& last_sent_packet() { return sent_packets_.back(); }
int packets_sent() { return sent_packets_.size(); }
const std::vector<RtpPacketReceived>& sent_packets() const {
return sent_packets_;
}
private:
RtpHeaderExtensionMap receivers_extensions_;
std::vector<RtpPacketReceived> sent_packets_;
};
class TestRtpSenderVideo : public RTPSenderVideo {
public:
TestRtpSenderVideo(Clock* clock,
RTPSender* rtp_sender,
FlexfecSender* flexfec_sender,
const WebRtcKeyValueConfig& field_trials)
: RTPSenderVideo([&] {
Config config;
config.clock = clock;
config.rtp_sender = rtp_sender;
Reland "Reland "Refactors UlpFec and FlexFec to use a common interface."" This is a reland of 49734dc0faa69616a58a1a95c7fc61a4610793cf Patchset 2 contains a fix for the fuzzer set up. Since we now parse an RtpPacket out of the fuzzer data, the header needs to be correct, otherwise we fail before even reaching the FEC code that we actually want to test. Bug: webrtc:11340, chromium:1052323, chromium:1055974 TBR=stefan@webrtc.org Original change's description: > Reland "Refactors UlpFec and FlexFec to use a common interface." > > This is a reland of 11af1d7444fd7438766b7bc52cbd64752d72e32e > > Original change's description: > > Refactors UlpFec and FlexFec to use a common interface. > > > > The new VideoFecGenerator is now injected into RtpSenderVideo, > > and generalizes the usage. > > This also prepares for being able to genera FEC in the RTP egress > > module. > > > > Bug: webrtc:11340 > > Change-Id: I8aa873129b2fb4131eb3399ee88f6ea2747155a3 > > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168347 > > Reviewed-by: Stefan Holmer <stefan@webrtc.org> > > Reviewed-by: Sebastian Jansson <srte@webrtc.org> > > Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> > > Commit-Queue: Erik Språng <sprang@webrtc.org> > > Cr-Commit-Position: refs/heads/master@{#30515} > > Bug: webrtc:11340, chromium:1052323 > Change-Id: Id646047365f1c46cca9e6f3e8eefa5151207b4a0 > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168608 > Commit-Queue: Erik Språng <sprang@webrtc.org> > Reviewed-by: Stefan Holmer <stefan@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#30593} Bug: webrtc:11340, chromium:1052323 Change-Id: Ib8925f44e2edfcfeadc95c845c3bfc23822604ed Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/169222 Commit-Queue: Erik Språng <sprang@webrtc.org> Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org> Cr-Commit-Position: refs/heads/master@{#30724}
2020-03-05 10:14:04 +01:00
config.fec_generator = flexfec_sender;
config.field_trials = &field_trials;
return config;
}()) {}
~TestRtpSenderVideo() override {}
bool AllowRetransmission(const RTPVideoHeader& header,
int32_t retransmission_settings,
int64_t expected_retransmission_time_ms) {
return RTPSenderVideo::AllowRetransmission(GetTemporalId(header),
retransmission_settings,
expected_retransmission_time_ms);
}
};
class FieldTrials : public WebRtcKeyValueConfig {
public:
explicit FieldTrials(bool use_send_side_bwe_with_overhead)
: use_send_side_bwe_with_overhead_(use_send_side_bwe_with_overhead) {}
std::string Lookup(absl::string_view key) const override {
return key == "WebRTC-SendSideBwe-WithOverhead" &&
use_send_side_bwe_with_overhead_
? "Enabled"
: "";
}
private:
bool use_send_side_bwe_with_overhead_;
};
class RtpSenderVideoTest : public ::testing::TestWithParam<bool> {
public:
RtpSenderVideoTest()
: field_trials_(GetParam()),
fake_clock_(kStartTime),
retransmission_rate_limiter_(&fake_clock_, 1000),
rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
config.field_trials = &field_trials_;
config.local_media_ssrc = kSsrc;
return config;
}())),
rtp_sender_video_(&fake_clock_,
rtp_module_->RtpSender(),
nullptr,
field_trials_) {
rtp_module_->SetSequenceNumber(kSeqNum);
rtp_module_->SetStartTimestamp(0);
}
void UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(
int version);
protected:
const RtpRtcpInterface::Configuration config_;
FieldTrials field_trials_;
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
RateLimiter retransmission_rate_limiter_;
std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
TestRtpSenderVideo rtp_sender_video_;
};
TEST_P(RtpSenderVideoTest, KeyFrameHasCVO) {
uint8_t kFrame[kMaxPacketLength];
rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::kUri,
kVideoRotationExtensionId);
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_0;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
VideoRotation rotation;
EXPECT_TRUE(
transport_.last_sent_packet().GetExtension<VideoOrientation>(&rotation));
EXPECT_EQ(kVideoRotation_0, rotation);
}
TEST_P(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) {
uint8_t kFrame[kMaxPacketLength];
const int64_t kPacketizationTimeMs = 100;
const int64_t kEncodeStartDeltaMs = 10;
const int64_t kEncodeFinishDeltaMs = 50;
rtp_module_->RegisterRtpHeaderExtension(VideoTimingExtension::kUri,
kVideoTimingExtensionId);
const int64_t kCaptureTimestamp = fake_clock_.TimeInMilliseconds();
RTPVideoHeader hdr;
hdr.video_timing.flags = VideoSendTiming::kTriggeredByTimer;
hdr.video_timing.encode_start_delta_ms = kEncodeStartDeltaMs;
hdr.video_timing.encode_finish_delta_ms = kEncodeFinishDeltaMs;
fake_clock_.AdvanceTimeMilliseconds(kPacketizationTimeMs);
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, kCaptureTimestamp,
kFrame, nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs);
VideoSendTiming timing;
EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>(
&timing));
EXPECT_EQ(kPacketizationTimeMs, timing.packetization_finish_delta_ms);
EXPECT_EQ(kEncodeStartDeltaMs, timing.encode_start_delta_ms);
EXPECT_EQ(kEncodeFinishDeltaMs, timing.encode_finish_delta_ms);
}
TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) {
uint8_t kFrame[kMaxPacketLength];
rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::kUri,
kVideoRotationExtensionId);
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_90;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kPayload, kType, kTimestamp, 0, kFrame, nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs));
hdr.rotation = kVideoRotation_0;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kPayload, kType, kTimestamp + 1, 0, kFrame, nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs));
VideoRotation rotation;
EXPECT_TRUE(
transport_.last_sent_packet().GetExtension<VideoOrientation>(&rotation));
EXPECT_EQ(kVideoRotation_0, rotation);
}
TEST_P(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) {
uint8_t kFrame[kMaxPacketLength];
rtp_module_->RegisterRtpHeaderExtension(VideoOrientation::kUri,
kVideoRotationExtensionId);
RTPVideoHeader hdr;
hdr.rotation = kVideoRotation_90;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kPayload, kType, kTimestamp, 0, kFrame, nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs));
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
EXPECT_TRUE(rtp_sender_video_.SendVideo(
kPayload, kType, kTimestamp + 1, 0, kFrame, nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs));
VideoRotation rotation;
EXPECT_TRUE(
transport_.last_sent_packet().GetExtension<VideoOrientation>(&rotation));
EXPECT_EQ(kVideoRotation_90, rotation);
}
// Make sure rotation is parsed correctly when the Camera (C) and Flip (F) bits
// are set in the CVO byte.
TEST_P(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) {
// Test extracting rotation when Camera (C) and Flip (F) bits are zero.
EXPECT_EQ(kVideoRotation_0, ConvertCVOByteToVideoRotation(0));
EXPECT_EQ(kVideoRotation_90, ConvertCVOByteToVideoRotation(1));
EXPECT_EQ(kVideoRotation_180, ConvertCVOByteToVideoRotation(2));
EXPECT_EQ(kVideoRotation_270, ConvertCVOByteToVideoRotation(3));
// Test extracting rotation when Camera (C) and Flip (F) bits are set.
const int flip_bit = 1 << 2;
const int camera_bit = 1 << 3;
EXPECT_EQ(kVideoRotation_0,
ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 0));
EXPECT_EQ(kVideoRotation_90,
ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 1));
EXPECT_EQ(kVideoRotation_180,
ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 2));
EXPECT_EQ(kVideoRotation_270,
ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 3));
}
TEST_P(RtpSenderVideoTest, RetransmissionTypesGeneric) {
RTPVideoHeader header;
header.codec = kVideoCodecGeneric;
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kConditionallyRetransmitHigherLayers,
kDefaultExpectedRetransmissionTimeMs));
}
TEST_P(RtpSenderVideoTest, RetransmissionTypesH264) {
RTPVideoHeader header;
header.video_type_header.emplace<RTPVideoHeaderH264>().packetization_mode =
H264PacketizationMode::NonInterleaved;
header.codec = kVideoCodecH264;
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kConditionallyRetransmitHigherLayers,
kDefaultExpectedRetransmissionTimeMs));
}
TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) {
RTPVideoHeader header;
header.codec = kVideoCodecVP8;
auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>();
vp8_header.temporalIdx = 0;
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers | kRetransmitBaseLayer,
kDefaultExpectedRetransmissionTimeMs));
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kConditionallyRetransmitHigherLayers,
kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers,
kDefaultExpectedRetransmissionTimeMs));
}
TEST_P(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) {
RTPVideoHeader header;
header.codec = kVideoCodecVP8;
auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>();
for (int tid = 1; tid <= kMaxTemporalStreams; ++tid) {
vp8_header.temporalIdx = tid;
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers | kRetransmitBaseLayer,
kDefaultExpectedRetransmissionTimeMs));
}
}
TEST_P(RtpSenderVideoTest, RetransmissionTypesVP9) {
RTPVideoHeader header;
header.codec = kVideoCodecVP9;
auto& vp9_header = header.video_type_header.emplace<RTPVideoHeaderVP9>();
for (int tid = 1; tid <= kMaxTemporalStreams; ++tid) {
vp9_header.temporal_idx = tid;
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitOff, kDefaultExpectedRetransmissionTimeMs));
EXPECT_FALSE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTimeMs));
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(
header, kRetransmitHigherLayers | kRetransmitBaseLayer,
kDefaultExpectedRetransmissionTimeMs));
}
}
TEST_P(RtpSenderVideoTest, ConditionalRetransmit) {
const int64_t kFrameIntervalMs = 33;
const int64_t kRttMs = (kFrameIntervalMs * 3) / 2;
const uint8_t kSettings =
kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers;
// Insert VP8 frames for all temporal layers, but stop before the final index.
RTPVideoHeader header;
header.codec = kVideoCodecVP8;
// Fill averaging window to prevent rounding errors.
constexpr int kNumRepetitions =
(RTPSenderVideo::kTLRateWindowSizeMs + (kFrameIntervalMs / 2)) /
kFrameIntervalMs;
constexpr int kPattern[] = {0, 2, 1, 2};
auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>();
for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) {
vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)];
rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs);
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
}
// Since we're at the start of the pattern, the next expected frame in TL0 is
// right now. We will wait at most one expected retransmission time before
// acknowledging that it did not arrive, which means this frame and the next
// will not be retransmitted.
vp8_header.temporalIdx = 1;
EXPECT_FALSE(
rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs));
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
EXPECT_FALSE(
rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs));
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
// The TL0 frame did not arrive. So allow retransmission.
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs));
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
// Insert a frame for TL2. We just had frame in TL1, so the next one there is
// in three frames away. TL0 is still too far in the past. So, allow
// retransmission.
vp8_header.temporalIdx = 2;
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs));
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
// Another TL2, next in TL1 is two frames away. Allow again.
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs));
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
// Yet another TL2, next in TL1 is now only one frame away, so don't store
// for retransmission.
EXPECT_FALSE(
rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs));
}
TEST_P(RtpSenderVideoTest, ConditionalRetransmitLimit) {
const int64_t kFrameIntervalMs = 200;
const int64_t kRttMs = (kFrameIntervalMs * 3) / 2;
const int32_t kSettings =
kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers;
// Insert VP8 frames for all temporal layers, but stop before the final index.
RTPVideoHeader header;
header.codec = kVideoCodecVP8;
// Fill averaging window to prevent rounding errors.
constexpr int kNumRepetitions =
(RTPSenderVideo::kTLRateWindowSizeMs + (kFrameIntervalMs / 2)) /
kFrameIntervalMs;
constexpr int kPattern[] = {0, 2, 2, 2};
auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>();
for (size_t i = 0; i < arraysize(kPattern) * kNumRepetitions; ++i) {
vp8_header.temporalIdx = kPattern[i % arraysize(kPattern)];
rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs);
fake_clock_.AdvanceTimeMilliseconds(kFrameIntervalMs);
}
// Since we're at the start of the pattern, the next expected frame will be
// right now in TL0. Put it in TL1 instead. Regular rules would dictate that
// we don't store for retransmission because we expect a frame in a lower
// layer, but that last frame in TL1 was a long time ago in absolute terms,
// so allow retransmission anyway.
vp8_header.temporalIdx = 1;
EXPECT_TRUE(rtp_sender_video_.AllowRetransmission(header, kSettings, kRttMs));
}
TEST_P(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) {
const int64_t kFrameId = 100000;
uint8_t kFrame[100];
rtp_module_->RegisterRtpHeaderExtension(
RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 2;
video_structure.templates = {
FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
FrameDependencyTemplate().S(1).T(0).Dtis("-S"),
FrameDependencyTemplate().S(1).T(1).Dtis("-D"),
};
rtp_sender_video_.SetVideoStructure(&video_structure);
// Send key frame.
RTPVideoHeader hdr;
RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
generic.frame_id = kFrameId;
generic.temporal_index = 0;
generic.spatial_index = 0;
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
DecodeTargetIndication::kSwitch};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 1);
DependencyDescriptor descriptor_key;
ASSERT_TRUE(transport_.last_sent_packet()
.GetExtension<RtpDependencyDescriptorExtension>(
nullptr, &descriptor_key));
ASSERT_TRUE(descriptor_key.attached_structure);
EXPECT_EQ(descriptor_key.attached_structure->num_decode_targets, 2);
EXPECT_THAT(descriptor_key.attached_structure->templates, SizeIs(3));
EXPECT_EQ(descriptor_key.frame_number, kFrameId & 0xFFFF);
EXPECT_EQ(descriptor_key.frame_dependencies.spatial_id, 0);
EXPECT_EQ(descriptor_key.frame_dependencies.temporal_id, 0);
EXPECT_EQ(descriptor_key.frame_dependencies.decode_target_indications,
generic.decode_target_indications);
EXPECT_THAT(descriptor_key.frame_dependencies.frame_diffs, IsEmpty());
// Send delta frame.
generic.frame_id = kFrameId + 1;
generic.temporal_index = 1;
generic.spatial_index = 1;
generic.dependencies = {kFrameId, kFrameId - 500};
generic.decode_target_indications = {DecodeTargetIndication::kNotPresent,
DecodeTargetIndication::kRequired};
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
EXPECT_EQ(transport_.packets_sent(), 2);
DependencyDescriptor descriptor_delta;
ASSERT_TRUE(
transport_.last_sent_packet()
.GetExtension<RtpDependencyDescriptorExtension>(
descriptor_key.attached_structure.get(), &descriptor_delta));
EXPECT_EQ(descriptor_delta.attached_structure, nullptr);
EXPECT_EQ(descriptor_delta.frame_number, (kFrameId + 1) & 0xFFFF);
EXPECT_EQ(descriptor_delta.frame_dependencies.spatial_id, 1);
EXPECT_EQ(descriptor_delta.frame_dependencies.temporal_id, 1);
EXPECT_EQ(descriptor_delta.frame_dependencies.decode_target_indications,
generic.decode_target_indications);
EXPECT_THAT(descriptor_delta.frame_dependencies.frame_diffs,
ElementsAre(1, 501));
}
TEST_P(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) {
const int64_t kFrameId = 100000;
uint8_t kFrame[100];
rtp_module_->RegisterRtpHeaderExtension(
RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 2;
video_structure.num_chains = 1;
// First decode target is protected by the only chain, second one - is not.
video_structure.decode_target_protected_by_chain = {0, 1};
video_structure.templates = {
FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}),
};
rtp_sender_video_.SetVideoStructure(&video_structure);
RTPVideoHeader hdr;
RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
generic.frame_id = kFrameId;
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
DecodeTargetIndication::kSwitch};
generic.chain_diffs = {2};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 1);
DependencyDescriptor descriptor_key;
ASSERT_TRUE(transport_.last_sent_packet()
.GetExtension<RtpDependencyDescriptorExtension>(
nullptr, &descriptor_key));
EXPECT_THAT(descriptor_key.frame_dependencies.chain_diffs,
ContainerEq(generic.chain_diffs));
}
TEST_P(RtpSenderVideoTest,
PropagatesActiveDecodeTargetsIntoDependencyDescriptor) {
const int64_t kFrameId = 100000;
uint8_t kFrame[100];
rtp_module_->RegisterRtpHeaderExtension(
RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 2;
video_structure.num_chains = 1;
video_structure.decode_target_protected_by_chain = {0, 0};
video_structure.templates = {
FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}),
};
rtp_sender_video_.SetVideoStructure(&video_structure);
RTPVideoHeader hdr;
RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
generic.frame_id = kFrameId;
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
DecodeTargetIndication::kSwitch};
generic.active_decode_targets = 0b01;
generic.chain_diffs = {1};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 1);
DependencyDescriptor descriptor_key;
ASSERT_TRUE(transport_.last_sent_packet()
.GetExtension<RtpDependencyDescriptorExtension>(
nullptr, &descriptor_key));
EXPECT_EQ(descriptor_key.active_decode_targets_bitmask, 0b01u);
}
TEST_P(RtpSenderVideoTest,
SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) {
const int64_t kFrameId = 100000;
uint8_t kFrame[100];
rtp_module_->RegisterRtpHeaderExtension(
RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
FrameDependencyStructure video_structure1;
video_structure1.num_decode_targets = 2;
video_structure1.templates = {
FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
FrameDependencyTemplate().S(0).T(1).Dtis("D-"),
};
FrameDependencyStructure video_structure2;
video_structure2.num_decode_targets = 2;
video_structure2.templates = {
FrameDependencyTemplate().S(0).T(0).Dtis("SS"),
FrameDependencyTemplate().S(0).T(1).Dtis("R-"),
};
// Send 1st key frame.
RTPVideoHeader hdr;
RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
generic.frame_id = kFrameId;
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
DecodeTargetIndication::kSwitch};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SetVideoStructure(&video_structure1);
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
// Parse 1st extension.
ASSERT_EQ(transport_.packets_sent(), 1);
DependencyDescriptor descriptor_key1;
ASSERT_TRUE(transport_.last_sent_packet()
.GetExtension<RtpDependencyDescriptorExtension>(
nullptr, &descriptor_key1));
ASSERT_TRUE(descriptor_key1.attached_structure);
// Send the delta frame.
generic.frame_id = kFrameId + 1;
generic.temporal_index = 1;
generic.decode_target_indications = {DecodeTargetIndication::kDiscardable,
DecodeTargetIndication::kNotPresent};
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 2);
RtpPacket delta_packet = transport_.last_sent_packet();
// Send 2nd key frame.
generic.frame_id = kFrameId + 2;
generic.decode_target_indications = {DecodeTargetIndication::kSwitch,
DecodeTargetIndication::kSwitch};
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SetVideoStructure(&video_structure2);
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
// Parse the 2nd key frame.
ASSERT_EQ(transport_.packets_sent(), 3);
DependencyDescriptor descriptor_key2;
ASSERT_TRUE(transport_.last_sent_packet()
.GetExtension<RtpDependencyDescriptorExtension>(
nullptr, &descriptor_key2));
ASSERT_TRUE(descriptor_key2.attached_structure);
// Try to parse the 1st delta frame. It should parseble using the structure
// from the 1st key frame, but not using the structure from the 2nd key frame.
DependencyDescriptor descriptor_delta;
EXPECT_TRUE(delta_packet.GetExtension<RtpDependencyDescriptorExtension>(
descriptor_key1.attached_structure.get(), &descriptor_delta));
EXPECT_FALSE(delta_packet.GetExtension<RtpDependencyDescriptorExtension>(
descriptor_key2.attached_structure.get(), &descriptor_delta));
}
TEST_P(RtpSenderVideoTest,
AuthenticateVideoHeaderWhenDependencyDescriptorExtensionIsUsed) {
static constexpr size_t kFrameSize = 100;
uint8_t kFrame[kFrameSize] = {1, 2, 3, 4};
rtp_module_->RegisterRtpHeaderExtension(
RtpDependencyDescriptorExtension::kUri, kDependencyDescriptorId);
rtc::scoped_refptr<MockFrameEncryptor> encryptor(
new rtc::RefCountedObject<NiceMock<MockFrameEncryptor>>);
ON_CALL(*encryptor, GetMaxCiphertextByteSize).WillByDefault(ReturnArg<1>());
ON_CALL(*encryptor, Encrypt)
.WillByDefault(WithArgs<3, 5>(
[](rtc::ArrayView<const uint8_t> frame, size_t* bytes_written) {
*bytes_written = frame.size();
return 0;
}));
RTPSenderVideo::Config config;
config.clock = &fake_clock_;
config.rtp_sender = rtp_module_->RtpSender();
config.field_trials = &field_trials_;
config.frame_encryptor = encryptor;
RTPSenderVideo rtp_sender_video(config);
FrameDependencyStructure video_structure;
video_structure.num_decode_targets = 1;
video_structure.templates = {FrameDependencyTemplate().Dtis("S")};
rtp_sender_video.SetVideoStructure(&video_structure);
// Send key frame.
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
hdr.generic.emplace().decode_target_indications =
video_structure.templates[0].decode_target_indications;
EXPECT_CALL(*encryptor,
Encrypt(_, _, Not(IsEmpty()), ElementsAreArray(kFrame), _, _));
rtp_sender_video.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
// Double check packet with the dependency descriptor is sent.
ASSERT_EQ(transport_.packets_sent(), 1);
EXPECT_TRUE(transport_.last_sent_packet()
.HasExtension<RtpDependencyDescriptorExtension>());
}
TEST_P(RtpSenderVideoTest, PopulateGenericFrameDescriptor) {
const int64_t kFrameId = 100000;
uint8_t kFrame[100];
rtp_module_->RegisterRtpHeaderExtension(
RtpGenericFrameDescriptorExtension00::kUri, kGenericDescriptorId);
RTPVideoHeader hdr;
RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
generic.frame_id = kFrameId;
generic.temporal_index = 3;
generic.spatial_index = 2;
generic.dependencies.push_back(kFrameId - 1);
generic.dependencies.push_back(kFrameId - 500);
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
RtpGenericFrameDescriptor descriptor_wire;
EXPECT_EQ(1, transport_.packets_sent());
ASSERT_TRUE(transport_.last_sent_packet()
.GetExtension<RtpGenericFrameDescriptorExtension00>(
&descriptor_wire));
EXPECT_EQ(static_cast<uint16_t>(generic.frame_id), descriptor_wire.FrameId());
EXPECT_EQ(generic.temporal_index, descriptor_wire.TemporalLayer());
EXPECT_THAT(descriptor_wire.FrameDependenciesDiffs(), ElementsAre(1, 500));
EXPECT_EQ(descriptor_wire.SpatialLayersBitmask(), 0b0000'0100);
}
void RtpSenderVideoTest::
UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(
int version) {
const int64_t kFrameId = 100000;
const size_t kFrameSize = 100;
uint8_t kFrame[kFrameSize];
rtp_module_->RegisterRtpHeaderExtension(
RtpGenericFrameDescriptorExtension00::kUri, kGenericDescriptorId);
RTPVideoHeader hdr;
hdr.codec = kVideoCodecVP8;
RTPVideoHeaderVP8& vp8 = hdr.video_type_header.emplace<RTPVideoHeaderVP8>();
vp8.pictureId = kFrameId % 0X7FFF;
vp8.tl0PicIdx = 13;
vp8.temporalIdx = 1;
vp8.keyIdx = 2;
RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace();
generic.frame_id = kFrameId;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
rtp_sender_video_.SendVideo(kPayload, VideoCodecType::kVideoCodecVP8,
kTimestamp, 0, kFrame, nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs);
ASSERT_EQ(transport_.packets_sent(), 1);
// Expect only minimal 1-byte vp8 descriptor was generated.
EXPECT_EQ(transport_.last_sent_packet().payload_size(), 1 + kFrameSize);
}
TEST_P(RtpSenderVideoTest,
UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed00) {
UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(0);
}
TEST_P(RtpSenderVideoTest,
UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed01) {
UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(1);
}
TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) {
constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678;
uint8_t kFrame[kMaxPacketLength];
rtp_module_->RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::kUri,
kAbsoluteCaptureTimeExtensionId);
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp,
kAbsoluteCaptureTimestampMs, kFrame, nullptr, hdr,
kDefaultExpectedRetransmissionTimeMs);
// It is expected that one and only one of the packets sent on this video
// frame has absolute capture time header extension.
int packets_with_abs_capture_time = 0;
for (const RtpPacketReceived& packet : transport_.sent_packets()) {
auto absolute_capture_time =
packet.GetExtension<AbsoluteCaptureTimeExtension>();
if (absolute_capture_time) {
++packets_with_abs_capture_time;
EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp,
Int64MsToUQ32x32(kAbsoluteCaptureTimestampMs + NtpOffsetMs()));
}
}
EXPECT_EQ(packets_with_abs_capture_time, 1);
}
Reland "Remove PlayoutDelayOracle and make RtpSenderVideo guarantee delivery" This is a reland of 4f68f5398d7fa3d47c449e99893c9bea07bf7ca2 Original change's description: > Remove PlayoutDelayOracle and make RtpSenderVideo guarantee delivery > > The PlayoutDelayOracle was responsible for making sure the PlayoutDelay > header extension was successfully propagated to the receiving side. Once > it was determined that the receiver had received a frame with the new > delay tag, it's no longer necessary to propagate. > > The issue with this implementation is that it is based on max > extended sequence number reported via RTCP, which makes it often slow > to react, could theoretically fail to produce desired outcome (max > received > X does not guarantee X was fully received and decoded), and > added a lot of code complexity. > > The guarantee of delivery can in fact be accomplished more reliably and > with less code by making sure to tag each frame until an undiscardable > frame is sent. > > This allows containing the logic fully within RTPSenderVideo. > > Bug: webrtc:11340 > Change-Id: I2d1d2b6b67f4f07b8b33336f8fcfcde724243eef > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168221 > Reviewed-by: Stefan Holmer <stefan@webrtc.org> > Reviewed-by: Sebastian Jansson <srte@webrtc.org> > Commit-Queue: Erik Språng <sprang@webrtc.org> > Cr-Commit-Position: refs/heads/master@{#30473} TBR=stefan@webrtc.org Bug: webrtc:11340 Change-Id: I2fdd0004121b13b96497b21e052359e31d0c477a Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168305 Commit-Queue: Erik Språng <sprang@webrtc.org> Reviewed-by: Sebastian Jansson <srte@webrtc.org> Cr-Commit-Position: refs/heads/master@{#30479}
2020-02-06 17:10:08 +01:00
TEST_P(RtpSenderVideoTest, PopulatesPlayoutDelay) {
// Single packet frames.
constexpr size_t kPacketSize = 123;
uint8_t kFrame[kPacketSize];
rtp_module_->RegisterRtpHeaderExtension(PlayoutDelayLimits::kUri,
kPlayoutDelayExtensionId);
const PlayoutDelay kExpectedDelay = {10, 20};
// Send initial key-frame without playout delay.
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
hdr.codec = VideoCodecType::kVideoCodecVP8;
auto& vp8_header = hdr.video_type_header.emplace<RTPVideoHeaderVP8>();
vp8_header.temporalIdx = 0;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
EXPECT_FALSE(
transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>());
// Set playout delay on a discardable frame.
hdr.playout_delay = kExpectedDelay;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
vp8_header.temporalIdx = 1;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
PlayoutDelay received_delay = PlayoutDelay::Noop();
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
&received_delay));
EXPECT_EQ(received_delay, kExpectedDelay);
// Set playout delay on a non-discardable frame, the extension should still
// be populated since dilvery wasn't guaranteed on the last one.
hdr.playout_delay = PlayoutDelay::Noop(); // Inidcates "no change".
vp8_header.temporalIdx = 0;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
&received_delay));
EXPECT_EQ(received_delay, kExpectedDelay);
// The next frame does not need the extensions since it's delivery has
// already been guaranteed.
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
EXPECT_FALSE(
transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>());
// Insert key-frame, we need to refresh the state here.
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_.SendVideo(kPayload, kType, kTimestamp, 0, kFrame, nullptr,
hdr, kDefaultExpectedRetransmissionTimeMs);
ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>(
&received_delay));
EXPECT_EQ(received_delay, kExpectedDelay);
}
INSTANTIATE_TEST_SUITE_P(WithAndWithoutOverhead,
RtpSenderVideoTest,
::testing::Bool());
class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test {
public:
RtpSenderVideoWithFrameTransformerTest()
: fake_clock_(kStartTime),
retransmission_rate_limiter_(&fake_clock_, 1000),
rtp_module_(ModuleRtpRtcpImpl2::Create([&] {
RtpRtcpInterface::Configuration config;
config.clock = &fake_clock_;
config.outgoing_transport = &transport_;
config.retransmission_rate_limiter = &retransmission_rate_limiter_;
config.field_trials = &field_trials_;
config.local_media_ssrc = kSsrc;
return config;
}())) {
rtp_module_->SetSequenceNumber(kSeqNum);
rtp_module_->SetStartTimestamp(0);
}
std::unique_ptr<RTPSenderVideo> CreateSenderWithFrameTransformer(
rtc::scoped_refptr<FrameTransformerInterface> transformer) {
RTPSenderVideo::Config config;
config.clock = &fake_clock_;
config.rtp_sender = rtp_module_->RtpSender();
config.field_trials = &field_trials_;
config.frame_transformer = transformer;
return std::make_unique<RTPSenderVideo>(config);
}
protected:
FieldTrialBasedConfig field_trials_;
SimulatedClock fake_clock_;
LoopbackTransportTest transport_;
RateLimiter retransmission_rate_limiter_;
std::unique_ptr<ModuleRtpRtcpImpl2> rtp_module_;
};
std::unique_ptr<EncodedImage> CreateDefaultEncodedImage() {
const uint8_t data[] = {1, 2, 3, 4};
auto encoded_image = std::make_unique<EncodedImage>();
encoded_image->SetEncodedData(
webrtc::EncodedImageBuffer::Create(data, sizeof(data)));
return encoded_image;
}
TEST_F(RtpSenderVideoWithFrameTransformerTest,
CreateSenderRegistersFrameTransformer) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
EXPECT_CALL(*mock_frame_transformer,
RegisterTransformedFrameSinkCallback(_, kSsrc));
std::unique_ptr<RTPSenderVideo> rtp_sender_video =
CreateSenderWithFrameTransformer(mock_frame_transformer);
}
TEST_F(RtpSenderVideoWithFrameTransformerTest,
DestroySenderUnregistersFrameTransformer) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
std::unique_ptr<RTPSenderVideo> rtp_sender_video =
CreateSenderWithFrameTransformer(mock_frame_transformer);
EXPECT_CALL(*mock_frame_transformer,
UnregisterTransformedFrameSinkCallback(kSsrc));
rtp_sender_video = nullptr;
}
TEST_F(RtpSenderVideoWithFrameTransformerTest,
SendEncodedImageTransformsFrame) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
std::unique_ptr<RTPSenderVideo> rtp_sender_video =
CreateSenderWithFrameTransformer(mock_frame_transformer);
auto encoded_image = CreateDefaultEncodedImage();
RTPVideoHeader video_header;
EXPECT_CALL(*mock_frame_transformer, Transform);
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
*encoded_image, nullptr, video_header,
kDefaultExpectedRetransmissionTimeMs);
}
TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
rtc::scoped_refptr<TransformedFrameCallback> callback;
EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback)
.WillOnce(SaveArg<0>(&callback));
std::unique_ptr<RTPSenderVideo> rtp_sender_video =
CreateSenderWithFrameTransformer(mock_frame_transformer);
ASSERT_TRUE(callback);
auto encoded_image = CreateDefaultEncodedImage();
RTPVideoHeader video_header;
video_header.frame_type = VideoFrameType::kVideoFrameKey;
ON_CALL(*mock_frame_transformer, Transform)
.WillByDefault(
[&callback](std::unique_ptr<TransformableFrameInterface> frame) {
callback->OnTransformedFrame(std::move(frame));
});
TaskQueueForTest encoder_queue;
encoder_queue.SendTask(
[&] {
rtp_sender_video->SendEncodedImage(
kPayload, kType, kTimestamp, *encoded_image, nullptr, video_header,
kDefaultExpectedRetransmissionTimeMs);
},
RTC_FROM_HERE);
encoder_queue.WaitForPreviouslyPostedTasks();
EXPECT_EQ(transport_.packets_sent(), 1);
}
TEST_F(RtpSenderVideoWithFrameTransformerTest,
TransformableFrameMetadataHasCorrectValue) {
rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
new rtc::RefCountedObject<NiceMock<MockFrameTransformer>>();
std::unique_ptr<RTPSenderVideo> rtp_sender_video =
CreateSenderWithFrameTransformer(mock_frame_transformer);
auto encoded_image = CreateDefaultEncodedImage();
RTPVideoHeader video_header;
video_header.width = 1280u;
video_header.height = 720u;
RTPVideoHeader::GenericDescriptorInfo& generic =
video_header.generic.emplace();
generic.frame_id = 10;
generic.temporal_index = 3;
generic.spatial_index = 2;
generic.decode_target_indications = {DecodeTargetIndication::kSwitch};
generic.dependencies = {5};
// Check that the transformable frame passed to the frame transformer has the
// correct metadata.
EXPECT_CALL(*mock_frame_transformer, Transform)
.WillOnce(
[](std::unique_ptr<TransformableFrameInterface> transformable_frame) {
auto frame =
absl::WrapUnique(static_cast<TransformableVideoFrameInterface*>(
transformable_frame.release()));
ASSERT_TRUE(frame);
auto metadata = frame->GetMetadata();
EXPECT_EQ(metadata.GetWidth(), 1280u);
EXPECT_EQ(metadata.GetHeight(), 720u);
EXPECT_EQ(metadata.GetFrameId(), 10);
EXPECT_EQ(metadata.GetTemporalIndex(), 3);
EXPECT_EQ(metadata.GetSpatialIndex(), 2);
EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5));
EXPECT_THAT(metadata.GetDecodeTargetIndications(),
ElementsAre(DecodeTargetIndication::kSwitch));
});
rtp_sender_video->SendEncodedImage(kPayload, kType, kTimestamp,
*encoded_image, nullptr, video_header,
kDefaultExpectedRetransmissionTimeMs);
}
} // namespace
} // namespace webrtc