1326 lines
38 KiB
C++
Raw Normal View History

/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "rtp_utility.h"
#include <cstring> // memcpy
#include <cmath> // ceil
#include <cassert>
#include "trace.h"
#if defined(_WIN32)
#include <Windows.h> // FILETIME
#include <WinSock.h> // timeval
#include <MMSystem.h> //timeGetTime
#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
#include <sys/time.h> // gettimeofday
#include <time.h>
#endif
#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
#include <stdio.h>
#define DEBUG_PRINT(...) \
{ \
char msg[256]; \
sprintf(msg, __VA_ARGS__); \
OutputDebugString(msg); \
}
#else
// special fix for visual 2003
#define DEBUG_PRINT(exp) ((void)0)
#endif // defined(_DEBUG) && defined(_WIN32)
namespace webrtc {
/*
* Time routines.
*/
#if defined(_WIN32)
namespace ModuleRTPUtility {
class WindowsHelpTimer
{
public:
struct reference_point
{
FILETIME file_time;
LARGE_INTEGER counterMS;
};
WindowsHelpTimer()
{
// set timer accuracy to 1 ms
timeBeginPeriod(1);
_timeInMs = 0;
_numWrapTimeInMs = 0;
synchronize();
};
virtual ~WindowsHelpTimer()
{
timeEndPeriod(1);
};
void get_time(FILETIME& current_time)
{
// we can't use query performance counter due to speed stepping
DWORD t = timeGetTime();
// NOTE: we have a miss match in sign between _timeInMs(LONG) and t(DWORD) however we only use it here without +- etc
volatile LONG* timeInMsPtr = &_timeInMs;
DWORD old = InterlockedExchange(timeInMsPtr, t); // make sure that we only inc wrapper once
if(old > t)
{
// wrap
_numWrapTimeInMs++;
}
LARGE_INTEGER elapsedMS;
elapsedMS.HighPart = _numWrapTimeInMs;
elapsedMS.LowPart = t;
elapsedMS.QuadPart = elapsedMS.QuadPart - _ref_point.counterMS.QuadPart;
//
// Translate to 100-nanoseconds intervals (FILETIME resolution) and add to
// reference FILETIME to get current FILETIME.
//
ULARGE_INTEGER filetime_ref_as_ul;
filetime_ref_as_ul.HighPart = _ref_point.file_time.dwHighDateTime;
filetime_ref_as_ul.LowPart = _ref_point.file_time.dwLowDateTime;
filetime_ref_as_ul.QuadPart += (ULONGLONG)((elapsedMS.QuadPart)*1000*10);
//
// Copy to result
//
current_time.dwHighDateTime = filetime_ref_as_ul.HighPart;
current_time.dwLowDateTime = filetime_ref_as_ul.LowPart;
};
private:
void synchronize()
{
FILETIME ft0 = { 0, 0 },
ft1 = { 0, 0 };
//
// Spin waiting for a change in system time. Get the matching
// performance counter value for that time.
//
::GetSystemTimeAsFileTime(&ft0);
do
{
::GetSystemTimeAsFileTime(&ft1);
_ref_point.counterMS.QuadPart = ::timeGetTime();
::Sleep(0);
}
while ((ft0.dwHighDateTime == ft1.dwHighDateTime) &&
(ft0.dwLowDateTime == ft1.dwLowDateTime));
_ref_point.file_time = ft1;
}
volatile LONG _timeInMs; // this needs to be long due to Windows, not an issue due to its usage
volatile WebRtc_UWord32 _numWrapTimeInMs;
reference_point _ref_point;
};
// A clock reading times from the Windows API.
class WindowsSystemClock : public RtpRtcpClock {
public:
WindowsSystemClock()
: _helpTimer() {}
virtual ~WindowsSystemClock() {}
virtual WebRtc_UWord32 GetTimeInMS();
virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac);
private:
WindowsHelpTimer _helpTimer;
};
}; // namespace ModuleRTPUtility
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
namespace ModuleRTPUtility {
// A clock reading times from the POSIX API.
class UnixSystemClock : public RtpRtcpClock {
public:
UnixSystemClock() {}
virtual ~UnixSystemClock() {}
virtual WebRtc_UWord32 GetTimeInMS();
virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac);
};
}; // namespace ModuleRTPUtility
#endif
namespace ModuleRTPUtility {
#if defined(_WIN32)
WebRtc_UWord32 WindowsSystemClock::GetTimeInMS()
{
return timeGetTime();
}
// Use the system time (roughly synchronised to the tick, and
// extrapolated using the system performance counter.
void WindowsSystemClock::CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac)
{
const WebRtc_UWord64 FILETIME_1970 = 0x019db1ded53e8000;
FILETIME StartTime;
WebRtc_UWord64 Time;
struct timeval tv;
// we can't use query performance counter since they can change depending on speed steping
_helpTimer.get_time(StartTime);
Time = (((WebRtc_UWord64) StartTime.dwHighDateTime) << 32) +
(WebRtc_UWord64) StartTime.dwLowDateTime;
// Convert the hecto-nano second time to tv format
Time -= FILETIME_1970;
tv.tv_sec = (WebRtc_UWord32) ( Time / (WebRtc_UWord64)10000000);
tv.tv_usec = (WebRtc_UWord32) (( Time % (WebRtc_UWord64)10000000) / 10);
double dtemp;
secs = tv.tv_sec + NTP_JAN_1970;
dtemp = tv.tv_usec / 1e6;
if (dtemp >= 1)
{
dtemp -= 1;
secs++;
} else if (dtemp < -1)
{
dtemp += 1;
secs--;
}
dtemp *= NTP_FRAC;
frac = (WebRtc_UWord32)dtemp;
}
#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
WebRtc_UWord32 UnixSystemClock::GetTimeInMS()
{
struct timeval tv;
struct timezone tz;
WebRtc_UWord32 val;
gettimeofday(&tv, &tz);
val = (WebRtc_UWord32)(tv.tv_sec*1000 + tv.tv_usec/1000);
return val;
}
// Use the system time.
void UnixSystemClock::CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac)
{
double dtemp;
struct timeval tv;
struct timezone tz;
tz.tz_minuteswest = 0;
tz.tz_dsttime = 0;
gettimeofday(&tv,&tz);
secs = tv.tv_sec + NTP_JAN_1970;
dtemp = tv.tv_usec / 1e6;
if (dtemp >= 1)
{
dtemp -= 1;
secs++;
} else if (dtemp < -1)
{
dtemp += 1;
secs--;
}
dtemp *= NTP_FRAC;
frac = (WebRtc_UWord32)dtemp;
}
#endif
RtpRtcpClock* GetSystemClock()
{
// TODO(hellner): violates the style guide (non-POD static instance).
#if defined(_WIN32)
static WindowsSystemClock system_clock;
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
static UnixSystemClock system_clock;
#endif
return &system_clock;
}
WebRtc_UWord32 GetCurrentRTP(RtpRtcpClock* clock, WebRtc_UWord32 freq)
{
if (clock == NULL)
clock = GetSystemClock();
WebRtc_UWord32 secs = 0, frac = 0;
clock->CurrentNTP(secs, frac);
return ConvertNTPTimeToRTP(secs, frac, freq);
}
WebRtc_UWord32 ConvertNTPTimeToRTP(WebRtc_UWord32 NTPsec,
WebRtc_UWord32 NTPfrac,
WebRtc_UWord32 freq)
{
float ftemp = (float)NTPfrac/(float)NTP_FRAC;
WebRtc_UWord32 tmp = (WebRtc_UWord32)(ftemp * freq);
return NTPsec*freq + tmp;
}
WebRtc_UWord32 ConvertNTPTimeToMS(WebRtc_UWord32 NTPsec,
WebRtc_UWord32 NTPfrac)
{
int freq = 1000;
float ftemp = (float)NTPfrac/(float)NTP_FRAC;
WebRtc_UWord32 tmp = (WebRtc_UWord32)(ftemp * freq);
WebRtc_UWord32 MStime= NTPsec*freq + tmp;
return MStime;
}
bool OldTimestamp(uint32_t newTimestamp,
uint32_t existingTimestamp,
bool* wrapped)
{
bool tmpWrapped =
(newTimestamp < 0x0000ffff && existingTimestamp > 0xffff0000) ||
(newTimestamp > 0xffff0000 && existingTimestamp < 0x0000ffff);
*wrapped = tmpWrapped;
if (existingTimestamp > newTimestamp && !tmpWrapped)
{
return true;
}
else if (existingTimestamp <= newTimestamp && !tmpWrapped)
{
return false;
}
else if (existingTimestamp < newTimestamp && tmpWrapped)
{
return true;
}
else
{
return false;
}
}
} // namespace ModuleRTPUtility
/*
* Misc utility routines
*/
#if defined(_WIN32)
bool ModuleRTPUtility::StringCompare(const WebRtc_Word8* str1,
const WebRtc_Word8* str2,
const WebRtc_UWord32 length)
{
return (_strnicmp(str1, str2, length) == 0)?true: false;
}
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
bool ModuleRTPUtility::StringCompare(const WebRtc_Word8* str1,
const WebRtc_Word8* str2,
const WebRtc_UWord32 length)
{
return (strncasecmp(str1, str2, length) == 0)?true: false;
}
#endif
#if !defined(WEBRTC_LITTLE_ENDIAN) && !defined(WEBRTC_BIG_ENDIAN)
#error Either WEBRTC_LITTLE_ENDIAN or WEBRTC_BIG_ENDIAN must be defined
#endif
/* for RTP/RTCP
All integer fields are carried in network byte order, that is, most
significant byte (octet) first. AKA big-endian.
*/
void
ModuleRTPUtility::AssignUWord32ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value)
{
#if defined(WEBRTC_LITTLE_ENDIAN)
dataBuffer[0] = static_cast<WebRtc_UWord8>(value>>24);
dataBuffer[1] = static_cast<WebRtc_UWord8>(value>>16);
dataBuffer[2] = static_cast<WebRtc_UWord8>(value>>8);
dataBuffer[3] = static_cast<WebRtc_UWord8>(value);
#else
WebRtc_UWord32* ptr = reinterpret_cast<WebRtc_UWord32*>(dataBuffer);
ptr[0] = value;
#endif
}
void
ModuleRTPUtility::AssignUWord24ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value)
{
#if defined(WEBRTC_LITTLE_ENDIAN)
dataBuffer[0] = static_cast<WebRtc_UWord8>(value>>16);
dataBuffer[1] = static_cast<WebRtc_UWord8>(value>>8);
dataBuffer[2] = static_cast<WebRtc_UWord8>(value);
#else
dataBuffer[0] = static_cast<WebRtc_UWord8>(value);
dataBuffer[1] = static_cast<WebRtc_UWord8>(value>>8);
dataBuffer[2] = static_cast<WebRtc_UWord8>(value>>16);
#endif
}
void
ModuleRTPUtility::AssignUWord16ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord16 value)
{
#if defined(WEBRTC_LITTLE_ENDIAN)
dataBuffer[0] = static_cast<WebRtc_UWord8>(value>>8);
dataBuffer[1] = static_cast<WebRtc_UWord8>(value);
#else
WebRtc_UWord16* ptr = reinterpret_cast<WebRtc_UWord16*>(dataBuffer);
ptr[0] = value;
#endif
}
WebRtc_UWord16
ModuleRTPUtility::BufferToUWord16(const WebRtc_UWord8* dataBuffer)
{
#if defined(WEBRTC_LITTLE_ENDIAN)
return (dataBuffer[0] << 8) + dataBuffer[1];
#else
return *reinterpret_cast<const WebRtc_UWord16*>(dataBuffer);
#endif
}
WebRtc_UWord32
ModuleRTPUtility::BufferToUWord24(const WebRtc_UWord8* dataBuffer)
{
return (dataBuffer[0] << 16) + (dataBuffer[1] << 8) + dataBuffer[2];
}
WebRtc_UWord32
ModuleRTPUtility::BufferToUWord32(const WebRtc_UWord8* dataBuffer)
{
#if defined(WEBRTC_LITTLE_ENDIAN)
return (dataBuffer[0] << 24) + (dataBuffer[1] << 16) + (dataBuffer[2] << 8) +
dataBuffer[3];
#else
return *reinterpret_cast<const WebRtc_UWord32*>(dataBuffer);
#endif
}
WebRtc_UWord32
ModuleRTPUtility::pow2(WebRtc_UWord8 exp)
{
return 1 << exp;
}
void
ModuleRTPUtility::RTPPayload::SetType(RtpVideoCodecTypes videoType)
{
type = videoType;
switch (type)
{
case kRtpNoVideo:
break;
case kRtpH263Video:
case kRtpH2631998Video:
{
info.H263.hasPictureStartCode = false;
info.H263.insert2byteStartCode = false;
info.H263.hasPbit = false;
info.H263.frameWidth = 0;
info.H263.frameHeight = 0;
info.H263.startBits = 0;
info.H263.endBits = 0;
info.H263.data = 0;
info.H263.dataLength = 0;
break;
}
case kRtpMpeg4Video:
{
info.MPEG4.isFirstPacket = false;
info.MPEG4.data = 0;
info.MPEG4.dataLength = 0;
break;
}
case kRtpVp8Video:
{
info.VP8.nonReferenceFrame = false;
info.VP8.beginningOfPartition = false;
info.VP8.partitionID = 0;
info.VP8.hasPictureID = false;
info.VP8.hasTl0PicIdx = false;
info.VP8.hasTID = false;
info.VP8.hasKeyIdx = false;
info.VP8.pictureID = -1;
info.VP8.tl0PicIdx = -1;
info.VP8.tID = -1;
info.VP8.layerSync = false;
info.VP8.frameWidth = 0;
info.VP8.frameHeight = 0;
break;
}
default:
break;
}
}
ModuleRTPUtility::RTPHeaderParser::RTPHeaderParser(const WebRtc_UWord8* rtpData,
const WebRtc_UWord32 rtpDataLength):
_ptrRTPDataBegin(rtpData),
_ptrRTPDataEnd(rtpData ? (rtpData + rtpDataLength) : NULL)
{
}
ModuleRTPUtility::RTPHeaderParser::~RTPHeaderParser()
{
}
bool
ModuleRTPUtility::RTPHeaderParser::RTCP() const
{
// 72 to 76 is reserved for RTP
// 77 to 79 is not reserver but they are not assigned we will block them
// for RTCP 200 SR == marker bit + 72
// for RTCP 204 APP == marker bit + 76
/*
* RTCP
*
* FIR full INTRA-frame request 192 [RFC2032] supported
* NACK negative acknowledgement 193 [RFC2032]
* IJ Extended inter-arrival jitter report 195 [RFC-ietf-avt-rtp-toffset-07.txt] http://tools.ietf.org/html/draft-ietf-avt-rtp-toffset-07
* SR sender report 200 [RFC3551] supported
* RR receiver report 201 [RFC3551] supported
* SDES source description 202 [RFC3551] supported
* BYE goodbye 203 [RFC3551] supported
* APP application-defined 204 [RFC3551] ignored
* RTPFB Transport layer FB message 205 [RFC4585] supported
* PSFB Payload-specific FB message 206 [RFC4585] supported
* XR extended report 207 [RFC3611] supported
*/
/* 205 RFC 5104
* FMT 1 NACK supported
* FMT 2 reserved
* FMT 3 TMMBR supported
* FMT 4 TMMBN supported
*/
/* 206 RFC 5104
* FMT 1: Picture Loss Indication (PLI) supported
* FMT 2: Slice Lost Indication (SLI)
* FMT 3: Reference Picture Selection Indication (RPSI)
* FMT 4: Full Intra Request (FIR) Command supported
* FMT 5: Temporal-Spatial Trade-off Request (TSTR)
* FMT 6: Temporal-Spatial Trade-off Notification (TSTN)
* FMT 7: Video Back Channel Message (VBCM)
* FMT 15: Application layer FB message
*/
const WebRtc_UWord8 payloadType = _ptrRTPDataBegin[1];
bool RTCP = false;
// check if this is a RTCP packet
switch(payloadType)
{
case 192:
RTCP = true;
break;
case 193:
// not supported
// pass through and check for a potential RTP packet
break;
case 195:
case 200:
case 201:
case 202:
case 203:
case 204:
case 205:
case 206:
case 207:
RTCP = true;
break;
}
return RTCP;
}
bool
ModuleRTPUtility::RTPHeaderParser::Parse(
WebRtcRTPHeader& parsedPacket, RtpHeaderExtensionMap* ptrExtensionMap) const
{
const ptrdiff_t length = _ptrRTPDataEnd - _ptrRTPDataBegin;
if (length < 12)
{
return false;
}
const WebRtc_UWord8 V = _ptrRTPDataBegin[0] >> 6 ; // Version
const bool P = ((_ptrRTPDataBegin[0] & 0x20) == 0) ? false : true; // Padding
const bool X = ((_ptrRTPDataBegin[0] & 0x10) == 0) ? false : true; // eXtension
const WebRtc_UWord8 CC = _ptrRTPDataBegin[0] & 0x0f;
const bool M = ((_ptrRTPDataBegin[1] & 0x80) == 0) ? false : true;
const WebRtc_UWord8 PT = _ptrRTPDataBegin[1] & 0x7f;
const WebRtc_UWord16 sequenceNumber = (_ptrRTPDataBegin[2] << 8) + _ptrRTPDataBegin[3];
const WebRtc_UWord8* ptr = &_ptrRTPDataBegin[4];
WebRtc_UWord32 RTPTimestamp = *ptr++ << 24;
RTPTimestamp += *ptr++ << 16;
RTPTimestamp += *ptr++ << 8;
RTPTimestamp += *ptr++;
WebRtc_UWord32 SSRC = *ptr++ << 24;
SSRC += *ptr++ << 16;
SSRC += *ptr++ << 8;
SSRC += *ptr++;
if (V != 2)
{
return false;
}
const WebRtc_UWord8 CSRCocts = CC * 4;
if ((ptr + CSRCocts) > _ptrRTPDataEnd)
{
return false;
}
parsedPacket.header.markerBit = M;
parsedPacket.header.payloadType = PT;
parsedPacket.header.sequenceNumber = sequenceNumber;
parsedPacket.header.timestamp = RTPTimestamp;
parsedPacket.header.ssrc = SSRC;
parsedPacket.header.numCSRCs = CC;
parsedPacket.header.paddingLength = P ? *(_ptrRTPDataEnd - 1) : 0;
for (unsigned int i = 0; i < CC; ++i)
{
WebRtc_UWord32 CSRC = *ptr++ << 24;
CSRC += *ptr++ << 16;
CSRC += *ptr++ << 8;
CSRC += *ptr++;
parsedPacket.header.arrOfCSRCs[i] = CSRC;
}
parsedPacket.type.Audio.numEnergy = parsedPacket.header.numCSRCs;
parsedPacket.header.headerLength = 12 + CSRCocts;
// If in effect, MAY be omitted for those packets for which the offset
// is zero.
parsedPacket.extension.transmissionTimeOffset = 0;
if (X)
{
/* RTP header extension, RFC 3550.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| defined by profile | length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| header extension |
| .... |
*/
const ptrdiff_t remain = _ptrRTPDataEnd - ptr;
if (remain < 4)
{
return false;
}
parsedPacket.header.headerLength += 4;
WebRtc_UWord16 definedByProfile = *ptr++ << 8;
definedByProfile += *ptr++;
WebRtc_UWord16 XLen = *ptr++ << 8;
XLen += *ptr++; // in 32 bit words
XLen *= 4; // in octs
if (remain < (4 + XLen))
{
return false;
}
if (definedByProfile == RTP_ONE_BYTE_HEADER_EXTENSION)
{
const WebRtc_UWord8* ptrRTPDataExtensionEnd = ptr + XLen;
ParseOneByteExtensionHeader(parsedPacket,
ptrExtensionMap,
ptrRTPDataExtensionEnd,
ptr);
}
parsedPacket.header.headerLength += XLen;
}
return true;
}
void ModuleRTPUtility::RTPHeaderParser::ParseOneByteExtensionHeader(
WebRtcRTPHeader& parsedPacket,
const RtpHeaderExtensionMap* ptrExtensionMap,
const WebRtc_UWord8* ptrRTPDataExtensionEnd,
const WebRtc_UWord8* ptr) const
{
if (!ptrExtensionMap) {
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1, "No extension map.");
return;
}
while (ptrRTPDataExtensionEnd - ptr > 0)
{
// 0
// 0 1 2 3 4 5 6 7
// +-+-+-+-+-+-+-+-+
// | ID | len |
// +-+-+-+-+-+-+-+-+
const WebRtc_UWord8 id = (*ptr & 0xf0) >> 4;
const WebRtc_UWord8 len = (*ptr & 0x0f);
ptr++;
if (id == 15) {
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
"Ext id: 15 encountered, parsing terminated.");
return;
}
RTPExtensionType type;
if (ptrExtensionMap->GetType(id, &type) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
"Failed to find extension id: %d", id);
return;
}
switch (type)
{
case TRANSMISSION_TIME_OFFSET:
{
if (len != 2)
{
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
"Incorrect transmission time offset len: %d", len);
return;
}
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ID | len=2 | transmission offset |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
WebRtc_Word32 transmissionTimeOffset = *ptr++ << 16;
transmissionTimeOffset += *ptr++ << 8;
transmissionTimeOffset += *ptr++;
parsedPacket.extension.transmissionTimeOffset = transmissionTimeOffset;
break;
}
//case RTP_AUDIO_LEVEL_ID;
//{
// --- Only used for debugging ---
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | ID | len=0 |V| level | 0x00 | 0x00 |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//
// Parse out the fields but only use it for debugging for now.
//const WebRtc_UWord8 V = (*ptr & 0x80) >> 7;
//const WebRtc_UWord8 level = (*ptr & 0x7f);
//DEBUG_PRINT("RTP_AUDIO_LEVEL_UNIQUE_ID: ID=%u, len=%u, V=%u, level=%u",
// ID, len, V, level);
//}
default:
{
WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
"Extension type not implemented.");
return;
}
}
WebRtc_UWord8 num_bytes = ParsePaddingBytes(ptrRTPDataExtensionEnd, ptr);
ptr += num_bytes;
}
}
WebRtc_UWord8 ModuleRTPUtility::RTPHeaderParser::ParsePaddingBytes(
const WebRtc_UWord8* ptrRTPDataExtensionEnd,
const WebRtc_UWord8* ptr) const {
WebRtc_UWord8 num_zero_bytes = 0;
while (ptrRTPDataExtensionEnd - ptr > 0) {
if (*ptr != 0) {
return num_zero_bytes;
}
ptr++;
num_zero_bytes++;
}
return num_zero_bytes;
}
// RTP payload parser
ModuleRTPUtility::RTPPayloadParser::RTPPayloadParser(
const RtpVideoCodecTypes videoType,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength,
const WebRtc_Word32 id)
:
_id(id),
_dataPtr(payloadData),
_dataLength(payloadDataLength),
_videoType(videoType)
{
}
ModuleRTPUtility::RTPPayloadParser::~RTPPayloadParser()
{
}
bool
ModuleRTPUtility::RTPPayloadParser::Parse( RTPPayload& parsedPacket) const
{
parsedPacket.SetType(_videoType);
switch (_videoType)
{
case kRtpNoVideo:
return ParseGeneric(parsedPacket);
case kRtpH263Video:
return ParseH263(parsedPacket);
case kRtpH2631998Video:
return ParseH2631998(parsedPacket);
case kRtpMpeg4Video:
return ParseMPEG4(parsedPacket);
case kRtpVp8Video:
return ParseVP8(parsedPacket);
default:
return false;
}
}
bool
ModuleRTPUtility::RTPPayloadParser::ParseGeneric(RTPPayload& /*parsedPacket*/) const
{
return false;
}
bool
ModuleRTPUtility::RTPPayloadParser::ParseH263(RTPPayload& parsedPacket) const
{
if(_dataLength <= 2)
{
// data length sanity check.
return false;
}
const WebRtc_UWord8 header1 = _dataPtr[0];
const WebRtc_UWord8 header2 = _dataPtr[1];
parsedPacket.frameType = ((header2 & 0x10) == 0) ? kIFrame : kPFrame;
unsigned int h263HeaderLength = 0;
if ((header1 & 0x80) == 0)
{
// Mode A
h263HeaderLength = 4;
}
else
{
// In Mode B and Mode C, I bit is in 5th byte of header
const WebRtc_UWord8 header5 = _dataPtr[4];
parsedPacket.frameType = ((header5 & 0x80) == 0) ? kIFrame : kPFrame;
if((header1 & 0x40) == 0)
{
// Mode B
// IMPROVEMENT use the information in the H263 header?
// GQuant of the first MB
h263HeaderLength = 8;
}else
{
// Mode C
h263HeaderLength = 12;
}
}
if (_dataLength < h263HeaderLength)
{
// Received empty H263 packet
return false;
}
// Get SBIT and EBIT
WebRtc_UWord8 sbit = 0;
WebRtc_UWord8 ebit = 0;
const WebRtc_UWord8 seBit = header1 & 0x3f;
if (seBit)
{
// We got SBIT or EBIT
sbit = (seBit >> 3) & 0x07;
ebit = seBit & 0x07;
}
const bool isH263PictureStartCode = H263PictureStartCode(_dataPtr + h263HeaderLength);
if (isH263PictureStartCode)
{
// parse out real size and inform the decoder
WebRtc_UWord16 width = 0;
WebRtc_UWord16 height = 0;
GetH263FrameSize(_dataPtr + h263HeaderLength, width, height);
parsedPacket.info.H263.hasPictureStartCode = true;
parsedPacket.info.H263.frameWidth = width;
parsedPacket.info.H263.frameHeight = height;
}
parsedPacket.info.H263.startBits = sbit;
parsedPacket.info.H263.endBits = ebit;
parsedPacket.info.H263.data = _dataPtr + h263HeaderLength;
parsedPacket.info.H263.dataLength = _dataLength - h263HeaderLength;
parsedPacket.info.H263.insert2byteStartCode = false; // not used in this mode
parsedPacket.info.H263.hasPbit = true; // not used in this mode
return true;
}
bool
ModuleRTPUtility::RTPPayloadParser::ParseH2631998( RTPPayload& parsedPacket) const
{
unsigned int h2631998HeaderLength = 2;
if(_dataLength <= h2631998HeaderLength)
{
// Received empty H263 (1998) packet
return false;
}
const WebRtc_UWord8 header1 = _dataPtr[0];
const WebRtc_UWord8 header2 = _dataPtr[1];
parsedPacket.frameType = kPFrame;
WebRtc_UWord8 p = (header1 >> 2) & 0x01; // picture start or a picture segment
WebRtc_UWord8 vrc = header1 & 0x02; // Video Redundancy Coding (VRC)
WebRtc_UWord8 pLen = ((header1 & 0x01) << 5) + ((header2 >> 3) & 0x1f); // Length, in bytes, of the extra picture header
//WebRtc_UWord8 peBit = (header2 & 0x07); // number of bits that shall be ignored in the last byte of the extra picture header
if (vrc)
{
return false;
}
if (pLen > 0)
{
h2631998HeaderLength += pLen;
//get extra header
// IMPROVEMENT we don't use the redundant picture header
}
if (_dataLength <= h2631998HeaderLength)
{
// Received empty H263 (1998) packet
return false;
}
// if p == 0
// it's a follow-on packet, hence it's not independently decodable
const bool isH263PictureStartCode = H263PictureStartCode(_dataPtr + h2631998HeaderLength, (p>0)?true:false);
if (isH263PictureStartCode)
{
// parse out real size and inform the decoder
WebRtc_UWord16 width = 0;
WebRtc_UWord16 height = 0;
if(p)
{
parsedPacket.frameType = GetH263FrameType(_dataPtr + h2631998HeaderLength - 2);
GetH263FrameSize(_dataPtr + h2631998HeaderLength - 2, width, height);
}
parsedPacket.info.H263.hasPictureStartCode = true;
parsedPacket.info.H263.frameWidth = width;
parsedPacket.info.H263.frameHeight = height;
}
parsedPacket.info.H263.hasPbit = (p>0)?true:false;
parsedPacket.info.H263.insert2byteStartCode = (p>0)?true:false;
parsedPacket.info.H263.data = _dataPtr + h2631998HeaderLength;
parsedPacket.info.H263.dataLength = _dataLength - h2631998HeaderLength;
return true;
}
bool
ModuleRTPUtility::RTPPayloadParser::ParseMPEG4(
RTPPayload& parsedPacket) const
{
if (_dataLength <= 5)
{
// Received empty MPEG4 packet
return false;
}
parsedPacket.frameType = kPFrame;
if (_dataPtr[0] == 0 && _dataPtr[1] == 0 && _dataPtr[2] == 1)
{
parsedPacket.info.MPEG4.isFirstPacket = true;
if (!(_dataPtr[4] & 0x40))
{
parsedPacket.frameType = kIFrame;
}
}
parsedPacket.info.MPEG4.data = _dataPtr;
parsedPacket.info.MPEG4.dataLength = _dataLength;
return true;
}
//
// VP8 format:
//
// Payload descriptor
// 0 1 2 3 4 5 6 7
// +-+-+-+-+-+-+-+-+
// |X|R|N|S|PartID | (REQUIRED)
// +-+-+-+-+-+-+-+-+
// X: |I|L|T|K| RSV | (OPTIONAL)
// +-+-+-+-+-+-+-+-+
// I: | PictureID | (OPTIONAL)
// +-+-+-+-+-+-+-+-+
// L: | TL0PICIDX | (OPTIONAL)
// +-+-+-+-+-+-+-+-+
// T/K: |TID:Y| KEYIDX | (OPTIONAL)
// +-+-+-+-+-+-+-+-+
//
// Payload header (considered part of the actual payload, sent to decoder)
// 0 1 2 3 4 5 6 7
// +-+-+-+-+-+-+-+-+
// |Size0|H| VER |P|
// +-+-+-+-+-+-+-+-+
// | ... |
// + +
bool
ModuleRTPUtility::RTPPayloadParser::ParseVP8(RTPPayload& parsedPacket) const
{
RTPPayloadVP8 *vp8 = &parsedPacket.info.VP8;
const WebRtc_UWord8 *dataPtr = _dataPtr;
int dataLength = _dataLength;
// Parse mandatory first byte of payload descriptor
bool extension = (*dataPtr & 0x80) ? true : false; // X bit
vp8->nonReferenceFrame = (*dataPtr & 0x20) ? true : false; // N bit
vp8->beginningOfPartition = (*dataPtr & 0x10) ? true : false; // S bit
vp8->partitionID = (*dataPtr & 0x0F); // PartID field
// Advance dataPtr and decrease remaining payload size
dataPtr++;
dataLength--;
if (extension)
{
const int parsedBytes = ParseVP8Extension(vp8, dataPtr, dataLength);
if (parsedBytes < 0) return false;
dataPtr += parsedBytes;
dataLength -= parsedBytes;
}
if (dataLength <= 0)
{
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
"Error parsing VP8 payload descriptor; payload too short");
return false;
}
// Read P bit from payload header (only at beginning of first partition)
if (dataLength > 0 && vp8->beginningOfPartition && vp8->partitionID == 0)
{
parsedPacket.frameType = (*dataPtr & 0x01) ? kPFrame : kIFrame;
}
else
{
parsedPacket.frameType = kPFrame;
}
if (0 != ParseVP8FrameSize(parsedPacket, dataPtr, dataLength))
{
return false;
}
parsedPacket.info.VP8.data = dataPtr;
parsedPacket.info.VP8.dataLength = dataLength;
return true;
}
int ModuleRTPUtility::RTPPayloadParser::ParseVP8FrameSize(
RTPPayload &parsedPacket,
const WebRtc_UWord8 *dataPtr,
int dataLength) const
{
if (parsedPacket.frameType != kIFrame)
{
// Included in payload header for I-frames.
return 0;
}
if (dataLength < 10)
{
// For an I-frame we should always have the uncompressed VP8 header
// in the beginning of the partition.
return -1;
}
RTPPayloadVP8 *vp8 = &parsedPacket.info.VP8;
vp8->frameWidth = ((dataPtr[7] << 8) + dataPtr[6]) & 0x3FFF;
vp8->frameHeight = ((dataPtr[9] << 8) + dataPtr[8]) & 0x3FFF;
return 0;
}
int ModuleRTPUtility::RTPPayloadParser::ParseVP8Extension(
RTPPayloadVP8 *vp8,
const WebRtc_UWord8 *dataPtr,
int dataLength) const
{
int parsedBytes = 0;
if (dataLength <= 0) return -1;
// Optional X field is present
vp8->hasPictureID = (*dataPtr & 0x80) ? true : false; // I bit
vp8->hasTl0PicIdx = (*dataPtr & 0x40) ? true : false; // L bit
vp8->hasTID = (*dataPtr & 0x20) ? true : false; // T bit
vp8->hasKeyIdx = (*dataPtr & 0x10) ? true : false; // K bit
// Advance dataPtr and decrease remaining payload size
dataPtr++;
parsedBytes++;
dataLength--;
if (vp8->hasPictureID)
{
if (ParseVP8PictureID(vp8, &dataPtr, &dataLength, &parsedBytes) != 0)
{
return -1;
}
}
if (vp8->hasTl0PicIdx)
{
if (ParseVP8Tl0PicIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0)
{
return -1;
}
}
if (vp8->hasTID || vp8->hasKeyIdx)
{
if (ParseVP8TIDAndKeyIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0)
{
return -1;
}
}
return parsedBytes;
}
int
ModuleRTPUtility::RTPPayloadParser::ParseVP8PictureID(
RTPPayloadVP8 *vp8,
const WebRtc_UWord8 **dataPtr,
int *dataLength,
int *parsedBytes) const
{
if (*dataLength <= 0) return -1;
vp8->pictureID = (**dataPtr & 0x7F);
if (**dataPtr & 0x80)
{
(*dataPtr)++;
(*parsedBytes)++;
if (--(*dataLength) <= 0) return -1;
// PictureID is 15 bits
vp8->pictureID = (vp8->pictureID << 8) + **dataPtr;
}
(*dataPtr)++;
(*parsedBytes)++;
(*dataLength)--;
return 0;
}
int
ModuleRTPUtility::RTPPayloadParser::ParseVP8Tl0PicIdx(
RTPPayloadVP8 *vp8,
const WebRtc_UWord8 **dataPtr,
int *dataLength,
int *parsedBytes) const
{
if (*dataLength <= 0) return -1;
vp8->tl0PicIdx = **dataPtr;
(*dataPtr)++;
(*parsedBytes)++;
(*dataLength)--;
return 0;
}
int ModuleRTPUtility::RTPPayloadParser::ParseVP8TIDAndKeyIdx(
RTPPayloadVP8 *vp8,
const WebRtc_UWord8 **dataPtr,
int *dataLength,
int *parsedBytes) const
{
if (*dataLength <= 0) return -1;
if (vp8->hasTID)
{
vp8->tID = ((**dataPtr >> 6) & 0x03);
vp8->layerSync = (**dataPtr & 0x20) ? true : false; // Y bit
}
if (vp8->hasKeyIdx)
{
vp8->keyIdx = (**dataPtr & 0x1F);
}
(*dataPtr)++;
(*parsedBytes)++;
(*dataLength)--;
return 0;
}
bool
ModuleRTPUtility::RTPPayloadParser::H263PictureStartCode(const WebRtc_UWord8* data, const bool skipFirst2bytes) const
{
// data is at least 3 bytes!
if(skipFirst2bytes)
{
const WebRtc_UWord8 h3 = *(data);
if((h3 & 0x7C) == 0 && (h3 & 0x80))
{
return true;
}
}else
{
// first part of the frame
const WebRtc_UWord8 h1 = *(data);
const WebRtc_UWord8 h2 = *(data+1);
const WebRtc_UWord8 h3 = *(data+2);
if(h1 == 0 && h2 == 0 && (h3 & 0x7C) == 0 && (h3 & 0x80))
{
return true;
}
}
return false;
}
void
ModuleRTPUtility::RTPPayloadParser::GetH263FrameSize(const WebRtc_UWord8* inputVideoBuffer,
WebRtc_UWord16& width,
WebRtc_UWord16& height) const
{
WebRtc_UWord8 uiH263PTypeFmt = (inputVideoBuffer[4] >> 2) & 0x07;
if (uiH263PTypeFmt == 7) //extended PTYPE (for QQVGA, QVGA, VGA)
{
const WebRtc_UWord8 uiH263PlusPTypeUFEP = ((inputVideoBuffer[4] & 0x03) << 1) + ((inputVideoBuffer[5] >> 7) & 0x01);
if (uiH263PlusPTypeUFEP == 1) //optional part included
{
WebRtc_UWord8 uiH263PlusPTypeFmt = (inputVideoBuffer[5] >> 4) & 0x07;
if(uiH263PlusPTypeFmt == 6) //custom picture format
{
const WebRtc_UWord16 uiH263PlusPTypeCPFmt_PWI = ((inputVideoBuffer[9] & 0x7F) << 2) + ((inputVideoBuffer[10] >> 6) & 0x03);
const WebRtc_UWord16 uiH263PlusPTypeCPFmt_PHI = ((inputVideoBuffer[10] & 0x1F) << 4) + ((inputVideoBuffer[11] >> 4) & 0x0F);
width = (uiH263PlusPTypeCPFmt_PWI + 1)*4;
width = uiH263PlusPTypeCPFmt_PHI*4;
}
else
{
switch (uiH263PlusPTypeFmt)
{
case 1: // SQCIF
width = 128;
height = 96;
break;
case 2: // QCIF
width = 176;
height = 144;
break;
case 3: // CIF
width = 352;
height = 288;
break;
case 4: // 4CIF
width = 704;
height = 576;
break;
case 5: // 16CIF
width = 1408;
height = 1152;
break;
default:
assert(false);
break;
}
}
}
}
else
{
switch (uiH263PTypeFmt)
{
case 1: // SQCIF
width = 128;
height = 96;
break;
case 2: // QCIF
width = 176;
height = 144;
break;
case 3: // CIF
width = 352;
height = 288;
break;
case 4: // 4CIF
width = 704;
height = 576;
break;
case 5: // 16CIF
width = 1408;
height = 1152;
break;
default:
assert(false);
break;
}
}
}
ModuleRTPUtility::FrameTypes
ModuleRTPUtility::RTPPayloadParser::GetH263FrameType(
const WebRtc_UWord8* inputVideoBuffer) const
{
FrameTypes frameType = kPFrame;
const WebRtc_UWord8 uiH263PTypeFmt = (inputVideoBuffer[4] >> 2) & 0x07;
WebRtc_UWord8 pType = 1;
if (uiH263PTypeFmt != 7)
{
pType = (inputVideoBuffer[4] >> 1) & 0x01;
}
else
{
const WebRtc_UWord8 uiH263PlusPTypeUFEP = ((inputVideoBuffer[4] & 0x03) << 1) + ((inputVideoBuffer[5] >> 7) & 0x01);
if (uiH263PlusPTypeUFEP == 1)
{
pType = ((inputVideoBuffer[7] >> 2) & 0x07);
}
else if (uiH263PlusPTypeUFEP == 0)
{
pType = ((inputVideoBuffer[5] >> 4) & 0x07);
}
}
if (pType == 0)
{
frameType = kIFrame;
}
return frameType;
}
} // namespace webrtc