Removing unused code.

Review URL: https://webrtc-codereview.appspot.com/349008

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1442 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pwestin@webrtc.org
2012-01-17 12:45:47 +00:00
parent e5297d2aaa
commit 5621057956
47 changed files with 25 additions and 3637 deletions

View File

@@ -451,19 +451,6 @@ enum { kPayloadNameSize = 32};
enum { kMaxSimulcastStreams = 4};
enum { kMaxTemporalStreams = 4};
// H.263 specific
struct VideoCodecH263
{
char quality;
};
// H.264 specific
enum H264Packetization
{
kH264SingleMode = 0,
kH264NonInterleavedMode = 1
};
enum VideoCodecComplexity
{
kComplexityNormal = 0,
@@ -489,20 +476,6 @@ enum VP8ResilienceMode {
// within a frame.
};
struct VideoCodecH264
{
H264Packetization packetization;
VideoCodecComplexity complexity;
VideoCodecProfile profile;
char level;
char quality;
bool useFMO;
unsigned char configParameters[kConfigParameterSize];
unsigned char configParametersSize;
};
// VP8 specific
struct VideoCodecVP8
{
@@ -513,14 +486,6 @@ struct VideoCodecVP8
unsigned char numberOfTemporalLayers;
};
// MPEG-4 specific
struct VideoCodecMPEG4
{
unsigned char configParameters[kConfigParameterSize];
unsigned char configParametersSize;
char level;
};
// Unknown specific
struct VideoCodecGeneric
{
@@ -529,10 +494,7 @@ struct VideoCodecGeneric
// Video codec types
enum VideoCodecType
{
kVideoCodecH263,
kVideoCodecH264,
kVideoCodecVP8,
kVideoCodecMPEG4,
kVideoCodecI420,
kVideoCodecRED,
kVideoCodecULPFEC,
@@ -541,10 +503,7 @@ enum VideoCodecType
union VideoCodecUnion
{
VideoCodecH263 H263;
VideoCodecH264 H264;
VideoCodecVP8 VP8;
VideoCodecMPEG4 MPEG4;
VideoCodecGeneric Generic;
};

View File

@@ -40,14 +40,6 @@ struct RTPAudioHeader
WebRtc_UWord8 channel; // number of channels 2 = stereo
};
struct RTPVideoHeaderH263
{
void InitRTPVideoHeaderH263() {};
bool independentlyDecodable; // H.263-1998 if no P bit it's not independently decodable
bool bits; // H.263 mode B, Xor the lasy byte of previus packet with the
// first byte of this packet
};
enum {kNoPictureId = -1};
enum {kNoTl0PicIdx = -1};
enum {kNoTemporalIdx = -1};
@@ -87,15 +79,12 @@ struct RTPVideoHeaderVP8
};
union RTPVideoTypeHeader
{
RTPVideoHeaderH263 H263;
RTPVideoHeaderVP8 VP8;
};
enum RTPVideoCodecTypes
{
kRTPVideoGeneric = 0,
kRTPVideoH263 = 1,
kRTPVideoMPEG4 = 5,
kRTPVideoVP8 = 8,
kRTPVideoNoVideo = 10,
kRTPVideoFEC = 11,

View File

@@ -157,18 +157,6 @@ WebRtc_Word32 ModuleFileUtility::InitAviWriting(
bitMapInfoHeader.biSizeImage = bitMapInfoHeader.biWidth *
bitMapInfoHeader.biHeight * bitMapInfoHeader.biBitCount / 8;
if(videoCodecInst.codecType == kVideoCodecMPEG4)
{
if(_aviOutFile->CreateVideoStream(
videoStreamHeader,
bitMapInfoHeader,
videoCodecInst.codecSpecific.MPEG4.configParameters,
videoCodecInst.codecSpecific.MPEG4.configParametersSize) != 0)
{
return -1;
}
} else
{
if (_aviOutFile->CreateVideoStream(
videoStreamHeader,
bitMapInfoHeader,
@@ -177,7 +165,6 @@ WebRtc_Word32 ModuleFileUtility::InitAviWriting(
{
return -1;
}
}
if(!videoOnly)
{
@@ -349,35 +336,11 @@ WebRtc_Word32 ModuleFileUtility::InitAviReading(const WebRtc_Word8* filename,
videoInStreamHeader.dwRate);
const size_t plnameLen = sizeof(_videoCodec.plName) / sizeof(char);
if (bitmapInfo.biCompression == AviFile::MakeFourCc('M','4','S','2'))
{
strncpy(_videoCodec.plName, "MP4V-ES", plnameLen);
if (configLength > 0)
{
if (configLength < kConfigParameterSize)
{
_videoCodec.codecSpecific.MPEG4.configParametersSize =
(WebRtc_UWord8)configLength;
memcpy(_videoCodec.codecSpecific.MPEG4.configParameters,
&codecConfigParameters,
_videoCodec.codecSpecific.MPEG4.configParametersSize);
}
else
{
return -1;
}
}
}
else if (bitmapInfo.biCompression == AviFile::MakeFourCc('I','4','2','0'))
if (bitmapInfo.biCompression == AviFile::MakeFourCc('I','4','2','0'))
{
strncpy(_videoCodec.plName, "I420", plnameLen);
_videoCodec.codecType = kVideoCodecI420;
}
else if (bitmapInfo.biCompression == AviFile::MakeFourCc('H','2','6','3'))
{
strncpy(_videoCodec.plName, "H263", plnameLen);
_videoCodec.codecType = kVideoCodecH263;
}
else if (bitmapInfo.biCompression ==
AviFile::MakeFourCc('V', 'P', '8', '0'))
{

View File

@@ -1073,11 +1073,6 @@ public:
* return -1 on failure else 0
*/
virtual WebRtc_Word32 RequestKeyFrame(const FrameType frameType = kVideoFrameKey) = 0;
/*
* Only for H.263 to interop with bad endpoints
*/
virtual WebRtc_Word32 SetH263InverseLogic(const bool enable) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_

View File

@@ -36,7 +36,6 @@ LOCAL_SRC_FILES := \
forward_error_correction.cc \
forward_error_correction_internal.cc \
overuse_detector.cc \
h263_information.cc \
remote_rate_control.cc \
receiver_fec.cc \
rtp_receiver_video.cc \

File diff suppressed because it is too large Load Diff

View File

@@ -1,182 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H263_INFORMATION_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H263_INFORMATION_H_
#include "typedefs.h"
#include "video_codec_information.h"
#define MAX_NUMBER_OF_H263_GOB 32 // 5 bits
namespace webrtc {
class H263Info
{
public:
H263Info()
:
uiH263PTypeFmt(0),
codecBits(0),
pQuant(0),
numOfGOBs(0),
totalNumOfMBs(0),
cpmBit(0),
fType(0)
{
memset(ptrGOBbuffer, 0, sizeof(ptrGOBbuffer));
memset(ptrGOBbufferSBit, 0, sizeof(ptrGOBbufferSBit));
memset(ptrGQuant, 0, sizeof(ptrGQuant));
memset(ptrNumOfMBs, 0, sizeof(ptrNumOfMBs));
memset(ptrGroupNum, 0, sizeof(ptrGroupNum));
}
WebRtc_Word32 CalculateMBOffset(const WebRtc_UWord8 numOfGOB) const;
WebRtc_UWord8 uiH263PTypeFmt; // Defines frame size
WebRtc_UWord8 codecBits;
WebRtc_UWord8 pQuant;
WebRtc_UWord8 numOfGOBs; // Total number of GOBs of current frame
WebRtc_UWord16 totalNumOfMBs;
WebRtc_UWord8 cpmBit;
WebRtc_UWord8 fType; // 0 - intra frame, 1 - inter frame
WebRtc_UWord16 ptrNumOfMBs[MAX_NUMBER_OF_H263_GOB]; // Total number of MBs of current GOB
WebRtc_UWord32 ptrGOBbuffer[MAX_NUMBER_OF_H263_GOB]; // GOB buffer (start byte of GOBs)
WebRtc_UWord8 ptrGroupNum[MAX_NUMBER_OF_H263_GOB];
WebRtc_UWord8 ptrGOBbufferSBit[MAX_NUMBER_OF_H263_GOB]; // sBit buffer (number of start bits to ignore for corresponding GOB)
WebRtc_UWord8 ptrGQuant[MAX_NUMBER_OF_H263_GOB]; // quantizer information for GOBs
};
struct H263MBInfo
{
H263MBInfo()
:
bufferSize(0),
ptrBuffer(0),
ptrBufferHMV(0),
ptrBufferVMV(0)
{
}
WebRtc_UWord32 bufferSize; // Size of MB buffer
WebRtc_UWord32* ptrBuffer; // MB buffer
WebRtc_UWord8* ptrBufferHMV; // Horizontal motion vector for corresponding MB
WebRtc_UWord8* ptrBufferVMV; // Vertical motion vector for corresponding MB
};
class H263Information : public VideoCodecInformation
{
public:
H263Information();
~H263Information();
/*******************************************************************************
* void Reset();
*
* Resets the members to zero.
*
*/
virtual void Reset();
virtual RtpVideoCodecTypes Type();
/*******************************************************************************
* WebRtc_Word32 GetInfo(WebRtc_UWord8* ptrEncodedBuffer,
* WebRtc_UWord32 length,
* const H263Info*& ptrInfo);
*
* Gets information from an encoded stream.
*
* Input:
* - ptrEncodedBuffer : PoWebRtc_Word32er to encoded stream.
* - length : Length in bytes of encoded stream.
*
* Output:
* - ptrInfo : PoWebRtc_Word32er to struct with H263 info.
*
* Return value:
* - 0 : ok
* - (-1) : Error
*/
virtual WebRtc_Word32 GetInfo(const WebRtc_UWord8* ptrEncodedBuffer,
const WebRtc_UWord32 length,
const H263Info*& ptrInfo);
/*******************************************************************************
* WebRtc_Word32 GetMBInfo(const WebRtc_UWord8* ptrEncodedBuffer,
* WebRtc_UWord32 length,
* WebRtc_Word32 numOfGOB,
* const H263MBInfo*& ptrInfoMB);
*
* Gets macroblock positions for a GOB.
* Also, the horizontal and vertical motion vector for each MB are returned.
*
* Input:
* - ptrEncodedBuffer : Pointer to encoded stream.
* - length : Length in bytes of encoded stream.
* - numOfGOB : Group number of current GOB.
*
* Output:
* - ptrInfoMB : Pointer to struct with MB positions in bits for a GOB.
* Horizontal and vertical motion vector for each MB.
*
* Return value:
* - 0 : ok
* - (-1) : Error
*/
WebRtc_Word32 GetMBInfo(const WebRtc_UWord8* ptrEncodedBuffer,
const WebRtc_UWord32 length,
const WebRtc_UWord8 numOfGOB,
const H263MBInfo*& ptrInfoMB);
protected:
bool HasInfo(const WebRtc_UWord32 length);
WebRtc_Word32 FindInfo(const WebRtc_UWord8* ptrEncodedBuffer, const WebRtc_UWord32 length);
bool PictureStartCode();
WebRtc_Word32 FindPTypeFMT();
void FindFType();
void FindCodecBits();
void FindPQUANT();
void FindCPMbit();
WebRtc_Word32 SetNumOfMBs();
WebRtc_Word32 FindGOBs(const WebRtc_UWord32 length);
// MB info
WebRtc_Word32 VerifyAndAllocateMB();
bool HasMBInfo(const WebRtc_UWord8 numOfGOB);
WebRtc_Word32 FindMBs(const WebRtc_UWord8* ptrEncodedBuffer,
const WebRtc_UWord8 numOfGOB,
const WebRtc_UWord32 length);
void FindGQUANT(WebRtc_Word32 numOfGOB);
WebRtc_Word32 FindMCBPC(WebRtc_Word32 &mbType, char *cbp);
WebRtc_Word32 FindCBPY(WebRtc_Word32 mbType, char *cbp);
WebRtc_Word32 FindMVD(WebRtc_Word32 numOfMB, WebRtc_Word32 verORhor, WebRtc_UWord8 *hmv1, WebRtc_UWord8 *vmv1);
WebRtc_Word32 FindTCOEF(WebRtc_Word32 &last);
bool IsGBSC();
WebRtc_UWord8 IsBitOne(const WebRtc_Word32 bitCnt) const;
void ByteAlignData(WebRtc_Word32 numOfBytes);
void OutputBits(WebRtc_Word32 length);
private:
WebRtc_Word32 _bitCnt;
const WebRtc_UWord8* _ptrData;
WebRtc_UWord8 _dataShifted[5];
H263Info _info;
H263MBInfo _infoMB;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H263_INFORMATION_H_

View File

@@ -39,7 +39,6 @@ RTPReceiverVideo::RTPReceiverVideo(const WebRtc_Word32 id,
_estimatedBW(0),
_currentFecFrameDecoded(false),
_receiveFEC(NULL),
_h263InverseLogic(false),
_overUseDetector(),
_videoBitRate(),
_lastBitRateChange(0),
@@ -109,24 +108,9 @@ RTPReceiverVideo::RegisterReceiveVideoPayload(const WebRtc_Word8 payloadName[RTP
if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3))
{
videoType = kRtpVp8Video;
} else if ((ModuleRTPUtility::StringCompare(payloadName, "H263-1998", 9)) ||
(ModuleRTPUtility::StringCompare(payloadName, "H263-2000", 9)))
{
videoType = kRtpH2631998Video;
} else if (ModuleRTPUtility::StringCompare(payloadName, "H263", 4))
{
videoType = kRtpH263Video;
} else if (ModuleRTPUtility::StringCompare(payloadName, "MP4V-ES", 7))
{
videoType = kRtpMpeg4Video;
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4))
{
videoType = kRtpNoVideo;
} else if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6))
{
// store this
@@ -434,15 +418,6 @@ RTPReceiverVideo::SetCodecType(const RtpVideoCodecTypes videoType,
case kRtpVp8Video:
rtpHeader->type.Video.codec = kRTPVideoVP8;
break;
case kRtpH263Video:
rtpHeader->type.Video.codec = kRTPVideoH263;
break;
case kRtpH2631998Video:
rtpHeader->type.Video.codec = kRTPVideoH263;
break;
case kRtpMpeg4Video:
rtpHeader->type.Video.codec = kRTPVideoMPEG4;
break;
case kRtpFecVideo:
rtpHeader->type.Video.codec = kRTPVideoFEC;
break;
@@ -475,15 +450,6 @@ RTPReceiverVideo::ParseVideoCodecSpecificSwitch(WebRtcRTPHeader* rtpHeader,
case kRtpVp8Video:
retVal = ReceiveVp8Codec(rtpHeader, payloadData, payloadDataLength);
break;
case kRtpH263Video:
retVal = ReceiveH263Codec(rtpHeader, payloadData, payloadDataLength);
break;
case kRtpH2631998Video:
retVal = ReceiveH2631998Codec(rtpHeader,payloadData, payloadDataLength);
break;
case kRtpMpeg4Video:
retVal = ReceiveMPEG4Codec(rtpHeader,payloadData, payloadDataLength);
break;
default:
_criticalSectionReceiverVideo->Leave();
assert(((void)"ParseCodecSpecific videoType can not be unknown here!", false));
@@ -492,157 +458,6 @@ RTPReceiverVideo::ParseVideoCodecSpecificSwitch(WebRtcRTPHeader* rtpHeader,
return retVal;
}
WebRtc_Word32
RTPReceiverVideo::ReceiveH263Codec(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength)
{
ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpH263Video,
payloadData,
payloadDataLength,
_id);
ModuleRTPUtility::RTPPayload parsedPacket;
const bool success = rtpPayloadParser.Parse(parsedPacket);
// from here down we only work on local data
_criticalSectionReceiverVideo->Leave();
if (!success)
{
return -1;
}
if (IP_PACKET_SIZE < parsedPacket.info.H263.dataLength +
(parsedPacket.info.H263.insert2byteStartCode ? 2 : 0))
{
return -1;
}
return ReceiveH263CodecCommon(parsedPacket, rtpHeader);
}
WebRtc_Word32
RTPReceiverVideo::ReceiveH2631998Codec(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength)
{
ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpH2631998Video,
payloadData,
payloadDataLength,
_id);
ModuleRTPUtility::RTPPayload parsedPacket;
const bool success = rtpPayloadParser.Parse(parsedPacket);
if (!success)
{
_criticalSectionReceiverVideo->Leave();
return -1;
}
if (IP_PACKET_SIZE < parsedPacket.info.H263.dataLength +
(parsedPacket.info.H263.insert2byteStartCode ? 2 : 0))
{
_criticalSectionReceiverVideo->Leave();
return -1;
}
// from here down we only work on local data
_criticalSectionReceiverVideo->Leave();
return ReceiveH263CodecCommon(parsedPacket, rtpHeader);
}
WebRtc_Word32
RTPReceiverVideo::ReceiveH263CodecCommon(ModuleRTPUtility::RTPPayload& parsedPacket,
WebRtcRTPHeader* rtpHeader)
{
rtpHeader->frameType = (parsedPacket.frameType == ModuleRTPUtility::kIFrame) ? kVideoFrameKey : kVideoFrameDelta;
if (_h263InverseLogic) // Microsoft H263 bug
{
if (rtpHeader->frameType == kVideoFrameKey)
rtpHeader->frameType = kVideoFrameDelta;
else
rtpHeader->frameType = kVideoFrameKey;
}
rtpHeader->type.Video.isFirstPacket = parsedPacket.info.H263.hasPictureStartCode;
// if p == 0
// it's a follow-on packet, hence it's not independently decodable
rtpHeader->type.Video.codecHeader.H263.independentlyDecodable = parsedPacket.info.H263.hasPbit;
if (parsedPacket.info.H263.hasPictureStartCode)
{
rtpHeader->type.Video.width = parsedPacket.info.H263.frameWidth;
rtpHeader->type.Video.height = parsedPacket.info.H263.frameHeight;
} else
{
rtpHeader->type.Video.width = 0;
rtpHeader->type.Video.height = 0;
}
rtpHeader->type.Video.codecHeader.H263.bits = (parsedPacket.info.H263.startBits > 0)?true:false;
// copy to a local buffer
WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
WebRtc_UWord16 dataLength = 0;
// we need to copy since we modify the first byte
if(parsedPacket.info.H263.insert2byteStartCode)
{
dataBuffer[0] = 0;
dataBuffer[1] = 0;
memcpy(dataBuffer+2, parsedPacket.info.H263.data, parsedPacket.info.H263.dataLength);
dataLength = 2 + parsedPacket.info.H263.dataLength;
} else
{
memcpy(dataBuffer, parsedPacket.info.H263.data, parsedPacket.info.H263.dataLength);
dataLength = parsedPacket.info.H263.dataLength;
}
if(parsedPacket.info.H263.dataLength > 0)
{
if(parsedPacket.info.H263.startBits > 0)
{
// make sure that the ignored start bits are zero
dataBuffer[0] &= (0xff >> parsedPacket.info.H263.startBits);
}
if(parsedPacket.info.H263.endBits > 0)
{
// make sure that the ignored end bits are zero
dataBuffer[parsedPacket.info.H263.dataLength -1] &= ((0xff << parsedPacket.info.H263.endBits) & 0xff);
}
}
return CallbackOfReceivedPayloadData(dataBuffer, dataLength, rtpHeader);
}
WebRtc_Word32
RTPReceiverVideo::ReceiveMPEG4Codec(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength)
{
ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpMpeg4Video,
payloadData,
payloadDataLength,
_id);
ModuleRTPUtility::RTPPayload parsedPacket;
const bool success = rtpPayloadParser.Parse(parsedPacket);
if (!success)
{
_criticalSectionReceiverVideo->Leave();
return -1;
}
// from here down we only work on local data
_criticalSectionReceiverVideo->Leave();
rtpHeader->frameType = (parsedPacket.frameType == ModuleRTPUtility::kIFrame) ? kVideoFrameKey : kVideoFrameDelta;
rtpHeader->type.Video.isFirstPacket = parsedPacket.info.MPEG4.isFirstPacket;
if(CallbackOfReceivedPayloadData(parsedPacket.info.MPEG4.data,
parsedPacket.info.MPEG4.dataLength,
rtpHeader) != 0)
{
return -1;
}
return 0;
}
WebRtc_Word32
RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
@@ -738,12 +553,6 @@ RTPReceiverVideo::ReceiveGenericCodec(WebRtcRTPHeader* rtpHeader,
return 0;
}
WebRtc_Word32 RTPReceiverVideo::SetH263InverseLogic(const bool enable)
{
_h263InverseLogic = enable;
return 0;
}
void RTPReceiverVideo::SetPacketOverHead(WebRtc_UWord16 packetOverHead)
{
_packetOverHead = packetOverHead;

View File

@@ -60,8 +60,6 @@ public:
const WebRtc_UWord16 incomingRtpPacketSize,
const WebRtc_Word64 nowMS);
WebRtc_Word32 SetH263InverseLogic(const bool enable);
WebRtc_Word32 ReceiveRecoveredPacketCallback(
WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord8* payloadData,
@@ -105,22 +103,6 @@ protected:
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH263Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH2631998Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH263CodecCommon(
ModuleRTPUtility::RTPPayload& parsedPacket,
WebRtcRTPHeader* rtpHeader);
WebRtc_Word32 ReceiveMPEG4Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
@@ -147,9 +129,6 @@ private:
bool _currentFecFrameDecoded;
ReceiverFEC* _receiveFEC;
// H263
bool _h263InverseLogic;
// BWE
OverUseDetector _overUseDetector;
BitRateStats _videoBitRate;

View File

@@ -71,8 +71,6 @@
'forward_error_correction_internal.h',
'overuse_detector.cc',
'overuse_detector.h',
'h263_information.cc',
'h263_information.h',
'remote_rate_control.cc',
'remote_rate_control.h',
'rtp_packet_history.cc',

View File

@@ -3121,15 +3121,6 @@ WebRtc_Word32 ModuleRtpRtcpImpl::BoundingSet(bool &tmmbrOwner,
boundingSet);
}
WebRtc_Word32 ModuleRtpRtcpImpl::SetH263InverseLogic(const bool enable) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
_id,
"SetH263InverseLogic(%s)",
enable ? "true":"false");
return _rtpReceiver.SetH263InverseLogic(enable);
}
void ModuleRtpRtcpImpl::SendKeyFrame() {
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "SendKeyFrame()");
OnReceivedIntraFrameRequest(0);

View File

@@ -486,8 +486,6 @@ public:
virtual WebRtc_Word32 SetFECUepProtection(const bool keyUseUepProtection,
const bool deltaUseUepProtection);
virtual WebRtc_Word32 SetH263InverseLogic(const bool enable);
virtual WebRtc_Word32 LastReceivedNTP(WebRtc_UWord32& NTPsecs,
WebRtc_UWord32& NTPfrac,
WebRtc_UWord32& remoteSR);

View File

@@ -19,7 +19,6 @@
#include <cassert> // assert
#include <cstdlib> // srand
#include "h263_information.h"
#include "rtp_format_vp8.h"
namespace webrtc {
@@ -50,12 +49,7 @@ RTPSenderVideo::RTPSenderVideo(const WebRtc_Word32 id,
_fecUseUepProtection(false),
_numberFirstPartition(0),
_fecOverheadRate(clock),
_videoBitrate(clock),
// H263
_savedByte(0),
_eBit(0)
{
_videoBitrate(clock) {
}
RTPSenderVideo::~RTPSenderVideo()
@@ -119,19 +113,6 @@ RTPSenderVideo::RegisterVideoPayload(
if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3))
{
videoType = kRtpVp8Video;
}
else if ((ModuleRTPUtility::StringCompare(payloadName, "H263-1998", 9)) ||
(ModuleRTPUtility::StringCompare(payloadName, "H263-2000", 9)))
{
videoType = kRtpH2631998Video;
}
else if (ModuleRTPUtility::StringCompare(payloadName, "H263", 4))
{
videoType = kRtpH263Video;
}
else if (ModuleRTPUtility::StringCompare(payloadName, "MP4V-ES", 7))
{
videoType = kRtpMpeg4Video;
} else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4))
{
videoType = kRtpNoVideo;
@@ -461,18 +442,6 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
retVal = SendGeneric(payloadType,captureTimeStamp, payloadData,
payloadSize);
break;
case kRtpH263Video:
retVal = SendH263(frameType,payloadType, captureTimeStamp, payloadData,
payloadSize, codecInfo);
break;
case kRtpH2631998Video: //RFC 4629
retVal = SendH2631998(frameType,payloadType, captureTimeStamp,
payloadData, payloadSize, codecInfo);
break;
case kRtpMpeg4Video: // RFC 3016
retVal = SendMPEG4(frameType,payloadType, captureTimeStamp,
payloadData, payloadSize);
break;
case kRtpVp8Video:
retVal = SendVP8(frameType, payloadType, captureTimeStamp,
payloadData, payloadSize, fragmentation, rtpTypeHdr);
@@ -549,114 +518,6 @@ RTPSenderVideo::SendGeneric(const WebRtc_Word8 payloadType,
return 0;
}
WebRtc_Word32
RTPSenderVideo::SendMPEG4(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize)
{
WebRtc_Word32 payloadBytesToSend = payloadSize;
WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
WebRtc_UWord16 maxLength = _rtpSender.MaxPayloadLength() -
FECPacketOverhead() - rtpHeaderLength;
WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
// Fragment packet of max MaxPayloadLength bytes payload.
const WebRtc_UWord8* data = payloadData;
while (payloadBytesToSend > 0)
{
WebRtc_UWord16 payloadBytes = 0;
WebRtc_Word32 dataOffset = rtpHeaderLength;
do
{
WebRtc_Word32 size = 0;
bool markerBit = false;
if(payloadBytesToSend > maxLength)
{
size = FindMPEG4NALU(data, maxLength);
}else
{
// Last in frame
markerBit = true;
size = payloadBytesToSend;
}
if(size <= 0)
{
return -1;
}
if(size > maxLength)
{
// We need to fragment NALU
return -1;
}
if(payloadBytes == 0)
{
// Build RTP header
if(_rtpSender.BuildRTPheader(
dataBuffer,
payloadType,
markerBit,
captureTimeStamp) != rtpHeaderLength)
{
return -1;
}
}
if( size + payloadBytes <= maxLength)
{
// Put payload in packet
memcpy(&dataBuffer[dataOffset], data, size);
dataOffset += size; //advance frame ptr
data += size; //advance packet ptr
payloadBytes += (WebRtc_UWord16)size;
payloadBytesToSend -= size;
} else
{
// Send packet
break;
}
}while(payloadBytesToSend);
if (-1 == SendVideoPacket(frameType, dataBuffer, payloadBytes,
rtpHeaderLength, kAllowRetransmission))
{
return -1;
}
}
return 0;
}
WebRtc_Word32
RTPSenderVideo::FindMPEG4NALU(const WebRtc_UWord8* inData,
WebRtc_Word32 maxLength)
{
WebRtc_Word32 size = 0;
for (WebRtc_Word32 i = maxLength; i > 4; i-=2) // Find NAL
{
// Scan down
if (inData[i] == 0)
{
if (inData[i-1] == 0)
{
// i point at the last zero
size = i-1;
}else if(inData[i+1] == 0)
{
size = i;
}
if(size > 0)
{
return size;
}
}
}
return 0;
}
VideoCodecInformation*
RTPSenderVideo::CodecInformationVideo()
{
@@ -675,552 +536,6 @@ RTPSenderVideo::MaxConfiguredBitrateVideo() const
return _maxBitrate;
}
WebRtc_Word32
RTPSenderVideo::SendH263(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
VideoCodecInformation* codecInfo)
{
bool modeA = true;
WebRtc_UWord16 h263HeaderLength = 4;
WebRtc_UWord16 payloadBytesInPacket = 0;
WebRtc_Word32 payloadBytesToSend = payloadSize;
WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
// -2: one byte is possible old ebit -> sBit,
// one byte is new ebit if next GOB header is not byte aligned
// (eventual sBit, eBit)
WebRtc_UWord16 maxPayloadLengthH263 = _rtpSender.MaxPayloadLength() -
FECPacketOverhead() - rtpHeaderLength - h263HeaderLength - 2;
// Fragment packet into packets of max MaxPayloadLength bytes payload.
WebRtc_UWord8 numOfGOB = 0;
WebRtc_UWord16 prevOK = 0;
WebRtc_UWord32 payloadBytesSent = 0;
WebRtc_UWord8 sbit = 0;
_eBit = 0;
H263Information* h263Information = NULL;
if(codecInfo)
{
// Another channel have already parsed this data
h263Information = static_cast<H263Information*>(codecInfo);
} else
{
if(_videoCodecInformation)
{
if(_videoCodecInformation->Type() != kRtpH263Video)
{
// Wrong codec
delete _videoCodecInformation;
_videoCodecInformation = new H263Information();
} else
{
_videoCodecInformation->Reset();
}
} else
{
_videoCodecInformation = new H263Information();
}
h263Information = static_cast<H263Information*>(_videoCodecInformation);
}
WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
const WebRtc_UWord8* data = payloadData;
const H263Info* ptrH263Info = NULL;
if (h263Information->GetInfo(payloadData,payloadSize, ptrH263Info) == -1)
{
return -1;
}
while (payloadBytesToSend > 0)
{
prevOK = 0;
modeA = true;
if (payloadBytesToSend > maxPayloadLengthH263)
{
// Fragment packet at GOB boundary
for (; numOfGOB < ptrH263Info->numOfGOBs; numOfGOB++)
{
// Fit one or more GOBs into packet
if (WebRtc_Word32(ptrH263Info->ptrGOBbuffer[numOfGOB+1] -
payloadBytesSent) < maxPayloadLengthH263)
{
prevOK = static_cast<WebRtc_UWord16>(
ptrH263Info->ptrGOBbuffer[numOfGOB+1] -
payloadBytesSent);
}else
{
break;
}
}
if (!prevOK)
{
// GOB larger than max MaxPayloadLength bytes => Mode B required
// Fragment stream at MB boundaries
modeA = false;
// Get MB positions within GOB
const H263MBInfo* ptrInfoMB = NULL;
if (-1 == h263Information->GetMBInfo(payloadData, payloadSize,
numOfGOB, ptrInfoMB))
{
return -1;
}
WebRtc_Word32 offset = ptrH263Info->
CalculateMBOffset(numOfGOB);
if(offset < 0)
{
return -1;
}
// Send packets fragmented at MB boundaries
if (-1 == SendH263MBs(frameType, payloadType, captureTimeStamp,
dataBuffer, data, rtpHeaderLength,
numOfGOB, *ptrH263Info,*ptrInfoMB, offset))
{
return -1;
}
offset = ptrH263Info->CalculateMBOffset(numOfGOB+1);
if(offset < 0)
{
return -1;
}
WebRtc_Word32 numBytes = ptrInfoMB->ptrBuffer[offset-1] / 8;
WebRtc_Word32 numBytesRem = ptrInfoMB->ptrBuffer[offset-1] % 8;
if (numBytesRem)
{
// In case our GOB is not byte alligned
numBytes++;
}
payloadBytesToSend -= numBytes;
data += numBytes;
payloadBytesSent += numBytes;
numOfGOB++;
}
}
if (modeA)
{
h263HeaderLength = 4;
WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
// H.263 payload header (4 bytes)
// First bit 0 == mode A, (00 000 000)
dataBuffer[rtpHeaderLength] = 0;
dataBuffer[rtpHeaderLength+1] = ptrH263Info->uiH263PTypeFmt << 5;
// Last bit 0
dataBuffer[rtpHeaderLength + 1] += ptrH263Info->codecBits << 1;
// First 3 bits 0
dataBuffer[rtpHeaderLength + 2] = 0;
// No pb frame
dataBuffer[rtpHeaderLength + 3] = 0;
// Last packet eBit -> current packet sBit
sbit = (8 - _eBit) % 8;
if (payloadBytesToSend > maxPayloadLengthH263)
{
if (numOfGOB > 0)
{
// Check if GOB header is byte aligned
if(ptrH263Info->ptrGOBbufferSBit)
{
_eBit = (8 -
ptrH263Info->ptrGOBbufferSBit[numOfGOB - 1]) % 8;
} else
{
_eBit = 0;
}
}
if (_eBit)
{
// Next GOB header is not byte aligned,
// include this byte in packet
// Send the byte with eBits
prevOK++;
}
}
if (payloadBytesToSend > maxPayloadLengthH263)
{
payloadBytesInPacket = prevOK;
payloadBytesToSend -= payloadBytesInPacket;
_rtpSender.BuildRTPheader(dataBuffer, payloadType,
false, captureTimeStamp);
} else
{
payloadBytesInPacket = (WebRtc_UWord16)payloadBytesToSend;
payloadBytesToSend = 0;
_rtpSender.BuildRTPheader(dataBuffer, payloadType,
true, captureTimeStamp);
_eBit = 0;
}
if (sbit)
{
// Add last sent byte and put payload in packet
// Set sBit
dataBuffer[rtpHeaderLength] = dataBuffer[rtpHeaderLength] |
((sbit & 0x7) << 3);
memcpy(&dataBuffer[rtpHeaderLength + h263HeaderLength],
&_savedByte, 1);
memcpy(&dataBuffer[rtpHeaderLength + h263HeaderLength + 1],
data, payloadBytesInPacket);
h263HeaderLength++;
}else
{
// Put payload in packet
memcpy(&dataBuffer[rtpHeaderLength + h263HeaderLength], data,
payloadBytesInPacket);
}
if (_eBit)
{
// Save last byte to paste in next packet
// Set eBit
dataBuffer[rtpHeaderLength] |= (_eBit & 0x7);
_savedByte = dataBuffer[payloadBytesInPacket +
h263HeaderLength + rtpHeaderLength-1];
}
if (-1 == SendVideoPacket(frameType,
dataBuffer,
payloadBytesInPacket + h263HeaderLength,
rtpHeaderLength, kAllowRetransmission))
{
return -1;
}
payloadBytesSent += payloadBytesInPacket;
data += payloadBytesInPacket;
}
}
return 0;
}
WebRtc_Word32
RTPSenderVideo::SendH2631998(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
VideoCodecInformation* codecInfo)
{
const WebRtc_UWord16 h2631998HeaderLength = 2;
// No extra header included
const WebRtc_UWord8 pLen = 0;
const WebRtc_UWord8 peBit = 0;
bool fragment = false;
WebRtc_UWord16 payloadBytesInPacket = 0;
WebRtc_Word32 payloadBytesToSend = payloadSize;
WebRtc_UWord16 numPayloadBytesToSend = 0;
WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
// P is not set in all packets,
// only packets that has a PictureStart or a GOB header
WebRtc_UWord8 p = 2;
H263Information* h263Information = NULL;
if(codecInfo)
{
// Another channel have already parsed this data
h263Information = static_cast<H263Information*>(codecInfo);
} else
{
if(_videoCodecInformation)
{
if(_videoCodecInformation->Type() != kRtpH263Video)
{
// Wrong codec
delete _videoCodecInformation;
_videoCodecInformation = new H263Information();
} else
{
_videoCodecInformation->Reset();
}
} else
{
_videoCodecInformation = new H263Information();
}
h263Information = static_cast<H263Information*>(_videoCodecInformation);
}
const H263Info* ptrH263Info = NULL;
if (h263Information->GetInfo(payloadData,payloadSize, ptrH263Info) == -1)
{
return -1;
}
WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
const WebRtc_UWord16 maxPayloadLengthH2631998 =
_rtpSender.MaxPayloadLength() - FECPacketOverhead() - rtpHeaderLength -
h2631998HeaderLength;
const WebRtc_UWord8* data = payloadData;
WebRtc_UWord8 numOfGOB = 0;
WebRtc_UWord32 payloadBytesSent = 0;
while(payloadBytesToSend > 0)
{
WebRtc_Word32 prevOK = 0;
// Fragment packets at GOB boundaries
for (; numOfGOB < ptrH263Info->numOfGOBs; numOfGOB++)
{
// Fit one or more GOBs into packet
if (static_cast<WebRtc_Word32>(
ptrH263Info->ptrGOBbuffer[numOfGOB+1] -
payloadBytesSent) <=
(maxPayloadLengthH2631998 + p))
{
prevOK = static_cast<WebRtc_UWord16>(
ptrH263Info->ptrGOBbuffer[numOfGOB+1] -
payloadBytesSent);
if(fragment)
{
// This is a fragment, send it
break;
}
}else
{
break;
}
}
if(!prevOK)
{
// GOB larger than MaxPayloadLength bytes
fragment = true;
numPayloadBytesToSend = maxPayloadLengthH2631998;
} else
{
fragment = false;
numPayloadBytesToSend = WebRtc_UWord16(prevOK - p);
}
dataBuffer[rtpHeaderLength] = (p << 1) + ((pLen >> 5) & 0x01);
dataBuffer[rtpHeaderLength+1] = ((pLen & 0x1F) << 3) + peBit;
if(p == 2)
{
// Increment data ptr
// (do not send first two bytes of picture or GOB start code)
data += 2;
payloadBytesToSend -= 2;
}
if(payloadBytesToSend > maxPayloadLengthH2631998)
{
payloadBytesInPacket = numPayloadBytesToSend;
payloadBytesToSend -= payloadBytesInPacket;
_rtpSender.BuildRTPheader(dataBuffer, payloadType,
false, captureTimeStamp);
}else
{
payloadBytesInPacket = (WebRtc_UWord16)payloadBytesToSend;
payloadBytesToSend = 0;
// MarkerBit is 1
_rtpSender.BuildRTPheader(dataBuffer, payloadType,
true, captureTimeStamp);
}
// Put payload in packet
memcpy(&dataBuffer[rtpHeaderLength + h2631998HeaderLength],
data, payloadBytesInPacket);
if(-1 == SendVideoPacket(frameType,
dataBuffer,
payloadBytesInPacket + h2631998HeaderLength,
rtpHeaderLength,
kAllowRetransmission))
{
return -1;
}
data += payloadBytesInPacket;
payloadBytesSent += payloadBytesInPacket + p;
if(fragment)
{
p = 0;
}else
{
p = 2;
}
}
return 0;
}
WebRtc_Word32
RTPSenderVideo::SendH263MBs(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
WebRtc_UWord8* dataBuffer,
const WebRtc_UWord8 *data,
const WebRtc_UWord16 rtpHeaderLength,
const WebRtc_UWord8 numOfGOB,
const H263Info& info,
const H263MBInfo& infoMB,
const WebRtc_Word32 offset)
{
// Mode B
WebRtc_UWord32 *sizeOfMBs = &infoMB.ptrBuffer[offset];
WebRtc_UWord8 *hmv1 = &infoMB.ptrBufferHMV[offset];
WebRtc_UWord8 *vmv1 = &infoMB.ptrBufferVMV[offset];
WebRtc_UWord16 h263HeaderLength = 8;
WebRtc_UWord16 payloadBytesInPacket = 0;
WebRtc_Word32 payloadBytesToSend =
sizeOfMBs[info.ptrNumOfMBs[numOfGOB]-1] / 8;
WebRtc_UWord8 eBitLastByte = (WebRtc_UWord8)((8 -
(sizeOfMBs[info.ptrNumOfMBs[numOfGOB]-1] % 8)) % 8);
WebRtc_Word32 sBit = 0;
WebRtc_Word32 firstMB = 0;
WebRtc_UWord32 bitsRem = 0;
WebRtc_UWord32 payloadBytesSent = 0;
WebRtc_Word32 numOfMB = 0;
WebRtc_Word32 prevOK = 0;
// (Eventual sBit, eBit)
WebRtc_UWord16 maxPayloadLengthH263MB = _rtpSender.MaxPayloadLength() -
FECPacketOverhead() - rtpHeaderLength - h263HeaderLength - 2;
if (eBitLastByte)
{
payloadBytesToSend++;
}
// Fragment packet into packets of max MaxPayloadLength bytes payload.
while (payloadBytesToSend > 0)
{
prevOK = 0;
firstMB = numOfMB;
if (payloadBytesToSend > maxPayloadLengthH263MB)
{
// Fragment packet at MB boundary
for (; numOfMB < info.ptrNumOfMBs[numOfGOB]; numOfMB++)
{
// Fit one or more MBs into packet
if (WebRtc_Word32(sizeOfMBs[numOfMB] / 8 - payloadBytesSent) <
maxPayloadLengthH263MB)
{
prevOK = sizeOfMBs[numOfMB] / 8 - payloadBytesSent;
bitsRem = sizeOfMBs[numOfMB] % 8;
if (bitsRem)
{
prevOK++;
}
}else
{
break;
}
}
if (!prevOK)
{
// MB does not fit in packet
return -1;
}
}
// H.263 payload header (8 bytes)
h263HeaderLength = 8;
// First bit 1 == mode B, 10 000 000
dataBuffer[rtpHeaderLength] = (WebRtc_UWord8)0x80;
// Source format
dataBuffer[rtpHeaderLength + 1] = (info.uiH263PTypeFmt) << 5;
if (numOfGOB == 0)
{
// Quantization value for first MB in packet
dataBuffer[rtpHeaderLength + 1] += info.pQuant;
}
if (numOfGOB > 0 && firstMB > 0)
{
// Quantization value for first MB in packet
// (0 if packet begins w/ a GOB header)
dataBuffer[rtpHeaderLength + 1] += info.ptrGQuant[numOfGOB];
}
// GOB #
dataBuffer[rtpHeaderLength + 2] = numOfGOB << 3;
// First MB in the packet
dataBuffer[rtpHeaderLength + 2] += (WebRtc_UWord8)((firstMB >> 6)& 0x7);
dataBuffer[rtpHeaderLength + 3] = (WebRtc_UWord8)(firstMB << 2);
dataBuffer[rtpHeaderLength + 4] = (info.codecBits) << 4;
// Horizontal motion vector
dataBuffer[rtpHeaderLength + 4] += (hmv1[firstMB] & 0x7F) >> 3;
dataBuffer[rtpHeaderLength + 5] = hmv1[firstMB] << 5;
// Vertical motion vector
dataBuffer[rtpHeaderLength + 5] += (vmv1[firstMB] & 0x7F) >> 2;
dataBuffer[rtpHeaderLength + 6] = vmv1[firstMB] << 6;
dataBuffer[rtpHeaderLength + 7] = 0;
sBit = (8 - _eBit) % 8;
if (payloadBytesToSend > maxPayloadLengthH263MB)
{
payloadBytesInPacket = (WebRtc_UWord16)prevOK;
payloadBytesToSend -= payloadBytesInPacket;
_rtpSender.BuildRTPheader(dataBuffer, payloadType, false,
captureTimeStamp);
_eBit = (WebRtc_UWord8)((8 - bitsRem) % 8);
}
else
{
payloadBytesInPacket = (WebRtc_UWord16)payloadBytesToSend;
payloadBytesToSend = 0;
if (numOfGOB == (info.numOfGOBs - 1))
{
_rtpSender.BuildRTPheader(dataBuffer, payloadType, true,
captureTimeStamp);
_eBit = 0;
}
else
{
_rtpSender.BuildRTPheader(dataBuffer, payloadType, false,
captureTimeStamp);
_eBit = eBitLastByte;
}
}
if (sBit)
{
// Add last sent byte and put payload in packet
dataBuffer[rtpHeaderLength] |= ((sBit & 0x7) << 3);
dataBuffer[rtpHeaderLength + h263HeaderLength] = _savedByte;
memcpy(&dataBuffer[rtpHeaderLength + h263HeaderLength + 1],
data, payloadBytesInPacket);
h263HeaderLength++;
} else
{
// Put payload in packet
memcpy(&dataBuffer[rtpHeaderLength + h263HeaderLength],
data, payloadBytesInPacket);
}
if (_eBit)
{
// Save last byte to paste in next packet
dataBuffer[rtpHeaderLength] |= (_eBit & 0x7);
_savedByte = dataBuffer[rtpHeaderLength +
h263HeaderLength +
payloadBytesInPacket - 1];
}
if (-1 == SendVideoPacket(frameType,
dataBuffer,
payloadBytesInPacket + h263HeaderLength,
rtpHeaderLength,
kAllowRetransmission))
{
return -1;
}
data += payloadBytesInPacket;
payloadBytesSent += payloadBytesInPacket;
}
return 0;
}
WebRtc_Word32
RTPSenderVideo::SendVP8(const FrameType frameType,
const WebRtc_Word8 payloadType,

View File

@@ -20,7 +20,6 @@
#include "list_wrapper.h"
#include "video_codec_information.h"
#include "h263_information.h"
#include "forward_error_correction.h"
#include "Bitrate.h"
@@ -104,26 +103,6 @@ private:
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize);
WebRtc_Word32 SendH263(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
VideoCodecInformation* codecInfo);
WebRtc_Word32 SendH2631998(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
VideoCodecInformation* codecInfo);
WebRtc_Word32 SendMPEG4(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize);
WebRtc_Word32 SendVP8(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
@@ -132,21 +111,6 @@ private:
const RTPFragmentationHeader* fragmentation,
const RTPVideoTypeHeader* rtpTypeHdr);
// MPEG 4
WebRtc_Word32 FindMPEG4NALU(const WebRtc_UWord8* inData ,WebRtc_Word32 MaxPayloadLength);
// H263
WebRtc_Word32 SendH263MBs(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
WebRtc_UWord8* dataBuffer,
const WebRtc_UWord8 *data,
const WebRtc_UWord16 rtpHeaderLength,
const WebRtc_UWord8 numOfGOB,
const H263Info& info,
const H263MBInfo& infoMB,
const WebRtc_Word32 offset);
private:
WebRtc_Word32 _id;
RTPSenderInterface& _rtpSender;
@@ -176,10 +140,6 @@ private:
Bitrate _fecOverheadRate;
// Bitrate used for video payload and RTP headers
Bitrate _videoBitrate;
// H263
WebRtc_UWord8 _savedByte;
WebRtc_UWord8 _eBit;
};
} // namespace webrtc

View File

@@ -427,27 +427,6 @@ ModuleRTPUtility::RTPPayload::SetType(RtpVideoCodecTypes videoType)
{
case kRtpNoVideo:
break;
case kRtpH263Video:
case kRtpH2631998Video:
{
info.H263.hasPictureStartCode = false;
info.H263.insert2byteStartCode = false;
info.H263.hasPbit = false;
info.H263.frameWidth = 0;
info.H263.frameHeight = 0;
info.H263.startBits = 0;
info.H263.endBits = 0;
info.H263.data = 0;
info.H263.dataLength = 0;
break;
}
case kRtpMpeg4Video:
{
info.MPEG4.isFirstPacket = false;
info.MPEG4.data = 0;
info.MPEG4.dataLength = 0;
break;
}
case kRtpVp8Video:
{
info.VP8.nonReferenceFrame = false;
@@ -793,12 +772,6 @@ ModuleRTPUtility::RTPPayloadParser::Parse( RTPPayload& parsedPacket) const
{
case kRtpNoVideo:
return ParseGeneric(parsedPacket);
case kRtpH263Video:
return ParseH263(parsedPacket);
case kRtpH2631998Video:
return ParseH2631998(parsedPacket);
case kRtpMpeg4Video:
return ParseMPEG4(parsedPacket);
case kRtpVp8Video:
return ParseVP8(parsedPacket);
default:
@@ -812,177 +785,6 @@ ModuleRTPUtility::RTPPayloadParser::ParseGeneric(RTPPayload& /*parsedPacket*/) c
return false;
}
bool
ModuleRTPUtility::RTPPayloadParser::ParseH263(RTPPayload& parsedPacket) const
{
if(_dataLength <= 2)
{
// data length sanity check.
return false;
}
const WebRtc_UWord8 header1 = _dataPtr[0];
const WebRtc_UWord8 header2 = _dataPtr[1];
parsedPacket.frameType = ((header2 & 0x10) == 0) ? kIFrame : kPFrame;
unsigned int h263HeaderLength = 0;
if ((header1 & 0x80) == 0)
{
// Mode A
h263HeaderLength = 4;
}
else
{
// In Mode B and Mode C, I bit is in 5th byte of header
const WebRtc_UWord8 header5 = _dataPtr[4];
parsedPacket.frameType = ((header5 & 0x80) == 0) ? kIFrame : kPFrame;
if((header1 & 0x40) == 0)
{
// Mode B
// IMPROVEMENT use the information in the H263 header?
// GQuant of the first MB
h263HeaderLength = 8;
}else
{
// Mode C
h263HeaderLength = 12;
}
}
if (_dataLength < h263HeaderLength)
{
// Received empty H263 packet
return false;
}
// Get SBIT and EBIT
WebRtc_UWord8 sbit = 0;
WebRtc_UWord8 ebit = 0;
const WebRtc_UWord8 seBit = header1 & 0x3f;
if (seBit)
{
// We got SBIT or EBIT
sbit = (seBit >> 3) & 0x07;
ebit = seBit & 0x07;
}
const bool isH263PictureStartCode = H263PictureStartCode(_dataPtr + h263HeaderLength);
if (isH263PictureStartCode)
{
// parse out real size and inform the decoder
WebRtc_UWord16 width = 0;
WebRtc_UWord16 height = 0;
GetH263FrameSize(_dataPtr + h263HeaderLength, width, height);
parsedPacket.info.H263.hasPictureStartCode = true;
parsedPacket.info.H263.frameWidth = width;
parsedPacket.info.H263.frameHeight = height;
}
parsedPacket.info.H263.startBits = sbit;
parsedPacket.info.H263.endBits = ebit;
parsedPacket.info.H263.data = _dataPtr + h263HeaderLength;
parsedPacket.info.H263.dataLength = _dataLength - h263HeaderLength;
parsedPacket.info.H263.insert2byteStartCode = false; // not used in this mode
parsedPacket.info.H263.hasPbit = true; // not used in this mode
return true;
}
bool
ModuleRTPUtility::RTPPayloadParser::ParseH2631998( RTPPayload& parsedPacket) const
{
unsigned int h2631998HeaderLength = 2;
if(_dataLength <= h2631998HeaderLength)
{
// Received empty H263 (1998) packet
return false;
}
const WebRtc_UWord8 header1 = _dataPtr[0];
const WebRtc_UWord8 header2 = _dataPtr[1];
parsedPacket.frameType = kPFrame;
WebRtc_UWord8 p = (header1 >> 2) & 0x01; // picture start or a picture segment
WebRtc_UWord8 vrc = header1 & 0x02; // Video Redundancy Coding (VRC)
WebRtc_UWord8 pLen = ((header1 & 0x01) << 5) + ((header2 >> 3) & 0x1f); // Length, in bytes, of the extra picture header
//WebRtc_UWord8 peBit = (header2 & 0x07); // number of bits that shall be ignored in the last byte of the extra picture header
if (vrc)
{
return false;
}
if (pLen > 0)
{
h2631998HeaderLength += pLen;
//get extra header
// IMPROVEMENT we don't use the redundant picture header
}
if (_dataLength <= h2631998HeaderLength)
{
// Received empty H263 (1998) packet
return false;
}
// if p == 0
// it's a follow-on packet, hence it's not independently decodable
const bool isH263PictureStartCode = H263PictureStartCode(_dataPtr + h2631998HeaderLength, (p>0)?true:false);
if (isH263PictureStartCode)
{
// parse out real size and inform the decoder
WebRtc_UWord16 width = 0;
WebRtc_UWord16 height = 0;
if(p)
{
parsedPacket.frameType = GetH263FrameType(_dataPtr + h2631998HeaderLength - 2);
GetH263FrameSize(_dataPtr + h2631998HeaderLength - 2, width, height);
}
parsedPacket.info.H263.hasPictureStartCode = true;
parsedPacket.info.H263.frameWidth = width;
parsedPacket.info.H263.frameHeight = height;
}
parsedPacket.info.H263.hasPbit = (p>0)?true:false;
parsedPacket.info.H263.insert2byteStartCode = (p>0)?true:false;
parsedPacket.info.H263.data = _dataPtr + h2631998HeaderLength;
parsedPacket.info.H263.dataLength = _dataLength - h2631998HeaderLength;
return true;
}
bool
ModuleRTPUtility::RTPPayloadParser::ParseMPEG4(
RTPPayload& parsedPacket) const
{
if (_dataLength <= 5)
{
// Received empty MPEG4 packet
return false;
}
parsedPacket.frameType = kPFrame;
if (_dataPtr[0] == 0 && _dataPtr[1] == 0 && _dataPtr[2] == 1)
{
parsedPacket.info.MPEG4.isFirstPacket = true;
if (!(_dataPtr[4] & 0x40))
{
parsedPacket.frameType = kIFrame;
}
}
parsedPacket.info.MPEG4.data = _dataPtr;
parsedPacket.info.MPEG4.dataLength = _dataLength;
return true;
}
//
// VP8 format:
//
@@ -1183,144 +985,4 @@ int ModuleRTPUtility::RTPPayloadParser::ParseVP8TIDAndKeyIdx(
(*dataLength)--;
return 0;
}
bool
ModuleRTPUtility::RTPPayloadParser::H263PictureStartCode(const WebRtc_UWord8* data, const bool skipFirst2bytes) const
{
// data is at least 3 bytes!
if(skipFirst2bytes)
{
const WebRtc_UWord8 h3 = *(data);
if((h3 & 0x7C) == 0 && (h3 & 0x80))
{
return true;
}
}else
{
// first part of the frame
const WebRtc_UWord8 h1 = *(data);
const WebRtc_UWord8 h2 = *(data+1);
const WebRtc_UWord8 h3 = *(data+2);
if(h1 == 0 && h2 == 0 && (h3 & 0x7C) == 0 && (h3 & 0x80))
{
return true;
}
}
return false;
}
void
ModuleRTPUtility::RTPPayloadParser::GetH263FrameSize(const WebRtc_UWord8* inputVideoBuffer,
WebRtc_UWord16& width,
WebRtc_UWord16& height) const
{
WebRtc_UWord8 uiH263PTypeFmt = (inputVideoBuffer[4] >> 2) & 0x07;
if (uiH263PTypeFmt == 7) //extended PTYPE (for QQVGA, QVGA, VGA)
{
const WebRtc_UWord8 uiH263PlusPTypeUFEP = ((inputVideoBuffer[4] & 0x03) << 1) + ((inputVideoBuffer[5] >> 7) & 0x01);
if (uiH263PlusPTypeUFEP == 1) //optional part included
{
WebRtc_UWord8 uiH263PlusPTypeFmt = (inputVideoBuffer[5] >> 4) & 0x07;
if(uiH263PlusPTypeFmt == 6) //custom picture format
{
const WebRtc_UWord16 uiH263PlusPTypeCPFmt_PWI = ((inputVideoBuffer[9] & 0x7F) << 2) + ((inputVideoBuffer[10] >> 6) & 0x03);
const WebRtc_UWord16 uiH263PlusPTypeCPFmt_PHI = ((inputVideoBuffer[10] & 0x1F) << 4) + ((inputVideoBuffer[11] >> 4) & 0x0F);
width = (uiH263PlusPTypeCPFmt_PWI + 1)*4;
width = uiH263PlusPTypeCPFmt_PHI*4;
}
else
{
switch (uiH263PlusPTypeFmt)
{
case 1: // SQCIF
width = 128;
height = 96;
break;
case 2: // QCIF
width = 176;
height = 144;
break;
case 3: // CIF
width = 352;
height = 288;
break;
case 4: // 4CIF
width = 704;
height = 576;
break;
case 5: // 16CIF
width = 1408;
height = 1152;
break;
default:
assert(false);
break;
}
}
}
}
else
{
switch (uiH263PTypeFmt)
{
case 1: // SQCIF
width = 128;
height = 96;
break;
case 2: // QCIF
width = 176;
height = 144;
break;
case 3: // CIF
width = 352;
height = 288;
break;
case 4: // 4CIF
width = 704;
height = 576;
break;
case 5: // 16CIF
width = 1408;
height = 1152;
break;
default:
assert(false);
break;
}
}
}
ModuleRTPUtility::FrameTypes
ModuleRTPUtility::RTPPayloadParser::GetH263FrameType(
const WebRtc_UWord8* inputVideoBuffer) const
{
FrameTypes frameType = kPFrame;
const WebRtc_UWord8 uiH263PTypeFmt = (inputVideoBuffer[4] >> 2) & 0x07;
WebRtc_UWord8 pType = 1;
if (uiH263PTypeFmt != 7)
{
pType = (inputVideoBuffer[4] >> 1) & 0x01;
}
else
{
const WebRtc_UWord8 uiH263PlusPTypeUFEP = ((inputVideoBuffer[4] & 0x03) << 1) + ((inputVideoBuffer[5] >> 7) & 0x01);
if (uiH263PlusPTypeUFEP == 1)
{
pType = ((inputVideoBuffer[7] >> 2) & 0x07);
}
else if (uiH263PlusPTypeUFEP == 0)
{
pType = ((inputVideoBuffer[5] >> 4) & 0x07);
}
}
if (pType == 0)
{
frameType = kIFrame;
}
return frameType;
}
} // namespace webrtc

View File

@@ -22,9 +22,6 @@ namespace webrtc {
enum RtpVideoCodecTypes
{
kRtpNoVideo = 0,
kRtpH263Video = 1,
kRtpH2631998Video = 2,
kRtpMpeg4Video = 5,
kRtpFecVideo = 10,
kRtpVp8Video = 11
};
@@ -153,29 +150,6 @@ namespace ModuleRTPUtility
kPFrame // Delta frame
};
struct RTPPayloadH263
{
// H.263 and H.263+
bool hasPictureStartCode;
bool insert2byteStartCode;
bool hasPbit;
WebRtc_UWord16 frameWidth;
WebRtc_UWord16 frameHeight;
WebRtc_UWord8 endBits; // ignore last end bits
WebRtc_UWord8 startBits; // ignore first bits
const WebRtc_UWord8* data;
WebRtc_UWord16 dataLength;
};
struct RTPPayloadMPEG4
{
// MPEG4
bool isFirstPacket;
const WebRtc_UWord8* data;
WebRtc_UWord16 dataLength;
};
struct RTPPayloadVP8
{
bool nonReferenceFrame;
@@ -199,8 +173,6 @@ namespace ModuleRTPUtility
union RTPPayloadUnion
{
RTPPayloadH263 H263;
RTPPayloadMPEG4 MPEG4;
RTPPayloadVP8 VP8;
};
@@ -229,11 +201,6 @@ namespace ModuleRTPUtility
private:
bool ParseGeneric(RTPPayload& parsedPacket) const;
bool ParseH263(RTPPayload& parsedPacket) const;
bool ParseH2631998(RTPPayload& parsedPacket) const;
bool ParseMPEG4(RTPPayload& parsedPacket) const;
bool ParseVP8(RTPPayload& parsedPacket) const;
int ParseVP8Extension(RTPPayloadVP8 *vp8,
@@ -259,16 +226,6 @@ namespace ModuleRTPUtility
const WebRtc_UWord8 *dataPtr,
int dataLength) const;
// H.263
bool H263PictureStartCode(const WebRtc_UWord8* data,
const bool skipFirst2bytes = false) const;
void GetH263FrameSize(const WebRtc_UWord8* inputVideoBuffer,
WebRtc_UWord16& width,
WebRtc_UWord16& height) const;
FrameTypes GetH263FrameType(const WebRtc_UWord8* inputVideoBuffer) const;
private:
WebRtc_Word32 _id;
const WebRtc_UWord8* _dataPtr;

View File

@@ -696,18 +696,6 @@ WebRtc_Word32 VideoFilePlayerImpl::SetUpVideoDecoder()
return -1;
}
if(strncmp(video_codec_info_.plName, "MP4V-ES", 8) == 0)
{
if(_videoDecoder.SetCodecConfigParameters(
video_codec_info_.plType,
video_codec_info_.codecSpecific.MPEG4.configParameters,
video_codec_info_.codecSpecific.MPEG4.configParametersSize) !=
0)
{
return -1;
}
}
_frameLengthMS = 1000/video_codec_info_.maxFramerate;
// Size of unencoded data (I420) should be the largest possible frame size

View File

@@ -76,20 +76,6 @@ WebRtc_Word32 VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
return 0;
}
WebRtc_Word32 VideoCoder::CodecConfigParameters(WebRtc_UWord8* buffer,
WebRtc_Word32 size)
{
return _vcm->CodecConfigParameters(buffer, size);
}
WebRtc_Word32 VideoCoder::SetCodecConfigParameters(WebRtc_UWord8 payloadType,
const WebRtc_UWord8* buffer,
WebRtc_Word32 length)
{
return _vcm->SetCodecConfigParameters(payloadType, buffer, length);
}
WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo,
const EncodedVideoData& encodedData)
{

View File

@@ -35,13 +35,6 @@ public:
WebRtc_Word32 SetDecodeCodec(VideoCodec& videoCodecInst,
WebRtc_Word32 numberOfCores);
WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer,
WebRtc_Word32 size);
WebRtc_Word32 SetCodecConfigParameters(WebRtc_UWord8 payloadType,
const WebRtc_UWord8* buffer,
WebRtc_Word32 length);
WebRtc_Word32 Decode(VideoFrame& decodedVideo,
const EncodedVideoData& encodedData);

View File

@@ -251,14 +251,8 @@ const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e) {
const char* VideoCodecTypeToStr(webrtc::VideoCodecType e) {
switch (e) {
case kVideoCodecH263:
return "H263";
case kVideoCodecH264:
return "H264";
case kVideoCodecVP8:
return "VP8";
case kVideoCodecMPEG4:
return "MPEG4";
case kVideoCodecI420:
return "I420";
case kVideoCodecRED:

View File

@@ -463,21 +463,6 @@ public:
WebRtc_UWord32 payloadLength,
const WebRtcRTPHeader& rtpInfo) = 0;
// Sets codec config parameters received out-of-band to the currently
// selected receive codec.
//
// Input:
// - payloadType : Payload type which specifies which codec to set these
// parameters to.
// - buffer : Codec config parameters.
// - length : Length of the parameter data.
//
// Return value : VCM_OK, on success.
// < 0, on error.
virtual WebRtc_Word32 SetCodecConfigParameters(WebRtc_UWord8 payloadType,
const WebRtc_UWord8* buffer,
WebRtc_Word32 length) = 0;
// Minimum playout delay (Used for lip-sync). This is the minimum delay required
// to sync with audio. Not included in VideoCodingModule::Delay()
// Defaults to 0 ms.

View File

@@ -36,10 +36,8 @@ namespace webrtc
#define VCM_ERROR_REQUEST_SLI -12
#define VCM_NOT_IMPLEMENTED -20
#define VCM_H263_PAYLOAD_TYPE 34
#define VCM_RED_PAYLOAD_TYPE 96
#define VCM_ULPFEC_PAYLOAD_TYPE 97
#define VCM_H263_1998_PAYLOAD_TYPE 121
#define VCM_VP8_PAYLOAD_TYPE 120
#define VCM_I420_PAYLOAD_TYPE 124
@@ -48,12 +46,6 @@ enum VCMNackProperties
kNackHistoryLength = 450
};
enum VCMH263FrameDrop
{
kDecodePFrames,
kDropPFrames
};
enum VCMVideoProtection
{
kProtectionNack, // Both send-side and receive-side

View File

@@ -274,11 +274,6 @@ VCMCodecDataBase::RegisterSendCodec(const VideoCodec* sendCodec,
{
return VCM_PARAMETER_ERROR;
}
if (strcmp(sendCodec->plName, "H263") == 0 &&
(sendCodec->plType != 34))
{
return VCM_PARAMETER_ERROR;
}
if (sendCodec->plType <= 0)
{
return VCM_PARAMETER_ERROR;
@@ -656,9 +651,6 @@ VCMCodecDataBase::CreateAndInitDecoder(WebRtc_UWord8 payloadType,
ReleaseDecoder(ptrDecoder);
return NULL;
}
SetCodecConfigParameters(*ptrDecoder, *decoderItem->_settings);
memcpy(&newCodec, decoderItem->_settings, sizeof(VideoCodec));
return ptrDecoder;
}
@@ -721,36 +713,6 @@ VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const
}
}
WebRtc_Word32
VCMCodecDataBase::SetCodecConfigParameters(WebRtc_UWord8 payloadType,
const WebRtc_UWord8* buffer,
WebRtc_Word32 length)
{
VCMDecoderMapItem* decItem = FindDecoderItem(payloadType);
if (decItem == NULL)
{
return VCM_PARAMETER_ERROR;
}
switch (decItem->_settings->codecType)
{
case kVideoCodecMPEG4:
{
memcpy(decItem->_settings->codecSpecific.MPEG4.configParameters, buffer, length);
decItem->_settings->codecSpecific.MPEG4.configParametersSize =
static_cast<WebRtc_UWord8>(length);
break;
}
default:
// This codec doesn't have codec config parameters
return VCM_GENERAL_ERROR;
}
if (_ptrDecoder != NULL && _receiveCodec.plType == decItem->_settings->plType)
{
return _ptrDecoder->SetCodecConfigParameters(buffer, length);
}
return VCM_OK;
}
VCMDecoderMapItem*
VCMCodecDataBase::FindDecoderItem(WebRtc_UWord8 payloadType) const
{
@@ -790,28 +752,4 @@ VCMCodecDataBase::CreateDecoder(VideoCodecType type) const
return NULL;
}
}
void
VCMCodecDataBase::SetCodecConfigParameters(VCMGenericDecoder& decoder,
const VideoCodec& settings)
{
switch (settings.codecType)
{
case kVideoCodecMPEG4:
{
if (settings.codecSpecific.MPEG4.configParametersSize > 0)
{
decoder.SetCodecConfigParameters(
settings.codecSpecific.MPEG4.configParameters,
settings.codecSpecific.MPEG4.configParametersSize);
}
break;
}
default:
// No codec config parameters for this codec
return;
}
return;
}
}

View File

@@ -174,10 +174,6 @@ public:
bool RenderTiming() const;
WebRtc_Word32 SetCodecConfigParameters(WebRtc_UWord8 payloadType,
const WebRtc_UWord8* buffer,
WebRtc_Word32 length);
protected:
/**
* Create an internal Encoder given a codec type
@@ -190,9 +186,6 @@ protected:
*/
VCMGenericDecoder* CreateDecoder(VideoCodecType type) const;
static void SetCodecConfigParameters(VCMGenericDecoder& decoder,
const VideoCodec& settings);
VCMDecoderMapItem* FindDecoderItem(WebRtc_UWord8 payloadType) const;
VCMExtDecoderMapItem* FindExternalDecoderItem(WebRtc_UWord8 payloadType) const;

View File

@@ -414,12 +414,6 @@ VCMFrameBuffer::PrepareForDecode()
_sessionInfo.BuildVP8FragmentationHeader(_buffer, _length,
&_fragmentation);
}
else
{
_length = _sessionInfo.PrepareForDecode(_buffer);
}
#else
_length = _sessionInfo.PrepareForDecode(_buffer);
#endif
}

View File

@@ -28,7 +28,6 @@ VCMPacket::VCMPacket()
isFirstPacket(false),
completeNALU(kNaluUnset),
insertStartCode(false),
bits(false),
codecSpecificHeader() {
}
@@ -47,7 +46,6 @@ VCMPacket::VCMPacket(const WebRtc_UWord8* ptr,
isFirstPacket(rtpHeader.type.Video.isFirstPacket),
completeNALU(kNaluComplete),
insertStartCode(false),
bits(false),
codecSpecificHeader(rtpHeader.type.Video)
{
CopyCodecSpecifics(rtpHeader.type.Video);
@@ -66,7 +64,6 @@ VCMPacket::VCMPacket(const WebRtc_UWord8* ptr, WebRtc_UWord32 size, WebRtc_UWord
isFirstPacket(false),
completeNALU(kNaluComplete),
insertStartCode(false),
bits(false),
codecSpecificHeader()
{}
@@ -82,7 +79,6 @@ void VCMPacket::Reset() {
isFirstPacket = false;
completeNALU = kNaluUnset;
insertStartCode = false;
bits = false;
memset(&codecSpecificHeader, 0, sizeof(RTPVideoHeader));
}

View File

@@ -47,10 +47,6 @@ public:
VCMNaluCompleteness completeNALU; // Default is kNaluIncomplete.
bool insertStartCode; // True if a start code should be inserted before this
// packet.
bool bits; // The first bits of this packets are zero and the
// first
// byte should be ORed with the last packet of the
// previous frame.
RTPVideoHeader codecSpecificHeader;
protected:
@@ -58,5 +54,4 @@ protected:
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_PACKET_H_

View File

@@ -108,7 +108,6 @@ int VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer,
PacketIterator it;
int packet_size = packet.sizeBytes;
if (!packet.bits)
packet_size += (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
// Calculate the offset into the frame buffer for this packet.
@@ -529,7 +528,6 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
uint8_t* frame_buffer,
bool enable_decodable_state,
int rtt_ms) {
assert(!packet.insertStartCode || !packet.bits);
// Check if this is first packet (only valid for some codecs)
if (packet.isFirstPacket) {
// The first packet in a frame signals the frame type.
@@ -584,72 +582,6 @@ void VCMSessionInfo::InformOfEmptyPacket(uint16_t seq_num) {
empty_seq_num_low_ = seq_num;
}
int VCMSessionInfo::PrepareForDecode(uint8_t* frame_buffer) {
int length = SessionLength();
int real_data_bytes = 0;
if (length == 0)
return length;
PacketIterator it = packets_.begin();
PacketIterator prev_it = it;
for (; it != packets_.end(); ++it) {
bool packet_loss = ((*prev_it).sizeBytes == 0 ||
!InSequence(it, prev_it));
if ((*it).bits) {
if (prev_it != it) { // Not the first packet.
uint8_t* ptr_first_byte =
const_cast<uint8_t*>((*it).dataPtr);
if (packet_loss) {
// It is be better to throw away this packet if we are
// missing the previous packet.
memset(ptr_first_byte, 0, (*it).sizeBytes);
++packets_not_decodable_;
} else if ((*it).sizeBytes > 0) {
// Glue with previous byte.
// Move everything from [this packet start + 1, end of buffer] one
// byte to the left.
uint8_t* ptr_prev_byte =
const_cast<uint8_t*>((*prev_it).dataPtr) +
(*prev_it).sizeBytes - 1;
*ptr_prev_byte = (*ptr_prev_byte) | (*ptr_first_byte);
memmove(const_cast<uint8_t*>((*it).dataPtr),
(*it).dataPtr + 1, (*it).sizeBytes - 1);
ShiftSubsequentPackets(it, -1);
(*it).sizeBytes--;
length--;
real_data_bytes += (*it).sizeBytes;
}
} else {
memset(const_cast<uint8_t*>((*it).dataPtr), 0,
(*it).sizeBytes);
++packets_not_decodable_;
}
} else if (packet_loss &&
(*it).codecSpecificHeader.codec == kRTPVideoH263) {
// Pad H.263 packet losses with 10 zeros to make it easier
// for the decoder.
const int kPaddingLength = 10;
WebRtc_UWord8 padding_data[kPaddingLength] = {0};
// Make a copy of the previous packet.
VCMPacket padding_packet(*it);
++padding_packet.seqNum;
padding_packet.dataPtr = padding_data;
padding_packet.sizeBytes = kPaddingLength;
length += InsertPacket(padding_packet, frame_buffer, false, 0);
} else {
real_data_bytes += (*it).sizeBytes;
}
prev_it = it;
}
if (real_data_bytes == 0) {
// Drop the frame since all it contains are zeros.
for (it = packets_.begin(); it != packets_.end(); ++it)
(*it).sizeBytes = 0;
length = 0;
}
return length;
}
int VCMSessionInfo::packets_not_decodable() const {
return packets_not_decodable_;
}

View File

@@ -69,7 +69,6 @@ class VCMSessionInfo {
bool LayerSync() const;
int Tl0PicId() const;
bool NonReference() const;
int PrepareForDecode(uint8_t* frame_buffer);
void SetPreviousFrameLoss() { previous_frame_loss_ = true; }
bool PreviousFrameLoss() const { return previous_frame_loss_; }

View File

@@ -32,7 +32,6 @@ class TestSessionInfo : public ::testing::Test {
packet_.dataPtr = packet_buffer_;
packet_.seqNum = 0;
packet_.timestamp = 0;
packet_.bits = false;
}
void FillPacket(uint8_t start_value) {
@@ -89,7 +88,7 @@ class TestNalUnits : public TestSessionInfo {
protected:
virtual void SetUp() {
TestSessionInfo::SetUp();
packet_.codec = kVideoCodecH264;
packet_.codec = kVideoCodecVP8;
}
bool VerifyNalu(int offset, int packets_expected, int start_value) {
@@ -915,72 +914,4 @@ TEST_F(TestNackList, LostAllButEmptyPackets) {
EXPECT_EQ(-2, seq_num_list_[3]);
EXPECT_EQ(4, seq_num_list_[4]);
}
TEST_F(TestSessionInfo, PacketPriorBitsPacketLost) {
packet_.seqNum = 0;
packet_.codecSpecificHeader.codec = kRTPVideoH263;
packet_.bits = true;
packet_.isFirstPacket = false;
packet_.markerBit = true;
FillPacket(1);
ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
kPacketBufferSize);
EXPECT_EQ(0, session_.PrepareForDecode(frame_buffer_));
EXPECT_EQ(0, session_.SessionLength());
EXPECT_EQ(1, session_.packets_not_decodable());
}
TEST_F(TestSessionInfo, MiddlePacketPriorBitsPacketLost) {
packet_.codecSpecificHeader.codec = kRTPVideoH263;
packet_.bits = false;
packet_.isFirstPacket = true;
packet_.seqNum = 0;
packet_.markerBit = false;
FillPacket(2);
ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
kPacketBufferSize);
packet_.bits = true;
packet_.isFirstPacket = false;
packet_.seqNum += 2;
packet_.markerBit = true;
FillPacket(2);
ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
kPacketBufferSize);
EXPECT_EQ(2 * kPacketBufferSize, session_.PrepareForDecode(frame_buffer_));
EXPECT_EQ(2 * kPacketBufferSize, session_.SessionLength());
EXPECT_EQ(1, session_.packets_not_decodable());
}
TEST_F(TestSessionInfo, ORingReorderedBitsPackets) {
const uint8_t kEndByte = 0x07;
const uint8_t kStartByte = 0xF8;
packet_.codecSpecificHeader.codec = kRTPVideoH263;
packet_.bits = true;
packet_.isFirstPacket = false;
packet_.seqNum = 1;
packet_.markerBit = true;
FillPacket(2);
packet_buffer_[0] = kStartByte;
ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
kPacketBufferSize);
packet_.bits = false;
packet_.isFirstPacket = true;
packet_.seqNum = 0;
packet_.markerBit = false;
FillPacket(1);
packet_buffer_[kPacketBufferSize - 1] = kEndByte;
ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
kPacketBufferSize);
EXPECT_EQ(2 * kPacketBufferSize - 1,
session_.PrepareForDecode(frame_buffer_));
EXPECT_EQ(2 * kPacketBufferSize - 1, session_.SessionLength());
EXPECT_EQ(kStartByte | kEndByte, frame_buffer_[kPacketBufferSize - 1]);
EXPECT_EQ(0, session_.packets_not_decodable());
}
} // namespace webrtc

View File

@@ -1405,32 +1405,6 @@ VideoCodingModuleImpl::IncomingPacket(const WebRtc_UWord8* incomingPayload,
return VCM_OK;
}
// Set codec config parameters
WebRtc_Word32
VideoCodingModuleImpl::SetCodecConfigParameters(WebRtc_UWord8 payloadType,
const WebRtc_UWord8* buffer,
WebRtc_Word32 length)
{
WEBRTC_TRACE(webrtc::kTraceModuleCall,
webrtc::kTraceVideoCoding,
VCMId(_id),
"SetCodecConfigParameters()");
CriticalSectionScoped cs(_receiveCritSect);
WebRtc_Word32 ret = _codecDataBase.SetCodecConfigParameters(payloadType,
buffer,
length);
if (ret < 0)
{
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,
VCMId(_id),
"SetCodecConfigParameters() failed, %d", ret);
return ret;
}
return VCM_OK;
}
// Minimum playout delay (used for lip-sync). This is the minimum delay required
// to sync with audio. Not included in VideoCodingModule::Delay()
// Defaults to 0 ms.

View File

@@ -228,11 +228,6 @@ public:
virtual WebRtc_Word32 DecodeFromStorage(
const EncodedVideoData& frameFromStorage);
// Set codec config parameters
virtual WebRtc_Word32 SetCodecConfigParameters(WebRtc_UWord8 payloadType,
const WebRtc_UWord8* buffer,
WebRtc_Word32 length);
// Minimum playout delay (Used for lip-sync). This is the minimum delay
// required to sync with audio. Not included in VideoCodingModule::Delay()
// Defaults to 0 ms.

View File

@@ -627,73 +627,9 @@ int JitterBufferTest(CmdArgs& args)
seqNum += 2;
//printf("DONE frame re-ordering 2 frames 2 packets\n");
//
// TEST H.263 bits
//
// -----------------
// | 1541 | 1542 |
// -----------------
// sBits
seqNum++;
timeStamp += 2*33*90;
packet.frameType = kVideoFrameDelta;
packet.isFirstPacket = true;
packet.markerBit = false;
packet.seqNum = seqNum;
packet.timestamp = timeStamp;
packet.bits = false;
packet.codec = kVideoCodecH263;
frameIn = jb.GetFrame(packet);
TEST(frameIn != 0);
// Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame
frameOut = jb.GetCompleteFrameForDecoding(10);
// it should not be complete
TEST(frameOut == 0);
seqNum++;
packet.isFirstPacket = false;
packet.markerBit = true;
packet.seqNum = seqNum;
packet.bits = true;
packet.dataPtr = &(data[9]);
frameIn = jb.GetFrame(packet);
TEST(frameIn != 0);
// Insert a packet into a frame
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
// get the frame
frameOut = jb.GetCompleteFrameForDecoding(10);
TEST(CheckOutFrame(frameOut, (size*2)-1, false) == 0);
// check the frame type
TEST(frameOut->FrameType() == kVideoFrameDelta);
// Release frame (when done with decoding)
jb.ReleaseFrame(frameOut);
// restore
packet.dataPtr = data;
packet.bits = false;
packet.codec = kVideoCodecUnknown;
//printf("DONE H.263 frame 2 packets with bits\n");
//
// TEST duplicate packets
@@ -704,7 +640,7 @@ int JitterBufferTest(CmdArgs& args)
//
seqNum++;
timeStamp += 33*90;
timeStamp += 2*33*90;
packet.frameType = kVideoFrameDelta;
packet.isFirstPacket = true;
packet.markerBit = false;
@@ -824,7 +760,7 @@ int JitterBufferTest(CmdArgs& args)
WebRtc_UWord32 numKeyFrames = 0;
TEST(jb.GetFrameStatistics(numDeltaFrames, numKeyFrames) == 0);
TEST(numDeltaFrames == 9);
TEST(numDeltaFrames == 8);
TEST(numKeyFrames == 1);
WebRtc_UWord32 frameRate;
@@ -1983,86 +1919,11 @@ int JitterBufferTest(CmdArgs& args)
jb.Flush();
// Three reordered H263 packets with bits.
packet.codec = kVideoCodecH263;
packet.frameType = kVideoFrameDelta;
packet.isFirstPacket = false;
packet.markerBit = false;
packet.bits = true;
packet.seqNum += 1;
WebRtc_UWord8 oldData1 = data[0];
WebRtc_UWord8 oldData2 = data[packet.sizeBytes - 1];
unsigned char startByte = 0x07;
unsigned char endByte = 0xF8;
data[0] = startByte;
TEST(frameIn = jb.GetFrame(packet));
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
frameOut = jb.GetFrameForDecoding();
TEST(frameOut == NULL);
packet.seqNum -= 1;
packet.isFirstPacket = true;
packet.bits = false;
data[0] = oldData1;
data[packet.sizeBytes - 1] = endByte;
TEST(frameIn = jb.GetFrame(packet));
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
frameOut = jb.GetFrameForDecoding();
TEST(frameOut == NULL);
packet.seqNum += 2;
packet.isFirstPacket = false;
packet.markerBit = true;
data[packet.sizeBytes - 1] = oldData2;
TEST(frameIn = jb.GetFrame(packet));
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
frameOut = jb.GetCompleteFrameForDecoding(0);
TEST(frameOut != NULL);
const WebRtc_UWord8* buf = frameOut->Buffer();
TEST(buf[packet.sizeBytes - 1] == (startByte | endByte));
jb.ReleaseFrame(frameOut);
// First packet lost, second packet with bits.
// The JB only outputs frame if the next one arrives:
// Adding dummy timestamp value so won't interfere with the test.
packet.seqNum = 1;
packet.timestamp = timeStamp + 33 * 90 * 5;
packet.frameType = kVideoFrameDelta;
packet.isFirstPacket = false;
packet.completeNALU = kNaluStart;
packet.markerBit = false;
frameIn = jb.GetFrame(packet);
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
packet.frameType = kVideoFrameDelta;
packet.isFirstPacket = false;
packet.markerBit = true;
packet.bits = true;
packet.seqNum += 2;
packet.timestamp = timeStamp + 33 * 90;
data[0] = 0x07;
data[packet.sizeBytes - 1] = 0xF8;
TEST(frameIn = jb.GetFrame(packet));
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
frameOut = jb.GetFrameForDecoding();
TEST(frameOut != NULL);
TEST(frameOut->Length() == 0);
jb.ReleaseFrame(frameOut);
data[0] = oldData1;
data[packet.sizeBytes - 1] = oldData2;
packet.codec = kVideoCodecUnknown;
jb.Flush();
// Test that a we cannot get incomplete frames from the JB if we haven't
// received the marker bit, unless we have received a packet from a later
// timestamp.
packet.seqNum += 2;
packet.bits = false;
packet.frameType = kVideoFrameDelta;
packet.isFirstPacket = false;
packet.markerBit = false;

View File

@@ -92,13 +92,6 @@ VCMNTEncodeCompleteCallback::SendData(
rtpInfo.type.Video.height = 0;
switch (_test.VideoType())
{
case kVideoCodecH263:
rtpInfo.type.Video.codec = kRTPVideoH263;
rtpInfo.type.Video.codecHeader.H263.bits = false;
rtpInfo.type.Video.codecHeader.H263.independentlyDecodable = false;
rtpInfo.type.Video.height = (WebRtc_UWord16)_test.Height();
rtpInfo.type.Video.width = (WebRtc_UWord16)_test.Width();
break;
case kVideoCodecVP8:
rtpInfo.type.Video.codec = kRTPVideoVP8;
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();

View File

@@ -68,10 +68,6 @@ VCMEncodeCompleteCallback::SendData(
rtpInfo.type.Video.width = (WebRtc_UWord16)_width;
switch (_codecType)
{
case webrtc::kRTPVideoH263:
rtpInfo.type.Video.codecHeader.H263.bits = false;
rtpInfo.type.Video.codecHeader.H263.independentlyDecodable = false;
break;
case webrtc::kRTPVideoVP8:
rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
rtpInfo.type.Video.codecHeader.VP8.nonReference =

View File

@@ -34,14 +34,6 @@ ConvertCodecType(const char* plname)
{
return kRTPVideoVP8;
}
else if (strncmp(plname,"H263" , 5) == 0)
{
return kRTPVideoH263;
}
else if (strncmp(plname, "H263-1998",10) == 0)
{
return kRTPVideoH263;
}
else if (strncmp(plname,"I420" , 5) == 0)
{
return kRTPVideoI420;

View File

@@ -90,10 +90,6 @@ int ParseArguments(int argc, char **argv, CmdArgs& args)
{
args.codecType = kVideoCodecI420;
}
else if (strncmp(argv[i+1], "H263", 4) == 0)
{
args.codecType = kVideoCodecH263;
}
else
return -1;

View File

@@ -162,10 +162,6 @@ class WEBRTC_DLLEXPORT ViECodec {
virtual int WaitForFirstKeyFrame(const int video_channel,
const bool wait) = 0;
// This function makes VideoEngine decode all incoming H.263 key frames as
// delta frames and all incoming delta frames as key frames.
virtual int SetInverseH263Logic(int video_channel, bool enable) = 0;
protected:
ViECodec() {}
virtual ~ViECodec() {}

View File

@@ -136,8 +136,7 @@ void TestCodecs(const TbInterfaces& interfaces,
for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
EXPECT_EQ(0, codec_interface->GetCodec(i, video_codec));
if (video_codec.codecType == webrtc::kVideoCodecMPEG4 ||
video_codec.codecType == webrtc::kVideoCodecRED ||
if (video_codec.codecType == webrtc::kVideoCodecRED ||
video_codec.codecType == webrtc::kVideoCodecULPFEC) {
ViETest::Log("\t %d. %s not tested", i, video_codec.plName);
} else {

View File

@@ -122,9 +122,6 @@ void SetSuitableResolution(webrtc::VideoCodec* video_codec,
// I420 is very bandwidth heavy, so limit it here.
video_codec->width = 176;
video_codec->height = 144;
} else if (video_codec->codecType == webrtc::kVideoCodecH263) {
video_codec->width = 352;
video_codec->height = 288;
} else {
// Otherwise go with 640x480.
video_codec->width = 640;

View File

@@ -100,9 +100,6 @@ void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
switch (videoCodec.codecType)
{
case webrtc::kVideoCodecH263:
ViETest::Log("\tcodecType: H263");
break;
case webrtc::kVideoCodecVP8:
ViETest::Log("\tcodecType: VP8");
break;

View File

@@ -100,8 +100,7 @@ void ViEAutoTest::ViECodecExtendedTest()
{
EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
if (videoCodec.codecType != webrtc::kVideoCodecH263
&& videoCodec.codecType != webrtc::kVideoCodecI420)
if (videoCodec.codecType != webrtc::kVideoCodecI420)
{
videoCodec.width = 640;
videoCodec.height = 480;

View File

@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include "critical_section_wrapper.h"
#include "engine_configurations.h"
#include "rtp_rtcp.h"
#include "stdio.h"
#include "trace.h"
#include "video_coding.h"
#include "video_processing.h"

View File

@@ -2527,8 +2527,4 @@ void ViEChannel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
}
}
WebRtc_Word32 ViEChannel::SetInverseH263Logic(const bool enable) {
return rtp_rtcp_.SetH263InverseLogic(enable);
}
} // namespace webrtc

View File

@@ -334,8 +334,6 @@ class ViEChannel
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
WebRtc_Word32 SetInverseH263Logic(const bool enable);
ViEFileRecorder& GetIncomingFileRecorder();
void ReleaseIncomingFileRecorder();

View File

@@ -613,25 +613,6 @@ int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
return 0;
}
int ViECodecImpl::SetInverseH263Logic(int video_channel, bool enable) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(instance_id_),
"%s(video_channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(channel_manager_);
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(instance_id_, video_channel),
"%s: No channel %d", __FUNCTION__, video_channel);
SetLastError(kViECodecInvalidChannelId);
return -1;
}
if (vie_channel->SetInverseH263Logic(enable) != 0) {
SetLastError(kViECodecUnknownError);
return -1;
}
return 0;
}
bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
// Check pl_name matches codec_type.
if (video_codec.codecType == kVideoCodecRED) {
@@ -658,16 +639,10 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
"Codec type doesn't match pl_name", video_codec.plType);
return false;
} else if ((video_codec.codecType == kVideoCodecH263 &&
strncmp(video_codec.plName, "H263", 4) == 0) ||
(video_codec.codecType == kVideoCodecH263 &&
strncmp(video_codec.plName, "H263-1998", 9) == 0) ||
(video_codec.codecType == kVideoCodecVP8 &&
} else if ((video_codec.codecType == kVideoCodecVP8 &&
strncmp(video_codec.plName, "VP8", 4) == 0) ||
(video_codec.codecType == kVideoCodecI420 &&
strncmp(video_codec.plName, "I420", 4) == 0) ||
(video_codec.codecType == kVideoCodecH264 &&
strncmp(video_codec.plName, "H264", 4) == 0)) {
strncmp(video_codec.plName, "I420", 4) == 0)) {
// OK.
} else {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
@@ -703,17 +678,6 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
"Number of Simulcast streams can not be 1");
return false;
}
if (video_codec.codecType == kVideoCodecH263) {
if ((video_codec.width == 704 && video_codec.height == 576) ||
(video_codec.width == 352 && video_codec.height == 288) ||
(video_codec.width == 176 && video_codec.height == 144) ||
(video_codec.width == 128 && video_codec.height == 96)) {
// OK.
} else {
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid size for H.263");
return false;
}
}
return true;
}

View File

@@ -63,7 +63,6 @@ class ViECodecImpl
virtual int DeregisterDecoderObserver(const int video_channel);
virtual int SendKeyFrame(const int video_channel);
virtual int WaitForFirstKeyFrame(const int video_channel, const bool wait);
virtual int SetInverseH263Logic(int video_channel, bool enable);
protected:
ViECodecImpl();