First implementation of simulcast, adds VP8 simulcast to video engine.

Changed API to RTP module
Expanded Auto test with a test for simulcast
Made the video codec tests compile
Added the vp8_simulcast files to this cl
Added missing auto test file
Review URL: http://webrtc-codereview.appspot.com/188001

git-svn-id: http://webrtc.googlecode.com/svn/trunk@736 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pwestin@webrtc.org
2011-10-13 15:19:55 +00:00
parent 103f33b734
commit 1da1ce0da5
69 changed files with 3374 additions and 1617 deletions

View File

@@ -485,6 +485,7 @@ enum RawVideoType
// Video codec // Video codec
enum { kConfigParameterSize = 128}; enum { kConfigParameterSize = 128};
enum { kPayloadNameSize = 32}; enum { kPayloadNameSize = 32};
enum { kMaxSimulcastStreams = 4};
// H.263 specific // H.263 specific
struct VideoCodecH263 struct VideoCodecH263
@@ -530,9 +531,10 @@ struct VideoCodecH264
// VP8 specific // VP8 specific
struct VideoCodecVP8 struct VideoCodecVP8
{ {
bool pictureLossIndicationOn; bool pictureLossIndicationOn;
bool feedbackModeOn; bool feedbackModeOn;
VideoCodecComplexity complexity; VideoCodecComplexity complexity;
unsigned char numberOfTemporalLayers;
}; };
// MPEG-4 specific // MPEG-4 specific
@@ -570,6 +572,19 @@ union VideoCodecUnion
VideoCodecGeneric Generic; VideoCodecGeneric Generic;
}; };
/*
* Simulcast is when the same stream is encoded multiple times with different
* settings such as resolution.
*/
struct SimulcastStream
{
unsigned short width;
unsigned short height;
unsigned char numberOfTemporalLayers;
unsigned int maxBitrate;
unsigned int qpMax; // minimum quality
};
// Common video codec properties // Common video codec properties
struct VideoCodec struct VideoCodec
{ {
@@ -588,8 +603,8 @@ struct VideoCodec
VideoCodecUnion codecSpecific; VideoCodecUnion codecSpecific;
unsigned int qpMax; unsigned int qpMax;
unsigned char numberOfSimulcastStreams;
SimulcastStream simulcastStream[kMaxSimulcastStreams];
}; };
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_COMMON_TYPES_H #endif // WEBRTC_COMMON_TYPES_H

View File

@@ -42,9 +42,12 @@ struct RTPVideoHeaderH263
bool bits; // H.263 mode B, Xor the lasy byte of previus packet with the bool bits; // H.263 mode B, Xor the lasy byte of previus packet with the
// first byte of this packet // first byte of this packet
}; };
enum {kNoPictureId = -1}; enum {kNoPictureId = -1};
enum {kNoTl0PicIdx = -1}; enum {kNoTl0PicIdx = -1};
enum {kNoTemporalIdx = -1}; enum {kNoTemporalIdx = -1};
enum {kNoSimulcastIdx = 0};
struct RTPVideoHeaderVP8 struct RTPVideoHeaderVP8
{ {
void InitRTPVideoHeaderVP8() void InitRTPVideoHeaderVP8()
@@ -89,6 +92,8 @@ struct RTPVideoHeader
WebRtc_UWord16 height; WebRtc_UWord16 height;
bool isFirstPacket; // first packet in frame bool isFirstPacket; // first packet in frame
WebRtc_UWord8 simulcastIdx; // Index if the simulcast encoder creating
// this frame, 0 if not using simulcast.
RTPVideoCodecTypes codec; RTPVideoCodecTypes codec;
RTPVideoTypeHeader codecHeader; RTPVideoTypeHeader codecHeader;
}; };

View File

@@ -161,22 +161,38 @@ public:
WebRtc_UWord8 &sampleTimeSeconds) = 0; WebRtc_UWord8 &sampleTimeSeconds) = 0;
/* /*
* set codec name and payload type * set voice codec name and payload type
*
* payloadName - payload name of codec
* payloadType - payload type of codec
* frequency - (audio specific) frequency of codec
* channels - (audio specific) number of channels in codec (1 = mono, 2 = stereo)
* rate - (audio) rate of codec
* (video) maxBitrate of codec, bits/sec
* *
* return -1 on failure else 0 * return -1 on failure else 0
*/ */
virtual WebRtc_Word32 RegisterReceivePayload(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], virtual WebRtc_Word32 RegisterReceivePayload(
const WebRtc_Word8 payloadType, const CodecInst& voiceCodec) = 0;
const WebRtc_UWord32 frequency = 0,
const WebRtc_UWord8 channels = 1, /*
const WebRtc_UWord32 rate = 0) = 0; * set video codec name and payload type
*
* return -1 on failure else 0
*/
virtual WebRtc_Word32 RegisterReceivePayload(
const VideoCodec& videoCodec) = 0;
/*
* get payload type for a voice codec
*
* return -1 on failure else 0
*/
virtual WebRtc_Word32 ReceivePayloadType(
const CodecInst& voiceCodec,
WebRtc_Word8* plType) = 0;
/*
* get payload type for a video codec
*
* return -1 on failure else 0
*/
virtual WebRtc_Word32 ReceivePayloadType(
const VideoCodec& videoCodec,
WebRtc_Word8* plType) = 0;
/* /*
* Remove a registerd payload type from list of accepted payloads * Remove a registerd payload type from list of accepted payloads
@@ -185,40 +201,8 @@ public:
* *
* return -1 on failure else 0 * return -1 on failure else 0
*/ */
virtual WebRtc_Word32 DeRegisterReceivePayload(const WebRtc_Word8 payloadType) = 0; virtual WebRtc_Word32 DeRegisterReceivePayload(
const WebRtc_Word8 payloadType) = 0;
/*
* get configured payload type
*
* payloadName - payload name of codec
* frequency - frequency of codec, ignored for video
* payloadType - payload type of codec, ignored for video
* channels - number of channels in codec (1 = mono, 2 = stereo)
* rate - (audio) rate of codec (ignored if set to 0)
*
* return -1 on failure else 0
*/
virtual WebRtc_Word32 ReceivePayloadType(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_UWord32 frequency,
const WebRtc_UWord8 channels,
WebRtc_Word8* payloadType,
const WebRtc_UWord32 rate = 0) const = 0;
/*
* get configured payload
*
* payloadType - payload type of codec
* payloadName - payload name of codec
* frequency - frequency of codec
* channels - number of channels in codec (1 = mono, 2 = stereo)
*
* return -1 on failure else 0
*/
virtual WebRtc_Word32 ReceivePayload(const WebRtc_Word8 payloadType,
WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
WebRtc_UWord32* frequency,
WebRtc_UWord8* channels,
WebRtc_UWord32* rate = NULL) const = 0;
/* /*
* Get last received remote timestamp * Get last received remote timestamp
@@ -248,21 +232,6 @@ public:
*/ */
virtual WebRtc_Word32 RemoteCSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const = 0; virtual WebRtc_Word32 RemoteCSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const = 0;
/*
* get Current incoming payload
*
* payloadName - payload name of codec
* payloadType - payload type of codec
* frequency - frequency of codec
* channels - number of channels in codec (2 = stereo)
*
* return -1 on failure else 0
*/
virtual WebRtc_Word32 RemotePayload(WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
WebRtc_Word8* payloadType,
WebRtc_UWord32* frequency,
WebRtc_UWord8* channels) const = 0;
/* /*
* get the currently configured SSRC filter * get the currently configured SSRC filter
* *
@@ -289,8 +258,8 @@ public:
* *
* return -1 on failure else 0 * return -1 on failure else 0
*/ */
virtual WebRtc_Word32 IncomingPacket( const WebRtc_UWord8* incomingPacket, virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPacket,
const WebRtc_UWord16 packetLength) = 0; const WebRtc_UWord16 packetLength) = 0;
/* /*
@@ -300,11 +269,11 @@ public:
* *
* return -1 on failure else 0 * return -1 on failure else 0
*/ */
virtual WebRtc_Word32 IncomingAudioNTP(const WebRtc_UWord32 audioReceivedNTPsecs, virtual WebRtc_Word32 IncomingAudioNTP(
const WebRtc_UWord32 audioReceivedNTPfrac, const WebRtc_UWord32 audioReceivedNTPsecs,
const WebRtc_UWord32 audioRTCPArrivalTimeSecs, const WebRtc_UWord32 audioReceivedNTPfrac,
const WebRtc_UWord32 audioRTCPArrivalTimeFrac) = 0; const WebRtc_UWord32 audioRTCPArrivalTimeSecs,
const WebRtc_UWord32 audioRTCPArrivalTimeFrac) = 0;
/************************************************************************** /**************************************************************************
* *
@@ -391,9 +360,10 @@ public:
* *
* return -1 on failure else 0 * return -1 on failure else 0
*/ */
virtual WebRtc_Word32 RTPKeepaliveStatus(bool* enable, virtual WebRtc_Word32 RTPKeepaliveStatus(
WebRtc_Word8* unknownPayloadType, bool* enable,
WebRtc_UWord16* deltaTransmitTimeMS) const = 0; WebRtc_Word8* unknownPayloadType,
WebRtc_UWord16* deltaTransmitTimeMS) const = 0;
/* /*
* check if RTPKeepaliveStatus is enabled * check if RTPKeepaliveStatus is enabled
@@ -403,20 +373,18 @@ public:
/* /*
* set codec name and payload type * set codec name and payload type
* *
* payloadName - payload name of codec * return -1 on failure else 0
* payloadType - payload type of codec */
* frequency - frequency of codec virtual WebRtc_Word32 RegisterSendPayload(
* channels - number of channels in codec (1 = mono, 2 = stereo) const CodecInst& voiceCodec) = 0;
* rate - (audio) rate of codec
* (video) maxBitrate of codec, bits/sec /*
* set codec name and payload type
* *
* return -1 on failure else 0 * return -1 on failure else 0
*/ */
virtual WebRtc_Word32 RegisterSendPayload(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], virtual WebRtc_Word32 RegisterSendPayload(
const WebRtc_Word8 payloadType, const VideoCodec& videoCodec) = 0;
const WebRtc_UWord32 frequency = 0,
const WebRtc_UWord8 channels = 1,
const WebRtc_UWord32 rate = 0) = 0;
/* /*
* Unregister a send payload * Unregister a send payload
@@ -548,7 +516,7 @@ public:
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation = NULL, const RTPFragmentationHeader* fragmentation = NULL,
const RTPVideoTypeHeader* rtpTypeHdr = NULL) = 0; const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
/************************************************************************** /**************************************************************************
* *

View File

@@ -210,17 +210,14 @@ class RtpVideoFeedback
{ {
public: public:
// this function should call codec module to inform it about the request // this function should call codec module to inform it about the request
virtual void OnReceivedIntraFrameRequest( virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_Word32 id, const FrameType type,
const WebRtc_UWord8 message = 0) = 0; const WebRtc_UWord8 streamIdx) = 0;
virtual void OnNetworkChanged(const WebRtc_Word32 id, virtual void OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps, const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost, const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs, const WebRtc_UWord16 roundTripTimeMs) = 0;
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax) = 0;
protected: protected:
virtual ~RtpVideoFeedback() {} virtual ~RtpVideoFeedback() {}

View File

@@ -18,25 +18,18 @@
namespace webrtc { namespace webrtc {
BandwidthManagement::BandwidthManagement(const WebRtc_Word32 id) : BandwidthManagement::BandwidthManagement(const WebRtc_Word32 id) :
_id(id), _id(id),
_critsect(*CriticalSectionWrapper::CreateCriticalSection()), _critsect(*CriticalSectionWrapper::CreateCriticalSection()),
_lastPacketLossExtendedHighSeqNum(0), _lastPacketLossExtendedHighSeqNum(0),
_lastReportAllLost(false), _lastReportAllLost(false),
_lastLoss(0), _lastLoss(0),
_accumulateLostPacketsQ8(0), _accumulateLostPacketsQ8(0),
_accumulateExpectedPackets(0), _accumulateExpectedPackets(0),
_bitRate(0), _bitRate(0),
_minBitRateConfigured(0), _minBitRateConfigured(0),
_maxBitRateConfigured(0), _maxBitRateConfigured(0),
_last_fraction_loss(0), _last_fraction_loss(0),
_last_round_trip_time(0), _last_round_trip_time(0),
// bandwidth estimate
_bwEstimateIncoming(0), _bwEstimateIncoming(0),
_bwEstimateIncomingMax(0),
_smoothedFractionLostQ4(-1), // indicate uninitialized _smoothedFractionLostQ4(-1), // indicate uninitialized
_sFLFactorQ4(14) // 0.875 in Q4 _sFLFactorQ4(14) // 0.875 in Q4
{ {
@@ -68,7 +61,7 @@ BandwidthManagement::SetSendBitrate(const WebRtc_UWord32 startBitrate,
} }
WebRtc_Word32 WebRtc_Word32
BandwidthManagement::MaxConfiguredBitrate(WebRtc_UWord16& maxBitrateKbit) BandwidthManagement::MaxConfiguredBitrate(WebRtc_UWord16* maxBitrateKbit)
{ {
CriticalSectionScoped cs(_critsect); CriticalSectionScoped cs(_critsect);
@@ -76,57 +69,48 @@ BandwidthManagement::MaxConfiguredBitrate(WebRtc_UWord16& maxBitrateKbit)
{ {
return -1; return -1;
} }
maxBitrateKbit = (WebRtc_UWord16)(_maxBitRateConfigured/1000); *maxBitrateKbit = (WebRtc_UWord16)(_maxBitRateConfigured/1000);
return 0; return 0;
} }
WebRtc_Word32 WebRtc_Word32
BandwidthManagement::UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthMinKbit, BandwidthManagement::UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthKbit,
const WebRtc_UWord16 bandWidthMaxKbit, WebRtc_UWord32* newBitrate,
WebRtc_UWord32& newBitrate, WebRtc_UWord8* fractionLost,
WebRtc_UWord8& fractionLost, WebRtc_UWord16* roundTripTime)
WebRtc_UWord16& roundTripTime)
{ {
newBitrate = 0; *newBitrate = 0;
CriticalSectionScoped cs(_critsect); CriticalSectionScoped cs(_critsect);
_bwEstimateIncoming = bandWidthMinKbit*1000; _bwEstimateIncoming = bandWidthKbit*1000;
_bwEstimateIncomingMax = bandWidthMaxKbit*1000;
if(_bitRate == 0) if(_bitRate == 0)
{ {
// BandwidthManagement off // BandwidthManagement off
return -1; return -1;
} }
if (_bwEstimateIncoming > 0 && _bitRate > _bwEstimateIncoming) if (_bwEstimateIncoming > 0 && _bitRate > _bwEstimateIncoming)
{ {
_bitRate = _bwEstimateIncoming; _bitRate = _bwEstimateIncoming;
} } else
else
{ {
return -1; return -1;
} }
newBitrate = _bitRate; *newBitrate = _bitRate;
fractionLost = _last_fraction_loss; *fractionLost = _last_fraction_loss;
roundTripTime = _last_round_trip_time; *roundTripTime = _last_round_trip_time;
return 0; return 0;
} }
WebRtc_Word32 WebRtc_Word32 BandwidthManagement::UpdatePacketLoss(
BandwidthManagement::UpdatePacketLoss(const WebRtc_UWord32 lastReceivedExtendedHighSeqNum, const WebRtc_UWord32 lastReceivedExtendedHighSeqNum,
const bool defaultCodec, const WebRtc_UWord16 rtt,
const WebRtc_UWord8 lossInput, WebRtc_UWord8* loss,
const WebRtc_UWord16 rtt, WebRtc_UWord32* newBitrate)
WebRtc_UWord32& newBitrate,
WebRtc_UWord16& bwEstimateKbitMin,
WebRtc_UWord16& bwEstimateKbitMax)
{ {
CriticalSectionScoped cs(_critsect); CriticalSectionScoped cs(_critsect);
WebRtc_UWord8 loss = lossInput; // Local copy to modify. _last_fraction_loss = *loss;
_last_fraction_loss = loss;
_last_round_trip_time = rtt; _last_round_trip_time = rtt;
if(_bitRate == 0) if(_bitRate == 0)
@@ -147,13 +131,13 @@ BandwidthManagement::UpdatePacketLoss(const WebRtc_UWord32 lastReceivedExtendedH
// Check if this report and the last was 100% loss, then report // Check if this report and the last was 100% loss, then report
// 100% loss even though seqNumDiff is small. // 100% loss even though seqNumDiff is small.
// If not, go on with the checks. // If not, go on with the checks.
if (!(_lastReportAllLost && loss == 255)) if (!(_lastReportAllLost && *loss == 255))
{ {
_lastReportAllLost = (loss == 255); _lastReportAllLost = (*loss == 255);
// Calculate number of lost packets. // Calculate number of lost packets.
// loss = 256 * numLostPackets / expectedPackets. // loss = 256 * numLostPackets / expectedPackets.
const int numLostPacketsQ8 = loss * seqNumDiff; const int numLostPacketsQ8 = *loss * seqNumDiff;
// Accumulate reports. // Accumulate reports.
_accumulateLostPacketsQ8 += numLostPacketsQ8; _accumulateLostPacketsQ8 += numLostPacketsQ8;
@@ -164,7 +148,7 @@ BandwidthManagement::UpdatePacketLoss(const WebRtc_UWord32 lastReceivedExtendedH
const int limitNumPackets = 10; const int limitNumPackets = 10;
if (_accumulateExpectedPackets >= limitNumPackets) if (_accumulateExpectedPackets >= limitNumPackets)
{ {
loss = _accumulateLostPacketsQ8 / _accumulateExpectedPackets; *loss = _accumulateLostPacketsQ8 / _accumulateExpectedPackets;
// Reset accumulators // Reset accumulators
_accumulateLostPacketsQ8 = 0; _accumulateLostPacketsQ8 = 0;
@@ -174,34 +158,24 @@ BandwidthManagement::UpdatePacketLoss(const WebRtc_UWord32 lastReceivedExtendedH
{ {
// Report same loss as before and keep the accumulators until // Report same loss as before and keep the accumulators until
// the next report. // the next report.
loss = _lastLoss; *loss = _lastLoss;
} }
} }
} }
// Keep for next time. // Keep for next time.
_lastLoss = loss; _lastLoss = *loss;
// Remember the sequence number until next time // Remember the sequence number until next time
_lastPacketLossExtendedHighSeqNum = lastReceivedExtendedHighSeqNum; _lastPacketLossExtendedHighSeqNum = lastReceivedExtendedHighSeqNum;
bwEstimateKbitMax = static_cast<WebRtc_UWord16>(_bwEstimateIncomingMax / 1000); WebRtc_UWord32 bitRate = ShapeSimple(*loss, rtt);
bwEstimateKbitMin = static_cast<WebRtc_UWord16>(_bwEstimateIncoming / 1000); if (bitRate == 0)
newBitrate = 0;
if (defaultCodec)
{
return 0;
}
WebRtc_UWord32 bitRate = ShapeSimple(loss, rtt);
if(bitRate == 0)
{ {
// no change // no change
return -1; return -1;
} }
_bitRate = bitRate; _bitRate = bitRate;
newBitrate = bitRate; *newBitrate = bitRate;
return 0; return 0;
} }
@@ -210,8 +184,9 @@ BandwidthManagement::UpdatePacketLoss(const WebRtc_UWord32 lastReceivedExtendedH
*/ */
// protected // protected
WebRtc_Word32 WebRtc_Word32 BandwidthManagement::CalcTFRCbps(WebRtc_Word16 avgPackSizeBytes,
BandwidthManagement::CalcTFRCbps(WebRtc_Word16 avgPackSizeBytes, WebRtc_Word32 rttMs, WebRtc_Word32 packetLoss) WebRtc_Word32 rttMs,
WebRtc_Word32 packetLoss)
{ {
if (avgPackSizeBytes <= 0 || rttMs <= 0 || packetLoss <= 0) if (avgPackSizeBytes <= 0 || rttMs <= 0 || packetLoss <= 0)
{ {
@@ -235,8 +210,8 @@ BandwidthManagement::CalcTFRCbps(WebRtc_Word16 avgPackSizeBytes, WebRtc_Word32 r
* Simple bandwidth estimation. Depends a lot on bwEstimateIncoming and packetLoss. * Simple bandwidth estimation. Depends a lot on bwEstimateIncoming and packetLoss.
*/ */
// protected // protected
WebRtc_UWord32 WebRtc_UWord32 BandwidthManagement::ShapeSimple(WebRtc_Word32 packetLoss,
BandwidthManagement::ShapeSimple(WebRtc_Word32 packetLoss, WebRtc_Word32 rtt) WebRtc_Word32 rtt)
{ {
WebRtc_UWord32 newBitRate = 0; WebRtc_UWord32 newBitRate = 0;
bool reducing = false; bool reducing = false;
@@ -292,18 +267,14 @@ BandwidthManagement::ShapeSimple(WebRtc_Word32 packetLoss, WebRtc_Word32 rtt)
{ {
newBitRate = _bwEstimateIncoming; newBitRate = _bwEstimateIncoming;
} }
if (newBitRate > _maxBitRateConfigured) if (newBitRate > _maxBitRateConfigured)
{ {
newBitRate = _maxBitRateConfigured; newBitRate = _maxBitRateConfigured;
} }
if (newBitRate < _minBitRateConfigured) if (newBitRate < _minBitRateConfigured)
{ {
newBitRate = _minBitRateConfigured; newBitRate = _minBitRateConfigured;
} }
return newBitRate; return newBitRate;
} }
} // namespace webrtc } // namespace webrtc

View File

@@ -26,27 +26,26 @@ public:
BandwidthManagement(const WebRtc_Word32 id); BandwidthManagement(const WebRtc_Word32 id);
~BandwidthManagement(); ~BandwidthManagement();
WebRtc_Word32 UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthMinKbit, // Call when we receive a RTCP message with TMMBR or REMB
const WebRtc_UWord16 bandWidthMaxKbit, WebRtc_Word32 UpdateBandwidthEstimate(const WebRtc_UWord16 bandWidthKbit,
WebRtc_UWord32& newBitrate, WebRtc_UWord32* newBitrate,
WebRtc_UWord8& fractionLost, WebRtc_UWord8* fractionLost,
WebRtc_UWord16& roundTripTime); WebRtc_UWord16* roundTripTime);
WebRtc_Word32 UpdatePacketLoss(const WebRtc_UWord32 lastReceivedExtendedHighSeqNum, // Call when we receive a RTCP message with a ReceiveBlock
const bool defaultCodec, WebRtc_Word32 UpdatePacketLoss(
const WebRtc_UWord8 lossInput, const WebRtc_UWord32 lastReceivedExtendedHighSeqNum,
const WebRtc_UWord16 rtt, const WebRtc_UWord16 rtt,
WebRtc_UWord32& newBitrate, WebRtc_UWord8* loss,
WebRtc_UWord16& bwEstimateKbitMin, WebRtc_UWord32* newBitrate);
WebRtc_UWord16& bwEstimateKbitMax);
WebRtc_Word32 AvailableBandwidth(WebRtc_UWord16& bandwidthKbit) const; WebRtc_Word32 AvailableBandwidth(WebRtc_UWord16* bandwidthKbit) const;
WebRtc_Word32 SetSendBitrate(const WebRtc_UWord32 startBitrate, WebRtc_Word32 SetSendBitrate(const WebRtc_UWord32 startBitrate,
const WebRtc_UWord16 minBitrateKbit, const WebRtc_UWord16 minBitrateKbit,
const WebRtc_UWord16 maxBitrateKbit); const WebRtc_UWord16 maxBitrateKbit);
WebRtc_Word32 MaxConfiguredBitrate(WebRtc_UWord16& maxBitrateKbit); WebRtc_Word32 MaxConfiguredBitrate(WebRtc_UWord16* maxBitrateKbit);
protected: protected:
WebRtc_UWord32 ShapeSimple(WebRtc_Word32 packetLoss, WebRtc_Word32 rtt); WebRtc_UWord32 ShapeSimple(WebRtc_Word32 packetLoss, WebRtc_Word32 rtt);
@@ -77,7 +76,6 @@ private:
// bandwidth estimate // bandwidth estimate
WebRtc_UWord32 _bwEstimateIncoming; WebRtc_UWord32 _bwEstimateIncoming;
WebRtc_UWord32 _bwEstimateIncomingMax;
WebRtc_Word16 _smoothedFractionLostQ4; WebRtc_Word16 _smoothedFractionLostQ4;
WebRtc_Word16 _sFLFactorQ4; // forgetting factor for _smoothedFractionLostQ4 WebRtc_Word16 _sFLFactorQ4; // forgetting factor for _smoothedFractionLostQ4
}; };

View File

@@ -1155,7 +1155,7 @@ RTCPReceiver::HandleREMBItem(RTCPUtility::RTCPParserV2& rtcpParser,
rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRemb; rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRemb;
rtcpPacketInformation.receiverEstimatedMaxBitrate = rtcpPacket.REMB.BitRate; rtcpPacketInformation.receiverEstimatedMaxBitrate = rtcpPacket.REMB.BitRate;
// TODO send up SSRCs and do a sanity check // TODO(pwestin) send up SSRCs and do a sanity check
} }
// no need for critsect we have _criticalSectionRTCPReceiver // no need for critsect we have _criticalSectionRTCPReceiver
@@ -1235,13 +1235,14 @@ RTCPReceiver::HandleAPPItem(RTCPUtility::RTCPParserV2& rtcpParser,
} }
void void
RTCPReceiver::OnReceivedIntraFrameRequest(const WebRtc_UWord8 message) const RTCPReceiver::OnReceivedIntraFrameRequest(const FrameType frameType,
const WebRtc_UWord8 streamIdx) const
{ {
CriticalSectionScoped lock(_criticalSectionFeedbacks); CriticalSectionScoped lock(_criticalSectionFeedbacks);
if(_cbVideoFeedback) if(_cbVideoFeedback)
{ {
_cbVideoFeedback->OnReceivedIntraFrameRequest(_id, message); _cbVideoFeedback->OnReceivedIntraFrameRequest(_id, frameType, streamIdx);
} }
} }
@@ -1280,8 +1281,7 @@ RTCPReceiver::TriggerCallbacksFromRTCPPacket(RTCPPacketInformation& rtcpPacketIn
_rtpRtcp.OnPacketLossStatisticsUpdate( _rtpRtcp.OnPacketLossStatisticsUpdate(
rtcpPacketInformation.fractionLost, rtcpPacketInformation.fractionLost,
rtcpPacketInformation.roundTripTime, rtcpPacketInformation.roundTripTime,
rtcpPacketInformation.lastReceivedExtendedHighSeqNum, rtcpPacketInformation.lastReceivedExtendedHighSeqNum);
rtcpPacketInformation.jitter);
} }
} }
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr) if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr)
@@ -1318,13 +1318,19 @@ RTCPReceiver::TriggerCallbacksFromRTCPPacket(RTCPPacketInformation& rtcpPacketIn
{ {
WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id, "SIG [RTCP] Incoming FIR to id:%d", _id); WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id, "SIG [RTCP] Incoming FIR to id:%d", _id);
} }
// we need use a bounce it up to handle default channel _rtpRtcp.OnReceivedIntraFrameRequest(&_rtpRtcp);
_rtpRtcp.OnReceivedIntraFrameRequest(0);
} }
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSli) if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSli)
{ {
// we need use a bounce it up to handle default channel // we need use a bounce it up to handle default channel
_rtpRtcp.OnReceivedSliceLossIndication(rtcpPacketInformation.sliPictureId); _rtpRtcp.OnReceivedSliceLossIndication(
rtcpPacketInformation.sliPictureId);
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb)
{
// We need to bounce this to the default channel
_rtpRtcp.OnReceivedEstimatedMaxBitrate(
rtcpPacketInformation.receiverEstimatedMaxBitrate);
} }
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRpsi) if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRpsi)
{ {

View File

@@ -72,24 +72,27 @@ public:
WebRtc_Word32 SenderInfoReceived(RTCPSenderInfo* senderInfo) const; WebRtc_Word32 SenderInfoReceived(RTCPSenderInfo* senderInfo) const;
void OnReceivedIntraFrameRequest(const WebRtc_UWord8 message) const; void OnReceivedIntraFrameRequest(const FrameType frameType,
const WebRtc_UWord8 streamIdx) const;
void OnReceivedSliceLossIndication(const WebRtc_UWord8 pitureID) const; void OnReceivedSliceLossIndication(const WebRtc_UWord8 pitureID) const;
void OnReceivedReferencePictureSelectionIndication(const WebRtc_UWord64 pitureID) const; void OnReceivedReferencePictureSelectionIndication(
const WebRtc_UWord64 pitureID) const;
// get statistics // get statistics
WebRtc_Word32 StatisticsReceived(const WebRtc_UWord32 remoteSSRC, WebRtc_Word32 StatisticsReceived(const WebRtc_UWord32 remoteSSRC,
RTCPReportBlock* receiveBlock) const; RTCPReportBlock* receiveBlock) const;
// Get TMMBR // Get TMMBR
WebRtc_Word32 TMMBRReceived(const WebRtc_UWord32 size, WebRtc_Word32 TMMBRReceived(const WebRtc_UWord32 size,
const WebRtc_UWord32 accNumCandidates, const WebRtc_UWord32 accNumCandidates,
TMMBRSet* candidateSet) const; TMMBRSet* candidateSet) const;
bool UpdateRTCPReceiveInformationTimers(); bool UpdateRTCPReceiveInformationTimers();
void UpdateBandwidthEstimate(const WebRtc_UWord16 bwEstimateKbit); void UpdateBandwidthEstimate(const WebRtc_UWord16 bwEstimateKbit);
WebRtc_Word32 BoundingSet(bool &tmmbrOwner, WebRtc_Word32 BoundingSet(bool &tmmbrOwner,
TMMBRSet*& boundingSetRec); TMMBRSet*& boundingSetRec);
WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS); WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS);
void PacketTimeout(); void PacketTimeout();

View File

@@ -318,11 +318,11 @@ int RtpFormatVp8::WriteTIDFields(WebRtc_UWord8* x_field,
int RtpFormatVp8::PayloadDescriptorExtraLength() const int RtpFormatVp8::PayloadDescriptorExtraLength() const
{ {
if (!beginning_) int length_bytes = 0;
if (beginning_)
{ {
return 0; length_bytes = PictureIdLength();
} }
int length_bytes = PictureIdLength();
if (TL0PicIdxFieldPresent()) ++length_bytes; if (TL0PicIdxFieldPresent()) ++length_bytes;
if (TIDFieldPresent()) ++length_bytes; if (TIDFieldPresent()) ++length_bytes;
if (length_bytes > 0) ++length_bytes; // Include the extension field. if (length_bytes > 0) ++length_bytes; // Include the extension field.
@@ -339,10 +339,7 @@ int RtpFormatVp8::PictureIdLength() const
{ {
return 1; return 1;
} }
else return 2;
{
return 2;
}
} }
bool RtpFormatVp8::XFieldPresent() const bool RtpFormatVp8::XFieldPresent() const
@@ -359,5 +356,4 @@ bool RtpFormatVp8::TL0PicIdxFieldPresent() const
{ {
return (hdr_info_.tl0PicIdx != kNoTl0PicIdx); return (hdr_info_.tl0PicIdx != kNoTl0PicIdx);
} }
} // namespace webrtc } // namespace webrtc

View File

@@ -512,12 +512,12 @@ RTPReceiver::DeRegisterReceivePayload(const WebRtc_Word8 payloadType)
return -1; return -1;
} }
WebRtc_Word32 WebRtc_Word32 RTPReceiver::ReceivePayloadType(
RTPReceiver::ReceivePayloadType(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_UWord32 frequency, const WebRtc_UWord32 frequency,
const WebRtc_UWord8 channels, const WebRtc_UWord8 channels,
WebRtc_Word8* payloadType, const WebRtc_UWord32 rate,
const WebRtc_UWord32 rate) const WebRtc_Word8* payloadType) const
{ {
if(payloadType == NULL) if(payloadType == NULL)
{ {

View File

@@ -59,17 +59,18 @@ public:
WebRtc_Word32 DeRegisterReceivePayload(const WebRtc_Word8 payloadType); WebRtc_Word32 DeRegisterReceivePayload(const WebRtc_Word8 payloadType);
WebRtc_Word32 ReceivePayloadType(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], WebRtc_Word32 ReceivePayloadType(
const WebRtc_UWord32 frequency, const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_UWord8 channels, const WebRtc_UWord32 frequency,
WebRtc_Word8* payloadType, const WebRtc_UWord8 channels,
const WebRtc_UWord32 rate) const; const WebRtc_UWord32 rate,
WebRtc_Word8* payloadType) const;
WebRtc_Word32 ReceivePayload(const WebRtc_Word8 payloadType, WebRtc_Word32 ReceivePayload(const WebRtc_Word8 payloadType,
WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
WebRtc_UWord32* frequency, WebRtc_UWord32* frequency,
WebRtc_UWord8* channels, WebRtc_UWord8* channels,
WebRtc_UWord32* rate) const; WebRtc_UWord32* rate) const;
WebRtc_Word32 RemotePayload(WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], WebRtc_Word32 RemotePayload(WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
WebRtc_Word8* payloadType, WebRtc_Word8* payloadType,

View File

@@ -86,17 +86,17 @@ RTPReceiverVideo::RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessag
} }
void void
RTPReceiverVideo::UpdateBandwidthManagement(const WebRtc_UWord32 minBitrateBps, RTPReceiverVideo::UpdateBandwidthManagement(const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost, const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs, const WebRtc_UWord16 roundTripTimeMs)
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax)
{ {
CriticalSectionScoped lock(_criticalSectionFeedback); CriticalSectionScoped lock(_criticalSectionFeedback);
if(_cbVideoFeedback) if(_cbVideoFeedback)
{ {
_cbVideoFeedback->OnNetworkChanged(_id, minBitrateBps, maxBitrateBps, fractionLost, roundTripTimeMs, bwEstimateKbitMin, bwEstimateKbitMax); _cbVideoFeedback->OnNetworkChanged(_id,
bitrateBps,
fractionLost,
roundTripTimeMs);
} }
} }

View File

@@ -38,32 +38,33 @@ public:
WebRtc_Word32 Init(); WebRtc_Word32 Init();
WebRtc_Word32 RegisterIncomingVideoCallback(RtpVideoFeedback* incomingMessagesCallback); WebRtc_Word32 RegisterIncomingVideoCallback(
RtpVideoFeedback* incomingMessagesCallback);
void UpdateBandwidthManagement(const WebRtc_UWord32 minBitrateBps, void UpdateBandwidthManagement(const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost, const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs, const WebRtc_UWord16 roundTripTimeMs);
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax);
ModuleRTPUtility::Payload* RegisterReceiveVideoPayload(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], ModuleRTPUtility::Payload* RegisterReceiveVideoPayload(
const WebRtc_Word8 payloadType, const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_UWord32 maxRate); const WebRtc_Word8 payloadType,
const WebRtc_UWord32 maxRate);
WebRtc_Word32 ParseVideoCodecSpecific(WebRtcRTPHeader* rtpHeader, WebRtc_Word32 ParseVideoCodecSpecific(
const WebRtc_UWord8* payloadData, WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord16 payloadDataLength, const WebRtc_UWord8* payloadData,
const RtpVideoCodecTypes videoType, const WebRtc_UWord16 payloadDataLength,
const bool isRED, const RtpVideoCodecTypes videoType,
const WebRtc_UWord8* incomingRtpPacket, const bool isRED,
const WebRtc_UWord16 incomingRtpPacketSize); const WebRtc_UWord8* incomingRtpPacket,
const WebRtc_UWord16 incomingRtpPacketSize);
WebRtc_Word32 SetH263InverseLogic(const bool enable); WebRtc_Word32 SetH263InverseLogic(const bool enable);
WebRtc_Word32 ReceiveRecoveredPacketCallback(WebRtcRTPHeader* rtpHeader, WebRtc_Word32 ReceiveRecoveredPacketCallback(
const WebRtc_UWord8* payloadData, WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord16 payloadDataLength); const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
void SetPacketOverHead(WebRtc_UWord16 packetOverHead); void SetPacketOverHead(WebRtc_UWord16 packetOverHead);
@@ -72,54 +73,59 @@ protected:
WebRtc_UWord16 EstimateBandwidth( const WebRtc_UWord16 bufferLength); WebRtc_UWord16 EstimateBandwidth( const WebRtc_UWord16 bufferLength);
virtual WebRtc_Word32 CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData, virtual WebRtc_Word32 CallbackOfReceivedPayloadData(
const WebRtc_UWord16 payloadSize, const WebRtc_UWord8* payloadData,
const WebRtcRTPHeader* rtpHeader) = 0; const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader) = 0;
virtual WebRtc_UWord32 TimeStamp() const = 0; virtual WebRtc_UWord32 TimeStamp() const = 0;
virtual WebRtc_UWord16 SequenceNumber() const = 0; virtual WebRtc_UWord16 SequenceNumber() const = 0;
virtual WebRtc_UWord32 PayloadTypeToPayload(const WebRtc_UWord8 payloadType, virtual WebRtc_UWord32 PayloadTypeToPayload(
ModuleRTPUtility::Payload*& payload) const = 0; const WebRtc_UWord8 payloadType,
ModuleRTPUtility::Payload*& payload) const = 0;
virtual bool RetransmitOfOldPacket(const WebRtc_UWord16 sequenceNumber, virtual bool RetransmitOfOldPacket(
const WebRtc_UWord32 rtpTimeStamp) const = 0; const WebRtc_UWord16 sequenceNumber,
const WebRtc_UWord32 rtpTimeStamp) const = 0;
virtual WebRtc_Word8 REDPayloadType() const = 0; virtual WebRtc_Word8 REDPayloadType() const = 0;
WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes videoType, WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes videoType,
WebRtcRTPHeader* rtpHeader) const; WebRtcRTPHeader* rtpHeader) const;
WebRtc_Word32 ParseVideoCodecSpecificSwitch(WebRtcRTPHeader* rtpHeader, WebRtc_Word32 ParseVideoCodecSpecificSwitch(
const WebRtc_UWord8* payloadData, WebRtcRTPHeader* rtpHeader,
const WebRtc_UWord16 payloadDataLength, const WebRtc_UWord8* payloadData,
const RtpVideoCodecTypes videoType); const WebRtc_UWord16 payloadDataLength,
const RtpVideoCodecTypes videoType);
WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader *rtpHeader, WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH263Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH2631998Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH263CodecCommon(
ModuleRTPUtility::RTPPayload& parsedPacket,
WebRtcRTPHeader* rtpHeader);
WebRtc_Word32 ReceiveMPEG4Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength); const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH263Codec(WebRtcRTPHeader *rtpHeader, WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH2631998Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveH263CodecCommon(ModuleRTPUtility::RTPPayload& parsedPacket,
WebRtcRTPHeader* rtpHeader);
WebRtc_Word32 ReceiveMPEG4Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength); const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadDataLength);
WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtpHeader, WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
WebRtc_UWord8* dataBuffer) const; WebRtc_UWord8* dataBuffer) const;
private: private:
WebRtc_Word32 _id; WebRtc_Word32 _id;
@@ -128,24 +134,24 @@ private:
CriticalSectionWrapper& _criticalSectionFeedback; CriticalSectionWrapper& _criticalSectionFeedback;
RtpVideoFeedback* _cbVideoFeedback; RtpVideoFeedback* _cbVideoFeedback;
CriticalSectionWrapper& _criticalSectionReceiverVideo; CriticalSectionWrapper& _criticalSectionReceiverVideo;
// bandwidth // bandwidth
bool _completeFrame; bool _completeFrame;
WebRtc_UWord32 _packetStartTimeMs; WebRtc_UWord32 _packetStartTimeMs;
WebRtc_UWord16 _receivedBW[BW_HISTORY_SIZE]; WebRtc_UWord16 _receivedBW[BW_HISTORY_SIZE];
WebRtc_UWord16 _estimatedBW; WebRtc_UWord16 _estimatedBW;
// FEC // FEC
bool _currentFecFrameDecoded; bool _currentFecFrameDecoded;
ReceiverFEC* _receiveFEC; ReceiverFEC* _receiveFEC;
// H263 // H263
bool _h263InverseLogic; bool _h263InverseLogic;
// BWE // BWE
OverUseDetector _overUseDetector; OverUseDetector _overUseDetector;
BitRateStats _videoBitRate; BitRateStats _videoBitRate;
WebRtc_Word64 _lastBitRateChange; WebRtc_Word64 _lastBitRateChange;
WebRtc_UWord16 _packetOverHead; WebRtc_UWord16 _packetOverHead;
}; };

File diff suppressed because it is too large Load Diff

View File

@@ -11,15 +11,14 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_ #ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_ #define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
#include "rtp_rtcp.h" #include <list>
#include "rtp_sender.h" #include "bandwidth_management.h"
#include "rtp_receiver.h"
#include "rtcp_receiver.h" #include "rtcp_receiver.h"
#include "rtcp_sender.h" #include "rtcp_sender.h"
#include "bandwidth_management.h" #include "rtp_receiver.h"
#include "rtp_rtcp.h"
#include "list_wrapper.h" #include "rtp_sender.h"
#ifdef MATLAB #ifdef MATLAB
class MatlabPlot; class MatlabPlot;
@@ -72,43 +71,30 @@ public:
// configure a timeout value // configure a timeout value
virtual WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 RTPtimeoutMS, virtual WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 RTPtimeoutMS,
const WebRtc_UWord32 RTCPtimeoutMS); const WebRtc_UWord32 RTCPtimeoutMS);
// Set periodic dead or alive notification // Set periodic dead or alive notification
virtual WebRtc_Word32 SetPeriodicDeadOrAliveStatus(const bool enable, virtual WebRtc_Word32 SetPeriodicDeadOrAliveStatus(
const WebRtc_UWord8 sampleTimeSeconds); const bool enable,
const WebRtc_UWord8 sampleTimeSeconds);
// Get periodic dead or alive notification status // Get periodic dead or alive notification status
virtual WebRtc_Word32 PeriodicDeadOrAliveStatus(bool &enable, virtual WebRtc_Word32 PeriodicDeadOrAliveStatus(
WebRtc_UWord8 &sampleTimeSeconds); bool &enable,
WebRtc_UWord8 &sampleTimeSeconds);
// set codec name and payload type virtual WebRtc_Word32 RegisterReceivePayload(const CodecInst& voiceCodec);
virtual WebRtc_Word32 RegisterReceivePayload( const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 frequency,
const WebRtc_UWord8 channels,
const WebRtc_UWord32 rate);
virtual WebRtc_Word32 DeRegisterReceivePayload(const WebRtc_Word8 payloadType); virtual WebRtc_Word32 RegisterReceivePayload(const VideoCodec& videoCodec);
// get configured payload type virtual WebRtc_Word32 ReceivePayloadType(const CodecInst& voiceCodec,
virtual WebRtc_Word32 ReceivePayloadType(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], WebRtc_Word8* plType);
const WebRtc_UWord32 frequency,
const WebRtc_UWord8 channels,
WebRtc_Word8* payloadType,
const WebRtc_UWord32 rate = 0) const;
// get configured payload virtual WebRtc_Word32 ReceivePayloadType(const VideoCodec& videoCodec,
virtual WebRtc_Word32 ReceivePayload(const WebRtc_Word8 payloadType, WebRtc_Word8* plType);
WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
WebRtc_UWord32* frequency,
WebRtc_UWord8* channels,
WebRtc_UWord32* rate = NULL) const;
virtual WebRtc_Word32 RemotePayload(WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], virtual WebRtc_Word32 DeRegisterReceivePayload(
WebRtc_Word8* payloadType, const WebRtc_Word8 payloadType);
WebRtc_UWord32* frequency,
WebRtc_UWord8* channels) const;
// get the currently configured SSRC filter // get the currently configured SSRC filter
virtual WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const; virtual WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const;
@@ -164,8 +150,12 @@ public:
virtual bool RTPKeepalive() const; virtual bool RTPKeepalive() const;
virtual WebRtc_Word32 RegisterSendPayload(const CodecInst& voiceCodec);
virtual WebRtc_Word32 RegisterSendPayload(const VideoCodec& videoCodec);
// set codec name and payload type // set codec name and payload type
virtual WebRtc_Word32 RegisterSendPayload(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE], WebRtc_Word32 RegisterSendPayload(const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
const WebRtc_Word8 payloadType, const WebRtc_Word8 payloadType,
const WebRtc_UWord32 frequency, const WebRtc_UWord32 frequency,
const WebRtc_UWord8 channels, const WebRtc_UWord8 channels,
@@ -231,7 +221,7 @@ public:
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation = NULL, const RTPFragmentationHeader* fragmentation = NULL,
const RTPVideoTypeHeader* rtpTypeHdr = NULL); const RTPVideoHeader* rtpVideoHdr = NULL);
/* /*
* RTCP * RTCP
@@ -365,7 +355,7 @@ public:
// Send a Negative acknowledgement packet // Send a Negative acknowledgement packet
virtual WebRtc_Word32 SendNACK(const WebRtc_UWord16* nackList, virtual WebRtc_Word32 SendNACK(const WebRtc_UWord16* nackList,
const WebRtc_UWord16 size); const WebRtc_UWord16 size);
// Store the sent packets, needed to answer to a Negative acknowledgement requests // Store the sent packets, needed to answer to a Negative acknowledgement requests
virtual WebRtc_Word32 SetStorePacketsStatus(const bool enable, const WebRtc_UWord16 numberToStore = 200); virtual WebRtc_Word32 SetStorePacketsStatus(const bool enable, const WebRtc_UWord16 numberToStore = 200);
@@ -477,57 +467,60 @@ public:
virtual RateControlRegion OnOverUseStateUpdate(const RateControlInput& rateControlInput); virtual RateControlRegion OnOverUseStateUpdate(const RateControlInput& rateControlInput);
virtual void OnReceivedNTP() ;
// bw estimation
virtual void OnPacketLossStatisticsUpdate(const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTime,
const WebRtc_UWord32 lastReceivedExtendedHighSeqNum,
const WebRtc_UWord32 jitter);
virtual void OnReceivedTMMBR();
virtual void OnBandwidthEstimateUpdate(WebRtc_UWord16 bandWidthKbit);
virtual void OnReceivedBandwidthEstimateUpdate(const WebRtc_UWord16 bwEstimateMinKbit,
const WebRtc_UWord16 bwEstimateMaxKbit);
// bad state of RTP receiver request a keyframe
virtual void OnRequestIntraFrame(const FrameType frameType);
// good state of RTP receiver inform sender // good state of RTP receiver inform sender
virtual WebRtc_Word32 SendRTCPReferencePictureSelection(const WebRtc_UWord64 pictureID); virtual WebRtc_Word32 SendRTCPReferencePictureSelection(const WebRtc_UWord64 pictureID);
virtual void OnReceivedIntraFrameRequest(const WebRtc_UWord8 message); virtual void OnBandwidthEstimateUpdate(WebRtc_UWord16 bandWidthKbit);
void OnReceivedNTP() ;
// bw estimation
void OnPacketLossStatisticsUpdate(
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTime,
const WebRtc_UWord32 lastReceivedExtendedHighSeqNum);
void OnReceivedTMMBR();
void OnReceivedEstimatedMaxBitrate(const WebRtc_UWord32 maxBitrate);
void OnReceivedBandwidthEstimateUpdate(const WebRtc_UWord16 bwEstimateKbit);
// bad state of RTP receiver request a keyframe
void OnRequestIntraFrame(const FrameType frameType);
void OnReceivedIntraFrameRequest(const RtpRtcp* caller);
// received a request for a new SLI // received a request for a new SLI
virtual void OnReceivedSliceLossIndication(const WebRtc_UWord8 pictureID); void OnReceivedSliceLossIndication(const WebRtc_UWord8 pictureID);
// received a new refereence frame // received a new refereence frame
virtual void OnReceivedReferencePictureSelectionIndication(const WebRtc_UWord64 pitureID); void OnReceivedReferencePictureSelectionIndication(
const WebRtc_UWord64 pitureID);
virtual void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength, void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
const WebRtc_UWord16* nackSequenceNumbers); const WebRtc_UWord16* nackSequenceNumbers);
virtual void OnRequestSendReport(); void OnRequestSendReport();
// only for internal testing
WebRtc_UWord32 LastSendReport(WebRtc_UWord32& lastRTCPTime);
protected: protected:
virtual void RegisterChildModule(RtpRtcp* module); void RegisterChildModule(RtpRtcp* module);
virtual void DeRegisterChildModule(RtpRtcp* module); void DeRegisterChildModule(RtpRtcp* module);
bool UpdateRTCPReceiveInformationTimers(); bool UpdateRTCPReceiveInformationTimers();
void ProcessDeadOrAliveTimer(); void ProcessDeadOrAliveTimer();
virtual WebRtc_UWord32 BitrateReceivedNow() const; WebRtc_UWord32 BitrateReceivedNow() const;
// Get remote SequenceNumber // Get remote SequenceNumber
virtual WebRtc_UWord16 RemoteSequenceNumber() const; WebRtc_UWord16 RemoteSequenceNumber() const;
virtual WebRtc_Word32 UpdateTMMBR(); WebRtc_Word32 UpdateTMMBR();
// only for internal testing
WebRtc_UWord32 LastSendReport(WebRtc_UWord32& lastRTCPTime);
RTPSender _rtpSender; RTPSender _rtpSender;
RTPReceiver _rtpReceiver; RTPReceiver _rtpReceiver;
@@ -543,37 +536,37 @@ private:
WebRtc_UWord32 _lastProcessTime; WebRtc_UWord32 _lastProcessTime;
WebRtc_UWord16 _packetOverHead; WebRtc_UWord16 _packetOverHead;
CriticalSectionWrapper& _criticalSectionModulePtrs; CriticalSectionWrapper& _criticalSectionModulePtrs;
CriticalSectionWrapper& _criticalSectionModulePtrsFeedback; CriticalSectionWrapper& _criticalSectionModulePtrsFeedback;
ModuleRtpRtcpImpl* _defaultModule; ModuleRtpRtcpImpl* _defaultModule;
ModuleRtpRtcpImpl* _audioModule; ModuleRtpRtcpImpl* _audioModule;
ModuleRtpRtcpImpl* _videoModule; ModuleRtpRtcpImpl* _videoModule;
ListWrapper _childModules; std::list<ModuleRtpRtcpImpl*> _childModules;
// Dead or alive // Dead or alive
bool _deadOrAliveActive; bool _deadOrAliveActive;
WebRtc_UWord32 _deadOrAliveTimeoutMS; WebRtc_UWord32 _deadOrAliveTimeoutMS;
WebRtc_UWord32 _deadOrAliveLastTimer; WebRtc_UWord32 _deadOrAliveLastTimer;
// receive side // receive side
BandwidthManagement _bandwidthManagement; BandwidthManagement _bandwidthManagement;
WebRtc_UWord32 _receivedNTPsecsAudio; WebRtc_UWord32 _receivedNTPsecsAudio;
WebRtc_UWord32 _receivedNTPfracAudio; WebRtc_UWord32 _receivedNTPfracAudio;
WebRtc_UWord32 _RTCPArrivalTimeSecsAudio; WebRtc_UWord32 _RTCPArrivalTimeSecsAudio;
WebRtc_UWord32 _RTCPArrivalTimeFracAudio; WebRtc_UWord32 _RTCPArrivalTimeFracAudio;
// send side // send side
NACKMethod _nackMethod; NACKMethod _nackMethod;
WebRtc_UWord32 _nackLastTimeSent; WebRtc_UWord32 _nackLastTimeSent;
WebRtc_UWord16 _nackLastSeqNumberSent; WebRtc_UWord16 _nackLastSeqNumberSent;
bool _simulcast;
VideoCodec _sendVideoCodec;
KeyFrameRequestMethod _keyFrameReqMethod; KeyFrameRequestMethod _keyFrameReqMethod;
WebRtc_UWord32 _lastChildBitrateUpdate;
#ifdef MATLAB #ifdef MATLAB
MatlabPlot* _plot1; MatlabPlot* _plot1;
#endif #endif
}; };
} // namespace webrtc } // namespace webrtc

View File

@@ -149,7 +149,7 @@ WebRtc_Word32 VideoCoder::SendData(
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
WebRtc_UWord32 payloadSize, WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* rtpTypeHdr) const RTPVideoHeader* /*rtpVideoHdr*/)
{ {
// Store the data in _videoEncodedData which is a pointer to videoFrame in // Store the data in _videoEncodedData which is a pointer to videoFrame in
// Encode(..) // Encode(..)

View File

@@ -64,7 +64,7 @@ private:
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& /* fragmentationHeader*/, const RTPFragmentationHeader& /* fragmentationHeader*/,
const RTPVideoTypeHeader* rtpTypeHdr); const RTPVideoHeader* rtpTypeHdr);
WebRtc_UWord32 _instanceID; WebRtc_UWord32 _instanceID;
VideoCodingModule* _vcm; VideoCodingModule* _vcm;

View File

@@ -50,7 +50,7 @@ public:
virtual WebRtc_Word32 virtual WebRtc_Word32
Encode(const RawImage& inputImage, Encode(const RawImage& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/, const CodecSpecificInfo* /*codecSpecificInfo*/,
VideoFrameType /*frameType*/); const VideoFrameType* /*frameTypes*/);
// Register an encode complete callback object. // Register an encode complete callback object.
// //

View File

@@ -119,7 +119,7 @@ I420Encoder::InitEncode(const VideoCodec* codecSettings,
WebRtc_Word32 WebRtc_Word32
I420Encoder::Encode(const RawImage& inputImage, I420Encoder::Encode(const RawImage& inputImage,
const CodecSpecificInfo* /*codecSpecificInfo*/, const CodecSpecificInfo* /*codecSpecificInfo*/,
VideoFrameType /*frameTypes*/) const VideoFrameType* /*frameTypes*/)
{ {
if (!_inited) if (!_inited)
{ {

View File

@@ -31,6 +31,8 @@ struct CodecSpecificInfoVP8
WebRtc_UWord64 pictureIdRPSI; WebRtc_UWord64 pictureIdRPSI;
WebRtc_Word16 pictureId; // negative value to skip pictureId WebRtc_Word16 pictureId; // negative value to skip pictureId
bool nonReference; bool nonReference;
WebRtc_UWord8 simulcastIdx;
WebRtc_UWord8 temporalIdx;
}; };
union CodecSpecificInfoUnion union CodecSpecificInfoUnion
@@ -106,8 +108,8 @@ public:
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise. // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 virtual WebRtc_Word32
Encode(const RawImage& inputImage, Encode(const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo = NULL, const CodecSpecificInfo* codecSpecificInfo,
VideoFrameType frameType = kDeltaFrame) = 0; const VideoFrameType* frameTypes) = 0;
// Register an encode complete callback object. // Register an encode complete callback object.
// //

View File

@@ -447,7 +447,7 @@ NormalAsyncTest::Encode()
} }
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo(); webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType); int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameType);
if (codecSpecificInfo != NULL) if (codecSpecificInfo != NULL)
{ {
delete codecSpecificInfo; delete codecSpecificInfo;

View File

@@ -268,7 +268,7 @@ bool PerformanceTest::Encode()
frameType = kKeyFrame; frameType = kKeyFrame;
} }
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo(); webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType); int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameType);
if (codecSpecificInfo != NULL) if (codecSpecificInfo != NULL)
{ {
delete codecSpecificInfo; delete codecSpecificInfo;

View File

@@ -233,7 +233,8 @@ UnitTest::Setup()
// Ensures our initial parameters are valid. // Ensures our initial parameters are valid.
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(image, NULL); VideoFrameType videoFrameType = kDeltaFrame;
_encoder->Encode(image, NULL, &videoFrameType);
_refEncFrameLength = WaitForEncodedFrame(); _refEncFrameLength = WaitForEncodedFrame();
VIDEO_TEST_EXIT_ON_ERR(_refEncFrameLength > 0); VIDEO_TEST_EXIT_ON_ERR(_refEncFrameLength > 0);
_refEncFrame = new unsigned char[_refEncFrameLength]; _refEncFrame = new unsigned char[_refEncFrameLength];
@@ -262,7 +263,7 @@ UnitTest::Setup()
_inputVideoBuffer.SetWidth(_source->GetWidth()); _inputVideoBuffer.SetWidth(_source->GetWidth());
_inputVideoBuffer.SetHeight(_source->GetHeight()); _inputVideoBuffer.SetHeight(_source->GetHeight());
VideoBufferToRawImage(_inputVideoBuffer, image); VideoBufferToRawImage(_inputVideoBuffer, image);
_encoder->Encode(image, NULL); _encoder->Encode(image, NULL, &videoFrameType);
VIDEO_TEST_EXIT_ON_ERR(WaitForEncodedFrame() > 0); VIDEO_TEST_EXIT_ON_ERR(WaitForEncodedFrame() > 0);
} }
EncodedImage encodedImage; EncodedImage encodedImage;
@@ -358,6 +359,7 @@ UnitTest::Perform()
RawImage inputImage; RawImage inputImage;
EncodedImage encodedImage; EncodedImage encodedImage;
EventWrapper& sleepEvent = *EventWrapper::Create(); EventWrapper& sleepEvent = *EventWrapper::Create();
VideoFrameType videoFrameType = kDeltaFrame;
//----- Encoder parameter tests ----- //----- Encoder parameter tests -----
@@ -365,7 +367,7 @@ UnitTest::Perform()
// We want to revert the initialization done in Setup(). // We want to revert the initialization done in Setup().
VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VideoBufferToRawImage(_inputVideoBuffer, inputImage); VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST(_encoder->Encode(inputImage, NULL) VIDEO_TEST(_encoder->Encode(inputImage, NULL, &videoFrameType )
== WEBRTC_VIDEO_CODEC_UNINITIALIZED); == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_UNINITIALIZED); VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
@@ -432,7 +434,7 @@ UnitTest::Perform()
// inputVideoBuffer unallocated. // inputVideoBuffer unallocated.
_inputVideoBuffer.Free(); _inputVideoBuffer.Free();
VideoBufferToRawImage(_inputVideoBuffer, inputImage); VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST(_encoder->Encode(inputImage, NULL) == VIDEO_TEST(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
WEBRTC_VIDEO_CODEC_ERR_PARAMETER); WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame); _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame); _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
@@ -444,7 +446,7 @@ UnitTest::Perform()
for (int i = 1; i <= 60; i++) for (int i = 1; i <= 60; i++)
{ {
VideoFrameType frameType = !(i % 2) ? kKeyFrame : kDeltaFrame; VideoFrameType frameType = !(i % 2) ? kKeyFrame : kDeltaFrame;
VIDEO_TEST(_encoder->Encode(inputImage, NULL, frameType) == VIDEO_TEST(_encoder->Encode(inputImage, NULL, &frameType) ==
WEBRTC_VIDEO_CODEC_OK); WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(WaitForEncodedFrame() > 0); VIDEO_TEST(WaitForEncodedFrame() > 0);
sleepEvent.Wait(10); // Allow the encoder's queue to realize it's empty. sleepEvent.Wait(10); // Allow the encoder's queue to realize it's empty.
@@ -453,11 +455,12 @@ UnitTest::Perform()
// Init then encode. // Init then encode.
_encodedVideoBuffer.UpdateLength(0); _encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset(); _encodedVideoBuffer.Reset();
VIDEO_TEST(_encoder->Encode(inputImage, NULL) == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(WaitForEncodedFrame() > 0); VIDEO_TEST(WaitForEncodedFrame() > 0);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL); _encoder->Encode(inputImage, NULL, &videoFrameType);
frameLength = WaitForEncodedFrame(); frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0); VIDEO_TEST(frameLength > 0);
VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength, VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
@@ -466,11 +469,12 @@ UnitTest::Perform()
// Reset then encode. // Reset then encode.
_encodedVideoBuffer.UpdateLength(0); _encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset(); _encodedVideoBuffer.Reset();
VIDEO_TEST(_encoder->Encode(inputImage, NULL) == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
WEBRTC_VIDEO_CODEC_OK);
WaitForEncodedFrame(); WaitForEncodedFrame();
VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL); _encoder->Encode(inputImage, NULL, &videoFrameType);
frameLength = WaitForEncodedFrame(); frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0); VIDEO_TEST(frameLength > 0);
VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength, VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
@@ -479,11 +483,12 @@ UnitTest::Perform()
// Release then encode. // Release then encode.
_encodedVideoBuffer.UpdateLength(0); _encodedVideoBuffer.UpdateLength(0);
_encodedVideoBuffer.Reset(); _encodedVideoBuffer.Reset();
VIDEO_TEST(_encoder->Encode(inputImage, NULL) == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->Encode(inputImage, NULL, &videoFrameType) ==
WEBRTC_VIDEO_CODEC_OK);
WaitForEncodedFrame(); WaitForEncodedFrame();
VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL); _encoder->Encode(inputImage, NULL, &videoFrameType);
frameLength = WaitForEncodedFrame(); frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0); VIDEO_TEST(frameLength > 0);
VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength, VIDEO_TEST(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
@@ -605,7 +610,8 @@ UnitTest::Perform()
WEBRTC_VIDEO_CODEC_OK); WEBRTC_VIDEO_CODEC_OK);
RawImage tempInput(inputImage._buffer, inputImage._length/4, RawImage tempInput(inputImage._buffer, inputImage._length/4,
inputImage._size/4); inputImage._size/4);
_encoder->Encode(tempInput, NULL); VideoFrameType videoFrameType = kDeltaFrame;
_encoder->Encode(tempInput, NULL, &videoFrameType);
frameLength = WaitForEncodedFrame(); frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0); VIDEO_TEST(frameLength > 0);
@@ -623,7 +629,7 @@ UnitTest::Perform()
VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) == VIDEO_TEST(_encoder->InitEncode(&_inst, 1, 1440) ==
WEBRTC_VIDEO_CODEC_OK); WEBRTC_VIDEO_CODEC_OK);
_encoder->Encode(inputImage, NULL); _encoder->Encode(inputImage, NULL, &videoFrameType);
frameLength = WaitForEncodedFrame(); frameLength = WaitForEncodedFrame();
VIDEO_TEST(frameLength > 0); VIDEO_TEST(frameLength > 0);
@@ -690,7 +696,10 @@ UnitTest::Perform()
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer); _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp(frames); _inputVideoBuffer.SetTimeStamp(frames);
VideoBufferToRawImage(_inputVideoBuffer, inputImage); VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST_EXIT_ON_ERR(_encoder->Encode(inputImage, NULL) == VideoFrameType videoFrameType = kDeltaFrame;
VIDEO_TEST_EXIT_ON_ERR(_encoder->Encode(inputImage,
NULL,
&videoFrameType) ==
WEBRTC_VIDEO_CODEC_OK); WEBRTC_VIDEO_CODEC_OK);
frameLength = WaitForEncodedFrame(); frameLength = WaitForEncodedFrame();
//VIDEO_TEST_EXIT_ON_ERR(frameLength); //VIDEO_TEST_EXIT_ON_ERR(frameLength);
@@ -774,8 +783,11 @@ UnitTest::RateControlTests()
static_cast<WebRtc_UWord32>(9e4 / static_cast<WebRtc_UWord32>(9e4 /
static_cast<float>(_inst.maxFramerate))); static_cast<float>(_inst.maxFramerate)));
VideoBufferToRawImage(_inputVideoBuffer, inputImage); VideoBufferToRawImage(_inputVideoBuffer, inputImage);
VIDEO_TEST_EXIT_ON_ERR(_encoder->Encode(inputImage, NULL) == VideoFrameType videoFrameType = kDeltaFrame;
WEBRTC_VIDEO_CODEC_OK); VIDEO_TEST_EXIT_ON_ERR(_encoder->Encode(inputImage,
NULL,
&videoFrameType) ==
WEBRTC_VIDEO_CODEC_OK);
frameLength = WaitForEncodedFrame(); frameLength = WaitForEncodedFrame();
VIDEO_TEST_EXIT_ON_ERR(frameLength > 0); VIDEO_TEST_EXIT_ON_ERR(frameLength > 0);
//VIDEO_TEST(frameLength > 0); //VIDEO_TEST(frameLength > 0);

View File

@@ -87,7 +87,7 @@ public:
virtual WebRtc_Word32 Encode(const RawImage& inputImage, virtual WebRtc_Word32 Encode(const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo, const CodecSpecificInfo* codecSpecificInfo,
VideoFrameType frameType); const VideoFrameType* frameTypes);
// Register an encode complete callback object. // Register an encode complete callback object.
// //
@@ -163,6 +163,7 @@ private:
bool _inited; bool _inited;
WebRtc_UWord32 _timeStamp; WebRtc_UWord32 _timeStamp;
WebRtc_UWord16 _pictureID; WebRtc_UWord16 _pictureID;
WebRtc_UWord8 _simulcastIdx;
bool _pictureLossIndicationOn; bool _pictureLossIndicationOn;
bool _feedbackModeOn; bool _feedbackModeOn;
bool _nextRefIsGolden; bool _nextRefIsGolden;

View File

@@ -0,0 +1,144 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vp8_simulcast.h
* WEBRTC VP8 simulcast wrapper interface
* Creates up to kMaxSimulcastStreams number of VP8 encoders
* Automatically scale the input frame to the right size for all VP8 encoders
* Runtime it divides the available bitrate beteween the VP8 Encoders
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_
#include "interpolator.h"
#include "video_codec_interface.h"
#include "vp8.h"
namespace webrtc
{
class VP8SimulcastEncoder : public VideoEncoder
{
public:
VP8SimulcastEncoder();
virtual ~VP8SimulcastEncoder();
// Free encoder memory.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 Release();
// Reset encoder state and prepare for a new call.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
// WEBRTC_VIDEO_CODEC_ERROR
virtual WebRtc_Word32 Reset();
// Initialize the encoder with the information from the codecSettings
//
// Input:
// - codecSettings : Codec settings
// - numberOfCores : Number of cores available for the encoder
// - maxPayloadSize : The maximum size each payload is allowed
// to have. Usually MTU - overhead.
//
// Return value : Set bit rate if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
// WEBRTC_VIDEO_CODEC_ERR_SIZE
// WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED
// WEBRTC_VIDEO_CODEC_MEMORY
// WEBRTC_VIDEO_CODEC_ERROR
virtual WebRtc_Word32 InitEncode(const VideoCodec* codecSettings,
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize);
// Encode an I420 image (as a part of a video stream). The encoded image
// will be returned to the user through the encode complete callback.
// It can encode multiple streams based on its configuration but not more than
// kMaxSimulcastStreams.
//
// Input:
// - inputImage : Image to be encoded
// - frameTypes : Frame type to be generated by the encoder
// pointer to first frame type in array the
// caller is responsible for the length of the
// array to be no shorter than number of encoders
// configured.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
// WEBRTC_VIDEO_CODEC_MEMORY
// WEBRTC_VIDEO_CODEC_ERROR
// WEBRTC_VIDEO_CODEC_TIMEOUT
virtual WebRtc_Word32 Encode(const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const VideoFrameType* frameTypes);
// Register an encode complete callback object.
//
// Input:
// - callback : Callback object which handles encoded images.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
EncodedImageCallback* callback);
// Inform the encoder of the new packet loss rate in the network
//
// - packetLoss : Fraction lost
// (loss rate in percent = 100 * packetLoss / 255)
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERROR
//
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
// Inform the encoder about the new target bit rate.
//
// - newBitRate : New target bit rate
// - frameRate : The target frame rate
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRateKbit,
WebRtc_UWord32 frameRate);
// Get version number for the codec.
//
// Input:
// - version : Pointer to allocated char buffer.
// - buflen : Length of provided char buffer.
//
// Output:
// - version : Version number string written to char buffer.
//
// Return value : >0 - Length of written string.
// <0 - WEBRTC_VIDEO_CODEC_ERR_SIZE
virtual WebRtc_Word32 Version(WebRtc_Word8 *version,
WebRtc_Word32 length) const;
static WebRtc_Word32 VersionStatic(WebRtc_Word8 *version,
WebRtc_Word32 length);
private:
VP8Encoder* encoder_[kMaxSimulcastStreams];
bool encode_stream_[kMaxSimulcastStreams];
VideoFrameType frame_type_[kMaxSimulcastStreams];
interpolator* interpolator_[kMaxSimulcastStreams];
RawImage video_frame_[kMaxSimulcastStreams];
VideoCodec video_codec_;
};// end of VP8SimulcastEncoder class
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_H_

View File

@@ -43,6 +43,7 @@ VP8Encoder::VP8Encoder():
_inited(false), _inited(false),
_timeStamp(0), _timeStamp(0),
_pictureID(0), _pictureID(0),
_simulcastIdx(0),
_pictureLossIndicationOn(false), _pictureLossIndicationOn(false),
_feedbackModeOn(false), _feedbackModeOn(false),
_nextRefIsGolden(true), _nextRefIsGolden(true),
@@ -164,8 +165,20 @@ VP8Encoder::SetRates(WebRtc_UWord32 newBitRateKbit, WebRtc_UWord32 newFrameRate)
{ {
newBitRateKbit = _maxBitRateKbit; newBitRateKbit = _maxBitRateKbit;
} }
_cfg->rc_target_bitrate = newBitRateKbit; // in kbit/s
_cfg->rc_target_bitrate = newBitRateKbit; // in kbit/s
/* TODO(pwestin) use number of temoral layers config
int ids[3] = {0,1,2};
_cfg->ts_number_layers = 3;
_cfg->ts_periodicity = 3;
_cfg->ts_target_bitrate[0] = (newBitRateKbit*2/5);
_cfg->ts_target_bitrate[1] = (newBitRateKbit*3/5);
_cfg->ts_target_bitrate[2] = (newBitRateKbit);
_cfg->ts_rate_decimator[0] = 4;
_cfg->ts_rate_decimator[1] = 2;
_cfg->ts_rate_decimator[2] = 1;
memcpy(_cfg->ts_layer_id, ids, sizeof(ids));
*/
// update frame rate // update frame rate
if (newFrameRate != _maxFrameRate) if (newFrameRate != _maxFrameRate)
{ {
@@ -267,6 +280,18 @@ VP8Encoder::InitEncode(const VideoCodec* inst,
{ {
_cfg->rc_target_bitrate = inst->startBitrate; // in kbit/s _cfg->rc_target_bitrate = inst->startBitrate; // in kbit/s
} }
/* TODO(pwestin) use number of temoral layers config
int ids[3] = {0,1,2};
_cfg->ts_number_layers = 3;
_cfg->ts_periodicity = 3;
_cfg->ts_target_bitrate[0] = (inst->startBitrate*2/5);
_cfg->ts_target_bitrate[1] = (inst->startBitrate*3/5);
_cfg->ts_target_bitrate[2] = (inst->startBitrate);
_cfg->ts_rate_decimator[0] = 4;
_cfg->ts_rate_decimator[1] = 2;
_cfg->ts_rate_decimator[2] = 1;
memcpy(_cfg->ts_layer_id, ids, sizeof(ids));
*/
// setting the time base of the codec // setting the time base of the codec
_cfg->g_timebase.num = 1; _cfg->g_timebase.num = 1;
@@ -394,7 +419,7 @@ VP8Encoder::MaxIntraTarget(WebRtc_UWord32 optimalBuffersize)
WebRtc_Word32 WebRtc_Word32
VP8Encoder::Encode(const RawImage& inputImage, VP8Encoder::Encode(const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo, const CodecSpecificInfo* codecSpecificInfo,
VideoFrameType frameTypes) const VideoFrameType* frameTypes)
{ {
if (!_inited) if (!_inited)
{ {
@@ -408,14 +433,21 @@ VP8Encoder::Encode(const RawImage& inputImage,
{ {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
} }
if (codecSpecificInfo)
{
_simulcastIdx = codecSpecificInfo->codecSpecific.VP8.simulcastIdx;
}
else
{
_simulcastIdx = 0;
}
// image in vpx_image_t format // image in vpx_image_t format
_raw->planes[PLANE_Y] = inputImage._buffer; _raw->planes[PLANE_Y] = inputImage._buffer;
_raw->planes[PLANE_U] = &inputImage._buffer[_height * _width]; _raw->planes[PLANE_U] = &inputImage._buffer[_height * _width];
_raw->planes[PLANE_V] = &inputImage._buffer[_height * _width * 5 >> 2]; _raw->planes[PLANE_V] = &inputImage._buffer[_height * _width * 5 >> 2];
int flags = 0; int flags = 0;
if (frameTypes == kKeyFrame) if (frameTypes && *frameTypes == kKeyFrame)
{ {
flags |= VPX_EFLAG_FORCE_KF; // will update both golden and altref flags |= VPX_EFLAG_FORCE_KF; // will update both golden and altref
_encodedImage._frameType = kKeyFrame; _encodedImage._frameType = kKeyFrame;
@@ -555,6 +587,8 @@ void VP8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
codec_specific->codecType = kVideoCodecVP8; codec_specific->codecType = kVideoCodecVP8;
CodecSpecificInfoVP8 *vp8Info = &(codec_specific->codecSpecific.VP8); CodecSpecificInfoVP8 *vp8Info = &(codec_specific->codecSpecific.VP8);
vp8Info->pictureId = _pictureID; vp8Info->pictureId = _pictureID;
vp8Info->simulcastIdx = _simulcastIdx;;
vp8Info->temporalIdx = kNoTemporalIdx; // TODO(pwestin) need to populate this
vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE); vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE);
_pictureID = (_pictureID + 1) % 0x7FFF; // prepare next _pictureID = (_pictureID + 1) % 0x7FFF; // prepare next
} }

View File

@@ -17,6 +17,7 @@
'include_dirs': [ 'include_dirs': [
'../interface', '../interface',
'../../../../../../common_video/interface', '../../../../../../common_video/interface',
'../../../../../../common_video/vplib/main/interface',
'../../../interface', '../../../interface',
'../../../../../interface', '../../../../../interface',
], ],
@@ -72,7 +73,9 @@
}, },
'sources': [ 'sources': [
'../interface/vp8.h', '../interface/vp8.h',
'../interface/vp8_simulcast.h',
'vp8.cc', 'vp8.cc',
'vp8_simulcast.cc',
], ],
}, },
], # targets ], # targets

View File

@@ -0,0 +1,317 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* vp8_simulcast.cc
* WEBRTC VP8 simulcast wrapper interface
*/
#include "vp8_simulcast.h"
#include <string.h>
#include "module_common_types.h"
#include "trace.h"
namespace webrtc {
VP8SimulcastEncoder::VP8SimulcastEncoder() {
for (int i = 0; i < kMaxSimulcastStreams; i++) {
encoder_[i] = NULL;
encode_stream_[i] = false;
frame_type_[i] = kKeyFrame;
interpolator_[i] = NULL;
}
}
VP8SimulcastEncoder::~VP8SimulcastEncoder() {
for (int i = 0; i < kMaxSimulcastStreams; i++) {
delete encoder_[i];
delete interpolator_[i];
delete [] video_frame_[i]._buffer;
}
}
WebRtc_Word32 VP8SimulcastEncoder::Release() {
for (int i = 0; i < kMaxSimulcastStreams; i++) {
delete encoder_[i];
encoder_[i] = NULL;
delete interpolator_[i];
interpolator_[i] = NULL;
delete [] video_frame_[i]._buffer;
video_frame_[i]._buffer = NULL;
video_frame_[i]._size = 0;
}
return 0;
}
WebRtc_Word32 VP8SimulcastEncoder::Reset() {
for (int i = 0; i < kMaxSimulcastStreams; i++) {
if (encoder_[i]) {
encoder_[i]->Reset();
}
}
return 0;
}
WebRtc_Word32 VP8SimulcastEncoder::InitEncode(const VideoCodec* codecSettings,
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize) {
// Store a config copy
memcpy(&video_codec_, codecSettings, sizeof(VideoCodec));
// local copy
VideoCodec video_codec;
memcpy(&video_codec, codecSettings, sizeof(VideoCodec));
video_codec.numberOfSimulcastStreams = 0;
WebRtc_UWord32 bitrate_sum = 0;
WebRtc_Word32 ret_val = 0;
for (int i = 0; i < codecSettings->numberOfSimulcastStreams; i++) {
if (encoder_[i] == NULL) {
encoder_[i] = new VP8Encoder();
}
assert(encoder_[i]);
if (codecSettings->startBitrate > bitrate_sum) {
frame_type_[i] = kKeyFrame;
encode_stream_[i] = true;
} else {
// no more bits
encode_stream_[i] = false;
continue;
}
bitrate_sum += codecSettings->simulcastStream[i].maxBitrate;
if (codecSettings->startBitrate >= bitrate_sum) {
video_codec.startBitrate = codecSettings->simulcastStream[i].maxBitrate;
} else {
// The last stream will get what ever is left of the budget up to its max
video_codec.startBitrate =
codecSettings->startBitrate -
(bitrate_sum -
codecSettings->simulcastStream[i].maxBitrate);
}
video_codec.maxBitrate = codecSettings->simulcastStream[i].maxBitrate;
video_codec.qpMax = codecSettings->simulcastStream[i].qpMax;
video_codec.width = codecSettings->simulcastStream[i].width;
video_codec.height = codecSettings->simulcastStream[i].height;
WebRtc_Word32 cores = 1;
if (video_codec.width > 640 &&
numberOfCores > codecSettings->numberOfSimulcastStreams) {
cores = 2;
}
ret_val = encoder_[i]->InitEncode(&video_codec,
cores,
maxPayloadSize);
if (ret_val != 0) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,
-1,
"Failed to initialize VP8 simulcast idx: %d.",
i);
return ret_val;
}
if (codecSettings->width != video_codec.width ||
codecSettings->height != video_codec.height) {
if (interpolator_[i] == NULL) {
interpolator_[i] = new interpolator();
}
interpolator_[i]->Set(
codecSettings->width,
codecSettings->height,
video_codec.width,
video_codec.height,
kI420,
kI420,
kBilinear);
if (video_frame_[i]._size <
(3u * video_codec.width * video_codec.height / 2u)) {
video_frame_[i]._size = 3 * video_codec.width * video_codec.height / 2;
delete video_frame_[i]._buffer;
video_frame_[i]._buffer = new WebRtc_UWord8[video_frame_[i]._size];
video_frame_[i]._length = 0;
}
}
}
return ret_val;
}
WebRtc_Word32 VP8SimulcastEncoder::Encode(
const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo,
const VideoFrameType* requestedFrameTypes) {
WebRtc_Word32 ret_val = -1;
// we need a local copy since we modify it
CodecSpecificInfo info = *codecSpecificInfo;
const int numberOfStreams = video_codec_.numberOfSimulcastStreams;
for (int i = 0; i < numberOfStreams; i++) {
if (encode_stream_[i]) {
video_frame_[i]._timeStamp = inputImage._timeStamp;
}
if (requestedFrameTypes[i] == kKeyFrame) {
// always do a keyframe if asked to
frame_type_[i] = kKeyFrame;
} else if (frame_type_[i] == kKeyFrame) {
// don't write over a previusly requested keyframe
} else if (frame_type_[i] == kGoldenFrame) {
if (requestedFrameTypes[i] == kAltRefFrame) {
// request for both AltRef and Golden upgrade to keyframe
frame_type_[i] = kKeyFrame;
}
} else if (frame_type_[i] == kAltRefFrame) {
if (requestedFrameTypes[i] == kGoldenFrame) {
// request for both AltRef and Golden upgrade to keyframe
frame_type_[i] = kKeyFrame;
}
} else if (frame_type_[i] == kDeltaFrame) {
// if the current is delta set requested
frame_type_[i] = requestedFrameTypes[i];
}
}
for (int i = 0; i < numberOfStreams; i++) {
if (encoder_[i] && encode_stream_[i]) {
// need the simulcastIdx to keep track of which encoder encoded the frame
info.codecSpecific.VP8.simulcastIdx = i;
VideoFrameType requested_frame_type = frame_type_[i];
if (interpolator_[i]) {
interpolator_[i]->Interpolate(inputImage._buffer,
video_frame_[i]._buffer,
video_frame_[i]._size);
video_frame_[i]._length =
3 *
video_codec_.simulcastStream[i].width *
video_codec_.simulcastStream[i].height /
2;
ret_val = encoder_[i]->Encode(video_frame_[i],
&info,
&requested_frame_type);
} else {
ret_val = encoder_[i]->Encode(inputImage,
&info,
&requested_frame_type);
}
if (ret_val < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, -1,
"Encode error:%d on stream:%d", ret_val, i);
return ret_val;
}
frame_type_[i] = kDeltaFrame;
}
}
return ret_val;
}
WebRtc_Word32 VP8SimulcastEncoder::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
WebRtc_Word32 ret_val = 0;
for (int i = 0; i < kMaxSimulcastStreams; i++) {
if (encoder_[i]) {
ret_val = encoder_[i]->RegisterEncodeCompleteCallback(callback);
if (ret_val < 0) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,
-1,
"RegisterEncodeCompleteCallback error:%d on stream:%d",
ret_val,
i);
return ret_val;
}
}
}
return ret_val;
}
WebRtc_Word32 VP8SimulcastEncoder::SetPacketLoss(WebRtc_UWord32 packetLoss) {
WebRtc_Word32 ret_val = 0;
for (int i = 0; i < kMaxSimulcastStreams; i++) {
if (encoder_[i]) {
ret_val = encoder_[i]->SetPacketLoss(packetLoss);
if (ret_val < 0) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,
-1,
"SetPacketLoss error:%d on stream:%d",
ret_val,
i);
return ret_val;
}
}
}
return ret_val;
}
WebRtc_Word32 VP8SimulcastEncoder::SetRates(WebRtc_UWord32 new_bitrate,
WebRtc_UWord32 frame_rate) {
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCoding, -1,
"VP8 simulcast SetRates(%d,%d)", new_bitrate, frame_rate);
WebRtc_UWord32 bitrate_sum = 0;
WebRtc_Word32 ret_val = 0;
for (int i = 0; i < video_codec_.numberOfSimulcastStreams; i++) {
if (new_bitrate > bitrate_sum) {
if (!encode_stream_[i]) {
frame_type_[i] = kKeyFrame;
encode_stream_[i] = true;
}
} else {
// no more bits
encode_stream_[i] = false;
continue;
}
WebRtc_UWord32 stream_bitrate = 0;
bitrate_sum += video_codec_.simulcastStream[i].maxBitrate;
if (new_bitrate >= bitrate_sum) {
stream_bitrate = video_codec_.simulcastStream[i].maxBitrate;
} else {
stream_bitrate =
new_bitrate -
(bitrate_sum -
video_codec_.simulcastStream[i].maxBitrate);
}
ret_val = encoder_[i]->SetRates(stream_bitrate, frame_rate);
if (ret_val < 0) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,
-1,
"VP8 error stream:%d SetRates(%d,%d)",
i, stream_bitrate, frame_rate);
} else {
WEBRTC_TRACE(webrtc::kTraceStateInfo,
webrtc::kTraceVideoCoding,
-1,
"VP8 stream:%d SetRates(%d,%d)",
i, stream_bitrate, frame_rate);
}
}
return ret_val;
}
WebRtc_Word32 VP8SimulcastEncoder::VersionStatic(WebRtc_Word8 *version,
WebRtc_Word32 length) {
const WebRtc_Word8* str = "WebM/VP8 simulcast version 1.0.0\n";
WebRtc_Word32 verLen = (WebRtc_Word32)strlen(str);
if (verLen > length) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
strncpy(version, str, length);
return verLen;
}
WebRtc_Word32 VP8SimulcastEncoder::Version(WebRtc_Word8 *version,
WebRtc_Word32 length) const {
return VersionStatic(version, length);
}
} // namespace webrtc

View File

@@ -237,7 +237,7 @@ public:
// < 0, on error. // < 0, on error.
virtual WebRtc_Word32 AddVideoFrame( virtual WebRtc_Word32 AddVideoFrame(
const VideoFrame& videoFrame, const VideoFrame& videoFrame,
const VideoContentMetrics* _contentMetrics = NULL, const VideoContentMetrics* contentMetrics = NULL,
const CodecSpecificInfo* codecSpecificInfo = NULL) = 0; const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
// Next frame encoded should be of the type frameType. // Next frame encoded should be of the type frameType.
@@ -248,7 +248,8 @@ public:
// //
// Return value : VCM_OK, on success. // Return value : VCM_OK, on success.
// < 0, on error. // < 0, on error.
virtual WebRtc_Word32 FrameTypeRequest(FrameType frameType) = 0; virtual WebRtc_Word32 FrameTypeRequest(FrameType frameType,
WebRtc_UWord8 simulcastIdx) = 0;
// Frame Dropper enable. Can be used to disable the frame dropping when the encoder // Frame Dropper enable. Can be used to disable the frame dropping when the encoder
// over-uses its bit rate. This API is designed to be used when the encoded frames // over-uses its bit rate. This API is designed to be used when the encoded frames

View File

@@ -89,7 +89,7 @@ public:
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* rtpTypeHdr) = 0; const RTPVideoHeader* rtpVideoHdr) = 0;
protected: protected:
virtual ~VCMPacketizationCallback() {} virtual ~VCMPacketizationCallback() {}
}; };

View File

@@ -23,6 +23,7 @@
// Supported codecs // Supported codecs
#ifdef VIDEOCODEC_VP8 #ifdef VIDEOCODEC_VP8
#include "vp8.h" #include "vp8.h"
#include "vp8_simulcast.h"
#endif #endif
#ifdef VIDEOCODEC_I420 #ifdef VIDEOCODEC_I420
#include "i420.h" #include "i420.h"
@@ -91,7 +92,7 @@ VCMCodecDataBase::Version(WebRtc_Word8* version,
{ {
return ret; return ret;
} }
encoder = CreateEncoder(settings.codecType); encoder = CreateEncoder(settings.codecType, false);
if (encoder == NULL) if (encoder == NULL)
{ {
return VCM_MEMORY; return VCM_MEMORY;
@@ -133,24 +134,29 @@ VCMCodecDataBase::ResetSender()
return VCM_OK; return VCM_OK;
} }
VCMGenericEncoder* VCMGenericEncoder* VCMCodecDataBase::CreateEncoder(
VCMCodecDataBase::CreateEncoder(VideoCodecType type) const const VideoCodecType type,
{ const bool simulcast) const {
switch(type) switch(type)
{ {
#ifdef VIDEOCODEC_VP8 #ifdef VIDEOCODEC_VP8
case kVideoCodecVP8: case kVideoCodecVP8:
return new VCMGenericEncoder(*(new VP8Encoder)); if (simulcast) {
break; return new VCMGenericEncoder(*(new VP8SimulcastEncoder));
} else {
return new VCMGenericEncoder(*(new VP8Encoder));
}
#endif #endif
#ifdef VIDEOCODEC_I420 #ifdef VIDEOCODEC_I420
case kVideoCodecI420: case kVideoCodecI420:
return new VCMGenericEncoder(*(new I420Encoder)); if (!simulcast) {
break; return new VCMGenericEncoder(*(new I420Encoder));
}
return NULL;
#endif #endif
default: default:
return NULL; return NULL;
break;
} }
} }
@@ -203,6 +209,8 @@ VCMCodecDataBase::Codec(WebRtc_UWord8 listId, VideoCodec *settings)
settings->maxFramerate = VCM_DEFAULT_FRAME_RATE; settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
settings->width = VCM_DEFAULT_CODEC_WIDTH; settings->width = VCM_DEFAULT_CODEC_WIDTH;
settings->height = VCM_DEFAULT_CODEC_HEIGHT; settings->height = VCM_DEFAULT_CODEC_HEIGHT;
settings->numberOfSimulcastStreams = 0;
settings->codecSpecific.VP8.numberOfTemporalLayers = 1;
break; break;
} }
#endif #endif
@@ -222,6 +230,7 @@ VCMCodecDataBase::Codec(WebRtc_UWord8 listId, VideoCodec *settings)
settings->width = VCM_DEFAULT_CODEC_WIDTH; settings->width = VCM_DEFAULT_CODEC_WIDTH;
settings->height = VCM_DEFAULT_CODEC_HEIGHT; settings->height = VCM_DEFAULT_CODEC_HEIGHT;
settings->minBitrate = VCM_MIN_BITRATE; settings->minBitrate = VCM_MIN_BITRATE;
settings->numberOfSimulcastStreams = 0;
break; break;
} }
#endif #endif
@@ -387,19 +396,32 @@ VCMCodecDataBase::SetEncoder(const VideoCodec* settings,
} }
else else
{ {
_ptrEncoder = CreateEncoder(settings->codecType); bool simulcast = false;
if (settings->numberOfSimulcastStreams > 1)
{
simulcast = true;
}
_ptrEncoder = CreateEncoder(settings->codecType, simulcast);
_currentEncIsExternal = false; _currentEncIsExternal = false;
} }
VCMencodedFrameCallback->SetPayloadType(settings->plType); VCMencodedFrameCallback->SetPayloadType(settings->plType);
if (_ptrEncoder == NULL) if (_ptrEncoder == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,
VCMId(_id),
"Failed to create encoder: %s.",
settings->plName);
return NULL; return NULL;
} }
if (_ptrEncoder->InitEncode(settings, _numberOfCores, _maxPayloadSize) < 0) if (_ptrEncoder->InitEncode(settings, _numberOfCores, _maxPayloadSize) < 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,
VCMId(_id),
"Failed to initialize encoder: %s.",
settings->plName);
DeleteEncoder(); DeleteEncoder();
return NULL; return NULL;
} }

View File

@@ -182,7 +182,8 @@ protected:
/** /**
* Create an internal Encoder given a codec type * Create an internal Encoder given a codec type
*/ */
VCMGenericEncoder* CreateEncoder(VideoCodecType type) const; VCMGenericEncoder* CreateEncoder(const VideoCodecType type,
const bool simulcast) const;
void DeleteEncoder(); void DeleteEncoder();
/* /*

View File

@@ -51,7 +51,9 @@ WebRtc_Word32 VCMGenericEncoder::Release()
} }
WebRtc_Word32 WebRtc_Word32
VCMGenericEncoder::InitEncode(const VideoCodec* settings, WebRtc_Word32 numberOfCores, WebRtc_UWord32 maxPayloadSize) VCMGenericEncoder::InitEncode(const VideoCodec* settings,
WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize)
{ {
_bitRate = settings->startBitrate; _bitRate = settings->startBitrate;
_frameRate = settings->maxFramerate; _frameRate = settings->maxFramerate;
@@ -66,16 +68,21 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings, WebRtc_Word32 numberOf
WebRtc_Word32 WebRtc_Word32
VCMGenericEncoder::Encode(const VideoFrame& inputFrame, VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo, const CodecSpecificInfo* codecSpecificInfo,
FrameType frameType) FrameType* frameType)
{ {
RawImage rawImage(inputFrame.Buffer(), inputFrame.Length(), inputFrame.Size()); RawImage rawImage(inputFrame.Buffer(),
inputFrame.Length(),
inputFrame.Size());
rawImage._width = inputFrame.Width(); rawImage._width = inputFrame.Width();
rawImage._height = inputFrame.Height(); rawImage._height = inputFrame.Height();
rawImage._timeStamp = inputFrame.TimeStamp(); rawImage._timeStamp = inputFrame.TimeStamp();
WebRtc_Word32 ret = _encoder.Encode(rawImage, codecSpecificInfo, VCMEncodedFrame::ConvertFrameType(frameType)); VideoFrameType videoFrameTypes[kMaxSimulcastStreams];
for (int i = 0; i < kMaxSimulcastStreams; i++)
return ret; {
videoFrameTypes[i] = VCMEncodedFrame::ConvertFrameType(frameType[i]);
}
return _encoder.Encode(rawImage, codecSpecificInfo, videoFrameTypes);
} }
WebRtc_Word32 WebRtc_Word32
@@ -125,10 +132,15 @@ VCMGenericEncoder::SetPeriodicKeyFrames(bool enable)
} }
WebRtc_Word32 WebRtc_Word32
VCMGenericEncoder::RequestFrame(FrameType frameType) VCMGenericEncoder::RequestFrame(FrameType* frameTypes)
{ {
RawImage image; RawImage image;
return _encoder.Encode(image, NULL, VCMEncodedFrame::ConvertFrameType(frameType)); VideoFrameType videoFrameTypes[kMaxSimulcastStreams];
for (int i = 0; i < kMaxSimulcastStreams; i++)
{
videoFrameTypes[i] = VCMEncodedFrame::ConvertFrameType(frameTypes[i]);
}
return _encoder.Encode(image, NULL, videoFrameTypes);
} }
WebRtc_Word32 WebRtc_Word32
@@ -186,22 +198,22 @@ VCMEncodedFrameCallback::Encoded(
WebRtc_UWord32 encodedBytes = 0; WebRtc_UWord32 encodedBytes = 0;
if (_sendCallback != NULL) if (_sendCallback != NULL)
{ {
encodedBytes = encodedImage._length; encodedBytes = encodedImage._length;
if (_bitStreamAfterEncoder != NULL) if (_bitStreamAfterEncoder != NULL)
{ {
fwrite(encodedImage._buffer, 1, encodedImage._length, _bitStreamAfterEncoder); fwrite(encodedImage._buffer, 1, encodedImage._length, _bitStreamAfterEncoder);
} }
RTPVideoTypeHeader rtpTypeHeader; RTPVideoHeader rtpVideoHeader;
RTPVideoTypeHeader* rtpTypeHeaderPtr = &rtpTypeHeader; RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
if (codecSpecificInfo) if (codecSpecificInfo)
{ {
CopyCodecSpecific(*codecSpecificInfo, &rtpTypeHeaderPtr); CopyCodecSpecific(*codecSpecificInfo, &rtpVideoHeaderPtr);
} }
else else
{ {
rtpTypeHeaderPtr = NULL; rtpVideoHeaderPtr = NULL;
} }
WebRtc_Word32 callbackReturn = _sendCallback->SendData( WebRtc_Word32 callbackReturn = _sendCallback->SendData(
@@ -211,7 +223,7 @@ VCMEncodedFrameCallback::Encoded(
encodedImage._buffer, encodedImage._buffer,
encodedBytes, encodedBytes,
*fragmentationHeader, *fragmentationHeader,
rtpTypeHeaderPtr); rtpVideoHeaderPtr);
if (callbackReturn < 0) if (callbackReturn < 0)
{ {
return callbackReturn; return callbackReturn;
@@ -227,7 +239,6 @@ VCMEncodedFrameCallback::Encoded(
{ {
return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame
} }
return VCM_OK; return VCM_OK;
} }
@@ -244,13 +255,17 @@ VCMEncodedFrameCallback::SetMediaOpt(VCMMediaOptimization *mediaOpt)
} }
void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info, void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info,
RTPVideoTypeHeader** rtp) { RTPVideoHeader** rtp) {
switch (info.codecType) switch (info.codecType) {
{
case kVideoCodecVP8: { case kVideoCodecVP8: {
(*rtp)->VP8.InitRTPVideoHeaderVP8(); (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
(*rtp)->VP8.pictureId = info.codecSpecific.VP8.pictureId; (*rtp)->codecHeader.VP8.pictureId =
(*rtp)->VP8.nonReference = info.codecSpecific.VP8.nonReference; info.codecSpecific.VP8.pictureId;
(*rtp)->codecHeader.VP8.nonReference =
info.codecSpecific.VP8.nonReference;
(*rtp)->codecHeader.VP8.temporalIdx =
info.codecSpecific.VP8.temporalIdx;
(*rtp)->simulcastIdx = info.codecSpecific.VP8.simulcastIdx;
return; return;
} }
default: { default: {
@@ -258,8 +273,6 @@ void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info,
*rtp = NULL; *rtp = NULL;
return; return;
} }
} }
} }
} // namespace webrtc } // namespace webrtc

View File

@@ -59,7 +59,7 @@ private:
* in info, rtp is set to NULL. * in info, rtp is set to NULL.
*/ */
static void CopyCodecSpecific(const CodecSpecificInfo& info, static void CopyCodecSpecific(const CodecSpecificInfo& info,
RTPVideoTypeHeader** rtp); RTPVideoHeader** rtp);
VCMPacketizationCallback* _sendCallback; VCMPacketizationCallback* _sendCallback;
VCMMediaOptimization* _mediaOpt; VCMMediaOptimization* _mediaOpt;
@@ -103,7 +103,7 @@ public:
*/ */
WebRtc_Word32 Encode(const VideoFrame& inputFrame, WebRtc_Word32 Encode(const VideoFrame& inputFrame,
const CodecSpecificInfo* codecSpecificInfo, const CodecSpecificInfo* codecSpecificInfo,
FrameType frameType); FrameType* frameType);
/** /**
* Set new target bit rate and frame rate * Set new target bit rate and frame rate
* Return Value: new bit rate if OK, otherwise <0s * Return Value: new bit rate if OK, otherwise <0s
@@ -129,7 +129,7 @@ public:
WebRtc_Word32 SetPeriodicKeyFrames(bool enable); WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
WebRtc_Word32 RequestFrame(FrameType frameType); WebRtc_Word32 RequestFrame(FrameType* frameTypes);
bool InternalSource() const; bool InternalSource() const;

View File

@@ -67,7 +67,6 @@ _scheduleKeyRequest(false),
_sendCritSect(*CriticalSectionWrapper::CreateCriticalSection()), _sendCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_encoder(), _encoder(),
_encodedFrameCallback(), _encodedFrameCallback(),
_nextFrameType(kVideoFrameDelta),
_mediaOpt(id), _mediaOpt(id),
_sendCodecType(kVideoCodecUnknown), _sendCodecType(kVideoCodecUnknown),
_sendStatsCallback(NULL), _sendStatsCallback(NULL),
@@ -79,6 +78,10 @@ _sendStatsTimer(1000),
_retransmissionTimer(10), _retransmissionTimer(10),
_keyRequestTimer(500) _keyRequestTimer(500)
{ {
for (int i = 0; i < kMaxSimulcastStreams; i++)
{
_nextFrameType[i] = kVideoFrameDelta;
}
#ifdef DEBUG_DECODER_BIT_STREAM #ifdef DEBUG_DECODER_BIT_STREAM
_bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb"); _bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
#endif #endif
@@ -786,7 +789,7 @@ VideoCodingModuleImpl::SetVideoProtection(VCMVideoProtection videoProtection,
// Add one raw video frame to the encoder, blocking. // Add one raw video frame to the encoder, blocking.
WebRtc_Word32 WebRtc_Word32
VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame, VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
const VideoContentMetrics* _contentMetrics, const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) const CodecSpecificInfo* codecSpecificInfo)
{ {
WEBRTC_TRACE(webrtc::kTraceModuleCall, WEBRTC_TRACE(webrtc::kTraceModuleCall,
@@ -799,12 +802,10 @@ VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
{ {
return VCM_UNINITIALIZED; return VCM_UNINITIALIZED;
} }
if (_nextFrameType[0] == kFrameEmpty)
if (_nextFrameType == kFrameEmpty)
{ {
return VCM_OK; return VCM_OK;
} }
_mediaOpt.UpdateIncomingFrameRate(); _mediaOpt.UpdateIncomingFrameRate();
if (_mediaOpt.DropFrame()) if (_mediaOpt.DropFrame())
@@ -816,12 +817,10 @@ VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
} }
else else
{ {
_mediaOpt.updateContentData(_contentMetrics); _mediaOpt.updateContentData(contentMetrics);
const FrameType requestedFrameType = _nextFrameType;
_nextFrameType = kVideoFrameDelta; // default frame type
WebRtc_Word32 ret = _encoder->Encode(videoFrame, WebRtc_Word32 ret = _encoder->Encode(videoFrame,
codecSpecificInfo, codecSpecificInfo,
requestedFrameType); _nextFrameType);
if (_encoderInputFile != NULL) if (_encoderInputFile != NULL)
{ {
fwrite(videoFrame.Buffer(), 1, videoFrame.Length(), fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
@@ -829,36 +828,42 @@ VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
} }
if (ret < 0) if (ret < 0)
{ {
_nextFrameType = requestedFrameType;
WEBRTC_TRACE(webrtc::kTraceError, WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding, webrtc::kTraceVideoCoding,
VCMId(_id), VCMId(_id),
"Encode error: %d", ret); "Encode error: %d", ret);
return ret; return ret;
} }
for (int i = 0; i < kMaxSimulcastStreams; i++)
{
_nextFrameType[i] = kVideoFrameDelta; // default frame type
}
} }
return VCM_OK; return VCM_OK;
} }
// Next frame encoded should be of the type frameType // Next frame encoded should be of the type frameType
// Good for only one frame // Good for only one frame
WebRtc_Word32 WebRtc_Word32
VideoCodingModuleImpl::FrameTypeRequest(FrameType frameType) VideoCodingModuleImpl::FrameTypeRequest(FrameType frameType,
WebRtc_UWord8 simulcastIdx)
{ {
assert(simulcastIdx < kMaxSimulcastStreams);
WEBRTC_TRACE(webrtc::kTraceModuleCall, WEBRTC_TRACE(webrtc::kTraceModuleCall,
webrtc::kTraceVideoCoding, webrtc::kTraceVideoCoding,
VCMId(_id), VCMId(_id),
"FrameTypeRequest()"); "FrameTypeRequest()");
CriticalSectionScoped cs(_sendCritSect); CriticalSectionScoped cs(_sendCritSect);
_nextFrameType = frameType; _nextFrameType[simulcastIdx] = frameType;
if (_encoder != NULL && _encoder->InternalSource()) if (_encoder != NULL && _encoder->InternalSource())
{ {
// Try to request the frame if we have an external encoder with // Try to request the frame if we have an external encoder with
// internal source since AddVideoFrame never will be called. // internal source since AddVideoFrame never will be called.
if (_encoder->RequestFrame(_nextFrameType) == WEBRTC_VIDEO_CODEC_OK) if (_encoder->RequestFrame(_nextFrameType) == WEBRTC_VIDEO_CODEC_OK)
{ {
_nextFrameType = kVideoFrameDelta; _nextFrameType[simulcastIdx] = kVideoFrameDelta;
} }
} }
return VCM_OK; return VCM_OK;

View File

@@ -150,7 +150,8 @@ public:
const CodecSpecificInfo* codecSpecificInfo = NULL); const CodecSpecificInfo* codecSpecificInfo = NULL);
// Next frame encoded should be of the type frameType. // Next frame encoded should be of the type frameType.
virtual WebRtc_Word32 FrameTypeRequest(FrameType frameType); virtual WebRtc_Word32 FrameTypeRequest(FrameType frameType,
WebRtc_UWord8 simulcastIdx);
//Enable frame dropper //Enable frame dropper
virtual WebRtc_Word32 EnableFrameDropper(bool enable); virtual WebRtc_Word32 EnableFrameDropper(bool enable);
@@ -277,10 +278,10 @@ private:
VCMKeyRequestMode _keyRequestMode; VCMKeyRequestMode _keyRequestMode;
bool _scheduleKeyRequest; bool _scheduleKeyRequest;
CriticalSectionWrapper& _sendCritSect; CriticalSectionWrapper& _sendCritSect; // Critical section for send side
VCMGenericEncoder* _encoder; VCMGenericEncoder* _encoder;
VCMEncodedFrameCallback _encodedFrameCallback; VCMEncodedFrameCallback _encodedFrameCallback;
FrameType _nextFrameType; FrameType _nextFrameType[kMaxSimulcastStreams];
VCMMediaOptimization _mediaOpt; VCMMediaOptimization _mediaOpt;
VideoCodecType _sendCodecType; VideoCodecType _sendCodecType;
VCMSendStatisticsCallback* _sendStatsCallback; VCMSendStatisticsCallback* _sendStatsCallback;
@@ -292,7 +293,5 @@ private:
VCMProcessTimer _retransmissionTimer; VCMProcessTimer _retransmissionTimer;
VCMProcessTimer _keyRequestTimer; VCMProcessTimer _keyRequestTimer;
}; };
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_ #endif // WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_

View File

@@ -241,7 +241,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
// Try to decode a delta frame. Should get a warning since we have enabled the "require key frame" setting // Try to decode a delta frame. Should get a warning since we have enabled the "require key frame" setting
// and because no frame type request callback has been registered. // and because no frame type request callback has been registered.
TEST(_vcm->Decode() == VCM_MISSING_CALLBACK); TEST(_vcm->Decode() == VCM_MISSING_CALLBACK);
TEST(_vcm->FrameTypeRequest(kVideoFrameKey) == VCM_OK); TEST(_vcm->FrameTypeRequest(kVideoFrameKey, 0) == VCM_OK);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK); TEST(_vcm->Decode() == VCM_OK);
@@ -251,7 +251,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
sendCodec.width = _width; sendCodec.width = _width;
sendCodec.height = _height; sendCodec.height = _height;
TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK); TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
TEST(_vcm->FrameTypeRequest(kVideoFrameKey) == VCM_OK); TEST(_vcm->FrameTypeRequest(kVideoFrameKey, 0) == VCM_OK);
waitEvent->Wait(33); waitEvent->Wait(33);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp); sourceFrame.SetTimeStamp(_timeStamp);
@@ -261,7 +261,7 @@ CodecDataBaseTest::Perform(CmdArgs& args)
waitEvent->Wait(33); waitEvent->Wait(33);
_timeStamp += (WebRtc_UWord32)(9e4 / _frameRate); _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
sourceFrame.SetTimeStamp(_timeStamp); sourceFrame.SetTimeStamp(_timeStamp);
TEST(_vcm->FrameTypeRequest(kVideoFrameKey) == VCM_OK); TEST(_vcm->FrameTypeRequest(kVideoFrameKey, 0) == VCM_OK);
TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK); TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
TEST(_vcm->Decode() == VCM_OK); TEST(_vcm->Decode() == VCM_OK);
TEST(_vcm->ResetDecoder() == VCM_OK); TEST(_vcm->ResetDecoder() == VCM_OK);

View File

@@ -568,8 +568,8 @@ VCMEncComplete_KeyReqTest::SendData(
const WebRtc_UWord32 timeStamp, const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& /*fragmentationHeader*/,
const webrtc::RTPVideoTypeHeader* videoTypeHdr) const webrtc::RTPVideoHeader* /*videoHdr*/)
{ {
WebRtcRTPHeader rtpInfo; WebRtcRTPHeader rtpInfo;
rtpInfo.header.markerBit = true; // end of frame rtpInfo.header.markerBit = true; // end of frame

View File

@@ -90,7 +90,7 @@ public:
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const webrtc::RTPFragmentationHeader& fragmentationHeader, const webrtc::RTPFragmentationHeader& fragmentationHeader,
const webrtc::RTPVideoTypeHeader* videoTypeHdr); const webrtc::RTPVideoHeader* videoHdr);
private: private:
webrtc::VideoCodingModule& _vcm; webrtc::VideoCodingModule& _vcm;
WebRtc_UWord16 _seqNo; WebRtc_UWord16 _seqNo;

View File

@@ -209,15 +209,22 @@ MediaOptTest::GeneralSetup()
// Registering codecs for the RTP module // Registering codecs for the RTP module
// Register receive payload // Register receive and send payload
_rtp->RegisterReceivePayload("VP8", VCM_VP8_PAYLOAD_TYPE); VideoCodec videoCodec;
_rtp->RegisterReceivePayload("ULPFEC", VCM_ULPFEC_PAYLOAD_TYPE); strncpy(videoCodec.plName, "VP8", 32);
_rtp->RegisterReceivePayload("RED", VCM_RED_PAYLOAD_TYPE); videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
// Register send payload strncpy(videoCodec.plName, "ULPFEC", 32);
_rtp->RegisterSendPayload("VP8", VCM_VP8_PAYLOAD_TYPE); videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
_rtp->RegisterSendPayload("ULPFEC", VCM_ULPFEC_PAYLOAD_TYPE); _rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload("RED", VCM_RED_PAYLOAD_TYPE); _rtp->RegisterSendPayload(videoCodec);
strncpy(videoCodec.plName, "RED", 32);
videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
_rtp->RegisterReceivePayload(videoCodec);
_rtp->RegisterSendPayload(videoCodec);
if (_nackFecEnabled == 1) if (_nackFecEnabled == 1)
_rtp->SetGenericFECStatus(_nackFecEnabled, VCM_RED_PAYLOAD_TYPE, _rtp->SetGenericFECStatus(_nackFecEnabled, VCM_RED_PAYLOAD_TYPE,

View File

@@ -149,15 +149,24 @@ int MTRxTxTest(CmdArgs& args)
return -1; return -1;
} }
// registering codecs for the RTP module // registering codecs for the RTP module
TEST(rtp->RegisterReceivePayload("ULPFEC", VCM_ULPFEC_PAYLOAD_TYPE) == 0); VideoCodec videoCodec;
TEST(rtp->RegisterReceivePayload("RED", VCM_RED_PAYLOAD_TYPE) == 0); strncpy(videoCodec.plName, "ULPFEC", 32);
TEST(rtp->RegisterReceivePayload(args.codecName.c_str(), VCM_VP8_PAYLOAD_TYPE) == 0); videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
strncpy(videoCodec.plName, "RED", 32);
videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
strncpy(videoCodec.plName, args.codecName.c_str(), 32);
videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
videoCodec.maxBitrate = 10000;
TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
TEST(rtp->RegisterSendPayload(videoCodec) == 0);
// inform RTP Module of error resilience features // inform RTP Module of error resilience features
TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE, VCM_ULPFEC_PAYLOAD_TYPE) == 0); TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE, VCM_ULPFEC_PAYLOAD_TYPE) == 0);
TEST(rtp->RegisterSendPayload(args.codecName.c_str(), VCM_VP8_PAYLOAD_TYPE, 90000, 1, 10000) == 0);
//VCM //VCM
VideoCodingModule* vcm = VideoCodingModule::Create(1); VideoCodingModule* vcm = VideoCodingModule::Create(1);
if (vcm->InitializeReceiver() < 0) if (vcm->InitializeReceiver() < 0)

View File

@@ -77,8 +77,8 @@ VCMNTEncodeCompleteCallback::SendData(
const WebRtc_UWord32 timeStamp, const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& /*fragmentationHeader*/,
const webrtc::RTPVideoTypeHeader* videoTypeHdr) const webrtc::RTPVideoHeader* videoHdr)
{ {
// will call the VCMReceiver input packet // will call the VCMReceiver input packet
@@ -101,9 +101,9 @@ VCMNTEncodeCompleteCallback::SendData(
case kVideoCodecVP8: case kVideoCodecVP8:
rtpInfo.type.Video.codec = kRTPVideoVP8; rtpInfo.type.Video.codec = kRTPVideoVP8;
rtpInfo.type.Video.codecHeader.VP8.nonReference = rtpInfo.type.Video.codecHeader.VP8.nonReference =
videoTypeHdr->VP8.nonReference; videoHdr->codecHeader.VP8.nonReference;
rtpInfo.type.Video.codecHeader.VP8.pictureId = rtpInfo.type.Video.codecHeader.VP8.pictureId =
videoTypeHdr->VP8.pictureId; videoHdr->codecHeader.VP8.pictureId;
break; break;
case kVideoCodecI420: case kVideoCodecI420:
rtpInfo.type.Video.codec = kRTPVideoI420; rtpInfo.type.Video.codec = kRTPVideoI420;

View File

@@ -34,7 +34,7 @@ public:
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const webrtc::RTPFragmentationHeader& fragmentationHeader, const webrtc::RTPFragmentationHeader& fragmentationHeader,
const webrtc::RTPVideoTypeHeader* videoTypeHdr); const webrtc::RTPVideoHeader* videoHdr);
// Register exisitng VCM. Currently - encode and decode with the same vcm module. // Register exisitng VCM. Currently - encode and decode with the same vcm module.
void RegisterReceiverVCM(webrtc::VideoCodingModule *vcm); void RegisterReceiverVCM(webrtc::VideoCodingModule *vcm);

View File

@@ -205,7 +205,10 @@ WebRtc_Word32 RTPPlayer::Initialize(const ListWrapper& payloadList)
PayloadCodecTuple* payloadType = static_cast<PayloadCodecTuple*>(item->GetItem()); PayloadCodecTuple* payloadType = static_cast<PayloadCodecTuple*>(item->GetItem());
if (payloadType != NULL) if (payloadType != NULL)
{ {
if (_rtpModule.RegisterReceivePayload(payloadType->name.c_str(), payloadType->payloadType) < 0) VideoCodec videoCodec;
strncpy(videoCodec.plName, payloadType->name.c_str(), 32);
videoCodec.plType = payloadType->payloadType;
if (_rtpModule.RegisterReceivePayload(videoCodec) < 0)
{ {
return -1; return -1;
} }

View File

@@ -51,7 +51,7 @@ VCMEncodeCompleteCallback::SendData(
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* videoTypeHdr) const RTPVideoHeader* videoHdr)
{ {
// will call the VCMReceiver input packet // will call the VCMReceiver input packet
_frameType = frameType; _frameType = frameType;
@@ -71,9 +71,9 @@ VCMEncodeCompleteCallback::SendData(
break; break;
case webrtc::kRTPVideoVP8: case webrtc::kRTPVideoVP8:
rtpInfo.type.Video.codecHeader.VP8.nonReference = rtpInfo.type.Video.codecHeader.VP8.nonReference =
videoTypeHdr->VP8.nonReference; videoHdr->codecHeader.VP8.nonReference;
rtpInfo.type.Video.codecHeader.VP8.pictureId = rtpInfo.type.Video.codecHeader.VP8.pictureId =
videoTypeHdr->VP8.pictureId; videoHdr->codecHeader.VP8.pictureId;
break; break;
case webrtc::kRTPVideoI420: case webrtc::kRTPVideoI420:
break; break;
@@ -145,7 +145,7 @@ VCMRTPEncodeCompleteCallback::SendData(
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* videoTypeHdr) const RTPVideoHeader* videoHdr)
{ {
_frameType = frameType; _frameType = frameType;
_encodedBytes+= payloadSize; _encodedBytes+= payloadSize;
@@ -156,7 +156,7 @@ VCMRTPEncodeCompleteCallback::SendData(
payloadData, payloadData,
payloadSize, payloadSize,
&fragmentationHeader, &fragmentationHeader,
videoTypeHdr); videoHdr);
} }
float float

View File

@@ -50,7 +50,7 @@ public:
const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp, const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize, const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* videoTypeHdr); const RTPVideoHeader* videoHdr);
// Register exisitng VCM. Currently - encode and decode under same module. // Register exisitng VCM. Currently - encode and decode under same module.
void RegisterReceiverVCM(VideoCodingModule *vcm) {_VCMReceiver = vcm;} void RegisterReceiverVCM(VideoCodingModule *vcm) {_VCMReceiver = vcm;}
// Return size of last encoded frame data (all frames in the sequence) // Return size of last encoded frame data (all frames in the sequence)
@@ -106,7 +106,7 @@ public:
const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp, const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize, const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* videoTypeHdr); const RTPVideoHeader* videoHdr);
// Return size of last encoded frame. Value good for one call // Return size of last encoded frame. Value good for one call
// (resets to zero after call to inform test of frame drop) // (resets to zero after call to inform test of frame drop)
float EncodedBytes(); float EncodedBytes();

View File

@@ -47,6 +47,12 @@ enum ViEKeyFrameRequestMethod
kViEKeyFrameRequestFirRtcp = 3 kViEKeyFrameRequestFirRtcp = 3
}; };
enum StreamType
{
kViEStreamTypeNormal = 0, // Normal media stream
kViEStreamTypeRtx = 1 // Retransmission media stream
};
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// ViERTPObserver // ViERTPObserver
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -120,13 +126,21 @@ public:
// This function enables you to specify the RTP synchronization source // This function enables you to specify the RTP synchronization source
// identifier (SSRC) explicitly. // identifier (SSRC) explicitly.
virtual int SetLocalSSRC(const int videoChannel, virtual int SetLocalSSRC(const int videoChannel,
const unsigned int SSRC) = 0; const unsigned int SSRC,
const StreamType usage = kViEStreamTypeNormal,
const unsigned char simulcastIdx = 0) = 0;
// This function gets the SSRC for the outgoing RTP stream for the specified // This function gets the SSRC for the outgoing RTP stream for the specified
// channel. // channel.
virtual int GetLocalSSRC(const int videoChannel, virtual int GetLocalSSRC(const int videoChannel,
unsigned int& SSRC) const = 0; unsigned int& SSRC) const = 0;
// This function map a incoming SSRC to a StreamType so that the engine
// can know which is the normal stream and which is the RTX
virtual int SetRemoteSSRCType(const int videoChannel,
const StreamType usage,
const unsigned int SSRC) const = 0;
// This function gets the SSRC for the incoming RTP stream for the specified // This function gets the SSRC for the incoming RTP stream for the specified
// channel. // channel.
virtual int GetRemoteSSRC(const int videoChannel, virtual int GetRemoteSSRC(const int videoChannel,

View File

@@ -219,7 +219,6 @@ int ViEBaseImpl::CreateChannel(int& videoChannel, int originalChannel)
SetLastError(kViEBaseInvalidChannelId); SetLastError(kViEBaseInvalidChannelId);
return -1; return -1;
} }
if (_channelManager.CreateChannel(videoChannel, if (_channelManager.CreateChannel(videoChannel,
originalChannel) == -1) originalChannel) == -1)
{ {

View File

@@ -910,8 +910,8 @@ WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codecSettings,
*/ */
WebRtc_Word32 WebRtc_Word32
ViECapturer::Encode(const RawImage& inputImage, ViECapturer::Encode(const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo, /*= NULL,*/ const CodecSpecificInfo* codecSpecificInfo,
VideoFrameType frameType /*= kDeltaFrame*/) const VideoFrameType* frameTypes)
{ {
CriticalSectionScoped cs(_encodingCritsect); CriticalSectionScoped cs(_encodingCritsect);
@@ -919,14 +919,13 @@ ViECapturer::Encode(const RawImage& inputImage,
if (!_captureEncoder) if (!_captureEncoder)
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
if (frameType == kKeyFrame) if (*frameTypes == kKeyFrame)
return _captureEncoder->EncodeFrameType(kVideoFrameKey); return _captureEncoder->EncodeFrameType(kVideoFrameKey);
if (frameType == kSkipFrame) if (*frameTypes == kSkipFrame)
return _captureEncoder->EncodeFrameType(kFrameEmpty); return _captureEncoder->EncodeFrameType(kFrameEmpty);
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
} }
WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback( EncodedImageCallback* callback) WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback( EncodedImageCallback* callback)

View File

@@ -125,11 +125,10 @@ protected:
WebRtc_Word32 numberOfCores, WebRtc_Word32 numberOfCores,
WebRtc_UWord32 maxPayloadSize); WebRtc_UWord32 maxPayloadSize);
virtual WebRtc_Word32 Encode(const RawImage& inputImage, virtual WebRtc_Word32 Encode(const RawImage& inputImage,
const CodecSpecificInfo* codecSpecificInfo = const CodecSpecificInfo* codecSpecificInfo,
NULL, const VideoFrameType* frameTypes);
VideoFrameType frameType = kDeltaFrame);
virtual WebRtc_Word32 RegisterEncodeCompleteCallback( virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
EncodedImageCallback* callback); EncodedImageCallback* callback);
virtual WebRtc_Word32 Release(); virtual WebRtc_Word32 Release();
virtual WebRtc_Word32 Reset(); virtual WebRtc_Word32 Reset();
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss); virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);

View File

@@ -51,7 +51,6 @@ ViEChannel::ViEChannel(WebRtc_Word32 channelId, WebRtc_Word32 engineId,
_numberOfCores(numberOfCores), _numberOfCores(numberOfCores),
_numSocketThreads(kViESocketThreads), _numSocketThreads(kViESocketThreads),
_callbackCritsect(*CriticalSectionWrapper::CreateCriticalSection()), _callbackCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_dataCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_rtpRtcp(*RtpRtcp::CreateRtpRtcp( _rtpRtcp(*RtpRtcp::CreateRtpRtcp(
ViEModuleId(engineId, channelId), false)), ViEModuleId(engineId, channelId), false)),
#ifndef WEBRTC_EXTERNAL_TRANSPORT #ifndef WEBRTC_EXTERNAL_TRANSPORT
@@ -61,8 +60,9 @@ ViEChannel::ViEChannel(WebRtc_Word32 channelId, WebRtc_Word32 engineId,
#endif #endif
_vcm(*VideoCodingModule::Create( _vcm(*VideoCodingModule::Create(
ViEModuleId(engineId, channelId))), ViEModuleId(engineId, channelId))),
_vieReceiver(*(new ViEReceiver(engineId, channelId, _rtpRtcp, _vcm))), _vieReceiver(*(new ViEReceiver(engineId, channelId, _rtpRtcp, _vcm))),
_vieSender(*(new ViESender(engineId, channelId, _rtpRtcp))), _vieSender(*(new ViESender(engineId, channelId))),
_vieSync(*(new ViESyncModule(ViEId(engineId, channelId), _vcm, _vieSync(*(new ViESyncModule(ViEId(engineId, channelId), _vcm,
_rtpRtcp))), _rtpRtcp))),
_moduleProcessThread(moduleProcessThread), _moduleProcessThread(moduleProcessThread),
@@ -161,7 +161,6 @@ WebRtc_Word32 ViEChannel::Init()
__FUNCTION__); __FUNCTION__);
return -1; return -1;
} }
if (_rtpRtcp.RegisterIncomingRTCPCallback(this) != 0) if (_rtpRtcp.RegisterIncomingRTCPCallback(this) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
@@ -216,8 +215,8 @@ WebRtc_Word32 ViEChannel::Init()
VideoCodec videoCodec; VideoCodec videoCodec;
if (_vcm.Codec(kVideoCodecVP8, &videoCodec) == VCM_OK) if (_vcm.Codec(kVideoCodecVP8, &videoCodec) == VCM_OK)
{ {
_rtpRtcp.RegisterSendPayload(videoCodec.plName, videoCodec.plType); _rtpRtcp.RegisterSendPayload(videoCodec);
_rtpRtcp.RegisterReceivePayload(videoCodec.plName, videoCodec.plType); _rtpRtcp.RegisterReceivePayload(videoCodec);
_vcm.RegisterReceiveCodec(&videoCodec, _numberOfCores); _vcm.RegisterReceiveCodec(&videoCodec, _numberOfCores);
_vcm.RegisterSendCodec(&videoCodec, _numberOfCores, _vcm.RegisterSendCodec(&videoCodec, _numberOfCores,
_rtpRtcp.MaxDataPayloadLength()); _rtpRtcp.MaxDataPayloadLength());
@@ -237,9 +236,11 @@ WebRtc_Word32 ViEChannel::Init()
ViEChannel::~ViEChannel() ViEChannel::~ViEChannel()
{ {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceMemory,
"ViEChannel Destructor, channelId: %d, engineId: %d", webrtc::kTraceVideo,
_channelId, _engineId); ViEId(_engineId, _channelId),
"ViEChannel Destructor, channelId: %d, engineId: %d",
_channelId, _engineId);
// Make sure we don't get more callbacks from the RTP module. // Make sure we don't get more callbacks from the RTP module.
_rtpRtcp.RegisterIncomingRTPCallback(NULL); _rtpRtcp.RegisterIncomingRTPCallback(NULL);
@@ -250,7 +251,16 @@ ViEChannel::~ViEChannel()
_moduleProcessThread.DeRegisterModule(&_rtpRtcp); _moduleProcessThread.DeRegisterModule(&_rtpRtcp);
_moduleProcessThread.DeRegisterModule(&_vcm); _moduleProcessThread.DeRegisterModule(&_vcm);
_moduleProcessThread.DeRegisterModule(&_vieSync); _moduleProcessThread.DeRegisterModule(&_vieSync);
while (_simulcastRtpRtcp.size() > 0)
{
std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
RtpRtcp* rtpRtcp = *it;
rtpRtcp->RegisterIncomingRTCPCallback(NULL);
rtpRtcp->RegisterSendTransport(NULL);
_moduleProcessThread.DeRegisterModule(rtpRtcp);
RtpRtcp::DestroyRtpRtcp(rtpRtcp);
_simulcastRtpRtcp.erase(it);
}
if (_ptrDecodeThread) if (_ptrDecodeThread)
{ {
StopDecodeThread(); StopDecodeThread();
@@ -261,14 +271,11 @@ ViEChannel::~ViEChannel()
delete &_vieSync; delete &_vieSync;
delete &_callbackCritsect; delete &_callbackCritsect;
delete &_dataCritsect;
// Release modules // Release modules
RtpRtcp::DestroyRtpRtcp(&_rtpRtcp); RtpRtcp::DestroyRtpRtcp(&_rtpRtcp);
#ifndef WEBRTC_EXTERNAL_TRANSPORT #ifndef WEBRTC_EXTERNAL_TRANSPORT
UdpTransport::Destroy( UdpTransport::Destroy(&_socketTransport);
&_socketTransport);
#endif #endif
VideoCodingModule::Destroy(&_vcm); VideoCodingModule::Destroy(&_vcm);
} }
@@ -287,8 +294,12 @@ ViEChannel::~ViEChannel()
WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& videoCodec, WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& videoCodec,
bool newStream) bool newStream)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s: codecType: %d", __FUNCTION__, videoCodec.codecType); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: codecType: %d",
__FUNCTION__,
videoCodec.codecType);
if (videoCodec.codecType == kVideoCodecRED || if (videoCodec.codecType == kVideoCodecRED ||
videoCodec.codecType == kVideoCodecULPFEC) videoCodec.codecType == kVideoCodecULPFEC)
@@ -299,7 +310,15 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& videoCodec,
__FUNCTION__, videoCodec.codecType); __FUNCTION__, videoCodec.codecType);
return -1; return -1;
} }
if (kMaxSimulcastStreams < videoCodec.numberOfSimulcastStreams)
{
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: Too many simulcast streams",
__FUNCTION__);
return -1;
}
// Update the RTP module with the settigns // Update the RTP module with the settigns
// Stop and Start the RTP module -> trigger new SSRC // Stop and Start the RTP module -> trigger new SSRC
bool restartRtp = false; bool restartRtp = false;
@@ -308,15 +327,168 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& videoCodec,
restartRtp = true; restartRtp = true;
_rtpRtcp.SetSendingStatus(false); _rtpRtcp.SetSendingStatus(false);
} }
if (videoCodec.numberOfSimulcastStreams > 0)
if (_rtpRtcp.SetSendBitrate(videoCodec.startBitrate * 1000,
videoCodec.minBitrate, videoCodec.maxBitrate)
!= 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WebRtc_UWord32 startBitrate = videoCodec.startBitrate * 1000;
ViEId(_engineId, _channelId), WebRtc_UWord32 streamBitrate = std::min(startBitrate,
"%s: could not set send bitrates", __FUNCTION__); videoCodec.simulcastStream[0].maxBitrate);
return -1; startBitrate -= streamBitrate;
// set correct bitrate to base layer
if (_rtpRtcp.SetSendBitrate(
streamBitrate,
videoCodec.minBitrate,
videoCodec.simulcastStream[0].maxBitrate) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: could not set send bitrates",
__FUNCTION__);
return -1;
}
// Create our simulcast RTP modules
for (int i = _simulcastRtpRtcp.size();
i < videoCodec.numberOfSimulcastStreams - 1;
i++)
{
RtpRtcp* rtpRtcp = RtpRtcp::CreateRtpRtcp(
ViEModuleId(_engineId, _channelId),
false);
if (rtpRtcp->RegisterDefaultModule(_defaultRtpRtcp))
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: could not register default module",
__FUNCTION__);
return -1;
}
_simulcastRtpRtcp.push_back(rtpRtcp);
}
// Remove last in list if we have too many
for (int j = _simulcastRtpRtcp.size();
j > (videoCodec.numberOfSimulcastStreams - 1);
j--)
{
RtpRtcp* rtpRtcp = _simulcastRtpRtcp.back();
rtpRtcp->RegisterIncomingRTCPCallback(NULL);
rtpRtcp->RegisterSendTransport(NULL);
_moduleProcessThread.DeRegisterModule(rtpRtcp);
RtpRtcp::DestroyRtpRtcp(rtpRtcp);
_simulcastRtpRtcp.pop_back();
}
VideoCodec videoCodec;
if (_vcm.Codec(kVideoCodecVP8, &videoCodec) != VCM_OK)
{
WEBRTC_TRACE(webrtc::kTraceWarning,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: VCM: failure geting default VP8 plType",
__FUNCTION__);
return -1;
}
WebRtc_UWord8 idx = 0;
// Configure all simulcast modules
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
idx++;
RtpRtcp* rtpRtcp = *it;
if (rtpRtcp->InitSender() != 0)
{
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: RTP::InitSender failure",
__FUNCTION__);
return -1;
}
if (rtpRtcp->InitReceiver() != 0)
{
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: RTP::InitReceiver failure",
__FUNCTION__);
return -1;
}
if (rtpRtcp->RegisterSendTransport((Transport*) &_vieSender) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: RTP::RegisterSendTransport failure",
__FUNCTION__);
return -1;
}
if (_moduleProcessThread.RegisterModule(rtpRtcp) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: RTP::RegisterModule failure", __FUNCTION__);
return -1;
}
if (rtpRtcp->SetRTCPStatus(_rtpRtcp.RTCP()) != 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: RTP::SetRTCPStatus failure", __FUNCTION__);
}
rtpRtcp->DeRegisterSendPayload(videoCodec.plType);
if (rtpRtcp->RegisterSendPayload(videoCodec) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: could not register payload type",
__FUNCTION__);
return -1;
}
if (restartRtp)
{
rtpRtcp->SetSendingStatus(true);
}
// Configure all simulcast streams min and max bitrates
const WebRtc_UWord32 streamBitrate = std::min(startBitrate,
videoCodec.simulcastStream[idx].maxBitrate);
startBitrate -= streamBitrate;
if (rtpRtcp->SetSendBitrate(
streamBitrate,
videoCodec.minBitrate,
videoCodec.simulcastStream[idx].maxBitrate) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: could not set send bitrates",
__FUNCTION__);
return -1;
}
}
_vieReceiver.RegisterSimulcastRtpRtcpModules(_simulcastRtpRtcp);
} else
{
if (!_simulcastRtpRtcp.empty())
{
// delete all simulcast rtp modules
while (!_simulcastRtpRtcp.empty())
{
RtpRtcp* rtpRtcp = _simulcastRtpRtcp.back();
rtpRtcp->RegisterIncomingRTCPCallback(NULL);
rtpRtcp->RegisterSendTransport(NULL);
_moduleProcessThread.DeRegisterModule(rtpRtcp);
RtpRtcp::DestroyRtpRtcp(rtpRtcp);
_simulcastRtpRtcp.pop_back();
}
}
// Clear any previus modules
_vieReceiver.RegisterSimulcastRtpRtcpModules(_simulcastRtpRtcp);
if (_rtpRtcp.SetSendBitrate(videoCodec.startBitrate * 1000,
videoCodec.minBitrate,
videoCodec.maxBitrate) != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: could not set send bitrates", __FUNCTION__);
return -1;
}
} }
/* TODO Enable this if H264 is available. /* TODO Enable this if H264 is available.
* This sets the wanted packetization mode. * This sets the wanted packetization mode.
@@ -336,17 +508,16 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& videoCodec,
} }
}*/ }*/
// Don't log this error, no way to chek in advance if this plType is // Don't log this error, no way to check in advance if this plType is
// registered or not... // registered or not...
_rtpRtcp.DeRegisterSendPayload(videoCodec.plType); _rtpRtcp.DeRegisterSendPayload(videoCodec.plType);
if (_rtpRtcp.RegisterSendPayload(videoCodec.plName, videoCodec.plType) != 0) if (_rtpRtcp.RegisterSendPayload(videoCodec) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
"%s: could not register payload type", __FUNCTION__); "%s: could not register payload type", __FUNCTION__);
return -1; return -1;
} }
if (restartRtp) if (restartRtp)
{ {
_rtpRtcp.SetSendingStatus(true); _rtpRtcp.SetSendingStatus(true);
@@ -360,11 +531,12 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& videoCodec,
WebRtc_Word32 ViEChannel::SetReceiveCodec(const VideoCodec& videoCodec) WebRtc_Word32 ViEChannel::SetReceiveCodec(const VideoCodec& videoCodec)
{ {
// We will not receive simulcast streams so no need to hadle that usecase
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId),
"%s", __FUNCTION__); "%s", __FUNCTION__);
_rtpRtcp.DeRegisterReceivePayload(videoCodec.plType); _rtpRtcp.DeRegisterReceivePayload(videoCodec.plType);
if (_rtpRtcp.RegisterReceivePayload(videoCodec.plName, videoCodec.plType) if (_rtpRtcp.RegisterReceivePayload(videoCodec)
!= 0) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
@@ -598,9 +770,18 @@ WebRtc_Word32 ViEChannel::SetSignalPacketLossStatus(bool enable,
WebRtc_Word32 ViEChannel::SetRTCPMode(const RTCPMethod rtcpMode) WebRtc_Word32 ViEChannel::SetRTCPMode(const RTCPMethod rtcpMode)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s: %d", __FUNCTION__, rtcpMode); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: %d", __FUNCTION__, rtcpMode);
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->SetRTCPStatus(rtcpMode);
}
return _rtpRtcp.SetRTCPStatus(rtcpMode); return _rtpRtcp.SetRTCPStatus(rtcpMode);
} }
@@ -682,17 +863,34 @@ WebRtc_Word32 ViEChannel::ProcessNACKRequest(const bool enable)
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
"%s: Using NACK method %d", __FUNCTION__, nackMethod); "%s: Using NACK method %d", __FUNCTION__, nackMethod);
_rtpRtcp.SetStorePacketsStatus(true, kNackHistorySize); _rtpRtcp.SetStorePacketsStatus(true, kNackHistorySize);
_vcm.RegisterPacketRequestCallback(this); _vcm.RegisterPacketRequestCallback(this);
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->SetStorePacketsStatus(true, kNackHistorySize);
}
} }
else else
{ {
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->SetStorePacketsStatus(false);
}
_rtpRtcp.SetStorePacketsStatus(false); _rtpRtcp.SetStorePacketsStatus(false);
_vcm.RegisterPacketRequestCallback(NULL); _vcm.RegisterPacketRequestCallback(NULL);
if (_rtpRtcp.SetNACKStatus(kNackOff) != 0) if (_rtpRtcp.SetNACKStatus(kNackOff) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError,
ViEId(_engineId, _channelId), webrtc::kTraceVideo,
"%s: Could not turn off NACK", __FUNCTION__); ViEId(_engineId, _channelId),
"%s: Could not turn off NACK", __FUNCTION__);
return -1; return -1;
} }
} }
@@ -735,7 +933,13 @@ ViEChannel::ProcessFECRequest(const bool enable,
enable); enable);
return -1; return -1;
} }
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->SetGenericFECStatus(enable, payloadTypeRED, payloadTypeFEC);
}
return 0; return 0;
} }
@@ -820,12 +1024,32 @@ WebRtc_Word32 ViEChannel::EnableKeyFrameRequestCallback(const bool enable)
// Sets SSRC for outgoing stream // Sets SSRC for outgoing stream
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
WebRtc_Word32 ViEChannel::SetSSRC(const WebRtc_UWord32 SSRC) WebRtc_Word32 ViEChannel::SetSSRC(const WebRtc_UWord32 SSRC,
const StreamType /*usage*/,
const unsigned char simulcastIdx)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), // TODO(pwestin) add support for streamType when we add RTX
"%s(SSRC: %u)", __FUNCTION__, SSRC); WEBRTC_TRACE(webrtc::kTraceInfo,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s(SSRC: %u, idx:%u)",
__FUNCTION__, SSRC, simulcastIdx);
return _rtpRtcp.SetSSRC(SSRC); if (simulcastIdx == 0)
{
return _rtpRtcp.SetSSRC(SSRC);
}
std::list<RtpRtcp*>::const_iterator it = _simulcastRtpRtcp.begin();
for (int i = 1; i < simulcastIdx; i++)
{
it++;
if (it == _simulcastRtpRtcp.end())
{
return -1;
}
}
RtpRtcp* rtpRtcp = *it;
return rtpRtcp->SetSSRC(SSRC);
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -1102,9 +1326,21 @@ WebRtc_Word32 ViEChannel::GetSendRtcpStatistics(WebRtc_UWord16& fractionLost,
WebRtc_UWord32& jitterSamples, WebRtc_UWord32& jitterSamples,
WebRtc_Word32& rttMs) WebRtc_Word32& rttMs)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
/*
TODO(pwestin) how do we do this for simulcast? average for all
except cumulativeLost that is the sum?
for (std::list<RtpRtcp*>::const_iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
}
*/
WebRtc_UWord32 remoteSSRC = _rtpRtcp.RemoteSSRC(); WebRtc_UWord32 remoteSSRC = _rtpRtcp.RemoteSSRC();
RTCPReportBlock remoteStat; RTCPReportBlock remoteStat;
@@ -1179,20 +1415,37 @@ WebRtc_Word32 ViEChannel::GetReceivedRtcpStatistics(
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
WebRtc_Word32 ViEChannel::GetRtpStatistics( WebRtc_Word32 ViEChannel::GetRtpStatistics(
WebRtc_UWord32& bytesSent, WebRtc_UWord32& packetsSent, WebRtc_UWord32& bytesSent,
WebRtc_UWord32& bytesReceived, WebRtc_UWord32& packetsReceived) const WebRtc_UWord32& packetsSent,
WebRtc_UWord32& bytesReceived,
WebRtc_UWord32& packetsReceived) const
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
if (_rtpRtcp.DataCountersRTP(&bytesSent, &packetsSent, &bytesReceived, if (_rtpRtcp.DataCountersRTP(&bytesSent,
&packetsSent,
&bytesReceived,
&packetsReceived) != 0) &packetsReceived) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), "%s: Could not get RTT", ViEId(_engineId, _channelId),
__FUNCTION__); "%s: Could not get counters", __FUNCTION__);
return -1; return -1;
} }
for (std::list<RtpRtcp*>::const_iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
WebRtc_UWord32 bytesSentTemp = 0;
WebRtc_UWord32 packetsSentTemp = 0;
RtpRtcp* rtpRtcp = *it;
rtpRtcp->DataCountersRTP(&bytesSentTemp, &packetsSentTemp, NULL, NULL);
bytesSent += bytesSentTemp;
packetsSent += packetsSentTemp;
}
return 0; return 0;
} }
@@ -1706,6 +1959,14 @@ WebRtc_Word32 ViEChannel::StartSend()
"%s: Could not start sending RTP", __FUNCTION__); "%s: Could not start sending RTP", __FUNCTION__);
return -1; return -1;
} }
for (std::list<RtpRtcp*>::const_iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->SetSendingMediaStatus(true);
rtpRtcp->SetSendingStatus(true);
}
return 0; return 0;
} }
@@ -1715,10 +1976,19 @@ WebRtc_Word32 ViEChannel::StartSend()
WebRtc_Word32 ViEChannel::StopSend() WebRtc_Word32 ViEChannel::StopSend()
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
_rtpRtcp.SetSendingMediaStatus(false); _rtpRtcp.SetSendingMediaStatus(false);
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->SetSendingMediaStatus(false);
}
if (_rtpRtcp.RTPKeepalive()) if (_rtpRtcp.RTPKeepalive())
{ {
// Don't turn off sending since we'll send keep alive packets // Don't turn off sending since we'll send keep alive packets
@@ -1740,6 +2010,14 @@ WebRtc_Word32 ViEChannel::StopSend()
"%s: could not stop RTP sending", __FUNCTION__); "%s: could not stop RTP sending", __FUNCTION__);
return -1; return -1;
} }
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->ResetSendDataCountersRTP();
rtpRtcp->SetSendingStatus(false);
}
return 0; return 0;
} }
@@ -2348,14 +2626,23 @@ WebRtc_Word32 ViEChannel::GetSendGQoS(bool& enabled,
WebRtc_Word32 ViEChannel::SetMTU(WebRtc_UWord16 mtu) WebRtc_Word32 ViEChannel::SetMTU(WebRtc_UWord16 mtu)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
if (_rtpRtcp.SetMaxTransferUnit(mtu) != 0) if (_rtpRtcp.SetMaxTransferUnit(mtu) != 0)
{ {
// Logging done // Logging done
return -1; return -1;
} }
for (std::list<RtpRtcp*>::iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->SetMaxTransferUnit(mtu);
}
return 0; return 0;
} }
@@ -2581,9 +2868,18 @@ WebRtc_Word32 ViEChannel::EnableColorEnhancement(bool enable)
WebRtc_Word32 ViEChannel::RegisterSendRtpRtcpModule( WebRtc_Word32 ViEChannel::RegisterSendRtpRtcpModule(
RtpRtcp& sendRtpRtcpModule) RtpRtcp& sendRtpRtcpModule)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s", __FUNCTION__); webrtc::kTraceVideo,
return _rtpRtcp.RegisterDefaultModule(&sendRtpRtcpModule); ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
WebRtc_Word32 retVal = _rtpRtcp.RegisterDefaultModule(&sendRtpRtcpModule);
if (retVal == 0)
{
// we need to store this for the SetSendCodec call
_defaultRtpRtcp = &sendRtpRtcpModule;
}
return retVal;
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -2595,8 +2891,20 @@ WebRtc_Word32 ViEChannel::RegisterSendRtpRtcpModule(
WebRtc_Word32 ViEChannel::DeregisterSendRtpRtcpModule() WebRtc_Word32 ViEChannel::DeregisterSendRtpRtcpModule()
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
_defaultRtpRtcp = NULL;
for (std::list<RtpRtcp*>::const_iterator it = _simulcastRtpRtcp.begin();
it != _simulcastRtpRtcp.end();
it++)
{
RtpRtcp* rtpRtcp = *it;
rtpRtcp->DeRegisterDefaultModule();
}
return _rtpRtcp.DeRegisterDefaultModule(); return _rtpRtcp.DeRegisterDefaultModule();
} }

View File

@@ -15,11 +15,14 @@
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_ #ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_ #define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_
#include <list>
// Defines // Defines
#include "vie_defines.h" #include "vie_defines.h"
#include "typedefs.h" #include "typedefs.h"
#include "vie_network.h" #include "vie_network.h"
#include "vie_rtp_rtcp.h"
#include "rtp_rtcp_defines.h" #include "rtp_rtcp_defines.h"
#include "udp_transport.h" #include "udp_transport.h"
#include "video_coding_defines.h" #include "video_coding_defines.h"
@@ -117,14 +120,15 @@ public:
const unsigned char payloadTypeRED, const unsigned char payloadTypeRED,
const unsigned char payloadTypeFEC); const unsigned char payloadTypeFEC);
WebRtc_Word32 WebRtc_Word32 SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
WebRtc_Word32 EnableTMMBR(const bool enable); WebRtc_Word32 EnableTMMBR(const bool enable);
WebRtc_Word32 EnableKeyFrameRequestCallback(const bool enable); WebRtc_Word32 EnableKeyFrameRequestCallback(const bool enable);
WebRtc_Word32 SetSSRC(const WebRtc_UWord32 SSRC); WebRtc_Word32 SetSSRC(const WebRtc_UWord32 SSRC,
const StreamType usage,
const unsigned char simulcastIdx);
WebRtc_Word32 GetLocalSSRC(WebRtc_UWord32& SSRC); WebRtc_Word32 GetLocalSSRC(WebRtc_UWord32& SSRC);
@@ -431,11 +435,11 @@ private:
// Critical sections // Critical sections
// Used for all registered callbacks except rendering. // Used for all registered callbacks except rendering.
CriticalSectionWrapper& _callbackCritsect; CriticalSectionWrapper& _callbackCritsect;
// Use the same as above instead a seperate?
CriticalSectionWrapper& _dataCritsect;
// Owned modules/classes // Owned modules/classes
RtpRtcp& _rtpRtcp; RtpRtcp& _rtpRtcp;
RtpRtcp* _defaultRtpRtcp;
std::list<RtpRtcp*> _simulcastRtpRtcp;
#ifndef WEBRTC_EXTERNAL_TRANSPORT #ifndef WEBRTC_EXTERNAL_TRANSPORT
UdpTransport& _socketTransport; UdpTransport& _socketTransport;
#endif #endif
@@ -480,7 +484,5 @@ private:
//Recording //Recording
ViEFileRecorder _fileRecorder; ViEFileRecorder _fileRecorder;
}; };
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_ #endif // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_CHANNEL_H_

View File

@@ -214,11 +214,28 @@ int ViEChannelManager::CreateChannel(int& channelId, int originalChannel)
if (vieEncoder == NULL) if (vieEncoder == NULL)
{ {
// The original channel doesn't exist // The original channel doesn't exist
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId), WEBRTC_TRACE(webrtc::kTraceError,
"%s: Original channel doesn't exist", __FUNCTION__, webrtc::kTraceVideo,
originalChannel); ViEId(_engineId),
"%s: Original channel doesn't exist",
__FUNCTION__,
originalChannel);
return -1; return -1;
} }
VideoCodec videoCodec;
if (vieEncoder->GetEncoder(videoCodec) == 0)
{
if (videoCodec.numberOfSimulcastStreams > 0)
{
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(_engineId, originalChannel),
"%s: Can't share a simulcast encoder",
__FUNCTION__);
return -1;
}
}
// Get a free id for the new channel // Get a free id for the new channel
if (GetFreeChannelId(channelId) == false) if (GetFreeChannelId(channelId) == false)
{ {

View File

@@ -248,6 +248,19 @@ int ViECodecImpl::SetSendCodec(const int videoChannel,
} }
newRtpStream = true; newRtpStream = true;
} }
if (videoCodecInternal.numberOfSimulcastStreams > 1)
{
if (cs.ChannelUsingViEEncoder(videoChannel))
{
// We don't allow simulcast channels to share encoder
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s: Can't share simulcast encoder",
__FUNCTION__);
SetLastError(kViECodecInUse);
return -1;
}
}
ViEInputManagerScoped is(_inputManager); ViEInputManagerScoped is(_inputManager);
ViEFrameProviderBase* frameProvider = NULL; ViEFrameProviderBase* frameProvider = NULL;
@@ -958,7 +971,12 @@ bool ViECodecImpl::CodecValid(const VideoCodec& videoCodec)
"Invalid minBitrate: %u", videoCodec.minBitrate); "Invalid minBitrate: %u", videoCodec.minBitrate);
return false; return false;
} }
if (videoCodec.numberOfSimulcastStreams == 1)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, -1,
"Number of Simulcast streams can not be 1");
return false;
}
if (videoCodec.codecType == kVideoCodecH263) if (videoCodec.codecType == kVideoCodecH263)
{ {
if ((videoCodec.width == 704 && videoCodec.height == 576) if ((videoCodec.width == 704 && videoCodec.height == 576)

View File

@@ -44,23 +44,27 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engineId, WebRtc_Word32 channelId,
channelId))), channelId))),
_vpm(*webrtc::VideoProcessingModule::Create(ViEModuleId(engineId, _vpm(*webrtc::VideoProcessingModule::Create(ViEModuleId(engineId,
channelId))), channelId))),
_rtpRtcp(*RtpRtcp::CreateRtpRtcp(ViEModuleId(engineId, _defaultRtpRtcp(*RtpRtcp::CreateRtpRtcp(
channelId), ViEModuleId(engineId, channelId), false)),
false)),
_callbackCritsect(*CriticalSectionWrapper::CreateCriticalSection()), _callbackCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_dataCritsect(*CriticalSectionWrapper::CreateCriticalSection()), _dataCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_paused(false), _timeLastIntraRequestMs(0), _paused(false),
_channelsDroppingDeltaFrames(0), _dropNextFrame(false), _channelsDroppingDeltaFrames(0), _dropNextFrame(false),
_fecEnabled(false), _nackEnabled(false), _codecObserver(NULL), _fecEnabled(false), _nackEnabled(false), _codecObserver(NULL),
_effectFilter(NULL), _moduleProcessThread(moduleProcessThread), _effectFilter(NULL), _moduleProcessThread(moduleProcessThread),
_hasReceivedSLI(false), _pictureIdSLI(0), _hasReceivedRPSI(false), _hasReceivedSLI(false), _pictureIdSLI(0), _hasReceivedRPSI(false),
_pictureIdRPSI(0), _fileRecorder(channelId) _pictureIdRPSI(0), _fileRecorder(channelId)
{ {
WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceMemory,
webrtc::kTraceVideo,
ViEId(engineId, channelId), ViEId(engineId, channelId),
"%s(engineId: %d) 0x%p - Constructor", __FUNCTION__, engineId, "%s(engineId: %d) 0x%p - Constructor", __FUNCTION__, engineId,
this); this);
for (int i = 0; i < kMaxSimulcastStreams; i++)
{
_timeLastIntraRequestMs[i] = 0;
}
_vcm.InitializeSender(); _vcm.InitializeSender();
_vpm.EnableTemporalDecimation(true); _vpm.EnableTemporalDecimation(true);
@@ -68,10 +72,10 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engineId, WebRtc_Word32 channelId,
_vpm.EnableContentAnalysis(false); _vpm.EnableContentAnalysis(false);
_moduleProcessThread.RegisterModule(&_vcm); _moduleProcessThread.RegisterModule(&_vcm);
_rtpRtcp.InitSender(); _defaultRtpRtcp.InitSender();
_rtpRtcp.RegisterIncomingVideoCallback(this); _defaultRtpRtcp.RegisterIncomingVideoCallback(this);
_rtpRtcp.RegisterIncomingRTCPCallback(this); _defaultRtpRtcp.RegisterIncomingRTCPCallback(this);
_moduleProcessThread.RegisterModule(&_rtpRtcp); _moduleProcessThread.RegisterModule(&_defaultRtpRtcp);
// //
_qmCallback = new QMTestVideoSettingsCallback(); _qmCallback = new QMTestVideoSettingsCallback();
@@ -84,8 +88,8 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engineId, WebRtc_Word32 channelId,
if (_vcm.Codec(webrtc::kVideoCodecVP8, &videoCodec) == VCM_OK) if (_vcm.Codec(webrtc::kVideoCodecVP8, &videoCodec) == VCM_OK)
{ {
_vcm.RegisterSendCodec(&videoCodec, _numberOfCores, _vcm.RegisterSendCodec(&videoCodec, _numberOfCores,
_rtpRtcp.MaxDataPayloadLength()); _defaultRtpRtcp.MaxDataPayloadLength());
_rtpRtcp.RegisterSendPayload(videoCodec.plName, videoCodec.plType); _defaultRtpRtcp.RegisterSendPayload(videoCodec);
} }
else else
{ {
@@ -96,8 +100,8 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engineId, WebRtc_Word32 channelId,
if (_vcm.Codec(webrtc::kVideoCodecI420, &videoCodec) == VCM_OK) if (_vcm.Codec(webrtc::kVideoCodecI420, &videoCodec) == VCM_OK)
{ {
_vcm.RegisterSendCodec(&videoCodec, _numberOfCores, _vcm.RegisterSendCodec(&videoCodec, _numberOfCores,
_rtpRtcp.MaxDataPayloadLength()); _defaultRtpRtcp.MaxDataPayloadLength());
_rtpRtcp.RegisterSendPayload(videoCodec.plName, videoCodec.plType); _defaultRtpRtcp.RegisterSendPayload(videoCodec);
} }
else else
{ {
@@ -136,21 +140,21 @@ ViEEncoder::~ViEEncoder()
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
"ViEEncoder Destructor 0x%p, engineId: %d", this, _engineId); "ViEEncoder Destructor 0x%p, engineId: %d", this, _engineId);
if (_rtpRtcp.NumberChildModules() > 0) if (_defaultRtpRtcp.NumberChildModules() > 0)
{ {
assert(false); assert(false);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
"Channels still attached %d, leaking memory", "Channels still attached %d, leaking memory",
_rtpRtcp.NumberChildModules()); _defaultRtpRtcp.NumberChildModules());
return; return;
} }
_moduleProcessThread.DeRegisterModule(&_vcm); _moduleProcessThread.DeRegisterModule(&_vcm);
_moduleProcessThread.DeRegisterModule(&_vpm); _moduleProcessThread.DeRegisterModule(&_vpm);
_moduleProcessThread.DeRegisterModule(&_rtpRtcp); _moduleProcessThread.DeRegisterModule(&_defaultRtpRtcp);
delete &_vcm; delete &_vcm;
delete &_vpm; delete &_vpm;
delete &_rtpRtcp; delete &_defaultRtpRtcp;
delete &_callbackCritsect; delete &_callbackCritsect;
delete &_dataCritsect; delete &_dataCritsect;
delete _qmCallback; delete _qmCallback;
@@ -291,7 +295,7 @@ WebRtc_Word32 ViEEncoder::DeRegisterExternalEncoder(WebRtc_UWord8 plType)
// If the external encoder is the current send codec use vcm internal encoder // If the external encoder is the current send codec use vcm internal encoder
if (currentSendCodec.plType == plType) if (currentSendCodec.plType == plType)
{ {
WebRtc_UWord16 maxDataPayloadLength = _rtpRtcp.MaxDataPayloadLength(); WebRtc_UWord16 maxDataPayloadLength = _defaultRtpRtcp.MaxDataPayloadLength();
if (_vcm.RegisterSendCodec(&currentSendCodec, _numberOfCores, if (_vcm.RegisterSendCodec(&currentSendCodec, _numberOfCores,
maxDataPayloadLength) != VCM_OK) maxDataPayloadLength) != VCM_OK)
{ {
@@ -308,26 +312,33 @@ WebRtc_Word32 ViEEncoder::DeRegisterExternalEncoder(WebRtc_UWord8 plType)
// SetEncoder // SetEncoder
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& videoCodec) WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& videoCodec)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceInfo,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
"%s: CodecType: %d, width: %u, height: %u", "%s: CodecType: %d, width: %u, height: %u",
__FUNCTION__, videoCodec.codecType, videoCodec.width, __FUNCTION__,
videoCodec.height); videoCodec.codecType,
videoCodec.width,
videoCodec.height);
// Multiply startBitrate by 1000 because RTP module changed in API. // Multiply startBitrate by 1000 because RTP module changed in API.
if (_rtpRtcp.SetSendBitrate(videoCodec.startBitrate * 1000, if (_defaultRtpRtcp.SetSendBitrate(videoCodec.startBitrate * 1000,
videoCodec.minBitrate, videoCodec.maxBitrate) != 0) videoCodec.minBitrate,
videoCodec.maxBitrate) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError,
ViEId(_engineId, _channelId), webrtc::kTraceVideo,
"Could not set RTP module bitrates"); ViEId(_engineId, _channelId),
"Could not set RTP module bitrates");
return -1; return -1;
} }
// Setting target width and height for VPM // Setting target width and height for VPM
if (_vpm.SetTargetResolution(videoCodec.width, videoCodec.height, videoCodec.maxFramerate) != VPM_OK) if (_vpm.SetTargetResolution(videoCodec.width,
videoCodec.height,
videoCodec.maxFramerate) != VPM_OK)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
@@ -336,7 +347,7 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& videoCodec)
} }
if (_rtpRtcp.RegisterSendPayload(videoCodec.plName, videoCodec.plType) != 0) if (_defaultRtpRtcp.RegisterSendPayload(videoCodec) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
@@ -344,7 +355,8 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& videoCodec)
return -1; return -1;
} }
WebRtc_UWord16 maxDataPayloadLength = _rtpRtcp.MaxDataPayloadLength(); WebRtc_UWord16 maxDataPayloadLength =
_defaultRtpRtcp.MaxDataPayloadLength();
// update QM with MaxDataPayloadLength // update QM with MaxDataPayloadLength
_qmCallback->SetMaxPayloadLength(maxDataPayloadLength); _qmCallback->SetMaxPayloadLength(maxDataPayloadLength);
@@ -363,9 +375,9 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& videoCodec)
// Set this module as sending right away, let the // Set this module as sending right away, let the
// slave module in the channel start and stop sending... // slave module in the channel start and stop sending...
if (_rtpRtcp.Sending() == false) if (_defaultRtpRtcp.Sending() == false)
{ {
if (_rtpRtcp.SetSendingStatus(true) != 0) if (_defaultRtpRtcp.SetSendingStatus(true) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId,
_channelId), _channelId),
@@ -462,14 +474,13 @@ RtpRtcp* ViEEncoder::SendRtpRtcpModule()
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId),
"%s", __FUNCTION__); "%s", __FUNCTION__);
return &_rtpRtcp; return &_defaultRtpRtcp;
} }
//============================================================================= //=============================================================================
// Data flow // Data flow
//============================================================================= //=============================================================================
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// DeliverFrame // DeliverFrame
// Implements ViEFrameCallback::DeliverFrame // Implements ViEFrameCallback::DeliverFrame
@@ -485,7 +496,7 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& videoFrame,
{ {
CriticalSectionScoped cs(_dataCritsect); CriticalSectionScoped cs(_dataCritsect);
if (_paused || _rtpRtcp.SendingMedia() == false) if (_paused || _defaultRtpRtcp.SendingMedia() == false)
{ {
// We've passed or we have no channels attached, don't encode // We've passed or we have no channels attached, don't encode
return; return;
@@ -524,14 +535,14 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& videoFrame,
{ {
if (CSRC[i] == 1) if (CSRC[i] == 1)
{ {
tempCSRC[i] = _rtpRtcp.SSRC(); tempCSRC[i] = _defaultRtpRtcp.SSRC();
} }
else else
{ {
tempCSRC[i] = CSRC[i]; tempCSRC[i] = CSRC[i];
} }
} }
_rtpRtcp.SetCSRCs(tempCSRC, (WebRtc_UWord8) numCSRCs); _defaultRtpRtcp.SetCSRCs(tempCSRC, (WebRtc_UWord8) numCSRCs);
} }
#ifdef VIDEOCODEC_VP8 #ifdef VIDEOCODEC_VP8
@@ -539,7 +550,6 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& videoFrame,
{ {
webrtc::CodecSpecificInfo codecSpecificInfo; webrtc::CodecSpecificInfo codecSpecificInfo;
codecSpecificInfo.codecType = webrtc::kVideoCodecUnknown; codecSpecificInfo.codecType = webrtc::kVideoCodecUnknown;
if (_hasReceivedSLI || _hasReceivedRPSI) if (_hasReceivedSLI || _hasReceivedRPSI)
{ {
webrtc::VideoCodec currentSendCodec; webrtc::VideoCodec currentSendCodec;
@@ -565,9 +575,11 @@ void ViEEncoder::DeliverFrame(int id, webrtc::VideoFrame& videoFrame,
} }
else if (ret != VPM_OK) else if (ret != VPM_OK)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceError,
"%s: Error preprocessing frame %u", __FUNCTION__, webrtc::kTraceVideo,
videoFrame.TimeStamp()); ViEId(_engineId, _channelId),
"%s: Error preprocessing frame %u", __FUNCTION__,
videoFrame.TimeStamp());
return; return;
} }
@@ -615,7 +627,7 @@ void ViEEncoder::DelayChanged(int id, int frameDelay)
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, _channelId),
"%s: %u", __FUNCTION__, frameDelay); "%s: %u", __FUNCTION__, frameDelay);
_rtpRtcp.SetCameraDelay(frameDelay); _defaultRtpRtcp.SetCameraDelay(frameDelay);
_fileRecorder.SetFrameDelay(frameDelay); _fileRecorder.SetFrameDelay(frameDelay);
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -651,10 +663,12 @@ int ViEEncoder::GetPreferedFrameSettings(int &width, int &height,
WebRtc_Word32 ViEEncoder::SendKeyFrame() WebRtc_Word32 ViEEncoder::SendKeyFrame()
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
return _vcm.FrameTypeRequest(kVideoFrameKey); return _vcm.FrameTypeRequest(kVideoFrameKey, 0); // Simulcast idx = 0
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -700,7 +714,7 @@ WebRtc_Word32 ViEEncoder::UpdateProtectionMethod()
WebRtc_UWord8 dummyPTypeFEC = 0; WebRtc_UWord8 dummyPTypeFEC = 0;
// check if fec is enabled // check if fec is enabled
WebRtc_Word32 error = _rtpRtcp.GenericFECStatus(fecEnabled, dummyPTypeRed, WebRtc_Word32 error = _defaultRtpRtcp.GenericFECStatus(fecEnabled, dummyPTypeRed,
dummyPTypeFEC); dummyPTypeFEC);
if (error) if (error)
{ {
@@ -708,7 +722,7 @@ WebRtc_Word32 ViEEncoder::UpdateProtectionMethod()
} }
// check if nack is enabled // check if nack is enabled
bool nackEnabled = (_rtpRtcp.NACK() == kNackOff) ? false : true; bool nackEnabled = (_defaultRtpRtcp.NACK() == kNackOff) ? false : true;
if (_fecEnabled == fecEnabled && _nackEnabled == nackEnabled) if (_fecEnabled == fecEnabled && _nackEnabled == nackEnabled)
{ {
// no change to current state // no change to current state
@@ -740,7 +754,7 @@ WebRtc_Word32 ViEEncoder::UpdateProtectionMethod()
webrtc::VideoCodec codec; webrtc::VideoCodec codec;
if (_vcm.SendCodec(&codec) == 0) if (_vcm.SendCodec(&codec) == 0)
{ {
WebRtc_UWord16 maxPayLoad = _rtpRtcp.MaxDataPayloadLength(); WebRtc_UWord16 maxPayLoad = _defaultRtpRtcp.MaxDataPayloadLength();
codec.startBitrate = _vcm.Bitrate(); codec.startBitrate = _vcm.Bitrate();
if (_vcm.RegisterSendCodec(&codec, _numberOfCores, maxPayLoad) != 0) if (_vcm.RegisterSendCodec(&codec, _numberOfCores, maxPayLoad) != 0)
{ {
@@ -775,7 +789,7 @@ ViEEncoder::SendData(const FrameType frameType,
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const webrtc::RTPFragmentationHeader& fragmentationHeader, const webrtc::RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* rtpTypeHdr) const RTPVideoHeader* rtpVideoHdr)
{ {
{ {
CriticalSectionScoped cs(_dataCritsect); CriticalSectionScoped cs(_dataCritsect);
@@ -786,19 +800,22 @@ ViEEncoder::SendData(const FrameType frameType,
} }
if (_channelsDroppingDeltaFrames && frameType == webrtc::kVideoFrameKey) if (_channelsDroppingDeltaFrames && frameType == webrtc::kVideoFrameKey)
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, WEBRTC_TRACE(webrtc::kTraceStream,
_channelId), webrtc::kTraceVideo,
"%s: Sending key frame, drop next frame", __FUNCTION__); ViEId(_engineId, _channelId),
"%s: Sending key frame, drop next frame",
__FUNCTION__);
_dropNextFrame = true; _dropNextFrame = true;
} }
} }
// New encoded data, hand over to the rtp module // New encoded data, hand over to the rtp module
WebRtc_Word32 retVal = _rtpRtcp.SendOutgoingData(frameType, payloadType, return _defaultRtpRtcp.SendOutgoingData(frameType,
timeStamp, payloadData, payloadType,
payloadSize, timeStamp,
&fragmentationHeader, payloadData,
rtpTypeHdr); payloadSize,
return retVal; &fragmentationHeader,
rtpVideoHdr);
} }
//============================================================================= //=============================================================================
@@ -822,13 +839,13 @@ WebRtc_Word32 ViEEncoder::ProtectionRequest(const WebRtc_UWord8 deltaFECRate,
deltaFECRate, keyFECRate, deltaUseUepProtection, deltaFECRate, keyFECRate, deltaUseUepProtection,
keyUseUepProtection, nack); keyUseUepProtection, nack);
if (_rtpRtcp.SetFECCodeRate(keyFECRate, deltaFECRate) != 0) if (_defaultRtpRtcp.SetFECCodeRate(keyFECRate, deltaFECRate) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(_engineId, _channelId), ViEId(_engineId, _channelId),
"%s: Could not update FEC code rate", __FUNCTION__); "%s: Could not update FEC code rate", __FUNCTION__);
} }
if (_rtpRtcp.SetFECUepProtection(keyUseUepProtection, if (_defaultRtpRtcp.SetFECUepProtection(keyUseUepProtection,
deltaUseUepProtection) != 0) deltaUseUepProtection) != 0)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
@@ -922,49 +939,48 @@ void ViEEncoder::OnRPSIReceived(const WebRtc_Word32 id,
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
void ViEEncoder::OnReceivedIntraFrameRequest(const WebRtc_Word32 id, void ViEEncoder::OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_UWord8 message) const FrameType type,
const WebRtc_UWord8 streamIdx)
{ {
// Key frame request from other side, signal to VCM assert(streamIdx < kMaxSimulcastStreams);
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
if (_timeLastIntraRequestMs + kViEMinKeyRequestIntervalMs // Key frame request from other side, signal to VCM
> TickTime::MillisecondTimestamp()) WEBRTC_TRACE(webrtc::kTraceStateInfo,
webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s", __FUNCTION__);
WebRtc_Word64 now = TickTime::MillisecondTimestamp();
if (_timeLastIntraRequestMs[streamIdx] + kViEMinKeyRequestIntervalMs > now)
{ {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceStream,
ViEId(_engineId, _channelId), webrtc::kTraceVideo,
"%s: Not not encoding new intra due to timing", __FUNCTION__); ViEId(_engineId, _channelId),
"%s: Not not encoding new intra due to timing",
__FUNCTION__);
return; return;
} }
// Default message == 0... _vcm.FrameTypeRequest(type, streamIdx);
if (message == 0) _timeLastIntraRequestMs[streamIdx] = now;
{
_vcm.FrameTypeRequest(kVideoFrameKey);
} else
{
_vcm.FrameTypeRequest((FrameType) message);
}
_timeLastIntraRequestMs = TickTime::MillisecondTimestamp();
return;
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// OnNetworkChanged // OnNetworkChanged
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
void ViEEncoder::OnNetworkChanged(const WebRtc_Word32 id, void ViEEncoder::OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps, const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost, const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs, const WebRtc_UWord16 roundTripTimeMs)
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax)
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s(minBitrateBps: %u, maxBitrateBps: %u,fractionLost: %u, rttMs: %u, bwEstMinKbit: %u, bwEstMaxKbit: %u", webrtc::kTraceVideo,
__FUNCTION__, minBitrateBps, maxBitrateBps, fractionLost, ViEId(_engineId, _channelId),
roundTripTimeMs, bwEstimateKbitMin, bwEstimateKbitMax); "%s(bitrateBps: %u, fractionLost: %u, rttMs: %u",
_vcm.SetChannelParameters(minBitrateBps / 1000, fractionLost, roundTripTimeMs); __FUNCTION__, bitrateBps, fractionLost, roundTripTimeMs);
return;
_vcm.SetChannelParameters(bitrateBps / 1000,
fractionLost,
roundTripTimeMs);
} }
WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effectFilter) WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effectFilter)
@@ -974,22 +990,32 @@ WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effectFilter)
{ {
if (_effectFilter == NULL) if (_effectFilter == NULL)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, WEBRTC_TRACE(webrtc::kTraceError,
_channelId), webrtc::kTraceVideo,
"%s: no effect filter added", __FUNCTION__); ViEId(_engineId,_channelId),
"%s: no effect filter added",
__FUNCTION__);
return -1; return -1;
} }
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s: deregister effect filter", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: deregister effect filter",
__FUNCTION__);
} else } else
{ {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _channelId), WEBRTC_TRACE(webrtc::kTraceInfo,
"%s: register effect", __FUNCTION__); webrtc::kTraceVideo,
ViEId(_engineId, _channelId),
"%s: register effect",
__FUNCTION__);
if (_effectFilter) if (_effectFilter)
{ {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(_engineId, WEBRTC_TRACE(webrtc::kTraceError,
_channelId), webrtc::kTraceVideo,
"%s: effect filter already added ", __FUNCTION__); ViEId(_engineId,_channelId),
"%s: effect filter already added ",
__FUNCTION__);
return -1; return -1;
} }
} }

View File

@@ -61,8 +61,8 @@ public:
WebRtc_Word32 GetEncoder(VideoCodec& videoCodec); WebRtc_Word32 GetEncoder(VideoCodec& videoCodec);
WebRtc_Word32 GetCodecConfigParameters( WebRtc_Word32 GetCodecConfigParameters(
unsigned char configParameters[kConfigParameterSize], unsigned char configParameters[kConfigParameterSize],
unsigned char& configParametersSize); unsigned char& configParametersSize);
// Scale or crop/pad image // Scale or crop/pad image
WebRtc_Word32 ScaleInputImage(bool enable); WebRtc_Word32 ScaleInputImage(bool enable);
@@ -86,14 +86,14 @@ public:
// Loss protection // Loss protection
WebRtc_Word32 UpdateProtectionMethod(); WebRtc_Word32 UpdateProtectionMethod();
// Implements VCMPacketizationCallback // Implements VCMPacketizationCallback
virtual WebRtc_Word32 virtual WebRtc_Word32 SendData(
SendData(const FrameType frameType, const FrameType frameType,
const WebRtc_UWord8 payloadType, const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp, const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader, const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoTypeHeader* rtpTypeHdr); const RTPVideoHeader* rtpVideoHdr);
// Implements VideoProtectionCallback // Implements VideoProtectionCallback
virtual WebRtc_Word32 ProtectionRequest(const WebRtc_UWord8 deltaFECRate, virtual WebRtc_Word32 ProtectionRequest(const WebRtc_UWord8 deltaFECRate,
const WebRtc_UWord8 keyFECRate, const WebRtc_UWord8 keyFECRate,
@@ -113,14 +113,13 @@ public:
// Implements RtpVideoFeedback // Implements RtpVideoFeedback
virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id, virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_UWord8 message = 0); const FrameType type,
const WebRtc_UWord8 streamIdx);
virtual void OnNetworkChanged(const WebRtc_Word32 id, virtual void OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps, const WebRtc_UWord32 bitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost, const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs, const WebRtc_UWord16 roundTripTimeMs);
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax);
// Effect filter // Effect filter
WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effectFilter); WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effectFilter);
//Recording //Recording
@@ -156,13 +155,13 @@ private:
VideoCodingModule& _vcm; VideoCodingModule& _vcm;
VideoProcessingModule& _vpm; VideoProcessingModule& _vpm;
RtpRtcp& _rtpRtcp; RtpRtcp& _defaultRtpRtcp;
CriticalSectionWrapper& _callbackCritsect; CriticalSectionWrapper& _callbackCritsect;
CriticalSectionWrapper& _dataCritsect; CriticalSectionWrapper& _dataCritsect;
VideoCodec _sendCodec; VideoCodec _sendCodec;
bool _paused; bool _paused;
WebRtc_Word64 _timeLastIntraRequestMs; WebRtc_Word64 _timeLastIntraRequestMs[kMaxSimulcastStreams];
WebRtc_Word32 _channelsDroppingDeltaFrames; WebRtc_Word32 _channelsDroppingDeltaFrames;
bool _dropNextFrame; bool _dropNextFrame;
//Loss protection //Loss protection

View File

@@ -33,7 +33,9 @@ ViEReceiver::ViEReceiver(int engineId, int channelId,
RtpRtcp& moduleRtpRtcp, RtpRtcp& moduleRtpRtcp,
VideoCodingModule& moduleVcm) VideoCodingModule& moduleVcm)
: _receiveCritsect(*CriticalSectionWrapper::CreateCriticalSection()), : _receiveCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_engineId(engineId), _channelId(channelId), _rtpRtcp(moduleRtpRtcp), _engineId(engineId),
_channelId(channelId),
_rtpRtcp(moduleRtpRtcp),
_vcm(moduleVcm), _vcm(moduleVcm),
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP
_ptrSrtp(NULL), _ptrSrtp(NULL),
@@ -44,7 +46,6 @@ ViEReceiver::ViEReceiver(int engineId, int channelId,
_ptrExternalDecryption(NULL), _ptrDecryptionBuffer(NULL), _ptrExternalDecryption(NULL), _ptrDecryptionBuffer(NULL),
_rtpDump(NULL), _receiving(false) _rtpDump(NULL), _receiving(false)
{ {
_rtpRtcp.RegisterIncomingVideoCallback(this);
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -118,6 +119,19 @@ int ViEReceiver::DeregisterExternalDecryption()
return 0; return 0;
} }
void ViEReceiver::RegisterSimulcastRtpRtcpModules(
const std::list<RtpRtcp*>& rtpModules)
{
CriticalSectionScoped cs(_receiveCritsect);
_rtpRtcpSimulcast.clear();
if (!rtpModules.empty())
{
_rtpRtcpSimulcast.insert(_rtpRtcpSimulcast.begin(),
rtpModules.begin(),
rtpModules.end());
}
}
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// RegisterSRTPModule // RegisterSRTPModule
@@ -217,7 +231,6 @@ void ViEReceiver::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
const WebRtc_UWord16 fromPort) const WebRtc_UWord16 fromPort)
{ {
InsertRTPPacket(incomingRtpPacket, incomingRtpPacketLength); InsertRTPPacket(incomingRtpPacket, incomingRtpPacketLength);
return;
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -232,7 +245,6 @@ void ViEReceiver::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
const WebRtc_UWord16 fromPort) const WebRtc_UWord16 fromPort)
{ {
InsertRTCPPacket(incomingRtcpPacket, incomingRtcpPacketLength); InsertRTCPPacket(incomingRtcpPacket, incomingRtcpPacketLength);
return;
} }
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
@@ -420,6 +432,15 @@ int ViEReceiver::InsertRTCPPacket(const WebRtc_Word8* rtcpPacket,
(WebRtc_UWord16) receivedPacketLength); (WebRtc_UWord16) receivedPacketLength);
} }
} }
{
CriticalSectionScoped cs(_receiveCritsect);
std::list<RtpRtcp*>::iterator it = _rtpRtcpSimulcast.begin();
while (it != _rtpRtcpSimulcast.end())
{
RtpRtcp* rtpRtcp = *it++;
rtpRtcp->IncomingPacket(receivedPacket, receivedPacketLength);
}
}
return _rtpRtcp.IncomingPacket(receivedPacket, receivedPacketLength); return _rtpRtcp.IncomingPacket(receivedPacket, receivedPacketLength);
} }
@@ -508,24 +529,4 @@ int ViEReceiver::StopRTPDump()
} }
return 0; return 0;
} }
// Implements RtpVideoFeedback
void ViEReceiver::OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_UWord8 message)
{
// Don't do anything, action trigged on default module
return;
}
void ViEReceiver::OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs,
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax)
{
// Called for default module
return;
}
} // namespace webrtc } // namespace webrtc

View File

@@ -15,12 +15,13 @@
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RECEIVER_H_ #ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RECEIVER_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RECEIVER_H_ #define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_RECEIVER_H_
// Defines #include <list>
#include "engine_configurations.h" #include "engine_configurations.h"
#include "vie_defines.h" #include "rtp_rtcp_defines.h"
#include "typedefs.h" #include "typedefs.h"
#include "udp_transport.h" #include "udp_transport.h"
#include "rtp_rtcp_defines.h" #include "vie_defines.h"
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP
class SrtpModule; class SrtpModule;
@@ -35,9 +36,7 @@ class RtpRtcp;
class VideoCodingModule; class VideoCodingModule;
class Encryption; class Encryption;
class ViEReceiver: public UdpTransportData, class ViEReceiver: public UdpTransportData, public RtpData
public RtpData,
public RtpVideoFeedback
{ {
public: public:
ViEReceiver(int engineId, int channelId, RtpRtcp& moduleRtpRtcp, ViEReceiver(int engineId, int channelId, RtpRtcp& moduleRtpRtcp,
@@ -47,6 +46,8 @@ public:
int RegisterExternalDecryption(Encryption* decryption); int RegisterExternalDecryption(Encryption* decryption);
int DeregisterExternalDecryption(); int DeregisterExternalDecryption();
void RegisterSimulcastRtpRtcpModules(const std::list<RtpRtcp*>& rtpModules);
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP
int RegisterSRTPModule(SrtpModule* srtpModule); int RegisterSRTPModule(SrtpModule* srtpModule);
int DeregisterSRTPModule(); int DeregisterSRTPModule();
@@ -76,21 +77,10 @@ public:
int ReceivedRTCPPacket(const void* rtcpPacket, int rtcpPacketLength); int ReceivedRTCPPacket(const void* rtcpPacket, int rtcpPacketLength);
// From RtpData, callback for data from RTP module // From RtpData, callback for data from RTP module
virtual WebRtc_Word32 virtual WebRtc_Word32 OnReceivedPayloadData(
OnReceivedPayloadData(const WebRtc_UWord8* payloadData, const WebRtc_UWord8* payloadData,
const WebRtc_UWord16 payloadSize, const WebRtc_UWord16 payloadSize,
const WebRtcRTPHeader* rtpHeader); const WebRtcRTPHeader* rtpHeader);
// Implements RtpVideoFeedback
virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
const WebRtc_UWord8 message = 0);
virtual void OnNetworkChanged(const WebRtc_Word32 id,
const WebRtc_UWord32 minBitrateBps,
const WebRtc_UWord32 maxBitrateBps,
const WebRtc_UWord8 fractionLost,
const WebRtc_UWord16 roundTripTimeMs,
const WebRtc_UWord16 bwEstimateKbitMin,
const WebRtc_UWord16 bwEstimateKbitMax);
private: private:
int InsertRTPPacket(const WebRtc_Word8* rtpPacket, int rtpPacketLength); int InsertRTPPacket(const WebRtc_Word8* rtpPacket, int rtpPacketLength);
int InsertRTCPPacket(const WebRtc_Word8* rtcpPacket, int rtcpPacketLength); int InsertRTCPPacket(const WebRtc_Word8* rtcpPacket, int rtcpPacketLength);
@@ -99,6 +89,7 @@ private:
int _engineId; int _engineId;
int _channelId; int _channelId;
RtpRtcp& _rtpRtcp; RtpRtcp& _rtpRtcp;
std::list<RtpRtcp*> _rtpRtcpSimulcast;
VideoCodingModule& _vcm; VideoCodingModule& _vcm;
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP

View File

@@ -107,10 +107,14 @@ ViERTP_RTCPImpl::~ViERTP_RTCPImpl()
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
int ViERTP_RTCPImpl::SetLocalSSRC(const int videoChannel, int ViERTP_RTCPImpl::SetLocalSSRC(const int videoChannel,
const unsigned int SSRC) const unsigned int SSRC,
const StreamType usage,
const unsigned char simulcastIdx)
{ {
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceApiCall,
ViEId(_instanceId, videoChannel), "%s(channel: %d, SSRC: %d)", webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel),
"%s(channel: %d, SSRC: %d)",
__FUNCTION__, videoChannel, SSRC); __FUNCTION__, videoChannel, SSRC);
// Get the channel // Get the channel
@@ -119,19 +123,19 @@ int ViERTP_RTCPImpl::SetLocalSSRC(const int videoChannel,
if (ptrViEChannel == NULL) if (ptrViEChannel == NULL)
{ {
// The channel doesn't exists // The channel doesn't exists
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideo,
ViEId(_instanceId, videoChannel), ViEId(_instanceId, videoChannel),
"%s: Channel %d doesn't exist", __FUNCTION__, "%s: Channel %d doesn't exist",
videoChannel); __FUNCTION__, videoChannel);
SetLastError(kViERtpRtcpInvalidChannelId); SetLastError(kViERtpRtcpInvalidChannelId);
return -1; return -1;
} }
if (ptrViEChannel->SetSSRC(SSRC) != 0) if (ptrViEChannel->SetSSRC(SSRC, usage, simulcastIdx) != 0)
{ {
SetLastError(kViERtpRtcpUnknownError); SetLastError(kViERtpRtcpUnknownError);
return -1; return -1;
} }
return 0; return 0;
} }
@@ -171,6 +175,14 @@ int ViERTP_RTCPImpl::GetLocalSSRC(const int videoChannel,
} }
int ViERTP_RTCPImpl::SetRemoteSSRCType(const int videoChannel,
const StreamType usage,
const unsigned int SSRC) const
{
// TODO(pwestin) add support for RTX
return -1;
}
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// GetRemoteSSRC // GetRemoteSSRC
// //

View File

@@ -38,10 +38,17 @@ public:
virtual int Release(); virtual int Release();
// SSRC/CSRC // SSRC/CSRC
virtual int SetLocalSSRC(const int videoChannel, const unsigned int SSRC); virtual int SetLocalSSRC(const int videoChannel,
const unsigned int SSRC,
const StreamType usage,
const unsigned char simulcastIdx);
virtual int GetLocalSSRC(const int videoChannel, unsigned int& SSRC) const; virtual int GetLocalSSRC(const int videoChannel, unsigned int& SSRC) const;
virtual int SetRemoteSSRCType(const int videoChannel,
const StreamType usage,
const unsigned int SSRC) const;
virtual int GetRemoteSSRC(const int videoChannel, unsigned int& SSRC) const; virtual int GetRemoteSSRC(const int videoChannel, unsigned int& SSRC) const;
virtual int GetRemoteCSRCs(const int videoChannel, virtual int GetRemoteCSRCs(const int videoChannel,

View File

@@ -11,11 +11,10 @@
/* /*
* vie_sender.cc * vie_sender.cc
*/ */
#include <cassert>
#include "vie_sender.h" #include "vie_sender.h"
#include "critical_section_wrapper.h" #include "critical_section_wrapper.h"
#include "rtp_rtcp.h"
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP
#include "SrtpModule.h" #include "SrtpModule.h"
#endif #endif
@@ -28,11 +27,9 @@ namespace webrtc {
// Constructor // Constructor
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
ViESender::ViESender(int engineId, int channelId, ViESender::ViESender(int engineId, int channelId)
RtpRtcp& rtpRtcpModule)
: _engineId(engineId), _channelId(channelId), : _engineId(engineId), _channelId(channelId),
_sendCritsect(*CriticalSectionWrapper::CreateCriticalSection()), _sendCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_rtpRtcp(rtpRtcpModule),
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP
_ptrSrtp(NULL), _ptrSrtp(NULL),
_ptrSrtcp(NULL), _ptrSrtcp(NULL),

View File

@@ -30,14 +30,13 @@
namespace webrtc { namespace webrtc {
class CriticalSectionWrapper; class CriticalSectionWrapper;
class RtpDump; class RtpDump;
class RtpRtcp;
class Transport; class Transport;
class VideoCodingModule; class VideoCodingModule;
class ViESender: public Transport class ViESender: public Transport
{ {
public: public:
ViESender(int engineId, int channelId, RtpRtcp& rtpRtcpModule); ViESender(int engineId, int channelId);
~ViESender(); ~ViESender();
int RegisterExternalEncryption(Encryption* encryption); int RegisterExternalEncryption(Encryption* encryption);
@@ -65,7 +64,6 @@ private:
int _engineId; int _engineId;
int _channelId; int _channelId;
CriticalSectionWrapper& _sendCritsect; CriticalSectionWrapper& _sendCritsect;
RtpRtcp& _rtpRtcp;
#ifdef WEBRTC_SRTP #ifdef WEBRTC_SRTP
SrtpModule* _ptrSrtp; SrtpModule* _ptrSrtp;

View File

@@ -37,6 +37,7 @@ public:
WebRtc_Word32 SetPacketLoss(WebRtc_Word32 lossRate); // Rate in % WebRtc_Word32 SetPacketLoss(WebRtc_Word32 lossRate); // Rate in %
void SetNetworkDelay(WebRtc_Word64 delayMs); void SetNetworkDelay(WebRtc_Word64 delayMs);
void SetSSRCFilter(WebRtc_UWord32 SSRC);
void ClearStats(); void ClearStats();
void GetStats(WebRtc_Word32& numRtpPackets, void GetStats(WebRtc_Word32& numRtpPackets,
@@ -88,6 +89,8 @@ private:
bool _checkSSRC; bool _checkSSRC;
WebRtc_UWord32 _lastSSRC; WebRtc_UWord32 _lastSSRC;
bool _filterSSRC;
WebRtc_UWord32 _SSRC;
bool _checkSequenceNumber; bool _checkSequenceNumber;
WebRtc_UWord16 _firstSequenceNumber; WebRtc_UWord16 _firstSequenceNumber;
}; };

View File

@@ -47,6 +47,7 @@ public:
int ViEExtendedTest(); int ViEExtendedTest();
int ViEAPITest(); int ViEAPITest();
int ViELoopbackCall(); int ViELoopbackCall();
int ViESimulcastCall();
// custom call and helper functions // custom call and helper functions
int ViECustomCall(); int ViECustomCall();

View File

@@ -12,24 +12,22 @@
// tb_external_transport.cc // tb_external_transport.cc
// //
#include <stdlib.h> // rand
#include "tb_external_transport.h" #include "tb_external_transport.h"
#include <stdlib.h> // rand
#if defined(WEBRTC_LINUX) || defined(__linux__)
#include <stdlib.h>
#include <string.h>
#endif
#if defined(WEBRTC_MAC)
#include <cstring>
#endif
#include "critical_section_wrapper.h" #include "critical_section_wrapper.h"
#include "event_wrapper.h" #include "event_wrapper.h"
#include "thread_wrapper.h" #include "thread_wrapper.h"
#include "tick_util.h" #include "tick_util.h"
#include "vie_network.h" #include "vie_network.h"
#include "tick_util.h"
#if defined(WEBRTC_LINUX) || defined(__linux__)
#include <stdlib.h>
#include <string.h>
#endif
#if defined(WEBRTC_MAC)
#include <cstring>
#endif
#if defined(_WIN32) #if defined(_WIN32)
#pragma warning(disable: 4355) // 'this' : used in base member initializer list #pragma warning(disable: 4355) // 'this' : used in base member initializer list
@@ -52,6 +50,8 @@ tbExternalTransport::tbExternalTransport(webrtc::ViENetwork& vieNetwork) :
_rtcpPackets(), _rtcpPackets(),
_checkSSRC(false), _checkSSRC(false),
_lastSSRC(0), _lastSSRC(0),
_filterSSRC(false),
_SSRC(0),
_checkSequenceNumber(0), _checkSequenceNumber(0),
_firstSequenceNumber(0) _firstSequenceNumber(0)
{ {
@@ -75,6 +75,18 @@ tbExternalTransport::~tbExternalTransport()
int tbExternalTransport::SendPacket(int channel, const void *data, int len) int tbExternalTransport::SendPacket(int channel, const void *data, int len)
{ {
if (_filterSSRC)
{
WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
WebRtc_UWord32 ssrc = ptr[8] << 24;
ssrc += ptr[9] << 16;
ssrc += ptr[10] << 8;
ssrc += ptr[11];
if (ssrc != _SSRC)
{
return len; // return len to avoif error in trace file
}
}
_statCrit.Enter(); _statCrit.Enter();
_rtpCount++; _rtpCount++;
_statCrit.Leave(); _statCrit.Leave();
@@ -132,7 +144,13 @@ void tbExternalTransport::SetNetworkDelay(WebRtc_Word64 delayMs)
{ {
webrtc::CriticalSectionScoped cs(_crit); webrtc::CriticalSectionScoped cs(_crit);
_networkDelayMs = delayMs; _networkDelayMs = delayMs;
return; }
void tbExternalTransport::SetSSRCFilter(WebRtc_UWord32 ssrc)
{
webrtc::CriticalSectionScoped cs(_crit);
_filterSSRC = true;
_SSRC = ssrc;
} }
void tbExternalTransport::ClearStats() void tbExternalTransport::ClearStats()
@@ -141,7 +159,6 @@ void tbExternalTransport::ClearStats()
_rtpCount = 0; _rtpCount = 0;
_dropCount = 0; _dropCount = 0;
_rtcpCount = 0; _rtcpCount = 0;
return;
} }
void tbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets, void tbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets,
@@ -152,7 +169,6 @@ void tbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets,
numRtpPackets = _rtpCount; numRtpPackets = _rtpCount;
numDroppedPackets = _dropCount; numDroppedPackets = _dropCount;
numRtcpPackets = _rtcpCount; numRtcpPackets = _rtcpCount;
return;
} }
void tbExternalTransport::EnableSSRCCheck() void tbExternalTransport::EnableSSRCCheck()
@@ -160,6 +176,7 @@ void tbExternalTransport::EnableSSRCCheck()
webrtc::CriticalSectionScoped cs(_statCrit); webrtc::CriticalSectionScoped cs(_statCrit);
_checkSSRC = true; _checkSSRC = true;
} }
unsigned int tbExternalTransport::ReceivedSSRC() unsigned int tbExternalTransport::ReceivedSSRC()
{ {
webrtc::CriticalSectionScoped cs(_statCrit); webrtc::CriticalSectionScoped cs(_statCrit);
@@ -258,8 +275,9 @@ bool tbExternalTransport::ViEExternalTransportProcess()
// Send to ViE // Send to ViE
if (packet) if (packet)
{ {
_vieNetwork.ReceivedRTPPacket(packet->channel, _vieNetwork.ReceivedRTCPPacket(
packet->packetBuffer, packet->length); packet->channel,
packet->packetBuffer, packet->length);
delete packet; delete packet;
packet = NULL; packet = NULL;
} }

View File

@@ -55,6 +55,7 @@ bool ViEAutoTestMain::BeginOSIndependentTesting()
ViETest::Log("\t 6. Specific extended test"); ViETest::Log("\t 6. Specific extended test");
ViETest::Log("\t 7. Simple loopback call"); ViETest::Log("\t 7. Simple loopback call");
ViETest::Log("\t 8. Custom configure a call"); ViETest::Log("\t 8. Custom configure a call");
ViETest::Log("\t 9. Simulcast in loopback");
ViETest::Log("Select type of test: "); ViETest::Log("Select type of test: ");
if (_useAnswerFile) if (_useAnswerFile)
@@ -73,13 +74,6 @@ bool ViEAutoTestMain::BeginOSIndependentTesting()
getchar(); getchar();
} }
ViETest::Log(""); ViETest::Log("");
if (testType < 0 || testType > 8)
{
ViETest::Log("ERROR: Invalid selection. Try again\n");
continue;
}
switch (testType) switch (testType)
{ {
case 0: case 0:
@@ -257,8 +251,12 @@ bool ViEAutoTestMain::BeginOSIndependentTesting()
case 8: case 8:
testErrors += vieAutoTest.ViECustomCall(); testErrors += vieAutoTest.ViECustomCall();
break; break;
default: case 9:
testErrors += vieAutoTest.ViESimulcastCall();
break; break;
default:
ViETest::Log("ERROR: Invalid selection. Try again\n");
continue;
} }
} while (testType != 0); } while (testType != 0);

View File

@@ -0,0 +1,554 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// vie_autotest_simulcast.cc
//
// This code is also used as sample code for ViE 3.0
//
#include "vie_autotest_defines.h"
#include "vie_autotest.h"
// ===================================================================
//
// BEGIN: VideoEngine 3.0 Sample Code
//
#include "common_types.h"
#include "voe_base.h"
#include "vie_base.h"
#include "vie_capture.h"
#include "vie_codec.h"
#include "vie_network.h"
#include "vie_render.h"
#include "vie_rtp_rtcp.h"
#include <iostream>
#include "tb_external_transport.h"
#define VCM_RED_PAYLOAD_TYPE 96
#define VCM_ULPFEC_PAYLOAD_TYPE 97
int VideoEngineSimulcastTest(void* window1, void* window2)
{
//********************************************************
// Begin create/initialize Video Engine for testing
//********************************************************
int error = 0;
//
// Create a VideoEngine instance
//
webrtc::VideoEngine* ptrViE = NULL;
ptrViE = webrtc::VideoEngine::Create();
if (ptrViE == NULL)
{
printf("ERROR in VideoEngine::Create\n");
return -1;
}
error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
if (error == -1)
{
printf("ERROR in VideoEngine::SetTraceLevel\n");
return -1;
}
#ifdef WEBRTC_ANDROID
error = ptrViE->SetTraceFile("/sdcard/ViETrace.txt");
if (error == -1)
{
printf("ERROR in VideoEngine::SetTraceFile\n");
return -1;
}
error = ptrViE->SetTraceFile("/sdcard/ViEEncryptedTrace.txt");
if (error == -1)
{
printf("ERROR in VideoEngine::SetTraceFile\n");
return -1;
}
#else
error = ptrViE->SetTraceFile("ViETrace.txt");
if (error == -1)
{
printf("ERROR in VideoEngine::SetTraceFile\n");
return -1;
}
#endif
//
// Init VideoEngine and create a channel
//
webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
if (ptrViEBase == NULL)
{
printf("ERROR in ViEBase::GetInterface\n");
return -1;
}
error = ptrViEBase->Init();
if (error == -1)
{
printf("ERROR in ViEBase::Init\n");
return -1;
}
int videoChannel = -1;
error = ptrViEBase->CreateChannel(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::CreateChannel\n");
return -1;
}
//
// List available capture devices, allocate and connect.
//
webrtc::ViECapture* ptrViECapture =
webrtc::ViECapture::GetInterface(ptrViE);
if (ptrViEBase == NULL)
{
printf("ERROR in ViECapture::GetInterface\n");
return -1;
}
const unsigned int KMaxDeviceNameLength = 128;
const unsigned int KMaxUniqueIdLength = 256;
char deviceName[KMaxDeviceNameLength];
memset(deviceName, 0, KMaxDeviceNameLength);
char uniqueId[KMaxUniqueIdLength];
memset(uniqueId, 0, KMaxUniqueIdLength);
printf("Available capture devices:\n");
int captureIdx = 0;
for (captureIdx = 0;
captureIdx < ptrViECapture->NumberOfCaptureDevices();
captureIdx++)
{
memset(deviceName, 0, KMaxDeviceNameLength);
memset(uniqueId, 0, KMaxUniqueIdLength);
error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
KMaxDeviceNameLength, uniqueId,
KMaxUniqueIdLength);
if (error == -1)
{
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
printf("\t %d. %s\n", captureIdx + 1, deviceName);
}
printf("\nChoose capture device: ");
#ifdef WEBRTC_ANDROID
captureIdx = 0;
printf("0\n");
#else
if (scanf("%d", &captureIdx) != 1)
{
printf("Error in scanf()\n");
return -1;
}
getchar();
captureIdx = captureIdx - 1; // Compensate for idx start at 1.
#endif
error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
KMaxDeviceNameLength, uniqueId,
KMaxUniqueIdLength);
if (error == -1)
{
printf("ERROR in ViECapture::GetCaptureDevice\n");
return -1;
}
int captureId = 0;
error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
captureId);
if (error == -1)
{
printf("ERROR in ViECapture::AllocateCaptureDevice\n");
return -1;
}
error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
if (error == -1)
{
printf("ERROR in ViECapture::ConnectCaptureDevice\n");
return -1;
}
error = ptrViECapture->StartCapture(captureId);
if (error == -1)
{
printf("ERROR in ViECapture::StartCapture\n");
return -1;
}
//
// RTP/RTCP settings
//
webrtc::ViERTP_RTCP* ptrViERtpRtcp =
webrtc::ViERTP_RTCP::GetInterface(ptrViE);
if (ptrViERtpRtcp == NULL)
{
printf("ERROR in ViERTP_RTCP::GetInterface\n");
return -1;
}
error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
webrtc::kRtcpCompound_RFC4585);
if (error == -1)
{
printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
return -1;
}
error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
if (error == -1)
{
printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
return -1;
}
//
// Set up rendering
//
webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
if (ptrViERender == NULL)
{
printf("ERROR in ViERender::GetInterface\n");
return -1;
}
error
= ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
if (error == -1)
{
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = ptrViERender->StartRender(captureId);
if (error == -1)
{
printf("ERROR in ViERender::StartRender\n");
return -1;
}
error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
1.0);
if (error == -1)
{
printf("ERROR in ViERender::AddRenderer\n");
return -1;
}
error = ptrViERender->StartRender(videoChannel);
if (error == -1)
{
printf("ERROR in ViERender::StartRender\n");
return -1;
}
//
// Setup codecs
//
webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
if (ptrViECodec == NULL)
{
printf("ERROR in ViECodec::GetInterface\n");
return -1;
}
// Check available codecs and prepare receive codecs
printf("\nAvailable codecs:\n");
webrtc::VideoCodec videoCodec;
memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
int codecIdx = 0;
for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); codecIdx++)
{
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
if (error == -1)
{
printf("ERROR in ViECodec::GetCodec\n");
return -1;
}
// try to keep the test frame size small when I420
if (videoCodec.codecType != webrtc::kVideoCodecVP8)
{
continue;
}
error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
if (error == -1)
{
printf("ERROR in ViECodec::SetReceiveCodec\n");
return -1;
}
if (videoCodec.codecType != webrtc::kVideoCodecRED
&& videoCodec.codecType != webrtc::kVideoCodecULPFEC)
{
printf("\t %d. %s\n", codecIdx + 1, videoCodec.plName);
}
break;
}
error = ptrViECodec->GetCodec(codecIdx, videoCodec);
if (error == -1)
{
printf("ERROR in ViECodec::GetCodec\n");
return -1;
}
// Set spatial resolution option
videoCodec.width = 1280;
videoCodec.height = 720;
// simulcast settings
videoCodec.numberOfSimulcastStreams = 3;
videoCodec.simulcastStream[0].width = 320;
videoCodec.simulcastStream[0].height = 180;
videoCodec.simulcastStream[0].numberOfTemporalLayers = 0;
videoCodec.simulcastStream[0].maxBitrate = 100;
videoCodec.simulcastStream[0].qpMax = videoCodec.qpMax;
videoCodec.simulcastStream[1].width = 640;
videoCodec.simulcastStream[1].height = 360;
videoCodec.simulcastStream[1].numberOfTemporalLayers = 0;
videoCodec.simulcastStream[1].maxBitrate = 500;
videoCodec.simulcastStream[1].qpMax = videoCodec.qpMax;
videoCodec.simulcastStream[2].width = 1280;
videoCodec.simulcastStream[2].height = 720;
videoCodec.simulcastStream[2].numberOfTemporalLayers = 0;
videoCodec.simulcastStream[2].maxBitrate = 1200;
videoCodec.simulcastStream[2].qpMax = videoCodec.qpMax;
// Set start bit rate
std::string str;
std::cout << std::endl;
std::cout << "Choose start rate (in kbps). Press enter for default: ";
std::getline(std::cin, str);
int startRate = atoi(str.c_str());
if(startRate != 0)
{
videoCodec.startBitrate=startRate;
}
error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
if (error == -1)
{
printf("ERROR in ViECodec::SetSendCodec\n");
return -1;
}
//
// Address settings
//
webrtc::ViENetwork* ptrViENetwork =
webrtc::ViENetwork::GetInterface(ptrViE);
if (ptrViENetwork == NULL)
{
printf("ERROR in ViENetwork::GetInterface\n");
return -1;
}
// Setting External transport
tbExternalTransport extTransport(*(ptrViENetwork));
error = ptrViENetwork->RegisterSendTransport(videoChannel,
extTransport);
if (error == -1)
{
printf("ERROR in ViECodec::RegisterSendTransport \n");
return -1;
}
extTransport.SetPacketLoss(0);
// Set network delay value
extTransport.SetNetworkDelay(10);
extTransport.SetSSRCFilter(3);
for (int idx = 0; idx < 3; idx++)
{
error = ptrViERtpRtcp->SetLocalSSRC(videoChannel,
idx+1, // SSRC
webrtc::kViEStreamTypeNormal,
idx);
if (error == -1)
{
printf("ERROR in ViERTP_RTCP::SetLocalSSRC(idx:%d)\n", idx);
return -1;
}
}
error = ptrViEBase->StartReceive(videoChannel);
if (error == -1)
{
printf("ERROR in ViENetwork::StartReceive\n");
return -1;
}
error = ptrViEBase->StartSend(videoChannel);
if (error == -1)
{
printf("ERROR in ViENetwork::StartSend\n");
return -1;
}
//********************************************************
// Engine started
//********************************************************
printf("\nSimulcast call started\n\n");
do
{
printf("Enter new SSRC filter 1,2 or 3\n");
printf("Press enter to stop...");
str.clear();
std::getline(std::cin, str);
if (!str.empty())
{
int ssrc = atoi(str.c_str());
if (ssrc > 0 && ssrc < 4)
{
extTransport.SetSSRCFilter(ssrc);
} else
{
printf("Invalid SSRC\n");
}
} else
{
break;
}
} while (true);
//********************************************************
// Testing finished. Tear down Video Engine
//********************************************************
error = ptrViEBase->StopReceive(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::StopReceive\n");
return -1;
}
error = ptrViEBase->StopSend(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::StopSend\n");
return -1;
}
error = ptrViERender->StopRender(captureId);
if (error == -1)
{
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = ptrViERender->RemoveRenderer(captureId);
if (error == -1)
{
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = ptrViERender->StopRender(videoChannel);
if (error == -1)
{
printf("ERROR in ViERender::StopRender\n");
return -1;
}
error = ptrViERender->RemoveRenderer(videoChannel);
if (error == -1)
{
printf("ERROR in ViERender::RemoveRenderer\n");
return -1;
}
error = ptrViECapture->StopCapture(captureId);
if (error == -1)
{
printf("ERROR in ViECapture::StopCapture\n");
return -1;
}
error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
if (error == -1)
{
printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
return -1;
}
error = ptrViECapture->ReleaseCaptureDevice(captureId);
if (error == -1)
{
printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
return -1;
}
error = ptrViEBase->DeleteChannel(videoChannel);
if (error == -1)
{
printf("ERROR in ViEBase::DeleteChannel\n");
return -1;
}
int remainingInterfaces = 0;
remainingInterfaces = ptrViECodec->Release();
remainingInterfaces += ptrViECapture->Release();
remainingInterfaces += ptrViERtpRtcp->Release();
remainingInterfaces += ptrViERender->Release();
remainingInterfaces += ptrViENetwork->Release();
remainingInterfaces += ptrViEBase->Release();
if (remainingInterfaces > 0)
{
printf("ERROR: Could not release all interfaces\n");
return -1;
}
bool deleted = webrtc::VideoEngine::Delete(ptrViE);
if (deleted == false)
{
printf("ERROR in VideoEngine::Delete\n");
return -1;
}
return 0;
//
// END: VideoEngine 3.0 Sample Code
//
// ===================================================================
}
int ViEAutoTest::ViESimulcastCall()
{
ViETest::Log(" ");
ViETest::Log("========================================");
ViETest::Log(" ViE Autotest Simulcast Call\n");
if (VideoEngineSimulcastTest(_window1, _window2) == 0)
{
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Simulcast Call Done");
ViETest::Log("========================================");
ViETest::Log(" ");
return 0;
}
ViETest::Log(" ");
ViETest::Log(" ViE Autotest Simulcast Call Failed");
ViETest::Log("========================================");
ViETest::Log(" ");
return 1;
}

View File

@@ -67,6 +67,7 @@
'source/vie_autotest_render.cc', 'source/vie_autotest_render.cc',
'source/vie_autotest_rtp_rtcp.cc', 'source/vie_autotest_rtp_rtcp.cc',
'source/vie_autotest_custom_call.cc', 'source/vie_autotest_custom_call.cc',
'source/vie_autotest_simulcast.cc',
# Platform dependent # Platform dependent
# Linux # Linux

View File

@@ -1485,11 +1485,7 @@ Channel::Init()
{ {
// Open up the RTP/RTCP receiver for all supported codecs // Open up the RTP/RTCP receiver for all supported codecs
if ((_audioCodingModule.Codec(idx, codec) == -1) || if ((_audioCodingModule.Codec(idx, codec) == -1) ||
(_rtpRtcpModule.RegisterReceivePayload(codec.plname, (_rtpRtcpModule.RegisterReceivePayload(codec) == -1))
codec.pltype,
codec.plfreq,
codec.channels,
codec.rate) == -1))
{ {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId,_channelId), VoEId(_instanceId,_channelId),
@@ -1517,10 +1513,7 @@ Channel::Init()
// Register default PT for outband 'telephone-event' // Register default PT for outband 'telephone-event'
if (!STR_CASE_CMP(codec.plname, "telephone-event")) if (!STR_CASE_CMP(codec.plname, "telephone-event"))
{ {
if ((_rtpRtcpModule.RegisterSendPayload(codec.plname, if ((_rtpRtcpModule.RegisterSendPayload(codec) == -1) ||
codec.pltype,
codec.plfreq,
codec.channels) == -1) ||
(_audioCodingModule.RegisterReceiveCodec(codec) == -1)) (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
{ {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, WEBRTC_TRACE(kTraceWarning, kTraceVoice,
@@ -1535,10 +1528,7 @@ Channel::Init()
{ {
if ((_audioCodingModule.RegisterSendCodec(codec) == -1) || if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
(_audioCodingModule.RegisterReceiveCodec(codec) == -1) || (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
(_rtpRtcpModule.RegisterSendPayload(codec.plname, (_rtpRtcpModule.RegisterSendPayload(codec) == -1))
codec.pltype,
codec.plfreq,
codec.channels) == -1))
{ {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId,_channelId), VoEId(_instanceId,_channelId),
@@ -2379,20 +2369,10 @@ Channel::SetSendCodec(const CodecInst& codec)
return -1; return -1;
} }
if (_rtpRtcpModule.RegisterSendPayload( if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0 ? 0 : codec.rate)) != 0)
{ {
_rtpRtcpModule.DeRegisterSendPayload(codec.pltype); _rtpRtcpModule.DeRegisterSendPayload(codec.pltype);
if (_rtpRtcpModule.RegisterSendPayload( if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0 ? 0 : codec.rate)) != 0)
{ {
WEBRTC_TRACE( WEBRTC_TRACE(
kTraceError, kTraceVoice, VoEId(_instanceId,_channelId), kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
@@ -2474,12 +2454,7 @@ Channel::SetRecPayloadType(const CodecInst& codec)
CodecInst rxCodec = codec; CodecInst rxCodec = codec;
// Get payload type for the given codec // Get payload type for the given codec
_rtpRtcpModule.ReceivePayloadType( _rtpRtcpModule.ReceivePayloadType(rxCodec, &pltype);
rxCodec.plname,
rxCodec.plfreq,
rxCodec.channels,
&pltype,
(rxCodec.rate < 0 ? 0 : rxCodec.rate));
rxCodec.pltype = pltype; rxCodec.pltype = pltype;
if (_rtpRtcpModule.DeRegisterReceivePayload(pltype) != 0) if (_rtpRtcpModule.DeRegisterReceivePayload(pltype) != 0)
@@ -2501,21 +2476,11 @@ Channel::SetRecPayloadType(const CodecInst& codec)
return 0; return 0;
} }
if (_rtpRtcpModule.RegisterReceivePayload( if (_rtpRtcpModule.RegisterReceivePayload(codec) != 0)
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0 ? 0 : codec.rate)) != 0)
{ {
// First attempt to register failed => de-register and try again // First attempt to register failed => de-register and try again
_rtpRtcpModule.DeRegisterReceivePayload(codec.pltype); _rtpRtcpModule.DeRegisterReceivePayload(codec.pltype);
if (_rtpRtcpModule.RegisterReceivePayload( if (_rtpRtcpModule.RegisterReceivePayload(codec) != 0)
codec.plname,
codec.pltype,
codec.plfreq,
codec.channels,
(codec.rate < 0 ? 0 : codec.rate)) != 0)
{ {
_engineStatisticsPtr->SetLastError( _engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError, VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@@ -2543,12 +2508,7 @@ Channel::GetRecPayloadType(CodecInst& codec)
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId), WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetRecPayloadType()"); "Channel::GetRecPayloadType()");
WebRtc_Word8 payloadType(-1); WebRtc_Word8 payloadType(-1);
if (_rtpRtcpModule.ReceivePayloadType(( if (_rtpRtcpModule.ReceivePayloadType(codec, &payloadType) != 0)
const WebRtc_Word8*)codec.plname,
codec.plfreq,
codec.channels,
&payloadType,
(codec.rate < 0 ? 0 : codec.rate)) != 0)
{ {
_engineStatisticsPtr->SetLastError( _engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError, VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@@ -2635,16 +2595,10 @@ Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
return -1; return -1;
} }
if (_rtpRtcpModule.RegisterSendPayload(codec.plname, if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
codec.pltype,
codec.plfreq,
codec.channels) != 0)
{ {
_rtpRtcpModule.DeRegisterSendPayload(codec.pltype); _rtpRtcpModule.DeRegisterSendPayload(codec.pltype);
if (_rtpRtcpModule.RegisterSendPayload(codec.plname, if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
codec.pltype,
codec.plfreq,
codec.channels) != 0)
{ {
_engineStatisticsPtr->SetLastError( _engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError, VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@@ -4539,9 +4493,11 @@ Channel::SetSendTelephoneEventPayloadType(unsigned char type)
"SetSendTelephoneEventPayloadType() invalid type"); "SetSendTelephoneEventPayloadType() invalid type");
return -1; return -1;
} }
const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE] = CodecInst codec;
"telephone-event"; codec.plfreq = 8000;
if (_rtpRtcpModule.RegisterSendPayload(payloadName, type, 8000) != 0) codec.pltype = type;
memcpy(codec.plname, "telephone-event", 16);
if (_rtpRtcpModule.RegisterSendPayload(codec) != 0)
{ {
_engineStatisticsPtr->SetLastError( _engineStatisticsPtr->SetLastError(
VE_RTP_RTCP_MODULE_ERROR, kTraceError, VE_RTP_RTCP_MODULE_ERROR, kTraceError,
@@ -6630,11 +6586,7 @@ Channel::RegisterReceiveCodecsToRTPModule()
{ {
// Open up the RTP/RTCP receiver for all supported codecs // Open up the RTP/RTCP receiver for all supported codecs
if ((_audioCodingModule.Codec(idx, codec) == -1) || if ((_audioCodingModule.Codec(idx, codec) == -1) ||
(_rtpRtcpModule.RegisterReceivePayload(codec.plname, (_rtpRtcpModule.RegisterReceivePayload(codec) == -1))
codec.pltype,
codec.plfreq,
codec.channels,
codec.rate) == -1))
{ {
WEBRTC_TRACE( WEBRTC_TRACE(
kTraceWarning, kTraceWarning,