Implemented NACK based reference picture selection.

This CL implements NACK based reference picture selection for VP8. A separate
class is used for keeping track of the references and managing the VP8 encode
flags. Appropriate tests have also been added.

BUG=
TEST=

Review URL: http://webrtc-codereview.appspot.com/284002

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1082 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
stefan@webrtc.org 2011-12-02 08:34:05 +00:00
parent 4b00560a6e
commit a4a88f90c4
30 changed files with 964 additions and 314 deletions

View File

@ -84,10 +84,12 @@ class VideoCaptureModule: public RefCountedModule {
// - frameRate : The target frame rate.
virtual WebRtc_Word32 SetRates(WebRtc_Word32 newBitRate,
WebRtc_Word32 frameRate) = 0;
// Inform the encoder about the packet loss.
// Inform the encoder about the packet loss and the round-trip time.
// - packetLoss : Fraction lost
// (loss rate in percent = 100 * packetLoss / 255).
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss) = 0;
// - rtt : Round-trip time in milliseconds.
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt) = 0;
// Encode the next frame as key frame.
virtual WebRtc_Word32 EncodeFrameType(const FrameType type) = 0;

View File

@ -71,11 +71,17 @@ public:
// <0 - Error
virtual WebRtc_Word32 Reset();
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 /*newBitRate*/, WebRtc_UWord32 /*frameRate*/) {return WEBRTC_VIDEO_CODEC_OK;}
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 /*newBitRate*/,
WebRtc_UWord32 /*frameRate*/)
{return WEBRTC_VIDEO_CODEC_OK;}
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 /*packetLoss*/){return WEBRTC_VIDEO_CODEC_OK;};
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 /*packetLoss*/,
int /*rtt*/)
{return WEBRTC_VIDEO_CODEC_OK;}
virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/){return WEBRTC_VIDEO_CODEC_OK;};
virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
WebRtc_Word32 /*size*/)
{return WEBRTC_VIDEO_CODEC_OK;}
// Get version number for the codec.
//

View File

@ -130,14 +130,16 @@ public:
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 Reset() = 0;
// Inform the encoder about the packet loss and round trip time on the network
// used to decide the best pattern and signaling.
// Inform the encoder about the packet loss and round trip time on the
// network used to decide the best pattern and signaling.
//
// - packetLoss : Fraction lost
// (loss rate in percent = 100 * packetLoss / 255)
// - packetLoss : Fraction lost (loss rate in percent =
// 100 * packetLoss / 255)
// - rtt : Round-trip time in milliseconds
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss) = 0;
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt) = 0;
// Inform the encoder about the new target bit rate.
//

View File

@ -38,7 +38,8 @@ class MockVideoEncoder : public VideoEncoder {
WebRtc_Word32(EncodedImageCallback* callback));
MOCK_METHOD0(Release, WebRtc_Word32());
MOCK_METHOD0(Reset, WebRtc_Word32());
MOCK_METHOD1(SetPacketLoss, WebRtc_Word32(WebRtc_UWord32 packetLoss));
MOCK_METHOD2(SetChannelParameters, WebRtc_Word32(WebRtc_UWord32 packetLoss,
int rtt));
MOCK_METHOD2(SetRates,
WebRtc_Word32(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate));

View File

@ -36,6 +36,7 @@ _appendNext(false),
_missingFrames(false),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedRPSI(false),
_hasReceivedPLI(false),
_waitForKey(false)
{
@ -55,6 +56,7 @@ _appendNext(false),
_missingFrames(false),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedRPSI(false),
_hasReceivedPLI(false),
_waitForKey(false)
{
@ -75,6 +77,7 @@ _appendNext(false),
_missingFrames(false),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedRPSI(false),
_hasReceivedPLI(false),
_waitForKey(false)
{
@ -95,6 +98,7 @@ _appendNext(false),
_missingFrames(false),
_rttFrames(0),
_hasReceivedSLI(false),
_hasReceivedRPSI(false),
_hasReceivedPLI(false),
_waitForKey(false)
{
@ -116,6 +120,7 @@ _appendNext(false),
_missingFrames(false),
_rttFrames(rttFrames),
_hasReceivedSLI(false),
_hasReceivedRPSI(false),
_hasReceivedPLI(false),
_waitForKey(false)
{
@ -542,8 +547,7 @@ NormalAsyncTest::CopyCodecSpecificInfo(
const webrtc::CodecSpecificInfo* codecSpecificInfo) const
{
webrtc::CodecSpecificInfo* info = new webrtc::CodecSpecificInfo;
info->codecType = codecSpecificInfo->codecType;
info->codecSpecific = codecSpecificInfo->codecSpecific;
*info = *codecSpecificInfo;
return info;
}
@ -569,6 +573,19 @@ void NormalAsyncTest::CopyEncodedImage(TestVideoEncodedBuffer& dest,
dest.SetCaptureHeight((WebRtc_UWord16)src._encodedHeight);
dest.SetTimeStamp(src._timeStamp);
}
WebRtc_Word32 NormalAsyncTest::ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 pictureId) {
_lastDecRefPictureId = pictureId;
return 0;
}
WebRtc_Word32 NormalAsyncTest::ReceivedDecodedFrame(
const WebRtc_UWord64 pictureId) {
_lastDecPictureId = pictureId;
return 0;
}
double
NormalAsyncTest::tGetTime()
{// return time in sec

View File

@ -93,10 +93,9 @@ public:
{
return NULL;
};
virtual WebRtc_Word32
ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) { return 0;};
virtual WebRtc_Word32
ReceivedDecodedFrame(const WebRtc_UWord64 pictureId) { return 0;};
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 pictureId);
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
protected:
virtual void Setup();
@ -131,7 +130,10 @@ protected:
std::list<fbSignal> _signalSLI;
int _rttFrames;
mutable bool _hasReceivedSLI;
mutable bool _hasReceivedRPSI;
WebRtc_UWord8 _pictureIdSLI;
WebRtc_UWord16 _pictureIdRPSI;
WebRtc_UWord64 _lastDecRefPictureId;
WebRtc_UWord64 _lastDecPictureId;
std::list<fbSignal> _signalPLI;
bool _hasReceivedPLI;

View File

@ -125,8 +125,8 @@ PacketLossTest::Setup()
std::string lossRateStr;
ss << _lossRate;
ss >> lossRateStr;
_encodedName = "../../" + source.GetName() + "-" + lossRateStr;
_outname = "../../out-" + source.GetName() + "-" + lossRateStr;
_encodedName = source.GetName() + "-" + lossRateStr;
_outname = "out-" + source.GetName() + "-" + lossRateStr;
if (_lossProbability != _lossRate)
{
@ -157,7 +157,11 @@ PacketLossTest::CodecSpecific_InitBitrate()
{
simulatedBitRate = _bitRate;
}
_encoder->SetPacketLoss((WebRtc_UWord32)(_lossProbability * 255.0));
int rtt = 0;
if (_inst.maxFramerate > 0)
rtt = _rttFrames * (1000 / _inst.maxFramerate);
_encoder->SetChannelParameters((WebRtc_UWord32)(_lossProbability * 255.0),
rtt);
_encoder->SetRates(simulatedBitRate, _inst.maxFramerate);
}
@ -169,7 +173,6 @@ int PacketLossTest::DoPacketLoss()
_sumChannelBytes += _frameToDecode->_frame->GetLength();
return 0;
}
//printf("Encoded: %d bytes\n", _encodedVideoBuffer.GetLength());
unsigned char *packet = NULL;
TestVideoEncodedBuffer newEncBuf;
newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
@ -180,7 +183,7 @@ int PacketLossTest::DoPacketLoss()
int thrown = 0;
while ((size = NextPacket(1500, &packet)) > 0)
{
if (!PacketLoss(_lossProbability))
if (!PacketLoss(_lossProbability, thrown))
{
InsertPacket(&newEncBuf, packet, size);
kept++;
@ -207,6 +210,7 @@ int PacketLossTest::DoPacketLoss()
_sumChannelBytes += newEncBuf.GetLength();
_totalKept += kept;
_totalThrown += thrown;
return lossResult;
//printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
//printf("Encoded left: %d bytes\n", _encodedVideoBuffer.GetLength());

View File

@ -110,6 +110,8 @@ Test::CodecSettings(int width, int height, WebRtc_UWord32 frameRate /*=30*/, Web
{
_bitRate = 600;
}
_inst.codecType = kVideoCodecVP8;
_inst.codecSpecific.VP8.feedbackModeOn = true;
_inst.maxFramerate = (unsigned char)frameRate;
_inst.startBitrate = (int)_bitRate;
_inst.maxBitrate = 8000;
@ -494,7 +496,7 @@ double Test::ActualBitRate(int nFrames)
return 8.0 * _sumEncBytes / (nFrames / _inst.maxFramerate);
}
bool Test::PacketLoss(double lossRate)
bool Test::PacketLoss(double lossRate, int /*thrown*/)
{
return RandUniform() < lossRate;
}

View File

@ -51,7 +51,7 @@ protected:
static bool SSIMthread(void *ctx);
double ActualBitRate(int nFrames);
static bool PacketLoss(double lossRate);
virtual bool PacketLoss(double lossRate, int /*thrown*/);
static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
static void VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
webrtc::RawImage &image);

View File

@ -30,6 +30,8 @@ struct vpx_codec_cx_pkt;
namespace webrtc
{
class ReferencePictureSelection;
/******************************/
/* VP8Encoder class */
/******************************/
@ -98,15 +100,18 @@ public:
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(EncodedImageCallback*
callback);
// Inform the encoder of the new packet loss rate in the network
// Inform the encoder of the new packet loss rate and the round-trip time of the
// network.
//
// - packetLoss : Fraction lost
// (loss rate in percent = 100 * packetLoss / 255)
// - rtt : Round-trip time in milliseconds
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERROR
//
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt);
// Inform the encoder about the new target bit rate.
//
@ -154,30 +159,25 @@ private:
// percentage of the per frame bandwidth
WebRtc_UWord32 MaxIntraTarget(WebRtc_UWord32 optimalBuffersize);
EncodedImage _encodedImage;
EncodedImageCallback* _encodedCompleteCallback;
WebRtc_Word32 _width;
WebRtc_Word32 _height;
WebRtc_Word32 _maxBitRateKbit;
WebRtc_UWord32 _maxFrameRate;
bool _inited;
WebRtc_UWord32 _timeStamp;
WebRtc_UWord16 _pictureID;
WebRtc_UWord8 _simulcastIdx;
bool _pictureLossIndicationOn;
bool _feedbackModeOn;
bool _nextRefIsGolden;
bool _lastAcknowledgedIsGolden;
bool _haveReceivedAcknowledgement;
WebRtc_UWord16 _pictureIDLastSentRef;
WebRtc_UWord16 _pictureIDLastAcknowledgedRef;
int _cpuSpeed;
WebRtc_UWord32 _rcMaxIntraTarget;
int _tokenPartitions;
EncodedImage _encodedImage;
EncodedImageCallback* _encodedCompleteCallback;
WebRtc_Word32 _width;
WebRtc_Word32 _height;
WebRtc_Word32 _maxBitRateKbit;
WebRtc_UWord32 _maxFrameRate;
bool _inited;
WebRtc_UWord32 _timeStamp;
WebRtc_UWord16 _pictureID;
WebRtc_UWord8 _simulcastIdx;
bool _feedbackModeOn;
int _cpuSpeed;
WebRtc_UWord32 _rcMaxIntraTarget;
int _tokenPartitions;
ReferencePictureSelection* _rps;
vpx_codec_ctx_t* _encoder;
vpx_codec_enc_cfg_t* _cfg;
vpx_image_t* _raw;
vpx_codec_ctx_t* _encoder;
vpx_codec_enc_cfg_t* _cfg;
vpx_image_t* _raw;
};// end of VP8Encoder class
/******************************/
@ -269,6 +269,7 @@ private:
int _imageFormat;
vpx_ref_frame_t* _refFrame;
int _propagationCnt;
bool _latestKeyFrameComplete;
};// end of VP8Decoder class

View File

@ -96,15 +96,18 @@ public:
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
EncodedImageCallback* callback);
// Inform the encoder of the new packet loss rate in the network
// Inform the encoder of the new packet loss rate and round-trip time of the
// network
//
// - packetLoss : Fraction lost
// (loss rate in percent = 100 * packetLoss / 255)
// - rtt : Round-trip time in milliseconds
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERROR
//
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt);
// Inform the encoder about the new target bit rate.
//

View File

@ -0,0 +1,131 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "reference_picture_selection.h"
#include "typedefs.h"
#include "vpx/vpx_encoder.h"
#include "vpx/vp8cx.h"
namespace webrtc {
ReferencePictureSelection::ReferencePictureSelection()
: kRttConfidence(1.33),
update_golden_next_(true),
established_golden_(false),
received_ack_(false),
last_sent_ref_picture_id_(0),
last_sent_ref_update_time_(0),
established_ref_picture_id_(0),
last_refresh_time_(0),
rtt_(0) {
}
void ReferencePictureSelection::Init() {
update_golden_next_ = true;
established_golden_ = false;
received_ack_ = false;
last_sent_ref_picture_id_ = 0;
last_sent_ref_update_time_ = 0;
established_ref_picture_id_ = 0;
last_refresh_time_ = 0;
rtt_ = 0;
}
void ReferencePictureSelection::ReceivedRPSI(int rpsi_picture_id) {
// Assume RPSI is signaled with 14 bits.
if ((rpsi_picture_id & 0x3fff) == (last_sent_ref_picture_id_ & 0x3fff)) {
// Remote peer has received our last reference frame, switch frame type.
received_ack_ = true;
established_golden_ = update_golden_next_;
update_golden_next_ = !update_golden_next_;
established_ref_picture_id_ = last_sent_ref_picture_id_;
}
}
bool ReferencePictureSelection::ReceivedSLI(uint32_t now_ts) {
bool send_refresh = false;
// Don't send a refresh more than once per round-trip time.
// This is to avoid too frequent refreshes, since the receiver
// will signal an SLI for every corrupt frame.
if (TimestampDiff(now_ts, last_refresh_time_) > rtt_) {
send_refresh = true;
last_refresh_time_ = now_ts;
}
return send_refresh;
}
int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
uint32_t now_ts) {
int flags = 0;
// We can't refresh the decoder until we have established the key frame.
if (send_refresh && received_ack_) {
flags |= VP8_EFLAG_NO_REF_LAST; // Don't reference the last frame
if (established_golden_)
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
else
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame
}
// Make sure we don't update the reference frames too often. We must wait long
// enough for an RPSI to arrive after the decoder decoded the reference frame.
// Ideally that should happen after one round-trip time.
// Add a margin defined by |kRttConfidence|.
uint32_t update_interval = kRttConfidence * rtt_;
if (update_interval < kMinUpdateInterval)
update_interval = kMinUpdateInterval;
// Don't send reference frame updates until we have an established reference.
if (TimestampDiff(now_ts, last_sent_ref_update_time_) > update_interval &&
received_ack_) {
flags |= VP8_EFLAG_NO_REF_LAST; // Don't reference the last frame.
if (update_golden_next_) {
flags |= VP8_EFLAG_FORCE_GF; // Update the golden reference.
flags |= VP8_EFLAG_NO_UPD_ARF; // Don't update alt-ref.
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
} else {
flags |= VP8_EFLAG_FORCE_ARF; // Update the alt-ref reference.
flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
}
last_sent_ref_picture_id_ = picture_id;
last_sent_ref_update_time_ = now_ts;
} else {
// No update of golden or alt-ref. We can therefore freely reference the
// established reference frame and the last frame.
if (established_golden_)
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
else
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
flags |= VP8_EFLAG_NO_UPD_ARF; // Don't update the alt-ref frame.
}
return flags;
}
void ReferencePictureSelection::EncodedKeyFrame(int picture_id) {
last_sent_ref_picture_id_ = picture_id;
received_ack_ = false;
}
void ReferencePictureSelection::SetRtt(int rtt) {
// Convert from milliseconds to timestamp frequency.
rtt_ = 90 * rtt;
}
uint32_t ReferencePictureSelection::TimestampDiff(uint32_t new_ts,
uint32_t old_ts) {
if (old_ts > new_ts) {
// Assuming this is a wrap, doing a compensated subtraction.
return (new_ts + (static_cast<int64_t>(1) << 32)) - old_ts;
}
return new_ts - old_ts;
}
} // namespace webrtc

View File

@ -0,0 +1,78 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file defines classes for doing reference picture selection, primarily
* with VP8.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
#include "typedefs.h"
namespace webrtc {
class ReferencePictureSelection {
public:
ReferencePictureSelection();
void Init();
// Report a received reference picture selection indication. This will
// introduce a new established reference if the received RPSI isn't too late.
void ReceivedRPSI(int rpsi_picture_id);
// Report a received slice loss indication. Returns true if a refresh frame
// must be sent to the receiver, which is accomplished by only predicting
// from the established reference.
// |now_ts| is the RTP timestamp corresponding to the current time. Typically
// the capture timestamp of the frame currently being processed.
// Returns true if it's time to encode a decoder refresh, otherwise false.
bool ReceivedSLI(uint32_t now_ts);
// Returns the recommended VP8 encode flags needed. May refresh the decoder
// and/or update the reference buffers.
// |picture_id| picture id of the frame to be encoded.
// |send_refresh| should be set to true if a decoder refresh should be
// encoded, otherwise false.
// |now_ts| is the RTP timestamp corresponding to the current time. Typically
// the capture timestamp of the frame currently being processed.
// Returns the flags to be given to the libvpx encoder when encoding the next
// frame.
int EncodeFlags(int picture_id, bool send_refresh, uint32_t now_ts);
// Notify the RPS that the frame with picture id |picture_id| was encoded as
// a key frame, effectively updating all reference buffers.
void EncodedKeyFrame(int picture_id);
// Set the round-trip time between the sender and the receiver to |rtt|
// milliseconds.
void SetRtt(int rtt);
private:
static uint32_t TimestampDiff(uint32_t new_ts, uint32_t old_ts);
// The minimum time between reference frame updates.
enum { kMinUpdateInterval = 90 * 10 }; // Timestamp frequency
const double kRttConfidence;
bool update_golden_next_;
bool established_golden_;
bool received_ack_;
int last_sent_ref_picture_id_;
uint32_t last_sent_ref_update_time_;
int established_ref_picture_id_;
uint32_t last_refresh_time_;
uint32_t rtt_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_

View File

@ -0,0 +1,107 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "gtest/gtest.h"
#include "reference_picture_selection.h"
#include "vpx/vpx_encoder.h"
#include "vpx/vp8cx.h"
using webrtc::ReferencePictureSelection;
// The minimum time between reference frame updates. Should match the values
// set in reference_picture_selection.h
enum { kMinUpdateInterval = 10 };
// The minimum time between decoder refreshes through restricted prediction.
// Should match the values set in reference_picture_selection.h
enum { kRtt = 10 };
enum {
kNoPropagationGolden = VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF,
kNoPropagationAltRef = VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_UPD_ARF,
kPropagateGolden = VP8_EFLAG_FORCE_GF |
VP8_EFLAG_NO_UPD_ARF |
VP8_EFLAG_NO_REF_GF |
VP8_EFLAG_NO_REF_LAST,
kPropagateAltRef = VP8_EFLAG_FORCE_ARF |
VP8_EFLAG_NO_UPD_GF |
VP8_EFLAG_NO_REF_ARF |
VP8_EFLAG_NO_REF_LAST,
kRefreshFromGolden = VP8_EFLAG_NO_REF_LAST |
VP8_EFLAG_NO_REF_ARF,
kRefreshFromAltRef = VP8_EFLAG_NO_REF_LAST |
VP8_EFLAG_NO_REF_GF
};
class TestRPS : public ::testing::Test {
protected:
virtual void SetUp() {
rps_.Init();
// Initialize with sending a key frame and acknowledging it.
rps_.EncodedKeyFrame(0);
rps_.ReceivedRPSI(0);
rps_.SetRtt(kRtt);
}
ReferencePictureSelection rps_;
};
TEST_F(TestRPS, TestPropagateReferenceFrames) {
// Should propagate the alt-ref reference.
uint32_t time = (4 * kMinUpdateInterval) / 3 + 1;
EXPECT_EQ(rps_.EncodeFlags(1, false, 90 * time), kPropagateAltRef);
rps_.ReceivedRPSI(1);
time += (4 * (time + kMinUpdateInterval)) / 3 + 1;
// Should propagate the golden reference.
EXPECT_EQ(rps_.EncodeFlags(2, false, 90 * time), kPropagateGolden);
rps_.ReceivedRPSI(2);
// Should propagate the alt-ref reference.
time = (4 * (time + kMinUpdateInterval)) / 3 + 1;
EXPECT_EQ(rps_.EncodeFlags(3, false, 90 * time), kPropagateAltRef);
rps_.ReceivedRPSI(3);
// Shouldn't propagate any reference frames (except last), and the established
// reference is alt-ref.
time = time + kMinUpdateInterval;
EXPECT_EQ(rps_.EncodeFlags(4, false, 90 * time), kNoPropagationAltRef);
}
TEST_F(TestRPS, TestDecoderRefresh) {
uint32_t time = kRtt + 1;
// No more than one refresh per RTT.
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
time += 5;
EXPECT_EQ(rps_.ReceivedSLI(90 * time), false);
time += kRtt - 4;
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
// Enough time have elapsed since the previous reference propagation, we will
// therefore get both a refresh from golden and a propagation of alt-ref.
EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time), kRefreshFromGolden |
kPropagateAltRef);
rps_.ReceivedRPSI(5);
time += kRtt + 1;
// Enough time for a new refresh, but not enough time for a reference
// propagation.
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time), kRefreshFromAltRef |
kNoPropagationAltRef);
}
TEST_F(TestRPS, TestWrap) {
EXPECT_EQ(rps_.ReceivedSLI(0xffffffff), true);
EXPECT_EQ(rps_.ReceivedSLI(1), false);
EXPECT_EQ(rps_.ReceivedSLI(90 * 100), true);
EXPECT_EQ(rps_.EncodeFlags(7, false, 0xffffffff), kPropagateAltRef);
EXPECT_EQ(rps_.EncodeFlags(8, false, 1), kNoPropagationGolden);
EXPECT_EQ(rps_.EncodeFlags(10, false, 90 * 100), kPropagateAltRef);
}

View File

@ -15,21 +15,20 @@
*
*/
#include "vp8.h"
#include "tick_util.h"
#include "vpx/vpx_encoder.h"
#include "vpx/vpx_decoder.h"
#include "vpx/vp8cx.h"
#include "vpx/vp8dx.h"
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include "module_common_types.h"
#include "reference_picture_selection.h"
#include "tick_util.h"
#include "vpx/vpx_encoder.h"
#include "vpx/vpx_decoder.h"
#include "vpx/vp8cx.h"
#include "vpx/vp8dx.h"
enum { kVp8ErrorPropagationTh = 30 };
//#define DEV_PIC_LOSS
namespace webrtc
{
@ -44,26 +43,23 @@ VP8Encoder::VP8Encoder():
_timeStamp(0),
_pictureID(0),
_simulcastIdx(0),
_pictureLossIndicationOn(false),
_feedbackModeOn(false),
_nextRefIsGolden(true),
_lastAcknowledgedIsGolden(true),
_haveReceivedAcknowledgement(false),
_pictureIDLastSentRef(0),
_pictureIDLastAcknowledgedRef(0),
_cpuSpeed(-6), // default value
_rcMaxIntraTarget(0),
_tokenPartitions(VP8_ONE_TOKENPARTITION),
_rps(new ReferencePictureSelection),
_encoder(NULL),
_cfg(NULL),
_raw(NULL)
{
srand((WebRtc_UWord32)TickTime::MillisecondTimestamp());
WebRtc_UWord32 seed = (WebRtc_UWord32)TickTime::MillisecondTimestamp();
srand(seed);
}
VP8Encoder::~VP8Encoder()
{
Release();
delete _rps;
}
WebRtc_Word32
@ -139,8 +135,8 @@ VP8Encoder::Reset()
}
_timeStamp = 0;
_encoder = new vpx_codec_ctx_t;
_rps->Init();
return InitAndSetControlSettings();
}
@ -218,11 +214,7 @@ VP8Encoder::InitEncode(const VideoCodec* inst,
{
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
#ifdef DEV_PIC_LOSS
// we need to know if we use feedback
_feedbackModeOn = inst->codecSpecific.VP8.feedbackModeOn;
_pictureLossIndicationOn = inst->codecSpecific.VP8.pictureLossIndicationOn;
#endif
WebRtc_Word32 retVal = Release();
if (retVal < 0)
@ -325,15 +317,13 @@ VP8Encoder::InitEncode(const VideoCodec* inst,
// set the maximum target size of any key-frame.
_rcMaxIntraTarget = MaxIntraTarget(_cfg->rc_buf_optimal_sz);
#ifdef DEV_PIC_LOSS
// this can only be off if we know we use feedback
if (_pictureLossIndicationOn)
if (_feedbackModeOn)
{
// don't generate key frame unless we tell you
// Disable periodic key frames if we get feedback from the decoder
// through SLI and RPSI.
_cfg->kf_mode = VPX_KF_DISABLED;
}
else
#endif
{
_cfg->kf_mode = VPX_KF_AUTO;
_cfg->kf_max_dist = 3000;
@ -363,6 +353,8 @@ VP8Encoder::InitEncode(const VideoCodec* inst,
}
}
_rps->Init();
return InitAndSetControlSettings();
@ -452,125 +444,23 @@ VP8Encoder::Encode(const RawImage& inputImage,
_raw->planes[PLANE_V] = &inputImage._buffer[_height * _width * 5 >> 2];
int flags = 0;
if (frameTypes && *frameTypes == kKeyFrame)
{
flags |= VPX_EFLAG_FORCE_KF; // will update both golden and altref
_encodedImage._frameType = kKeyFrame;
_pictureIDLastSentRef = _pictureID;
}
else
{
#ifdef DEV_PIC_LOSS
if (_feedbackModeOn && codecSpecificInfo)
{
const CodecSpecificInfo* info = static_cast<const
CodecSpecificInfo*>(codecSpecificInfo);
if (info->codecType == kVideoCodecVP8)
{
// codecSpecificInfo will contain received RPSI and SLI
// picture IDs. This will help us decide on when to switch type
// of reference frame
// if we receive SLI
// force using an old golden or altref as a reference
if (info->codecSpecific.VP8.hasReceivedSLI)
{
// if this is older than my last acked ref we can ignore it
// info->codecSpecific.VP8.pictureIdSLI valid 6 bits => 64 frames
// since picture id can wrap check if in between our last sent and last acked
bool sendRefresh = false;
// check for a wrap in picture ID
if ((_pictureIDLastAcknowledgedRef & 0x3f) > (_pictureID & 0x3f))
{
// we have a wrap
if ( info->codecSpecific.VP8.pictureIdSLI > (_pictureIDLastAcknowledgedRef&0x3f)||
info->codecSpecific.VP8.pictureIdSLI < (_pictureID & 0x3f))
{
sendRefresh = true;
}
}
else if (info->codecSpecific.VP8.pictureIdSLI > (_pictureIDLastAcknowledgedRef&0x3f)&&
info->codecSpecific.VP8.pictureIdSLI < (_pictureID & 0x3f))
{
sendRefresh = true;
}
// right now we could also ignore it if it's older than our last sent ref since
// last sent ref only refers back to last acked
// _pictureIDLastSentRef;
if (sendRefresh)
{
flags |= VP8_EFLAG_NO_REF_LAST; // Don't reference the last frame
if (_haveReceivedAcknowledgement)
{
// we cant set this if we refer to a key frame
if (_lastAcknowledgedIsGolden)
{
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alternate reference frame
}
else
{
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame
}
}
}
}
if (info->codecSpecific.VP8.hasReceivedRPSI)
{
if ((info->codecSpecific.VP8.pictureIdRPSI & 0x3fff) == (_pictureIDLastSentRef & 0x3fff)) // compare 14 bits
{
// remote peer have received our last reference frame
// switch frame type
_haveReceivedAcknowledgement = true;
_nextRefIsGolden = !_nextRefIsGolden;
_pictureIDLastAcknowledgedRef = _pictureIDLastSentRef;
}
}
}
const WebRtc_UWord16 periodX = 64; // we need a period X to decide on the distance between golden and altref
if (_pictureID % periodX == 0)
{
// only required if we have had a loss
// however we don't acknowledge a SLI so if that is lost it's no good
flags |= VP8_EFLAG_NO_REF_LAST; // Don't reference the last frame
if (_nextRefIsGolden)
{
flags |= VP8_EFLAG_FORCE_GF; // force a golden
flags |= VP8_EFLAG_NO_UPD_ARF; // don't update altref
if (_haveReceivedAcknowledgement)
{
// we can't set this if we refer to a key frame
// pw temporary as proof of concept
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame
}
}
else
{
flags |= VP8_EFLAG_FORCE_ARF; // force an altref
flags |= VP8_EFLAG_NO_UPD_GF; // Don't update golden
if (_haveReceivedAcknowledgement)
{
// we can't set this if we refer to a key frame
// pw temporary as proof of concept
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alternate reference frame
}
}
// remember our last reference frame
_pictureIDLastSentRef = _pictureID;
}
else
{
flags |= VP8_EFLAG_NO_UPD_GF; // don't update golden
flags |= VP8_EFLAG_NO_UPD_ARF; // don't update altref
}
if (frameTypes && *frameTypes == kKeyFrame) {
// Key frame request from caller.
// Will update both golden and alt-ref.
flags |= VPX_EFLAG_FORCE_KF;
} else if (_feedbackModeOn && codecSpecificInfo) {
// Handle RPSI and SLI messages and set up the appropriate encode flags.
bool sendRefresh = false;
if (codecSpecificInfo->codecType == kVideoCodecVP8) {
if (codecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI) {
_rps->ReceivedRPSI(
codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI);
}
#endif
_encodedImage._frameType = kDeltaFrame;
if (codecSpecificInfo->codecSpecific.VP8.hasReceivedSLI) {
sendRefresh = _rps->ReceivedSLI(inputImage._timeStamp);
}
}
flags = _rps->EncodeFlags(_pictureID, sendRefresh, inputImage._timeStamp);
}
// TODO(holmer): Ideally the duration should be the timestamp diff of this
@ -611,6 +501,7 @@ WebRtc_Word32
VP8Encoder::GetEncodedFrame(const RawImage& input_image)
{
vpx_codec_iter_t iter = NULL;
_encodedImage._frameType = kDeltaFrame;
const vpx_codec_cx_pkt_t *pkt= vpx_codec_get_cx_data(_encoder, &iter); // no lagging => 1 frame at a time
if (pkt == NULL && !_encoder->err)
{
@ -632,6 +523,7 @@ VP8Encoder::GetEncodedFrame(const RawImage& input_image)
if (pkt->data.frame.flags & VPX_FRAME_IS_KEY)
{
_encodedImage._frameType = kKeyFrame;
_rps->EncodedKeyFrame(_pictureID);
}
if (_encodedImage._length > 0)
@ -672,6 +564,7 @@ VP8Encoder::GetEncodedPartitions(const RawImage& input_image) {
vpx_codec_iter_t iter = NULL;
int part_idx = 0;
_encodedImage._length = 0;
_encodedImage._frameType = kDeltaFrame;
RTPFragmentationHeader frag_info;
frag_info.VerifyAndAllocateFragmentationHeader((1 << _tokenPartitions) + 1);
CodecSpecificInfo codecSpecific;
@ -701,6 +594,7 @@ VP8Encoder::GetEncodedPartitions(const RawImage& input_image) {
if (pkt->data.frame.flags & VPX_FRAME_IS_KEY)
{
_encodedImage._frameType = kKeyFrame;
_rps->EncodedKeyFrame(_pictureID);
}
PopulateCodecSpecific(&codecSpecific, *pkt);
break;
@ -718,9 +612,9 @@ VP8Encoder::GetEncodedPartitions(const RawImage& input_image) {
#endif
WebRtc_Word32
VP8Encoder::SetPacketLoss(WebRtc_UWord32 packetLoss)
{
return WEBRTC_VIDEO_CODEC_OK;
VP8Encoder::SetChannelParameters(WebRtc_UWord32 packetLoss, int rtt) {
_rps->SetRtt(rtt);
return WEBRTC_VIDEO_CODEC_OK;
}
WebRtc_Word32
@ -740,7 +634,8 @@ VP8Decoder::VP8Decoder():
_lastKeyFrame(),
_imageFormat(VPX_IMG_FMT_NONE),
_refFrame(NULL),
_propagationCnt(-1)
_propagationCnt(-1),
_latestKeyFrameComplete(false)
{
}
@ -768,6 +663,7 @@ VP8Decoder::Reset()
InitDecode(NULL, _numCores);
}
_propagationCnt = -1;
_latestKeyFrameComplete = false;
return WEBRTC_VIDEO_CODEC_OK;
}
@ -785,12 +681,9 @@ VP8Decoder::InitDecode(const VideoCodec* inst,
{
_decoder = new vpx_dec_ctx_t;
}
#ifdef DEV_PIC_LOSS
if(inst && inst->codecType == kVideoCodecVP8)
{
_feedbackModeOn = inst->codecSpecific.VP8.feedbackModeOn;
if (inst && inst->codecType == kVideoCodecVP8) {
_feedbackModeOn = inst->codecSpecific.VP8.feedbackModeOn;
}
#endif
vpx_codec_dec_cfg_t cfg;
// Setting number of threads to a constant value (1)
@ -805,9 +698,9 @@ VP8Decoder::InitDecode(const VideoCodec* inst,
#endif
#endif
flags |= VPX_CODEC_USE_POSTPROC;
flags |= VPX_CODEC_USE_POSTPROC;
if (vpx_codec_dec_init(_decoder, vpx_codec_vp8_dx(), NULL, flags))
if (vpx_codec_dec_init(_decoder, vpx_codec_vp8_dx(), &cfg, flags))
{
return WEBRTC_VIDEO_CODEC_MEMORY;
}
@ -828,6 +721,7 @@ VP8Decoder::InitDecode(const VideoCodec* inst,
}
_numCores = numberOfCores;
_propagationCnt = -1;
_latestKeyFrameComplete = false;
_inited = true;
return WEBRTC_VIDEO_CODEC_OK;
@ -848,16 +742,6 @@ VP8Decoder::Decode(const EncodedImage& inputImage,
{
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (inputImage._completeFrame == false)
{
// future improvement
// we can't decode this frame
if (_feedbackModeOn)
{
return WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI;
}
// otherwise allow for incomplete frames to be decoded.
}
if (inputImage._buffer == NULL && inputImage._length > 0)
{
// Reset to avoid requesting key frames too often.
@ -873,25 +757,28 @@ VP8Decoder::Decode(const EncodedImage& inputImage,
}
#endif
// Restrict error propagation
// Reset on a key frame refresh
if (inputImage._frameType == kKeyFrame && inputImage._completeFrame)
// Restrict error propagation using key frame requests. Disabled when
// the feedback mode is enabled (RPS).
// Reset on a key frame refresh.
if (!_feedbackModeOn) {
if (inputImage._frameType == kKeyFrame && inputImage._completeFrame)
_propagationCnt = -1;
// Start count on first loss
else if ((!inputImage._completeFrame || missingFrames) &&
_propagationCnt == -1)
// Start count on first loss.
else if ((!inputImage._completeFrame || missingFrames) &&
_propagationCnt == -1)
_propagationCnt = 0;
if (_propagationCnt >= 0)
if (_propagationCnt >= 0)
_propagationCnt++;
}
vpx_dec_iter_t iter = NULL;
vpx_image_t* img;
WebRtc_Word32 ret;
// check for missing frames
// Check for missing frames.
if (missingFrames)
{
// call decoder with zero data length to signal missing frames
// Call decoder with zero data length to signal missing frames.
if (vpx_codec_decode(_decoder, NULL, 0, 0, VPX_DL_REALTIME))
{
// Reset to avoid requesting key frames too often.
@ -915,7 +802,7 @@ VP8Decoder::Decode(const EncodedImage& inputImage,
WebRtc_UWord8* buffer = inputImage._buffer;
if (inputImage._length == 0)
{
buffer = NULL; // Triggers full frame concealment
buffer = NULL; // Triggers full frame concealment.
}
if (vpx_codec_decode(_decoder,
buffer,
@ -958,25 +845,6 @@ VP8Decoder::Decode(const EncodedImage& inputImage,
_lastKeyFrame._length = bytesToCopy;
}
int lastRefUpdates = 0;
#ifdef DEV_PIC_LOSS
if (vpx_codec_control(_decoder, VP8D_GET_LAST_REF_UPDATES, &lastRefUpdates))
{
// Reset to avoid requesting key frames too often.
if (_propagationCnt > 0)
_propagationCnt = 0;
return WEBRTC_VIDEO_CODEC_ERROR;
}
int corrupted = 0;
if (vpx_codec_control(_decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted))
{
// Reset to avoid requesting key frames too often.
if (_propagationCnt > 0)
_propagationCnt = 0;
return WEBRTC_VIDEO_CODEC_ERROR;
}
#endif
img = vpx_codec_get_frame(_decoder, &iter);
ret = ReturnFrame(img, inputImage._timeStamp);
if (ret != 0)
@ -987,37 +855,49 @@ VP8Decoder::Decode(const EncodedImage& inputImage,
return ret;
}
// we need to communicate that we should send a RPSI with a specific picture ID
if (_feedbackModeOn) {
// Whenever we receive an incomplete key frame all reference buffers will
// be corrupt. If that happens we must request new key frames until we
// decode a complete.
if (inputImage._frameType == kKeyFrame)
_latestKeyFrameComplete = inputImage._completeFrame;
if (!_latestKeyFrameComplete)
return WEBRTC_VIDEO_CODEC_ERROR;
// TODO(pw): how do we know it's a golden or alt reference frame? libvpx will
// provide an API for now I added it temporarily
WebRtc_Word16 pictureId = -1;
if (codecSpecificInfo) {
// Check for reference updates and last reference buffer corruption and
// signal successful reference propagation or frame corruption to the
// encoder.
int referenceUpdates = 0;
if (vpx_codec_control(_decoder, VP8D_GET_LAST_REF_UPDATES,
&referenceUpdates)) {
// Reset to avoid requesting key frames too often.
if (_propagationCnt > 0)
_propagationCnt = 0;
return WEBRTC_VIDEO_CODEC_ERROR;
}
int corrupted = 0;
if (vpx_codec_control(_decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted)) {
// Reset to avoid requesting key frames too often.
if (_propagationCnt > 0)
_propagationCnt = 0;
return WEBRTC_VIDEO_CODEC_ERROR;
}
WebRtc_Word16 pictureId = -1;
if (codecSpecificInfo) {
pictureId = codecSpecificInfo->codecSpecific.VP8.pictureId;
}
if (pictureId > -1)
{
if ((lastRefUpdates & VP8_GOLD_FRAME)
|| (lastRefUpdates & VP8_ALTR_FRAME))
{
if (!missingFrames && (inputImage._completeFrame == true))
//if (!corrupted) // TODO(pw): Can we engage this line instead of
// the above?
{
_decodeCompleteCallback->ReceivedDecodedReferenceFrame(
pictureId);
}
}
if (pictureId > -1) {
if (((referenceUpdates & VP8_GOLD_FRAME) ||
(referenceUpdates & VP8_ALTR_FRAME)) && !corrupted) {
_decodeCompleteCallback->ReceivedDecodedReferenceFrame(pictureId);
}
_decodeCompleteCallback->ReceivedDecodedFrame(pictureId);
}
#ifdef DEV_PIC_LOSS
if (corrupted)
{
}
if (corrupted) {
// we can decode but with artifacts
return WEBRTC_VIDEO_CODEC_REQUEST_SLI;
}
}
#endif
// Check Vs. threshold
if (_propagationCnt > kVp8ErrorPropagationTh)

View File

@ -46,6 +46,8 @@
],
},
'sources': [
'reference_picture_selection.h',
'reference_picture_selection.cc',
'../interface/vp8.h',
'../interface/vp8_simulcast.h',
'vp8.cc',
@ -71,18 +73,36 @@
'sources': [
# header files
'../test/benchmark.h',
'../test/dual_decoder_test.h',
'../test/normal_async_test.h',
'../test/packet_loss_test.h',
'../test/rps_test.h',
'../test/unit_test.h',
'../test/dual_decoder_test.h',
# source files
'../test/benchmark.cc',
'../test/dual_decoder_test.cc',
'../test/normal_async_test.cc',
'../test/packet_loss_test.cc',
'../test/rps_test.cc',
'../test/tester.cc',
'../test/unit_test.cc',
'../test/dual_decoder_test.cc',
],
},
{
'target_name': 'vp8_unittests',
'type': 'executable',
'dependencies': [
'<(webrtc_root)/../test/test.gyp:test_support_main',
'<(webrtc_root)/../testing/gtest.gyp:gtest',
'<(webrtc_root)/../third_party/libvpx/libvpx.gyp:libvpx',
'webrtc_vp8',
],
'include_dirs': [
'<(webrtc_root)/../third_party/libvpx/source/libvpx',
],
'sources': [
'reference_picture_selection_unittest.cc',
],
},
], # targets

View File

@ -234,11 +234,13 @@ WebRtc_Word32 VP8SimulcastEncoder::RegisterEncodeCompleteCallback(
return ret_val;
}
WebRtc_Word32 VP8SimulcastEncoder::SetPacketLoss(WebRtc_UWord32 packetLoss) {
WebRtc_Word32 VP8SimulcastEncoder::SetChannelParameters(
WebRtc_UWord32 packetLoss,
int rtt) {
WebRtc_Word32 ret_val = 0;
for (int i = 0; i < kMaxSimulcastStreams; i++) {
if (encoder_[i]) {
ret_val = encoder_[i]->SetPacketLoss(packetLoss);
ret_val = encoder_[i]->SetChannelParameters(packetLoss, rtt);
if (ret_val < 0) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCoding,

View File

@ -73,8 +73,10 @@ VP8NormalAsyncTest::CreateEncoderSpecificInfo() const
vp8CodecSpecificInfo->codecType = kVideoCodecVP8;
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = false;
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
_hasReceivedSLI = false;
_hasReceivedRPSI = false;
return vp8CodecSpecificInfo;

View File

@ -23,29 +23,15 @@ PacketLossTest(name, description)
{
}
VP8PacketLossTest::VP8PacketLossTest(double lossRate, bool useNack)
VP8PacketLossTest::VP8PacketLossTest(double lossRate,
bool useNack,
int rttFrames)
:
PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode", lossRate, useNack)
PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode",
lossRate, useNack, rttFrames)
{
}
void
VP8PacketLossTest::CodecSpecific_InitBitrate()
{
assert(_bitRate > 0);
WebRtc_UWord32 simulatedBitRate;
if (_lossProbability != _lossRate)
{
// Simulating NACK
simulatedBitRate = (WebRtc_UWord32)(_bitRate / (1 + _lossRate));
}
else
{
simulatedBitRate = _bitRate;
}
_encoder->SetRates(simulatedBitRate, _inst.maxFramerate);
}
int VP8PacketLossTest::ByteLoss(int size, unsigned char* /* pkg */, int bytesToLose)
{
int retLength = size - bytesToLose;
@ -55,3 +41,34 @@ int VP8PacketLossTest::ByteLoss(int size, unsigned char* /* pkg */, int bytesToL
}
return retLength;
}
WebRtc_Word32
VP8PacketLossTest::ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId)
{
_pictureIdRPSI = pictureId;
_hasReceivedRPSI = true;
return 0;
}
webrtc::CodecSpecificInfo*
VP8PacketLossTest::CreateEncoderSpecificInfo() const
{
webrtc::CodecSpecificInfo* vp8CodecSpecificInfo =
new webrtc::CodecSpecificInfo();
vp8CodecSpecificInfo->codecType = webrtc::kVideoCodecVP8;
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
_hasReceivedSLI = false;
_hasReceivedRPSI = false;
return vp8CodecSpecificInfo;
}
bool VP8PacketLossTest::PacketLoss(double lossRate, int numLosses) {
if (numLosses)
return true;
return RandUniform() < lossRate;
}

View File

@ -17,12 +17,17 @@ class VP8PacketLossTest : public PacketLossTest
{
public:
VP8PacketLossTest();
VP8PacketLossTest(double lossRate, bool useNack);
VP8PacketLossTest(double lossRate, bool useNack, int rttFrames);
protected:
VP8PacketLossTest(std::string name, std::string description);
virtual void CodecSpecific_InitBitrate();
virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
// |lossRate| is the probability of packet loss between 0 and 1.
// |numLosses| is the number of packets already lost in the current frame.
virtual bool PacketLoss(double lossRate, int numLosses);
webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const;
};

View File

@ -0,0 +1,307 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "rps_test.h"
#include <assert.h>
#include <string.h> // memcmp
#include <time.h>
#include "vp8.h"
VP8RpsTest::VP8RpsTest(float bitRate)
: VP8NormalAsyncTest(bitRate),
decoder2_(new webrtc::VP8Decoder),
sli_(false) {
}
VP8RpsTest::VP8RpsTest()
: VP8NormalAsyncTest("VP8 Reference Picture Selection Test",
"VP8 Reference Picture Selection Test", 1),
decoder2_(new webrtc::VP8Decoder),
sli_(false) {
}
VP8RpsTest::~VP8RpsTest() {
if (decoder2_) {
decoder2_->Release();
delete decoder2_;
}
decoded_frame2_.Free();
}
void VP8RpsTest::Perform() {
_inname = "test/testFiles/foreman_cif.yuv";
CodecSettings(352, 288, 30, _bitRate);
Setup();
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
decoded_frame2_.VerifyAndAllocate(_lengthSourceFrame);
// Enable RPS functionality
_inst.codecSpecific.VP8.pictureLossIndicationOn = true;
_inst.codecSpecific.VP8.feedbackModeOn = true;
if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
exit(EXIT_FAILURE);
_decoder->InitDecode(&_inst,1);
decoder2_->InitDecode(&_inst,1);
FrameQueue frameQueue;
VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
RpsDecodeCompleteCallback decCallback(&_decodedVideoBuffer);
RpsDecodeCompleteCallback decCallback2(&decoded_frame2_);
_encoder->RegisterEncodeCompleteCallback(&encCallback);
_decoder->RegisterDecodeCompleteCallback(&decCallback);
decoder2_->RegisterDecodeCompleteCallback(&decCallback2);
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
exit(EXIT_FAILURE);
_totalEncodeTime = _totalDecodeTime = 0;
_totalEncodePipeTime = _totalDecodePipeTime = 0;
bool complete = false;
_framecnt = 0;
_encFrameCnt = 0;
_decFrameCnt = 0;
_sumEncBytes = 0;
_lengthEncFrame = 0;
double starttime = clock()/(double)CLOCKS_PER_SEC;
while (!complete) {
CodecSpecific_InitBitrate();
complete = Encode(&decCallback2);
if (!frameQueue.Empty() || complete) {
while (!frameQueue.Empty()) {
_frameToDecode =
static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
int lost = DoPacketLoss();
if (lost == 2) {
// Lost the whole frame, continue
_missingFrames = true;
delete _frameToDecode;
_frameToDecode = NULL;
continue;
}
int ret = Decode(lost);
delete _frameToDecode;
_frameToDecode = NULL;
if (ret < 0) {
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
exit(EXIT_FAILURE);
}
else if (ret == 0) {
_framecnt++;
}
else {
fprintf(stderr,
"\n\nPositive return value from decode!\n\n");
}
}
}
}
double endtime = clock()/(double)CLOCKS_PER_SEC;
double totalExecutionTime = endtime - starttime;
printf("Total execution time: %.1f s\n", totalExecutionTime);
_sumEncBytes = encCallback.EncodedBytes();
double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
double avgEncTime = _totalEncodeTime / _encFrameCnt;
double avgDecTime = _totalDecodeTime / _decFrameCnt;
printf("Actual bitrate: %f kbps\n", actualBitRate);
printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
printf("Average encode pipeline time: %.1f ms\n",
1000 * _totalEncodePipeTime / _encFrameCnt);
printf("Average decode pipeline time: %.1f ms\n",
1000 * _totalDecodePipeTime / _decFrameCnt);
printf("Number of encoded frames: %u\n", _encFrameCnt);
printf("Number of decoded frames: %u\n", _decFrameCnt);
(*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
_bitRate << " kbps" << std::endl;
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
_encoder->Release();
_decoder->Release();
Teardown();
}
bool VP8RpsTest::Encode(RpsDecodeCompleteCallback* decodeCallback) {
_lengthEncFrame = 0;
fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
_inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
_inputVideoBuffer.SetTimeStamp((unsigned int)
(_encFrameCnt * 9e4 / _inst.maxFramerate));
_inputVideoBuffer.SetWidth(_inst.width);
_inputVideoBuffer.SetHeight(_inst.height);
webrtc::RawImage rawImage;
VideoBufferToRawImage(_inputVideoBuffer, rawImage);
if (feof(_sourceFile) != 0) {
return true;
}
_encodeCompleteTime = 0;
_encodeTimes[rawImage._timeStamp] = tGetTime();
webrtc::VideoFrameType frameType = webrtc::kDeltaFrame;
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
decodeCallback->LastDecodedRefPictureId(
&codecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI);
if (sli_) {
codecSpecificInfo->codecSpecific.VP8.pictureIdSLI =
decodeCallback->LastDecodedPictureId();
codecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = true;
sli_ = false;
}
printf("Encoding: %u\n", _framecnt);
int ret = _encoder->Encode(rawImage, codecSpecificInfo, &frameType);
if (codecSpecificInfo != NULL) {
delete codecSpecificInfo;
codecSpecificInfo = NULL;
}
if (_encodeCompleteTime > 0) {
_totalEncodeTime += _encodeCompleteTime -
_encodeTimes[rawImage._timeStamp];
}
else {
_totalEncodeTime += tGetTime() - _encodeTimes[rawImage._timeStamp];
}
assert(ret >= 0);
return false;
}
//#define FRAME_LOSS 1
int VP8RpsTest::Decode(int lossValue) {
_sumEncBytes += _frameToDecode->_frame->GetLength();
webrtc::EncodedImage encodedImage;
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
encodedImage._completeFrame = !lossValue;
_decodeCompleteTime = 0;
_decodeTimes[encodedImage._timeStamp] = clock()/(double)CLOCKS_PER_SEC;
int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
_frameToDecode->_codecSpecificInfo);
// Drop every 10th frame for the second decoder
#if FRAME_LOSS
if (_framecnt == 0 || _framecnt % 10 != 0) {
printf("Decoding: %u\n", _framecnt);
if (_framecnt > 1 && (_framecnt - 1) % 10 == 0)
_missingFrames = true;
#else
if (true) {
if (_framecnt > 0 && _framecnt % 10 == 0) {
encodedImage._length = std::rand() % encodedImage._length;
printf("Decoding with loss: %u\n", _framecnt);
}
else
printf("Decoding: %u\n", _framecnt);
#endif
int ret2 = decoder2_->Decode(encodedImage, _missingFrames, NULL,
_frameToDecode->_codecSpecificInfo,
0 /* dummy */);
// check return values
if (ret < 0 || ret2 < 0) {
return -1;
} else if (ret2 == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI ||
ret2 == WEBRTC_VIDEO_CODEC_REQUEST_SLI) {
sli_ = true;
}
// compare decoded images
#if FRAME_LOSS
if (!_missingFrames) {
if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
_decodedVideoBuffer.GetLength(),
decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
_framecnt);
return -1;
}
}
#else
if (_framecnt > 0 && _framecnt % 10 != 0) {
if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
_decodedVideoBuffer.GetLength(),
decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
_framecnt);
return -1;
}
}
#endif
}
#if FRAME_LOSS
else
printf("Dropping %u\n", _framecnt);
#endif
_missingFrames = false;
return 0;
}
bool
VP8RpsTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
const void* ptrB, unsigned int bLengthBytes) {
if (aLengthBytes != bLengthBytes)
return false;
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
}
RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(TestVideoBuffer* buffer)
: decoded_frame_(buffer),
decode_complete_(false),
last_decoded_picture_id_(0),
last_decoded_ref_picture_id_(0),
updated_ref_picture_id_(false) {
}
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::RawImage& image) {
decoded_frame_->VerifyAndAllocate(image._length);
decoded_frame_->CopyBuffer(image._length, image._buffer);
decoded_frame_->SetWidth(image._width);
decoded_frame_->SetHeight(image._height);
decoded_frame_->SetTimeStamp(image._timeStamp);
decode_complete_ = true;
return 0;
}
bool RpsDecodeCompleteCallback::DecodeComplete() {
if (decode_complete_)
{
decode_complete_ = false;
return true;
}
return false;
}
WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
const WebRtc_UWord64 picture_id) {
last_decoded_ref_picture_id_ = picture_id & 0x7FFF;
updated_ref_picture_id_ = true;
return 0;
}
WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedFrame(
const WebRtc_UWord64 picture_id) {
last_decoded_picture_id_ = picture_id & 0x3F;
return 0;
}
WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedPictureId() const {
return last_decoded_picture_id_;
}
WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedRefPictureId(
bool *updated) {
if (updated)
*updated = updated_ref_picture_id_;
updated_ref_picture_id_ = false;
return last_decoded_ref_picture_id_;
}

View File

@ -0,0 +1,57 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
#include "vp8.h"
#include "normal_async_test.h"
class RpsDecodeCompleteCallback;
class VP8RpsTest : public VP8NormalAsyncTest {
public:
VP8RpsTest(float bitRate);
VP8RpsTest();
virtual ~VP8RpsTest();
virtual void Perform();
private:
VP8RpsTest(std::string name, std::string description, unsigned int testNo)
: VP8NormalAsyncTest(name, description, testNo) {}
virtual bool Encode(RpsDecodeCompleteCallback* decodeCallback);
virtual int Decode(int lossValue = 0);
static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
const void *ptrB, unsigned int bLengthBytes);
webrtc::VP8Decoder* decoder2_;
TestVideoBuffer decoded_frame2_;
bool sli_;
};
class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
public:
RpsDecodeCompleteCallback(TestVideoBuffer* buffer);
WebRtc_Word32 Decoded(webrtc::RawImage& decodedImage);
bool DecodeComplete();
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 picture_id);
WebRtc_UWord64 LastDecodedPictureId() const;
WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
private:
TestVideoBuffer* decoded_frame_;
bool decode_complete_;
WebRtc_UWord64 last_decoded_picture_id_;
WebRtc_UWord64 last_decoded_ref_picture_id_;
bool updated_ref_picture_id_;
};
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_

View File

@ -17,6 +17,7 @@
#include "normal_async_test.h"
#include "packet_loss_test.h"
#include "unit_test.h"
#include "rps_test.h"
#include "testsupport/fileutils.h"
#include "vp8.h"
@ -24,11 +25,12 @@ using namespace webrtc;
void PopulateTests(std::vector<Test*>* tests)
{
tests->push_back(new VP8UnitTest());
// tests->push_back(new VP8RpsTest());
// tests->push_back(new VP8UnitTest());
// tests->push_back(new VP8DualDecoderTest());
// tests->push_back(new VP8Benchmark());
// tests->push_back(new VP8PacketLossTest());
// tests->push_back(new VP8NormalAsyncTest());
// tests->push_back(new VP8PacketLossTest(0.05, false, 5));
tests->push_back(new VP8NormalAsyncTest());
}
int main()

View File

@ -150,25 +150,25 @@ public:
// - availableBandWidth : Band width available for the VCM in kbit/s.
// - lossRate : Fractions of lost packets the past second.
// (loss rate in percent = 100 * packetLoss / 255)
// - RTT : Current round-trip time in ms.
// - rtt : Current round-trip time in ms.
//
// Return value : VCM_OK, on success.
// < 0, on error.
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 availableBandWidth,
WebRtc_UWord8 lossRate,
WebRtc_UWord32 RTT) = 0;
WebRtc_UWord32 rtt) = 0;
// Sets the parameters describing the receive channel. These parameters are inputs to the
// Media Optimization inside the VCM.
//
// Input:
// - RTT : Current round-trip time in ms.
// - rtt : Current round-trip time in ms.
// with the most amount available bandwidth in a conference
// scenario
//
// Return value : VCM_OK, on success.
// < 0, on error.
virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 RTT) = 0;
virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 rtt) = 0;
// Register a transport callback which will be called to deliver the encoded data and
// side information.

View File

@ -86,9 +86,9 @@ VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
}
WebRtc_Word32
VCMGenericEncoder::SetPacketLoss(WebRtc_Word32 packetLoss)
VCMGenericEncoder::SetChannelParameters(WebRtc_Word32 packetLoss, int rtt)
{
return _encoder.SetPacketLoss(packetLoss);
return _encoder.SetChannelParameters(packetLoss, rtt);
}
WebRtc_Word32

View File

@ -110,9 +110,9 @@ public:
*/
WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate);
/**
* Set a new packet loss rate
* Set a new packet loss rate and a new round-trip time in milliseconds.
*/
WebRtc_Word32 SetPacketLoss(WebRtc_Word32 packetLoss);
WebRtc_Word32 SetChannelParameters(WebRtc_Word32 packetLoss, int rtt);
WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size);
/**
* Register a transport callback which will be called to deliver the encoded buffers

View File

@ -547,7 +547,7 @@ VideoCodingModuleImpl::FrameRate() const
WebRtc_Word32
VideoCodingModuleImpl::SetChannelParameters(WebRtc_UWord32 availableBandWidth,
WebRtc_UWord8 lossRate,
WebRtc_UWord32 RTT)
WebRtc_UWord32 rtt)
{
WEBRTC_TRACE(webrtc::kTraceModuleCall,
webrtc::kTraceVideoCoding,
@ -558,10 +558,10 @@ VideoCodingModuleImpl::SetChannelParameters(WebRtc_UWord32 availableBandWidth,
CriticalSectionScoped sendCs(_sendCritSect);
WebRtc_UWord32 targetRate = _mediaOpt.SetTargetRates(availableBandWidth,
lossRate,
RTT);
rtt);
if (_encoder != NULL)
{
ret = _encoder->SetPacketLoss(lossRate);
ret = _encoder->SetChannelParameters(lossRate, rtt);
if (ret < 0 )
{
return ret;
@ -582,14 +582,14 @@ VideoCodingModuleImpl::SetChannelParameters(WebRtc_UWord32 availableBandWidth,
}
WebRtc_Word32
VideoCodingModuleImpl::SetReceiveChannelParameters(WebRtc_UWord32 RTT)
VideoCodingModuleImpl::SetReceiveChannelParameters(WebRtc_UWord32 rtt)
{
WEBRTC_TRACE(webrtc::kTraceModuleCall,
webrtc::kTraceVideoCoding,
VCMId(_id),
"SetReceiveChannelParameters()");
CriticalSectionScoped receiveCs(_receiveCritSect);
_receiver.UpdateRtt(RTT);
_receiver.UpdateRtt(rtt);
return 0;
}

View File

@ -113,10 +113,10 @@ public:
virtual WebRtc_Word32 SetChannelParameters(
WebRtc_UWord32 availableBandWidth,
WebRtc_UWord8 lossRate,
WebRtc_UWord32 RTT);
WebRtc_UWord32 rtt);
// Set recieve channel parameters
virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 RTT);
virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 rtt);
// Register a transport callback which will be called to deliver the
// encoded buffers

View File

@ -1007,7 +1007,8 @@ WebRtc_Word32 ViECapturer::Reset()
return 0;
}
WebRtc_Word32 ViECapturer::SetPacketLoss(WebRtc_UWord32 packetLoss)
WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt)
{
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(_engineId, _captureId),
"%s(captureDeviceId: %d)", __FUNCTION__, _captureId);
@ -1016,7 +1017,7 @@ WebRtc_Word32 ViECapturer::SetPacketLoss(WebRtc_UWord32 packetLoss)
if (!_captureEncoder)
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
return _captureEncoder->SetPacketLoss(packetLoss);
return _captureEncoder->SetChannelParameters(packetLoss, rtt);
}
WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 newBitRate,

View File

@ -136,7 +136,8 @@ protected:
EncodedImageCallback* callback);
virtual WebRtc_Word32 Release();
virtual WebRtc_Word32 Reset();
virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
int rtt);
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate,
WebRtc_UWord32 frameRate);