Move src/ -> webrtc/
TBR=niklas.enbom@webrtc.org Review URL: https://webrtc-codereview.appspot.com/915006 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2963 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
4
webrtc/modules/video_coding/codecs/OWNERS
Normal file
4
webrtc/modules/video_coding/codecs/OWNERS
Normal file
@@ -0,0 +1,4 @@
|
||||
stefan@webrtc.org
|
||||
mikhal@webrtc.org
|
||||
marpan@webrtc.org
|
||||
henrik.lundin@webrtc.org
|
||||
151
webrtc/modules/video_coding/codecs/i420/main/interface/i420.h
Normal file
151
webrtc/modules/video_coding/codecs/i420/main/interface/i420.h
Normal file
@@ -0,0 +1,151 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
|
||||
|
||||
#include "video_codec_interface.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class I420Encoder : public VideoEncoder {
|
||||
public:
|
||||
|
||||
I420Encoder();
|
||||
|
||||
virtual ~I420Encoder();
|
||||
|
||||
// Initialize the encoder with the information from the VideoCodec.
|
||||
//
|
||||
// Input:
|
||||
// - codecSettings : Codec settings.
|
||||
// - numberOfCores : Number of cores available for the encoder.
|
||||
// - maxPayloadSize : The maximum size each payload is allowed
|
||||
// to have. Usually MTU - overhead.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
|
||||
// <0 - Error
|
||||
virtual int InitEncode(const VideoCodec* codecSettings,
|
||||
int /*numberOfCores*/,
|
||||
uint32_t /*maxPayloadSize*/);
|
||||
|
||||
// "Encode" an I420 image (as a part of a video stream). The encoded image
|
||||
// will be returned to the user via the encode complete callback.
|
||||
//
|
||||
// Input:
|
||||
// - inputImage : Image to be encoded.
|
||||
// - codecSpecificInfo : Pointer to codec specific data.
|
||||
// - frameType : Frame type to be sent (Key /Delta).
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
|
||||
// <0 - Error
|
||||
virtual int Encode(const VideoFrame& inputImage,
|
||||
const CodecSpecificInfo* /*codecSpecificInfo*/,
|
||||
const std::vector<VideoFrameType>* /*frame_types*/);
|
||||
|
||||
// Register an encode complete callback object.
|
||||
//
|
||||
// Input:
|
||||
// - callback : Callback object which handles encoded images.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
|
||||
|
||||
// Free encoder memory.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int Release();
|
||||
|
||||
virtual int SetRates(uint32_t /*newBitRate*/, uint32_t /*frameRate*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;}
|
||||
|
||||
virtual int SetChannelParameters(uint32_t /*packetLoss*/, int /*rtt*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;}
|
||||
|
||||
virtual int CodecConfigParameters(uint8_t* /*buffer*/, int /*size*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;}
|
||||
|
||||
private:
|
||||
bool _inited;
|
||||
EncodedImage _encodedImage;
|
||||
EncodedImageCallback* _encodedCompleteCallback;
|
||||
|
||||
}; // end of WebRtcI420DEncoder class
|
||||
|
||||
class I420Decoder : public VideoDecoder {
|
||||
public:
|
||||
|
||||
I420Decoder();
|
||||
|
||||
virtual ~I420Decoder();
|
||||
|
||||
// Initialize the decoder.
|
||||
// The user must notify the codec of width and height values.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK.
|
||||
// <0 - Errors
|
||||
virtual int InitDecode(const VideoCodec* codecSettings,
|
||||
int /*numberOfCores*/);
|
||||
|
||||
virtual int SetCodecConfigParameters(const uint8_t* /*buffer*/, int /*size*/)
|
||||
{return WEBRTC_VIDEO_CODEC_OK;};
|
||||
|
||||
// Decode encoded image (as a part of a video stream). The decoded image
|
||||
// will be returned to the user through the decode complete callback.
|
||||
//
|
||||
// Input:
|
||||
// - inputImage : Encoded image to be decoded
|
||||
// - missingFrames : True if one or more frames have been lost
|
||||
// since the previous decode call.
|
||||
// - codecSpecificInfo : pointer to specific codec data
|
||||
// - renderTimeMs : Render time in Ms
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
|
||||
// <0 - Error
|
||||
virtual int Decode(const EncodedImage& inputImage,
|
||||
bool missingFrames,
|
||||
const RTPFragmentationHeader* /*fragmentation*/,
|
||||
const CodecSpecificInfo* /*codecSpecificInfo*/,
|
||||
int64_t /*renderTimeMs*/);
|
||||
|
||||
// Register a decode complete callback object.
|
||||
//
|
||||
// Input:
|
||||
// - callback : Callback object which handles decoded images.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int RegisterDecodeCompleteCallback(DecodedImageCallback* callback);
|
||||
|
||||
// Free decoder memory.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK.
|
||||
// <0 - Error
|
||||
virtual int Release();
|
||||
|
||||
// Reset decoder state and prepare for a new call.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK.
|
||||
// <0 - Error
|
||||
virtual int Reset();
|
||||
|
||||
private:
|
||||
|
||||
VideoFrame _decodedImage;
|
||||
int _width;
|
||||
int _height;
|
||||
bool _inited;
|
||||
DecodedImageCallback* _decodeCompleteCallback;
|
||||
|
||||
}; // End of WebRtcI420Decoder class.
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
|
||||
@@ -0,0 +1,41 @@
|
||||
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
include $(LOCAL_PATH)/../../../../../../../android-webrtc.mk
|
||||
|
||||
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
|
||||
LOCAL_MODULE := libwebrtc_i420
|
||||
LOCAL_MODULE_TAGS := optional
|
||||
LOCAL_CPP_EXTENSION := .cc
|
||||
LOCAL_SRC_FILES := i420.cc
|
||||
|
||||
# Flags passed to both C and C++ files.
|
||||
LOCAL_CFLAGS := \
|
||||
$(MY_WEBRTC_COMMON_DEFS)
|
||||
|
||||
# Include paths placed before CFLAGS/CPPFLAGS
|
||||
LOCAL_C_INCLUDES := \
|
||||
$(LOCAL_PATH)/../interface \
|
||||
$(LOCAL_PATH)/../../../interface \
|
||||
$(LOCAL_PATH)/../../../../../.. \
|
||||
$(LOCAL_PATH)/../../../../../../common_video/interface \
|
||||
$(LOCAL_PATH)/../../../../../../system_wrappers/interface
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := \
|
||||
libcutils \
|
||||
libdl \
|
||||
libstlport
|
||||
|
||||
ifndef NDK_ROOT
|
||||
include external/stlport/libstlport.mk
|
||||
endif
|
||||
include $(BUILD_STATIC_LIBRARY)
|
||||
201
webrtc/modules/video_coding/codecs/i420/main/source/i420.cc
Normal file
201
webrtc/modules/video_coding/codecs/i420/main/source/i420.cc
Normal file
@@ -0,0 +1,201 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/codecs/i420/main/interface/i420.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
I420Encoder::I420Encoder():
|
||||
_inited(false),
|
||||
_encodedImage(),
|
||||
_encodedCompleteCallback(NULL)
|
||||
{}
|
||||
|
||||
I420Encoder::~I420Encoder() {
|
||||
_inited = false;
|
||||
if (_encodedImage._buffer != NULL) {
|
||||
delete [] _encodedImage._buffer;
|
||||
_encodedImage._buffer = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
int I420Encoder::Release() {
|
||||
// Should allocate an encoded frame and then release it here, for that we
|
||||
// actually need an init flag.
|
||||
if (_encodedImage._buffer != NULL) {
|
||||
delete [] _encodedImage._buffer;
|
||||
_encodedImage._buffer = NULL;
|
||||
}
|
||||
_inited = false;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int I420Encoder::InitEncode(const VideoCodec* codecSettings,
|
||||
int /*numberOfCores*/,
|
||||
uint32_t /*maxPayloadSize */) {
|
||||
if (codecSettings == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
if (codecSettings->width < 1 || codecSettings->height < 1) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
|
||||
// Allocating encoded memory.
|
||||
if (_encodedImage._buffer != NULL) {
|
||||
delete [] _encodedImage._buffer;
|
||||
_encodedImage._buffer = NULL;
|
||||
_encodedImage._size = 0;
|
||||
}
|
||||
const uint32_t newSize = CalcBufferSize(kI420,
|
||||
codecSettings->width,
|
||||
codecSettings->height);
|
||||
uint8_t* newBuffer = new uint8_t[newSize];
|
||||
if (newBuffer == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_MEMORY;
|
||||
}
|
||||
_encodedImage._size = newSize;
|
||||
_encodedImage._buffer = newBuffer;
|
||||
|
||||
// If no memory allocation, no point to init.
|
||||
_inited = true;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
|
||||
|
||||
int I420Encoder::Encode(const VideoFrame& inputImage,
|
||||
const CodecSpecificInfo* /*codecSpecificInfo*/,
|
||||
const std::vector<VideoFrameType>* /*frame_types*/) {
|
||||
if (!_inited) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
if (_encodedCompleteCallback == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
|
||||
_encodedImage._frameType = kKeyFrame; // No coding.
|
||||
_encodedImage._timeStamp = inputImage.TimeStamp();
|
||||
_encodedImage._encodedHeight = inputImage.Height();
|
||||
_encodedImage._encodedWidth = inputImage.Width();
|
||||
if (inputImage.Length() > _encodedImage._size) {
|
||||
|
||||
// Allocating encoded memory.
|
||||
if (_encodedImage._buffer != NULL) {
|
||||
delete [] _encodedImage._buffer;
|
||||
_encodedImage._buffer = NULL;
|
||||
_encodedImage._size = 0;
|
||||
}
|
||||
const uint32_t newSize = CalcBufferSize(kI420,
|
||||
_encodedImage._encodedWidth,
|
||||
_encodedImage._encodedHeight);
|
||||
uint8_t* newBuffer = new uint8_t[newSize];
|
||||
if (newBuffer == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_MEMORY;
|
||||
}
|
||||
_encodedImage._size = newSize;
|
||||
_encodedImage._buffer = newBuffer;
|
||||
}
|
||||
memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length());
|
||||
_encodedImage._length = inputImage.Length();
|
||||
_encodedCompleteCallback->Encoded(_encodedImage);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
|
||||
int
|
||||
I420Encoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback) {
|
||||
_encodedCompleteCallback = callback;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
|
||||
I420Decoder::I420Decoder():
|
||||
_decodedImage(),
|
||||
_width(0),
|
||||
_height(0),
|
||||
_inited(false),
|
||||
_decodeCompleteCallback(NULL)
|
||||
{}
|
||||
|
||||
I420Decoder::~I420Decoder() {
|
||||
Release();
|
||||
}
|
||||
|
||||
int
|
||||
I420Decoder::Reset() {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
|
||||
int
|
||||
I420Decoder::InitDecode(const VideoCodec* codecSettings,
|
||||
int /*numberOfCores */) {
|
||||
if (codecSettings == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
} else if (codecSettings->width < 1 || codecSettings->height < 1) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
_width = codecSettings->width;
|
||||
_height = codecSettings->height;
|
||||
_inited = true;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int
|
||||
I420Decoder::Decode(const EncodedImage& inputImage,
|
||||
bool /*missingFrames*/,
|
||||
const RTPFragmentationHeader* /*fragmentation*/,
|
||||
const CodecSpecificInfo* /*codecSpecificInfo*/,
|
||||
int64_t /*renderTimeMs*/) {
|
||||
if (inputImage._buffer == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
if (_decodeCompleteCallback == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
if (inputImage._length <= 0) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
if (!_inited) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
|
||||
// Set decoded image parameters.
|
||||
if (_decodedImage.CopyFrame(inputImage._length, inputImage._buffer) < 0) {
|
||||
return WEBRTC_VIDEO_CODEC_MEMORY;
|
||||
}
|
||||
_decodedImage.SetHeight(_height);
|
||||
_decodedImage.SetWidth(_width);
|
||||
_decodedImage.SetTimeStamp(inputImage._timeStamp);
|
||||
|
||||
_decodeCompleteCallback->Decoded(_decodedImage);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int
|
||||
I420Decoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback) {
|
||||
_decodeCompleteCallback = callback;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int
|
||||
I420Decoder::Release() {
|
||||
_decodedImage.Free();
|
||||
_inited = false;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'webrtc_i420',
|
||||
'type': '<(library)',
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||
],
|
||||
'include_dirs': [
|
||||
'../interface',
|
||||
'../../../interface',
|
||||
'../../../../../../common_video/interface',
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'../interface',
|
||||
'../../../../../../common_video/interface',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'../interface/i420.h',
|
||||
'i420.cc',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
# Local Variables:
|
||||
# tab-width:2
|
||||
# indent-tabs-mode:nil
|
||||
# End:
|
||||
# vim: set expandtab tabstop=2 shiftwidth=2:
|
||||
@@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class MockEncodedImageCallback : public EncodedImageCallback {
|
||||
public:
|
||||
MOCK_METHOD3(Encoded,
|
||||
WebRtc_Word32(EncodedImage& encodedImage,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
const RTPFragmentationHeader* fragmentation));
|
||||
};
|
||||
|
||||
class MockVideoEncoder : public VideoEncoder {
|
||||
public:
|
||||
MOCK_CONST_METHOD2(Version,
|
||||
WebRtc_Word32(WebRtc_Word8 *version,
|
||||
WebRtc_Word32 length));
|
||||
MOCK_METHOD3(InitEncode,
|
||||
WebRtc_Word32(const VideoCodec* codecSettings,
|
||||
WebRtc_Word32 numberOfCores,
|
||||
WebRtc_UWord32 maxPayloadSize));
|
||||
MOCK_METHOD3(Encode,
|
||||
WebRtc_Word32(const VideoFrame& inputImage,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
const std::vector<VideoFrameType>* frame_types));
|
||||
MOCK_METHOD1(RegisterEncodeCompleteCallback,
|
||||
WebRtc_Word32(EncodedImageCallback* callback));
|
||||
MOCK_METHOD0(Release, WebRtc_Word32());
|
||||
MOCK_METHOD0(Reset, WebRtc_Word32());
|
||||
MOCK_METHOD2(SetChannelParameters, WebRtc_Word32(WebRtc_UWord32 packetLoss,
|
||||
int rtt));
|
||||
MOCK_METHOD2(SetRates,
|
||||
WebRtc_Word32(WebRtc_UWord32 newBitRate,
|
||||
WebRtc_UWord32 frameRate));
|
||||
MOCK_METHOD1(SetPeriodicKeyFrames, WebRtc_Word32(bool enable));
|
||||
MOCK_METHOD2(CodecConfigParameters,
|
||||
WebRtc_Word32(WebRtc_UWord8* /*buffer*/, WebRtc_Word32));
|
||||
};
|
||||
|
||||
class MockDecodedImageCallback : public DecodedImageCallback {
|
||||
public:
|
||||
MOCK_METHOD1(Decoded,
|
||||
WebRtc_Word32(VideoFrame& decodedImage));
|
||||
MOCK_METHOD1(ReceivedDecodedReferenceFrame,
|
||||
WebRtc_Word32(const WebRtc_UWord64 pictureId));
|
||||
MOCK_METHOD1(ReceivedDecodedFrame,
|
||||
WebRtc_Word32(const WebRtc_UWord64 pictureId));
|
||||
};
|
||||
|
||||
class MockVideoDecoder : public VideoDecoder {
|
||||
public:
|
||||
MOCK_METHOD2(InitDecode,
|
||||
WebRtc_Word32(const VideoCodec* codecSettings,
|
||||
WebRtc_Word32 numberOfCores));
|
||||
MOCK_METHOD5(Decode,
|
||||
WebRtc_Word32(const EncodedImage& inputImage,
|
||||
bool missingFrames,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
WebRtc_Word64 renderTimeMs));
|
||||
MOCK_METHOD1(RegisterDecodeCompleteCallback,
|
||||
WebRtc_Word32(DecodedImageCallback* callback));
|
||||
MOCK_METHOD0(Release, WebRtc_Word32());
|
||||
MOCK_METHOD0(Reset, WebRtc_Word32());
|
||||
MOCK_METHOD2(SetCodecConfigParameters,
|
||||
WebRtc_Word32(const WebRtc_UWord8* /*buffer*/, WebRtc_Word32));
|
||||
MOCK_METHOD0(Copy, VideoDecoder*());
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
|
||||
@@ -0,0 +1,251 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "common_types.h"
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/codecs/interface/video_error_codes.h"
|
||||
#include "common_video/interface/video_image.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
class RTPFragmentationHeader; // forward declaration
|
||||
|
||||
// Note: if any pointers are added to this struct, it must be fitted
|
||||
// with a copy-constructor. See below.
|
||||
struct CodecSpecificInfoVP8
|
||||
{
|
||||
bool hasReceivedSLI;
|
||||
WebRtc_UWord8 pictureIdSLI;
|
||||
bool hasReceivedRPSI;
|
||||
WebRtc_UWord64 pictureIdRPSI;
|
||||
WebRtc_Word16 pictureId; // negative value to skip pictureId
|
||||
bool nonReference;
|
||||
WebRtc_UWord8 simulcastIdx;
|
||||
WebRtc_UWord8 temporalIdx;
|
||||
bool layerSync;
|
||||
int tl0PicIdx; // Negative value to skip tl0PicIdx
|
||||
WebRtc_Word8 keyIdx; // negative value to skip keyIdx
|
||||
};
|
||||
|
||||
union CodecSpecificInfoUnion
|
||||
{
|
||||
CodecSpecificInfoVP8 VP8;
|
||||
};
|
||||
|
||||
// Note: if any pointers are added to this struct or its sub-structs, it
|
||||
// must be fitted with a copy-constructor. This is because it is copied
|
||||
// in the copy-constructor of VCMEncodedFrame.
|
||||
struct CodecSpecificInfo
|
||||
{
|
||||
VideoCodecType codecType;
|
||||
CodecSpecificInfoUnion codecSpecific;
|
||||
};
|
||||
|
||||
class EncodedImageCallback
|
||||
{
|
||||
public:
|
||||
virtual ~EncodedImageCallback() {};
|
||||
|
||||
// Callback function which is called when an image has been encoded.
|
||||
//
|
||||
// Input:
|
||||
// - encodedImage : The encoded image
|
||||
//
|
||||
// Return value : > 0, signals to the caller that one or more future frames
|
||||
// should be dropped to keep bit rate or frame rate.
|
||||
// = 0, if OK.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32
|
||||
Encoded(EncodedImage& encodedImage,
|
||||
const CodecSpecificInfo* codecSpecificInfo = NULL,
|
||||
const RTPFragmentationHeader* fragmentation = NULL) = 0;
|
||||
};
|
||||
|
||||
class VideoEncoder
|
||||
{
|
||||
public:
|
||||
virtual ~VideoEncoder() {};
|
||||
|
||||
// Initialize the encoder with the information from the VideoCodec.
|
||||
//
|
||||
// Input:
|
||||
// - codecSettings : Codec settings
|
||||
// - numberOfCores : Number of cores available for the encoder
|
||||
// - maxPayloadSize : The maximum size each payload is allowed
|
||||
// to have. Usually MTU - overhead.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 InitEncode(const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores, WebRtc_UWord32 maxPayloadSize) = 0;
|
||||
|
||||
// Encode an I420 image (as a part of a video stream). The encoded image
|
||||
// will be returned to the user through the encode complete callback.
|
||||
//
|
||||
// Input:
|
||||
// - inputImage : Image to be encoded
|
||||
// - codecSpecificInfo : Pointer to codec specific data
|
||||
// - frame_types : The frame type to encode
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0
|
||||
// otherwise.
|
||||
virtual WebRtc_Word32 Encode(
|
||||
const VideoFrame& inputImage,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
const std::vector<VideoFrameType>* frame_types) = 0;
|
||||
|
||||
// Register an encode complete callback object.
|
||||
//
|
||||
// Input:
|
||||
// - callback : Callback object which handles encoded images.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 RegisterEncodeCompleteCallback(EncodedImageCallback* callback) = 0;
|
||||
|
||||
// Free encoder memory.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 Release() = 0;
|
||||
|
||||
// Inform the encoder about the packet loss and round trip time on the
|
||||
// network used to decide the best pattern and signaling.
|
||||
//
|
||||
// - packetLoss : Fraction lost (loss rate in percent =
|
||||
// 100 * packetLoss / 255)
|
||||
// - rtt : Round-trip time in milliseconds
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
|
||||
int rtt) = 0;
|
||||
|
||||
// Inform the encoder about the new target bit rate.
|
||||
//
|
||||
// - newBitRate : New target bit rate
|
||||
// - frameRate : The target frame rate
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate) = 0;
|
||||
|
||||
// Use this function to enable or disable periodic key frames. Can be useful for codecs
|
||||
// which have other ways of stopping error propagation.
|
||||
//
|
||||
// - enable : Enable or disable periodic key frames
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 SetPeriodicKeyFrames(bool enable) { return WEBRTC_VIDEO_CODEC_ERROR; }
|
||||
|
||||
// Codec configuration data to send out-of-band, i.e. in SIP call setup
|
||||
//
|
||||
// - buffer : Buffer pointer to where the configuration data
|
||||
// should be stored
|
||||
// - size : The size of the buffer in bytes
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
|
||||
};
|
||||
|
||||
class DecodedImageCallback
|
||||
{
|
||||
public:
|
||||
virtual ~DecodedImageCallback() {};
|
||||
|
||||
// Callback function which is called when an image has been decoded.
|
||||
//
|
||||
// Input:
|
||||
// - decodedImage : The decoded image.
|
||||
//
|
||||
// Return value : 0 if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage) = 0;
|
||||
|
||||
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;}
|
||||
|
||||
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId) {return -1;}
|
||||
};
|
||||
|
||||
class VideoDecoder
|
||||
{
|
||||
public:
|
||||
virtual ~VideoDecoder() {};
|
||||
|
||||
// Initialize the decoder with the information from the VideoCodec.
|
||||
//
|
||||
// Input:
|
||||
// - inst : Codec settings
|
||||
// - numberOfCores : Number of cores available for the decoder
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 InitDecode(const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores) = 0;
|
||||
|
||||
// Decode encoded image (as a part of a video stream). The decoded image
|
||||
// will be returned to the user through the decode complete callback.
|
||||
//
|
||||
// Input:
|
||||
// - inputImage : Encoded image to be decoded
|
||||
// - missingFrames : True if one or more frames have been lost
|
||||
// since the previous decode call.
|
||||
// - fragmentation : Specifies where the encoded frame can be
|
||||
// split into separate fragments. The meaning
|
||||
// of fragment is codec specific, but often
|
||||
// means that each fragment is decodable by
|
||||
// itself.
|
||||
// - codecSpecificInfo : Pointer to codec specific data
|
||||
// - renderTimeMs : System time to render in milliseconds. Only
|
||||
// used by decoders with internal rendering.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32
|
||||
Decode(const EncodedImage& inputImage,
|
||||
bool missingFrames,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const CodecSpecificInfo* codecSpecificInfo = NULL,
|
||||
WebRtc_Word64 renderTimeMs = -1) = 0;
|
||||
|
||||
// Register an decode complete callback object.
|
||||
//
|
||||
// Input:
|
||||
// - callback : Callback object which handles decoded images.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 RegisterDecodeCompleteCallback(DecodedImageCallback* callback) = 0;
|
||||
|
||||
// Free decoder memory.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 Release() = 0;
|
||||
|
||||
// Reset decoder state and prepare for a new call.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 Reset() = 0;
|
||||
|
||||
// Codec configuration data sent out-of-band, i.e. in SIP call setup
|
||||
//
|
||||
// Input/Output:
|
||||
// - buffer : Buffer pointer to the configuration data
|
||||
// - size : The size of the configuration data in
|
||||
// bytes
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 SetCodecConfigParameters(const WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
|
||||
|
||||
// Create a copy of the codec and its internal state.
|
||||
//
|
||||
// Return value : A copy of the instance if OK, NULL otherwise.
|
||||
virtual VideoDecoder* Copy() { return NULL; }
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
|
||||
@@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
|
||||
|
||||
// NOTE: in sync with video_coding_module_defines.h
|
||||
|
||||
// Define return values
|
||||
|
||||
#define WEBRTC_VIDEO_CODEC_REQUEST_SLI 2
|
||||
#define WEBRTC_VIDEO_CODEC_NO_OUTPUT 1
|
||||
#define WEBRTC_VIDEO_CODEC_OK 0
|
||||
#define WEBRTC_VIDEO_CODEC_ERROR -1
|
||||
#define WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED -2
|
||||
#define WEBRTC_VIDEO_CODEC_MEMORY -3
|
||||
#define WEBRTC_VIDEO_CODEC_ERR_PARAMETER -4
|
||||
#define WEBRTC_VIDEO_CODEC_ERR_SIZE -5
|
||||
#define WEBRTC_VIDEO_CODEC_TIMEOUT -6
|
||||
#define WEBRTC_VIDEO_CODEC_UNINITIALIZED -7
|
||||
#define WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI -12
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
|
||||
|
||||
#include "modules/video_coding/codecs/test/packet_manipulator.h"
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "common_video/interface/video_image.h"
|
||||
#include "gmock/gmock.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
class MockPacketManipulator : public PacketManipulator {
|
||||
public:
|
||||
MOCK_METHOD1(ManipulatePackets, int(webrtc::EncodedImage* encoded_image));
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
|
||||
111
webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
Normal file
111
webrtc/modules/video_coding/codecs/test/packet_manipulator.cc
Normal file
@@ -0,0 +1,111 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/codecs/test/packet_manipulator.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <cstdio>
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
PacketManipulatorImpl::PacketManipulatorImpl(PacketReader* packet_reader,
|
||||
const NetworkingConfig& config,
|
||||
bool verbose)
|
||||
: packet_reader_(packet_reader),
|
||||
config_(config),
|
||||
active_burst_packets_(0),
|
||||
critsect_(CriticalSectionWrapper::CreateCriticalSection()),
|
||||
random_seed_(1),
|
||||
verbose_(verbose) {
|
||||
assert(packet_reader);
|
||||
}
|
||||
|
||||
PacketManipulatorImpl::~PacketManipulatorImpl() {
|
||||
delete critsect_;
|
||||
}
|
||||
|
||||
int PacketManipulatorImpl::ManipulatePackets(
|
||||
webrtc::EncodedImage* encoded_image) {
|
||||
assert(encoded_image);
|
||||
int nbr_packets_dropped = 0;
|
||||
// There's no need to build a copy of the image data since viewing an
|
||||
// EncodedImage object, setting the length to a new lower value represents
|
||||
// that everything is dropped after that position in the byte array.
|
||||
// EncodedImage._size is the allocated bytes.
|
||||
// EncodedImage._length is how many that are filled with data.
|
||||
int new_length = 0;
|
||||
packet_reader_->InitializeReading(encoded_image->_buffer,
|
||||
encoded_image->_length,
|
||||
config_.packet_size_in_bytes);
|
||||
WebRtc_UWord8* packet = NULL;
|
||||
int nbr_bytes_to_read;
|
||||
// keep track of if we've lost any packets, since then we shall loose
|
||||
// the remains of the current frame:
|
||||
bool packet_loss_has_occurred = false;
|
||||
while ((nbr_bytes_to_read = packet_reader_->NextPacket(&packet)) > 0) {
|
||||
// Check if we're currently in a packet loss burst that is not completed:
|
||||
if (active_burst_packets_ > 0) {
|
||||
active_burst_packets_--;
|
||||
nbr_packets_dropped++;
|
||||
} else if (RandomUniform() < config_.packet_loss_probability ||
|
||||
packet_loss_has_occurred) {
|
||||
packet_loss_has_occurred = true;
|
||||
nbr_packets_dropped++;
|
||||
if (config_.packet_loss_mode == kBurst) {
|
||||
// Initiate a new burst
|
||||
active_burst_packets_ = config_.packet_loss_burst_length - 1;
|
||||
}
|
||||
} else {
|
||||
new_length += nbr_bytes_to_read;
|
||||
}
|
||||
}
|
||||
encoded_image->_length = new_length;
|
||||
if (nbr_packets_dropped > 0) {
|
||||
// Must set completeFrame to false to inform the decoder about this:
|
||||
encoded_image->_completeFrame = false;
|
||||
if (verbose_) {
|
||||
printf("Dropped %d packets for frame %d (frame length: %d)\n",
|
||||
nbr_packets_dropped, encoded_image->_timeStamp,
|
||||
encoded_image->_length);
|
||||
}
|
||||
}
|
||||
return nbr_packets_dropped;
|
||||
}
|
||||
|
||||
void PacketManipulatorImpl::InitializeRandomSeed(unsigned int seed) {
|
||||
random_seed_ = seed;
|
||||
}
|
||||
|
||||
inline double PacketManipulatorImpl::RandomUniform() {
|
||||
// Use the previous result as new seed before each rand() call. Doing this
|
||||
// it doesn't matter if other threads are calling rand() since we'll always
|
||||
// get the same behavior as long as we're using a fixed initial seed.
|
||||
critsect_->Enter();
|
||||
srand(random_seed_);
|
||||
random_seed_ = std::rand();
|
||||
critsect_->Leave();
|
||||
return (random_seed_ + 1.0)/(RAND_MAX + 1.0);
|
||||
}
|
||||
|
||||
const char* PacketLossModeToStr(PacketLossMode e) {
|
||||
switch (e) {
|
||||
case kUniform:
|
||||
return "Uniform";
|
||||
case kBurst:
|
||||
return "Burst";
|
||||
default:
|
||||
assert(false);
|
||||
return "Unknown";
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtcc
|
||||
113
webrtc/modules/video_coding/codecs/test/packet_manipulator.h
Normal file
113
webrtc/modules/video_coding/codecs/test/packet_manipulator.h
Normal file
@@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "system_wrappers/interface/critical_section_wrapper.h"
|
||||
#include "testsupport/packet_reader.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
// Which mode the packet loss shall be performed according to.
|
||||
enum PacketLossMode {
|
||||
// Drops packets with a configured probability independently for each packet
|
||||
kUniform,
|
||||
// Drops packets similar to uniform but when a packet is being dropped,
|
||||
// the number of lost packets in a row is equal to the configured burst
|
||||
// length.
|
||||
kBurst
|
||||
};
|
||||
// Returns a string representation of the enum value.
|
||||
const char* PacketLossModeToStr(PacketLossMode e);
|
||||
|
||||
// Contains configurations related to networking and simulation of
|
||||
// scenarios caused by network interference.
|
||||
struct NetworkingConfig {
|
||||
NetworkingConfig()
|
||||
: packet_size_in_bytes(1500), max_payload_size_in_bytes(1440),
|
||||
packet_loss_mode(kUniform), packet_loss_probability(0.0),
|
||||
packet_loss_burst_length(1) {
|
||||
}
|
||||
|
||||
// Packet size in bytes. Default: 1500 bytes.
|
||||
int packet_size_in_bytes;
|
||||
|
||||
// Encoder specific setting of maximum size in bytes of each payload.
|
||||
// Default: 1440 bytes.
|
||||
int max_payload_size_in_bytes;
|
||||
|
||||
// Packet loss mode. Two different packet loss models are supported:
|
||||
// uniform or burst. This setting has no effect unless
|
||||
// packet_loss_probability is >0.
|
||||
// Default: uniform.
|
||||
PacketLossMode packet_loss_mode;
|
||||
|
||||
// Packet loss probability. A value between 0.0 and 1.0 that defines the
|
||||
// probability of a packet being lost. 0.1 means 10% and so on.
|
||||
// Default: 0 (no loss).
|
||||
double packet_loss_probability;
|
||||
|
||||
// Packet loss burst length. Defines how many packets will be lost in a burst
|
||||
// when a packet has been decided to be lost. Must be >=1. Default: 1.
|
||||
int packet_loss_burst_length;
|
||||
};
|
||||
|
||||
// Class for simulating packet loss on the encoded frame data.
|
||||
// When a packet loss has occurred in a frame, the remaining data in that
|
||||
// frame is lost (even if burst length is only a single packet).
|
||||
// TODO(kjellander): Support discarding only individual packets in the frame
|
||||
// when CL 172001 has been submitted. This also requires a correct
|
||||
// fragmentation header to be passed to the decoder.
|
||||
//
|
||||
// To get a repeatable packet drop pattern, re-initialize the random seed
|
||||
// using InitializeRandomSeed before each test run.
|
||||
class PacketManipulator {
|
||||
public:
|
||||
virtual ~PacketManipulator() {}
|
||||
|
||||
// Manipulates the data of the encoded_image to simulate parts being lost
|
||||
// during transport.
|
||||
// If packets are dropped from frame data, the completedFrame field will be
|
||||
// set to false.
|
||||
// Returns the number of packets being dropped.
|
||||
virtual int
|
||||
ManipulatePackets(webrtc::EncodedImage* encoded_image) = 0;
|
||||
};
|
||||
|
||||
class PacketManipulatorImpl : public PacketManipulator {
|
||||
public:
|
||||
PacketManipulatorImpl(PacketReader* packet_reader,
|
||||
const NetworkingConfig& config,
|
||||
bool verbose);
|
||||
virtual ~PacketManipulatorImpl();
|
||||
virtual int ManipulatePackets(webrtc::EncodedImage* encoded_image);
|
||||
virtual void InitializeRandomSeed(unsigned int seed);
|
||||
protected:
|
||||
// Returns a uniformly distributed random value between 0.0 and 1.0
|
||||
virtual double RandomUniform();
|
||||
private:
|
||||
PacketReader* packet_reader_;
|
||||
const NetworkingConfig& config_;
|
||||
// Used to simulate a burst over several frames.
|
||||
int active_burst_packets_;
|
||||
CriticalSectionWrapper* critsect_;
|
||||
unsigned int random_seed_;
|
||||
bool verbose_;
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
|
||||
@@ -0,0 +1,153 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/codecs/test/packet_manipulator.h"
|
||||
|
||||
#include <queue>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "modules/video_coding/codecs/test/predictive_packet_manipulator.h"
|
||||
#include "testsupport/unittest_utils.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
const double kNeverDropProbability = 0.0;
|
||||
const double kAlwaysDropProbability = 1.0;
|
||||
const int kBurstLength = 1;
|
||||
|
||||
class PacketManipulatorTest: public PacketRelatedTest {
|
||||
protected:
|
||||
PacketReader packet_reader_;
|
||||
EncodedImage image_;
|
||||
NetworkingConfig drop_config_;
|
||||
NetworkingConfig no_drop_config_;
|
||||
|
||||
PacketManipulatorTest() {
|
||||
image_._buffer = packet_data_;
|
||||
image_._length = kPacketDataLength;
|
||||
image_._size = kPacketDataLength;
|
||||
|
||||
drop_config_.packet_size_in_bytes = kPacketSizeInBytes;
|
||||
drop_config_.packet_loss_probability = kAlwaysDropProbability;
|
||||
drop_config_.packet_loss_burst_length = kBurstLength;
|
||||
drop_config_.packet_loss_mode = kUniform;
|
||||
|
||||
no_drop_config_.packet_size_in_bytes = kPacketSizeInBytes;
|
||||
no_drop_config_.packet_loss_probability = kNeverDropProbability;
|
||||
no_drop_config_.packet_loss_burst_length = kBurstLength;
|
||||
no_drop_config_.packet_loss_mode = kUniform;
|
||||
}
|
||||
|
||||
virtual ~PacketManipulatorTest() {}
|
||||
|
||||
void SetUp() {
|
||||
PacketRelatedTest::SetUp();
|
||||
}
|
||||
|
||||
void TearDown() {
|
||||
PacketRelatedTest::TearDown();
|
||||
}
|
||||
|
||||
void VerifyPacketLoss(int expected_nbr_packets_dropped,
|
||||
int actual_nbr_packets_dropped,
|
||||
int expected_packet_data_length,
|
||||
WebRtc_UWord8* expected_packet_data,
|
||||
EncodedImage& actual_image) {
|
||||
EXPECT_EQ(expected_nbr_packets_dropped, actual_nbr_packets_dropped);
|
||||
EXPECT_EQ(expected_packet_data_length, static_cast<int>(image_._length));
|
||||
EXPECT_EQ(0, memcmp(expected_packet_data, actual_image._buffer,
|
||||
expected_packet_data_length));
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(PacketManipulatorTest, Constructor) {
|
||||
PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
|
||||
}
|
||||
|
||||
TEST_F(PacketManipulatorTest, DropNone) {
|
||||
PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
|
||||
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
|
||||
VerifyPacketLoss(0, nbr_packets_dropped, kPacketDataLength,
|
||||
packet_data_, image_);
|
||||
}
|
||||
|
||||
TEST_F(PacketManipulatorTest, UniformDropNoneSmallFrame) {
|
||||
int data_length = 400; // smaller than the packet size
|
||||
image_._length = data_length;
|
||||
PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
|
||||
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
|
||||
|
||||
VerifyPacketLoss(0, nbr_packets_dropped, data_length,
|
||||
packet_data_, image_);
|
||||
}
|
||||
|
||||
TEST_F(PacketManipulatorTest, UniformDropAll) {
|
||||
PacketManipulatorImpl manipulator(&packet_reader_, drop_config_, false);
|
||||
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
|
||||
VerifyPacketLoss(kPacketDataNumberOfPackets, nbr_packets_dropped,
|
||||
0, packet_data_, image_);
|
||||
}
|
||||
|
||||
// Use our customized test class to make the second packet being lost
|
||||
TEST_F(PacketManipulatorTest, UniformDropSinglePacket) {
|
||||
drop_config_.packet_loss_probability = 0.5;
|
||||
PredictivePacketManipulator manipulator(&packet_reader_, drop_config_);
|
||||
manipulator.AddRandomResult(1.0);
|
||||
manipulator.AddRandomResult(0.3); // less than 0.5 will cause packet loss
|
||||
manipulator.AddRandomResult(1.0);
|
||||
|
||||
// Execute the test target method:
|
||||
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
|
||||
|
||||
// Since we setup the predictive packet manipulator, it will throw away the
|
||||
// second packet. The third packet is also lost because when we have lost one,
|
||||
// the remains shall also be discarded (in the current implementation).
|
||||
VerifyPacketLoss(2, nbr_packets_dropped, kPacketSizeInBytes, packet1_,
|
||||
image_);
|
||||
}
|
||||
|
||||
// Use our customized test class to make the second packet being lost
|
||||
TEST_F(PacketManipulatorTest, BurstDropNinePackets) {
|
||||
// Create a longer packet data structure (10 packets)
|
||||
const int kNbrPackets = 10;
|
||||
const int kDataLength = kPacketSizeInBytes * kNbrPackets;
|
||||
WebRtc_UWord8 data[kDataLength];
|
||||
WebRtc_UWord8* data_pointer = data;
|
||||
// Fill with 0s, 1s and so on to be able to easily verify which were dropped:
|
||||
for (int i = 0; i < kNbrPackets; ++i) {
|
||||
memset(data_pointer + i * kPacketSizeInBytes, i, kPacketSizeInBytes);
|
||||
}
|
||||
// Overwrite the defaults from the test fixture:
|
||||
image_._buffer = data;
|
||||
image_._length = kDataLength;
|
||||
image_._size = kDataLength;
|
||||
|
||||
drop_config_.packet_loss_probability = 0.5;
|
||||
drop_config_.packet_loss_burst_length = 5;
|
||||
drop_config_.packet_loss_mode = kBurst;
|
||||
PredictivePacketManipulator manipulator(&packet_reader_, drop_config_);
|
||||
manipulator.AddRandomResult(1.0);
|
||||
manipulator.AddRandomResult(0.3); // less than 0.5 will cause packet loss
|
||||
for (int i = 0; i < kNbrPackets - 2; ++i) {
|
||||
manipulator.AddRandomResult(1.0);
|
||||
}
|
||||
|
||||
// Execute the test target method:
|
||||
int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
|
||||
|
||||
// Should discard every packet after the first one.
|
||||
VerifyPacketLoss(9, nbr_packets_dropped, kPacketSizeInBytes, data, image_);
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/codecs/test/predictive_packet_manipulator.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <cstdio>
|
||||
|
||||
#include "testsupport/packet_reader.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
PredictivePacketManipulator::PredictivePacketManipulator(
|
||||
PacketReader* packet_reader, const NetworkingConfig& config)
|
||||
: PacketManipulatorImpl(packet_reader, config, false) {
|
||||
}
|
||||
|
||||
PredictivePacketManipulator::~PredictivePacketManipulator() {
|
||||
}
|
||||
|
||||
|
||||
void PredictivePacketManipulator::AddRandomResult(double result) {
|
||||
assert(result >= 0.0 && result <= 1.0);
|
||||
random_results_.push(result);
|
||||
}
|
||||
|
||||
double PredictivePacketManipulator::RandomUniform() {
|
||||
if(random_results_.size() == 0u) {
|
||||
fprintf(stderr, "No more stored results, please make sure AddRandomResult()"
|
||||
"is called same amount of times you're going to invoke the "
|
||||
"RandomUniform() function, i.e. once per packet.\n");
|
||||
assert(false);
|
||||
}
|
||||
double result = random_results_.front();
|
||||
random_results_.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtcc
|
||||
@@ -0,0 +1,45 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
|
||||
|
||||
#include <queue>
|
||||
|
||||
#include "modules/video_coding/codecs/test/packet_manipulator.h"
|
||||
#include "testsupport/packet_reader.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
// Predictive packet manipulator that allows for setup of the result of
|
||||
// the random invocations.
|
||||
class PredictivePacketManipulator : public PacketManipulatorImpl {
|
||||
public:
|
||||
PredictivePacketManipulator(PacketReader* packet_reader,
|
||||
const NetworkingConfig& config);
|
||||
virtual ~PredictivePacketManipulator();
|
||||
// Adds a result. You must add at least the same number of results as the
|
||||
// expected calls to the RandomUniform method. The results are added to a
|
||||
// FIFO queue so they will be returned in the same order they were added.
|
||||
// Result parameter must be 0.0 to 1.0.
|
||||
void AddRandomResult(double result);
|
||||
protected:
|
||||
// Returns a uniformly distributed random value between 0.0 and 1.0
|
||||
virtual double RandomUniform();
|
||||
|
||||
private:
|
||||
std::queue<double> random_results_;
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
|
||||
172
webrtc/modules/video_coding/codecs/test/stats.cc
Normal file
172
webrtc/modules/video_coding/codecs/test/stats.cc
Normal file
@@ -0,0 +1,172 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/codecs/test/stats.h"
|
||||
|
||||
#include <algorithm> // min_element, max_element
|
||||
#include <cassert>
|
||||
#include <cstdio>
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
Stats::Stats() {}
|
||||
|
||||
Stats::~Stats() {}
|
||||
|
||||
bool LessForEncodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
|
||||
return s1.encode_time_in_us < s2.encode_time_in_us;
|
||||
}
|
||||
|
||||
bool LessForDecodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
|
||||
return s1.decode_time_in_us < s2.decode_time_in_us;
|
||||
}
|
||||
|
||||
bool LessForEncodedSize(const FrameStatistic& s1, const FrameStatistic& s2) {
|
||||
return s1.encoded_frame_length_in_bytes < s2.encoded_frame_length_in_bytes;
|
||||
}
|
||||
|
||||
bool LessForBitRate(const FrameStatistic& s1, const FrameStatistic& s2) {
|
||||
return s1.bit_rate_in_kbps < s2.bit_rate_in_kbps;
|
||||
}
|
||||
|
||||
FrameStatistic& Stats::NewFrame(int frame_number) {
|
||||
assert(frame_number >= 0);
|
||||
FrameStatistic stat;
|
||||
stat.frame_number = frame_number;
|
||||
stats_.push_back(stat);
|
||||
return stats_[frame_number];
|
||||
}
|
||||
|
||||
void Stats::PrintSummary() {
|
||||
printf("Processing summary:\n");
|
||||
if (stats_.size() == 0) {
|
||||
printf("No frame statistics have been logged yet.\n");
|
||||
return;
|
||||
}
|
||||
|
||||
// Calculate min, max, average and total encoding time
|
||||
int total_encoding_time_in_us = 0;
|
||||
int total_decoding_time_in_us = 0;
|
||||
int total_encoded_frames_lengths = 0;
|
||||
int total_encoded_key_frames_lengths = 0;
|
||||
int total_encoded_nonkey_frames_lengths = 0;
|
||||
int nbr_keyframes = 0;
|
||||
int nbr_nonkeyframes = 0;
|
||||
|
||||
for (FrameStatisticsIterator it = stats_.begin();
|
||||
it != stats_.end(); ++it) {
|
||||
total_encoding_time_in_us += it->encode_time_in_us;
|
||||
total_decoding_time_in_us += it->decode_time_in_us;
|
||||
total_encoded_frames_lengths += it->encoded_frame_length_in_bytes;
|
||||
if (it->frame_type == webrtc::kKeyFrame) {
|
||||
total_encoded_key_frames_lengths += it->encoded_frame_length_in_bytes;
|
||||
nbr_keyframes++;
|
||||
} else {
|
||||
total_encoded_nonkey_frames_lengths += it->encoded_frame_length_in_bytes;
|
||||
nbr_nonkeyframes++;
|
||||
}
|
||||
}
|
||||
|
||||
FrameStatisticsIterator frame;
|
||||
|
||||
// ENCODING
|
||||
printf("Encoding time:\n");
|
||||
frame = std::min_element(stats_.begin(),
|
||||
stats_.end(), LessForEncodeTime);
|
||||
printf(" Min : %7d us (frame %d)\n",
|
||||
frame->encode_time_in_us, frame->frame_number);
|
||||
|
||||
frame = std::max_element(stats_.begin(),
|
||||
stats_.end(), LessForEncodeTime);
|
||||
printf(" Max : %7d us (frame %d)\n",
|
||||
frame->encode_time_in_us, frame->frame_number);
|
||||
|
||||
printf(" Average : %7d us\n",
|
||||
static_cast<int>(total_encoding_time_in_us / stats_.size()));
|
||||
|
||||
// DECODING
|
||||
printf("Decoding time:\n");
|
||||
// only consider frames that were successfully decoded (packet loss may cause
|
||||
// failures)
|
||||
std::vector<FrameStatistic> decoded_frames;
|
||||
for (std::vector<FrameStatistic>::iterator it = stats_.begin();
|
||||
it != stats_.end(); ++it) {
|
||||
if (it->decoding_successful) {
|
||||
decoded_frames.push_back(*it);
|
||||
}
|
||||
}
|
||||
if (decoded_frames.size() == 0) {
|
||||
printf("No successfully decoded frames exist in this statistics.\n");
|
||||
} else {
|
||||
frame = std::min_element(decoded_frames.begin(),
|
||||
decoded_frames.end(), LessForDecodeTime);
|
||||
printf(" Min : %7d us (frame %d)\n",
|
||||
frame->decode_time_in_us, frame->frame_number);
|
||||
|
||||
frame = std::max_element(decoded_frames.begin(),
|
||||
decoded_frames.end(), LessForDecodeTime);
|
||||
printf(" Max : %7d us (frame %d)\n",
|
||||
frame->decode_time_in_us, frame->frame_number);
|
||||
|
||||
printf(" Average : %7d us\n",
|
||||
static_cast<int>(total_decoding_time_in_us / decoded_frames.size()));
|
||||
printf(" Failures: %d frames failed to decode.\n",
|
||||
static_cast<int>(stats_.size() - decoded_frames.size()));
|
||||
}
|
||||
|
||||
// SIZE
|
||||
printf("Frame sizes:\n");
|
||||
frame = std::min_element(stats_.begin(),
|
||||
stats_.end(), LessForEncodedSize);
|
||||
printf(" Min : %7d bytes (frame %d)\n",
|
||||
frame->encoded_frame_length_in_bytes, frame->frame_number);
|
||||
|
||||
frame = std::max_element(stats_.begin(),
|
||||
stats_.end(), LessForEncodedSize);
|
||||
printf(" Max : %7d bytes (frame %d)\n",
|
||||
frame->encoded_frame_length_in_bytes, frame->frame_number);
|
||||
|
||||
printf(" Average : %7d bytes\n",
|
||||
static_cast<int>(total_encoded_frames_lengths / stats_.size()));
|
||||
if (nbr_keyframes > 0) {
|
||||
printf(" Average key frame size : %7d bytes (%d keyframes)\n",
|
||||
total_encoded_key_frames_lengths / nbr_keyframes,
|
||||
nbr_keyframes);
|
||||
}
|
||||
if (nbr_nonkeyframes > 0) {
|
||||
printf(" Average non-key frame size: %7d bytes (%d frames)\n",
|
||||
total_encoded_nonkey_frames_lengths / nbr_nonkeyframes,
|
||||
nbr_nonkeyframes);
|
||||
}
|
||||
|
||||
// BIT RATE
|
||||
printf("Bit rates:\n");
|
||||
frame = std::min_element(stats_.begin(),
|
||||
stats_.end(), LessForBitRate);
|
||||
printf(" Min bit rate: %7d kbps (frame %d)\n",
|
||||
frame->bit_rate_in_kbps, frame->frame_number);
|
||||
|
||||
frame = std::max_element(stats_.begin(),
|
||||
stats_.end(), LessForBitRate);
|
||||
printf(" Max bit rate: %7d kbps (frame %d)\n",
|
||||
frame->bit_rate_in_kbps, frame->frame_number);
|
||||
|
||||
printf("\n");
|
||||
printf("Total encoding time : %7d ms.\n",
|
||||
total_encoding_time_in_us / 1000);
|
||||
printf("Total decoding time : %7d ms.\n",
|
||||
total_decoding_time_in_us / 1000);
|
||||
printf("Total processing time: %7d ms.\n",
|
||||
(total_encoding_time_in_us + total_decoding_time_in_us) / 1000);
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
76
webrtc/modules/video_coding/codecs/test/stats.h
Normal file
76
webrtc/modules/video_coding/codecs/test/stats.h
Normal file
@@ -0,0 +1,76 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "common_video/interface/video_image.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
// Contains statistics of a single frame that has been processed.
|
||||
struct FrameStatistic {
|
||||
FrameStatistic() :
|
||||
encoding_successful(false), decoding_successful(false),
|
||||
encode_return_code(0), decode_return_code(0),
|
||||
encode_time_in_us(0), decode_time_in_us(0),
|
||||
frame_number(0), packets_dropped(0), total_packets(0),
|
||||
bit_rate_in_kbps(0), encoded_frame_length_in_bytes(0),
|
||||
frame_type(kDeltaFrame) {
|
||||
};
|
||||
bool encoding_successful;
|
||||
bool decoding_successful;
|
||||
int encode_return_code;
|
||||
int decode_return_code;
|
||||
int encode_time_in_us;
|
||||
int decode_time_in_us;
|
||||
int frame_number;
|
||||
// How many packets were discarded of the encoded frame data (if any)
|
||||
int packets_dropped;
|
||||
int total_packets;
|
||||
|
||||
// Current bit rate. Calculated out of the size divided with the time
|
||||
// interval per frame.
|
||||
int bit_rate_in_kbps;
|
||||
|
||||
// Copied from EncodedImage
|
||||
int encoded_frame_length_in_bytes;
|
||||
webrtc::VideoFrameType frame_type;
|
||||
};
|
||||
|
||||
// Handles statistics from a single video processing run.
|
||||
// Contains calculation methods for interesting metrics from these stats.
|
||||
class Stats {
|
||||
public:
|
||||
typedef std::vector<FrameStatistic>::iterator FrameStatisticsIterator;
|
||||
|
||||
Stats();
|
||||
virtual ~Stats();
|
||||
|
||||
// Add a new statistic data object.
|
||||
// The frame number must be incrementing and start at zero in order to use
|
||||
// it as an index for the frame_statistics_ vector.
|
||||
// Returns the newly created statistic object.
|
||||
FrameStatistic& NewFrame(int frame_number);
|
||||
|
||||
// Prints a summary of all the statistics that have been gathered during the
|
||||
// processing
|
||||
void PrintSummary();
|
||||
|
||||
std::vector<FrameStatistic> stats_;
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
|
||||
64
webrtc/modules/video_coding/codecs/test/stats_unittest.cc
Normal file
64
webrtc/modules/video_coding/codecs/test/stats_unittest.cc
Normal file
@@ -0,0 +1,64 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/codecs/test/stats.h"
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
class StatsTest: public testing::Test {
|
||||
protected:
|
||||
StatsTest() {
|
||||
}
|
||||
|
||||
virtual ~StatsTest() {
|
||||
}
|
||||
|
||||
void SetUp() {
|
||||
stats_ = new Stats();
|
||||
}
|
||||
|
||||
void TearDown() {
|
||||
delete stats_;
|
||||
}
|
||||
|
||||
Stats* stats_;
|
||||
};
|
||||
|
||||
// Test empty object
|
||||
TEST_F(StatsTest, Uninitialized) {
|
||||
EXPECT_EQ(0u, stats_->stats_.size());
|
||||
stats_->PrintSummary(); // should not crash
|
||||
}
|
||||
|
||||
// Add single frame stats and verify
|
||||
TEST_F(StatsTest, AddOne) {
|
||||
stats_->NewFrame(0u);
|
||||
FrameStatistic* frameStat = &stats_->stats_[0];
|
||||
EXPECT_EQ(0, frameStat->frame_number);
|
||||
}
|
||||
|
||||
// Add multiple frame stats and verify
|
||||
TEST_F(StatsTest, AddMany) {
|
||||
int nbr_of_frames = 1000;
|
||||
for (int i = 0; i < nbr_of_frames; ++i) {
|
||||
FrameStatistic& frameStat = stats_->NewFrame(i);
|
||||
EXPECT_EQ(i, frameStat.frame_number);
|
||||
}
|
||||
EXPECT_EQ(nbr_of_frames, static_cast<int>(stats_->stats_.size()));
|
||||
|
||||
stats_->PrintSummary(); // should not crash
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
@@ -0,0 +1,65 @@
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
{
|
||||
'conditions': [
|
||||
['include_tests==1', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'video_codecs_test_framework',
|
||||
'type': '<(library)',
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/test/test.gyp:test_support',
|
||||
],
|
||||
'sources': [
|
||||
'mock/mock_packet_manipulator.h',
|
||||
'packet_manipulator.h',
|
||||
'packet_manipulator.cc',
|
||||
'predictive_packet_manipulator.h',
|
||||
'predictive_packet_manipulator.cc',
|
||||
'stats.h',
|
||||
'stats.cc',
|
||||
'videoprocessor.h',
|
||||
'videoprocessor.cc',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'video_codecs_test_framework_unittests',
|
||||
'type': 'executable',
|
||||
'dependencies': [
|
||||
'video_codecs_test_framework',
|
||||
'webrtc_video_coding',
|
||||
'<(DEPTH)/testing/gmock.gyp:gmock',
|
||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||
'<(webrtc_root)/test/test.gyp:test_support_main',
|
||||
],
|
||||
'sources': [
|
||||
'packet_manipulator_unittest.cc',
|
||||
'stats_unittest.cc',
|
||||
'videoprocessor_unittest.cc',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'video_codecs_test_framework_integrationtests',
|
||||
'type': 'executable',
|
||||
'dependencies': [
|
||||
'video_codecs_test_framework',
|
||||
'webrtc_video_coding',
|
||||
'<(DEPTH)/testing/gtest.gyp:gtest',
|
||||
'<(webrtc_root)/test/metrics.gyp:metrics',
|
||||
'<(webrtc_root)/test/test.gyp:test_support_main',
|
||||
'<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
|
||||
],
|
||||
'sources': [
|
||||
'videoprocessor_integrationtest.cc',
|
||||
],
|
||||
},
|
||||
], # targets
|
||||
}], # include_tests
|
||||
], # conditions
|
||||
}
|
||||
388
webrtc/modules/video_coding/codecs/test/videoprocessor.cc
Normal file
388
webrtc/modules/video_coding/codecs/test/videoprocessor.cc
Normal file
@@ -0,0 +1,388 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/codecs/test/videoprocessor.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <limits>
|
||||
|
||||
#include "system_wrappers/interface/cpu_info.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
VideoProcessorImpl::VideoProcessorImpl(webrtc::VideoEncoder* encoder,
|
||||
webrtc::VideoDecoder* decoder,
|
||||
FrameReader* frame_reader,
|
||||
FrameWriter* frame_writer,
|
||||
PacketManipulator* packet_manipulator,
|
||||
const TestConfig& config,
|
||||
Stats* stats)
|
||||
: encoder_(encoder),
|
||||
decoder_(decoder),
|
||||
frame_reader_(frame_reader),
|
||||
frame_writer_(frame_writer),
|
||||
packet_manipulator_(packet_manipulator),
|
||||
config_(config),
|
||||
stats_(stats),
|
||||
encode_callback_(NULL),
|
||||
decode_callback_(NULL),
|
||||
source_buffer_(NULL),
|
||||
first_key_frame_has_been_excluded_(false),
|
||||
last_frame_missing_(false),
|
||||
initialized_(false),
|
||||
encoded_frame_size_(0),
|
||||
prev_time_stamp_(0),
|
||||
num_dropped_frames_(0),
|
||||
num_spatial_resizes_(0),
|
||||
last_encoder_frame_width_(0),
|
||||
last_encoder_frame_height_(0),
|
||||
scaler_() {
|
||||
assert(encoder);
|
||||
assert(decoder);
|
||||
assert(frame_reader);
|
||||
assert(frame_writer);
|
||||
assert(packet_manipulator);
|
||||
assert(stats);
|
||||
}
|
||||
|
||||
bool VideoProcessorImpl::Init() {
|
||||
// Calculate a factor used for bit rate calculations:
|
||||
bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8; // bits
|
||||
|
||||
int frame_length_in_bytes = frame_reader_->FrameLength();
|
||||
|
||||
// Initialize data structures used by the encoder/decoder APIs
|
||||
source_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
|
||||
last_successful_frame_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
|
||||
|
||||
// Set fixed properties common for all frames:
|
||||
source_frame_.SetWidth(config_.codec_settings->width);
|
||||
source_frame_.SetHeight(config_.codec_settings->height);
|
||||
source_frame_.VerifyAndAllocate(frame_length_in_bytes);
|
||||
source_frame_.SetLength(frame_length_in_bytes);
|
||||
|
||||
// To keep track of spatial resize actions by encoder.
|
||||
last_encoder_frame_width_ = config_.codec_settings->width;
|
||||
last_encoder_frame_height_ = config_.codec_settings->height;
|
||||
|
||||
// Setup required callbacks for the encoder/decoder:
|
||||
encode_callback_ = new VideoProcessorEncodeCompleteCallback(this);
|
||||
decode_callback_ = new VideoProcessorDecodeCompleteCallback(this);
|
||||
WebRtc_Word32 register_result =
|
||||
encoder_->RegisterEncodeCompleteCallback(encode_callback_);
|
||||
if (register_result != WEBRTC_VIDEO_CODEC_OK) {
|
||||
fprintf(stderr, "Failed to register encode complete callback, return code: "
|
||||
"%d\n", register_result);
|
||||
return false;
|
||||
}
|
||||
register_result = decoder_->RegisterDecodeCompleteCallback(decode_callback_);
|
||||
if (register_result != WEBRTC_VIDEO_CODEC_OK) {
|
||||
fprintf(stderr, "Failed to register decode complete callback, return code: "
|
||||
"%d\n", register_result);
|
||||
return false;
|
||||
}
|
||||
// Init the encoder and decoder
|
||||
WebRtc_UWord32 nbr_of_cores = 1;
|
||||
if (!config_.use_single_core) {
|
||||
nbr_of_cores = CpuInfo::DetectNumberOfCores();
|
||||
}
|
||||
WebRtc_Word32 init_result =
|
||||
encoder_->InitEncode(config_.codec_settings, nbr_of_cores,
|
||||
config_.networking_config.max_payload_size_in_bytes);
|
||||
if (init_result != WEBRTC_VIDEO_CODEC_OK) {
|
||||
fprintf(stderr, "Failed to initialize VideoEncoder, return code: %d\n",
|
||||
init_result);
|
||||
return false;
|
||||
}
|
||||
init_result = decoder_->InitDecode(config_.codec_settings, nbr_of_cores);
|
||||
if (init_result != WEBRTC_VIDEO_CODEC_OK) {
|
||||
fprintf(stderr, "Failed to initialize VideoDecoder, return code: %d\n",
|
||||
init_result);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (config_.verbose) {
|
||||
printf("Video Processor:\n");
|
||||
printf(" #CPU cores used : %d\n", nbr_of_cores);
|
||||
printf(" Total # of frames: %d\n", frame_reader_->NumberOfFrames());
|
||||
printf(" Codec settings:\n");
|
||||
printf(" Start bitrate : %d kbps\n",
|
||||
config_.codec_settings->startBitrate);
|
||||
printf(" Width : %d\n", config_.codec_settings->width);
|
||||
printf(" Height : %d\n", config_.codec_settings->height);
|
||||
}
|
||||
initialized_ = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
VideoProcessorImpl::~VideoProcessorImpl() {
|
||||
delete[] source_buffer_;
|
||||
delete[] last_successful_frame_buffer_;
|
||||
encoder_->RegisterEncodeCompleteCallback(NULL);
|
||||
delete encode_callback_;
|
||||
decoder_->RegisterDecodeCompleteCallback(NULL);
|
||||
delete decode_callback_;
|
||||
}
|
||||
|
||||
|
||||
void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) {
|
||||
int set_rates_result = encoder_->SetRates(bit_rate, frame_rate);
|
||||
assert(set_rates_result >= 0);
|
||||
if (set_rates_result < 0) {
|
||||
fprintf(stderr, "Failed to update encoder with new rate %d, "
|
||||
"return code: %d\n", bit_rate, set_rates_result);
|
||||
}
|
||||
num_dropped_frames_ = 0;
|
||||
num_spatial_resizes_ = 0;
|
||||
}
|
||||
|
||||
int VideoProcessorImpl::EncodedFrameSize() {
|
||||
return encoded_frame_size_;
|
||||
}
|
||||
|
||||
int VideoProcessorImpl::NumberDroppedFrames() {
|
||||
return num_dropped_frames_;
|
||||
}
|
||||
|
||||
int VideoProcessorImpl::NumberSpatialResizes() {
|
||||
return num_spatial_resizes_;
|
||||
}
|
||||
|
||||
bool VideoProcessorImpl::ProcessFrame(int frame_number) {
|
||||
assert(frame_number >=0);
|
||||
if (!initialized_) {
|
||||
fprintf(stderr, "Attempting to use uninitialized VideoProcessor!\n");
|
||||
return false;
|
||||
}
|
||||
// |prev_time_stamp_| is used for getting number of dropped frames.
|
||||
if (frame_number == 0) {
|
||||
prev_time_stamp_ = -1;
|
||||
}
|
||||
if (frame_reader_->ReadFrame(source_buffer_)) {
|
||||
// Copy the source frame to the newly read frame data.
|
||||
// Length is common for all frames.
|
||||
source_frame_.CopyFrame(source_frame_.Length(), source_buffer_);
|
||||
|
||||
// Ensure we have a new statistics data object we can fill:
|
||||
FrameStatistic& stat = stats_->NewFrame(frame_number);
|
||||
|
||||
encode_start_ = TickTime::Now();
|
||||
// Use the frame number as "timestamp" to identify frames
|
||||
source_frame_.SetTimeStamp(frame_number);
|
||||
|
||||
// Decide if we're going to force a keyframe:
|
||||
std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
|
||||
if (config_.keyframe_interval > 0 &&
|
||||
frame_number % config_.keyframe_interval == 0) {
|
||||
frame_types[0] = kKeyFrame;
|
||||
}
|
||||
|
||||
// For dropped frames, we regard them as zero size encoded frames.
|
||||
encoded_frame_size_ = 0;
|
||||
|
||||
WebRtc_Word32 encode_result = encoder_->Encode(source_frame_, NULL,
|
||||
&frame_types);
|
||||
|
||||
if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
|
||||
fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
|
||||
frame_number, encode_result);
|
||||
}
|
||||
stat.encode_return_code = encode_result;
|
||||
return true;
|
||||
} else {
|
||||
return false; // we've reached the last frame
|
||||
}
|
||||
}
|
||||
|
||||
void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) {
|
||||
// Timestamp is frame number, so this gives us #dropped frames.
|
||||
int num_dropped_from_prev_encode = encoded_image->_timeStamp -
|
||||
prev_time_stamp_ - 1;
|
||||
num_dropped_frames_ += num_dropped_from_prev_encode;
|
||||
prev_time_stamp_ = encoded_image->_timeStamp;
|
||||
if (num_dropped_from_prev_encode > 0) {
|
||||
// For dropped frames, we write out the last decoded frame to avoid getting
|
||||
// out of sync for the computation of PSNR and SSIM.
|
||||
for (int i = 0; i < num_dropped_from_prev_encode; i++) {
|
||||
frame_writer_->WriteFrame(last_successful_frame_buffer_);
|
||||
}
|
||||
}
|
||||
// Frame is not dropped, so update the encoded frame size
|
||||
// (encoder callback is only called for non-zero length frames).
|
||||
encoded_frame_size_ = encoded_image->_length;
|
||||
|
||||
TickTime encode_stop = TickTime::Now();
|
||||
int frame_number = encoded_image->_timeStamp;
|
||||
FrameStatistic& stat = stats_->stats_[frame_number];
|
||||
stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_,
|
||||
encode_stop);
|
||||
stat.encoding_successful = true;
|
||||
stat.encoded_frame_length_in_bytes = encoded_image->_length;
|
||||
stat.frame_number = encoded_image->_timeStamp;
|
||||
stat.frame_type = encoded_image->_frameType;
|
||||
stat.bit_rate_in_kbps = encoded_image->_length * bit_rate_factor_;
|
||||
stat.total_packets = encoded_image->_length /
|
||||
config_.networking_config.packet_size_in_bytes + 1;
|
||||
|
||||
// Perform packet loss if criteria is fullfilled:
|
||||
bool exclude_this_frame = false;
|
||||
// Only keyframes can be excluded
|
||||
if (encoded_image->_frameType == kKeyFrame) {
|
||||
switch (config_.exclude_frame_types) {
|
||||
case kExcludeOnlyFirstKeyFrame:
|
||||
if (!first_key_frame_has_been_excluded_) {
|
||||
first_key_frame_has_been_excluded_ = true;
|
||||
exclude_this_frame = true;
|
||||
}
|
||||
break;
|
||||
case kExcludeAllKeyFrames:
|
||||
exclude_this_frame = true;
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
if (!exclude_this_frame) {
|
||||
stat.packets_dropped =
|
||||
packet_manipulator_->ManipulatePackets(encoded_image);
|
||||
}
|
||||
|
||||
// Keep track of if frames are lost due to packet loss so we can tell
|
||||
// this to the encoder (this is handled by the RTP logic in the full stack)
|
||||
decode_start_ = TickTime::Now();
|
||||
// TODO(kjellander): Pass fragmentation header to the decoder when
|
||||
// CL 172001 has been submitted and PacketManipulator supports this.
|
||||
WebRtc_Word32 decode_result = decoder_->Decode(*encoded_image,
|
||||
last_frame_missing_, NULL);
|
||||
stat.decode_return_code = decode_result;
|
||||
if (decode_result != WEBRTC_VIDEO_CODEC_OK) {
|
||||
// Write the last successful frame the output file to avoid getting it out
|
||||
// of sync with the source file for SSIM and PSNR comparisons:
|
||||
frame_writer_->WriteFrame(last_successful_frame_buffer_);
|
||||
}
|
||||
// save status for losses so we can inform the decoder for the next frame:
|
||||
last_frame_missing_ = encoded_image->_length == 0;
|
||||
}
|
||||
|
||||
void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
|
||||
TickTime decode_stop = TickTime::Now();
|
||||
int frame_number = image.TimeStamp();
|
||||
// Report stats
|
||||
FrameStatistic& stat = stats_->stats_[frame_number];
|
||||
stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_,
|
||||
decode_stop);
|
||||
stat.decoding_successful = true;
|
||||
|
||||
// Check for resize action (either down or up):
|
||||
if (static_cast<int>(image.Width()) != last_encoder_frame_width_ ||
|
||||
static_cast<int>(image.Height()) != last_encoder_frame_height_ ) {
|
||||
++num_spatial_resizes_;
|
||||
last_encoder_frame_width_ = image.Width();
|
||||
last_encoder_frame_height_ = image.Height();
|
||||
}
|
||||
// Check if codec size is different from native/original size, and if so,
|
||||
// upsample back to original size: needed for PSNR and SSIM computations.
|
||||
if (image.Width() != config_.codec_settings->width ||
|
||||
image.Height() != config_.codec_settings->height) {
|
||||
VideoFrame up_image;
|
||||
int ret_val = scaler_.Set(image.Width(), image.Height(),
|
||||
config_.codec_settings->width,
|
||||
config_.codec_settings->height,
|
||||
kI420, kI420, kScaleBilinear);
|
||||
assert(ret_val >= 0);
|
||||
if (ret_val < 0) {
|
||||
fprintf(stderr, "Failed to set scalar for frame: %d, return code: %d\n",
|
||||
frame_number, ret_val);
|
||||
}
|
||||
ret_val = scaler_.Scale(image, &up_image);
|
||||
assert(ret_val >= 0);
|
||||
if (ret_val < 0) {
|
||||
fprintf(stderr, "Failed to scale frame: %d, return code: %d\n",
|
||||
frame_number, ret_val);
|
||||
}
|
||||
// Update our copy of the last successful frame:
|
||||
memcpy(last_successful_frame_buffer_, up_image.Buffer(), up_image.Length());
|
||||
|
||||
bool write_success = frame_writer_->WriteFrame(up_image.Buffer());
|
||||
assert(write_success);
|
||||
if (!write_success) {
|
||||
fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
|
||||
}
|
||||
up_image.Free();
|
||||
} else { // No resize.
|
||||
// Update our copy of the last successful frame:
|
||||
memcpy(last_successful_frame_buffer_, image.Buffer(), image.Length());
|
||||
|
||||
bool write_success = frame_writer_->WriteFrame(image.Buffer());
|
||||
assert(write_success);
|
||||
if (!write_success) {
|
||||
fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int VideoProcessorImpl::GetElapsedTimeMicroseconds(
|
||||
const webrtc::TickTime& start, const webrtc::TickTime& stop) {
|
||||
WebRtc_UWord64 encode_time = (stop - start).Microseconds();
|
||||
assert(encode_time <
|
||||
static_cast<unsigned int>(std::numeric_limits<int>::max()));
|
||||
return static_cast<int>(encode_time);
|
||||
}
|
||||
|
||||
const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e) {
|
||||
switch (e) {
|
||||
case kExcludeOnlyFirstKeyFrame:
|
||||
return "ExcludeOnlyFirstKeyFrame";
|
||||
case kExcludeAllKeyFrames:
|
||||
return "ExcludeAllKeyFrames";
|
||||
default:
|
||||
assert(false);
|
||||
return "Unknown";
|
||||
}
|
||||
}
|
||||
|
||||
const char* VideoCodecTypeToStr(webrtc::VideoCodecType e) {
|
||||
switch (e) {
|
||||
case kVideoCodecVP8:
|
||||
return "VP8";
|
||||
case kVideoCodecI420:
|
||||
return "I420";
|
||||
case kVideoCodecRED:
|
||||
return "RED";
|
||||
case kVideoCodecULPFEC:
|
||||
return "ULPFEC";
|
||||
case kVideoCodecUnknown:
|
||||
return "Unknown";
|
||||
default:
|
||||
assert(false);
|
||||
return "Unknown";
|
||||
}
|
||||
}
|
||||
|
||||
// Callbacks
|
||||
WebRtc_Word32
|
||||
VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
|
||||
EncodedImage& encoded_image,
|
||||
const webrtc::CodecSpecificInfo* codec_specific_info,
|
||||
const webrtc::RTPFragmentationHeader* fragmentation) {
|
||||
video_processor_->FrameEncoded(&encoded_image); // Forward to parent class.
|
||||
return 0;
|
||||
}
|
||||
WebRtc_Word32
|
||||
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
|
||||
VideoFrame& image) {
|
||||
video_processor_->FrameDecoded(image); // forward to parent class
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
260
webrtc/modules/video_coding/codecs/test/videoprocessor.h
Normal file
260
webrtc/modules/video_coding/codecs/test/videoprocessor.h
Normal file
@@ -0,0 +1,260 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "common_video/libyuv/include/scaler.h"
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "modules/video_coding/codecs/test/packet_manipulator.h"
|
||||
#include "modules/video_coding/codecs/test/stats.h"
|
||||
#include "system_wrappers/interface/tick_util.h"
|
||||
#include "testsupport/frame_reader.h"
|
||||
#include "testsupport/frame_writer.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
// Defines which frame types shall be excluded from packet loss and when.
|
||||
enum ExcludeFrameTypes {
|
||||
// Will exclude the first keyframe in the video sequence from packet loss.
|
||||
// Following keyframes will be targeted for packet loss.
|
||||
kExcludeOnlyFirstKeyFrame,
|
||||
// Exclude all keyframes from packet loss, no matter where in the video
|
||||
// sequence they occur.
|
||||
kExcludeAllKeyFrames
|
||||
};
|
||||
// Returns a string representation of the enum value.
|
||||
const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e);
|
||||
|
||||
// Test configuration for a test run
|
||||
struct TestConfig {
|
||||
TestConfig()
|
||||
: name(""), description(""), test_number(0),
|
||||
input_filename(""), output_filename(""), output_dir("out"),
|
||||
networking_config(), exclude_frame_types(kExcludeOnlyFirstKeyFrame),
|
||||
frame_length_in_bytes(-1), use_single_core(false), keyframe_interval(0),
|
||||
codec_settings(NULL), verbose(true) {
|
||||
};
|
||||
|
||||
// Name of the test. This is purely metadata and does not affect
|
||||
// the test in any way.
|
||||
std::string name;
|
||||
|
||||
// More detailed description of the test. This is purely metadata and does
|
||||
// not affect the test in any way.
|
||||
std::string description;
|
||||
|
||||
// Number of this test. Useful if multiple runs of the same test with
|
||||
// different configurations shall be managed.
|
||||
int test_number;
|
||||
|
||||
// File to process for the test. This must be a video file in the YUV format.
|
||||
std::string input_filename;
|
||||
|
||||
// File to write to during processing for the test. Will be a video file
|
||||
// in the YUV format.
|
||||
std::string output_filename;
|
||||
|
||||
// Path to the directory where encoded files will be put
|
||||
// (absolute or relative to the executable). Default: "out".
|
||||
std::string output_dir;
|
||||
|
||||
// Configurations related to networking.
|
||||
NetworkingConfig networking_config;
|
||||
|
||||
// Decides how the packet loss simulations shall exclude certain frames
|
||||
// from packet loss. Default: kExcludeOnlyFirstKeyFrame.
|
||||
ExcludeFrameTypes exclude_frame_types;
|
||||
|
||||
// The length of a single frame of the input video file. This value is
|
||||
// calculated out of the width and height according to the video format
|
||||
// specification. Must be set before processing.
|
||||
int frame_length_in_bytes;
|
||||
|
||||
// Force the encoder and decoder to use a single core for processing.
|
||||
// Using a single core is necessary to get a deterministic behavior for the
|
||||
// encoded frames - using multiple cores will produce different encoded frames
|
||||
// since multiple cores are competing to consume the byte budget for each
|
||||
// frame in parallel.
|
||||
// If set to false, the maximum number of available cores will be used.
|
||||
// Default: false.
|
||||
bool use_single_core;
|
||||
|
||||
// If set to a value >0 this setting forces the encoder to create a keyframe
|
||||
// every Nth frame. Note that the encoder may create a keyframe in other
|
||||
// locations in addition to the interval that is set using this parameter.
|
||||
// Forcing key frames may also affect encoder planning optimizations in
|
||||
// a negative way, since it will suddenly be forced to produce an expensive
|
||||
// key frame.
|
||||
// Default: 0.
|
||||
int keyframe_interval;
|
||||
|
||||
// The codec settings to use for the test (target bitrate, video size,
|
||||
// framerate and so on). This struct must be created and filled in using
|
||||
// the VideoCodingModule::Codec() method.
|
||||
webrtc::VideoCodec* codec_settings;
|
||||
|
||||
// If printing of information to stdout shall be performed during processing.
|
||||
bool verbose;
|
||||
};
|
||||
|
||||
// Returns a string representation of the enum value.
|
||||
const char* VideoCodecTypeToStr(webrtc::VideoCodecType e);
|
||||
|
||||
// Handles encoding/decoding of video using the VideoEncoder/VideoDecoder
|
||||
// interfaces. This is done in a sequential manner in order to be able to
|
||||
// measure times properly.
|
||||
// The class processes a frame at the time for the configured input file.
|
||||
// It maintains state of where in the source input file the processing is at.
|
||||
//
|
||||
// Regarding packet loss: Note that keyframes are excluded (first or all
|
||||
// depending on the ExcludeFrameTypes setting). This is because if key frames
|
||||
// would be altered, all the following delta frames would be pretty much
|
||||
// worthless. VP8 has an error-resilience feature that makes it able to handle
|
||||
// packet loss in key non-first keyframes, which is why only the first is
|
||||
// excluded by default.
|
||||
// Packet loss in such important frames is handled on a higher level in the
|
||||
// Video Engine, where signaling would request a retransmit of the lost packets,
|
||||
// since they're so important.
|
||||
//
|
||||
// Note this class is not thread safe in any way and is meant for simple testing
|
||||
// purposes.
|
||||
class VideoProcessor {
|
||||
public:
|
||||
virtual ~VideoProcessor() {}
|
||||
|
||||
// Performs initial calculations about frame size, sets up callbacks etc.
|
||||
// Returns false if an error has occurred, in addition to printing to stderr.
|
||||
virtual bool Init() = 0;
|
||||
|
||||
// Processes a single frame. Returns true as long as there's more frames
|
||||
// available in the source clip.
|
||||
// Frame number must be an integer >=0.
|
||||
virtual bool ProcessFrame(int frame_number) = 0;
|
||||
|
||||
// Updates the encoder with the target bit rate and the frame rate.
|
||||
virtual void SetRates(int bit_rate, int frame_rate) = 0;
|
||||
|
||||
// Return the size of the encoded frame in bytes. Dropped frames by the
|
||||
// encoder are regarded as zero size.
|
||||
virtual int EncodedFrameSize() = 0;
|
||||
|
||||
// Return the number of dropped frames.
|
||||
virtual int NumberDroppedFrames() = 0;
|
||||
|
||||
// Return the number of spatial resizes.
|
||||
virtual int NumberSpatialResizes() = 0;
|
||||
};
|
||||
|
||||
class VideoProcessorImpl : public VideoProcessor {
|
||||
public:
|
||||
VideoProcessorImpl(webrtc::VideoEncoder* encoder,
|
||||
webrtc::VideoDecoder* decoder,
|
||||
FrameReader* frame_reader,
|
||||
FrameWriter* frame_writer,
|
||||
PacketManipulator* packet_manipulator,
|
||||
const TestConfig& config,
|
||||
Stats* stats);
|
||||
virtual ~VideoProcessorImpl();
|
||||
virtual bool Init();
|
||||
virtual bool ProcessFrame(int frame_number);
|
||||
|
||||
private:
|
||||
// Invoked by the callback when a frame has completed encoding.
|
||||
void FrameEncoded(webrtc::EncodedImage* encodedImage);
|
||||
// Invoked by the callback when a frame has completed decoding.
|
||||
void FrameDecoded(const webrtc::VideoFrame& image);
|
||||
// Used for getting a 32-bit integer representing time
|
||||
// (checks the size is within signed 32-bit bounds before casting it)
|
||||
int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
|
||||
const webrtc::TickTime& stop);
|
||||
// Updates the encoder with the target bit rate and the frame rate.
|
||||
void SetRates(int bit_rate, int frame_rate);
|
||||
// Return the size of the encoded frame in bytes.
|
||||
int EncodedFrameSize();
|
||||
// Return the number of dropped frames.
|
||||
int NumberDroppedFrames();
|
||||
// Return the number of spatial resizes.
|
||||
int NumberSpatialResizes();
|
||||
|
||||
webrtc::VideoEncoder* encoder_;
|
||||
webrtc::VideoDecoder* decoder_;
|
||||
FrameReader* frame_reader_;
|
||||
FrameWriter* frame_writer_;
|
||||
PacketManipulator* packet_manipulator_;
|
||||
const TestConfig& config_;
|
||||
Stats* stats_;
|
||||
|
||||
EncodedImageCallback* encode_callback_;
|
||||
DecodedImageCallback* decode_callback_;
|
||||
// Buffer used for reading the source video file:
|
||||
WebRtc_UWord8* source_buffer_;
|
||||
// Keep track of the last successful frame, since we need to write that
|
||||
// when decoding fails:
|
||||
WebRtc_UWord8* last_successful_frame_buffer_;
|
||||
webrtc::VideoFrame source_frame_;
|
||||
// To keep track of if we have excluded the first key frame from packet loss:
|
||||
bool first_key_frame_has_been_excluded_;
|
||||
// To tell the decoder previous frame have been dropped due to packet loss:
|
||||
bool last_frame_missing_;
|
||||
// If Init() has executed successfully.
|
||||
bool initialized_;
|
||||
int encoded_frame_size_;
|
||||
int prev_time_stamp_;
|
||||
int num_dropped_frames_;
|
||||
int num_spatial_resizes_;
|
||||
int last_encoder_frame_width_;
|
||||
int last_encoder_frame_height_;
|
||||
Scaler scaler_;
|
||||
|
||||
// Statistics
|
||||
double bit_rate_factor_; // multiply frame length with this to get bit rate
|
||||
webrtc::TickTime encode_start_;
|
||||
webrtc::TickTime decode_start_;
|
||||
|
||||
// Callback class required to implement according to the VideoEncoder API.
|
||||
class VideoProcessorEncodeCompleteCallback
|
||||
: public webrtc::EncodedImageCallback {
|
||||
public:
|
||||
explicit VideoProcessorEncodeCompleteCallback(VideoProcessorImpl* vp)
|
||||
: video_processor_(vp) {
|
||||
}
|
||||
WebRtc_Word32 Encoded(
|
||||
webrtc::EncodedImage& encoded_image,
|
||||
const webrtc::CodecSpecificInfo* codec_specific_info = NULL,
|
||||
const webrtc::RTPFragmentationHeader* fragmentation = NULL);
|
||||
|
||||
private:
|
||||
VideoProcessorImpl* video_processor_;
|
||||
};
|
||||
|
||||
// Callback class required to implement according to the VideoDecoder API.
|
||||
class VideoProcessorDecodeCompleteCallback
|
||||
: public webrtc::DecodedImageCallback {
|
||||
public:
|
||||
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
|
||||
: video_processor_(vp) {
|
||||
}
|
||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
|
||||
|
||||
private:
|
||||
VideoProcessorImpl* video_processor_;
|
||||
};
|
||||
};
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
|
||||
@@ -0,0 +1,750 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
#include <math.h>
|
||||
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "modules/video_coding/codecs/test/packet_manipulator.h"
|
||||
#include "modules/video_coding/codecs/test/videoprocessor.h"
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8.h"
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
|
||||
#include "modules/video_coding/main/interface/video_coding.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "testsupport/frame_reader.h"
|
||||
#include "testsupport/frame_writer.h"
|
||||
#include "testsupport/metrics/video_metrics.h"
|
||||
#include "testsupport/packet_reader.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Maximum number of rate updates (i.e., calls to encoder to change bitrate
|
||||
// and/or frame rate) for the current tests.
|
||||
const int kMaxNumRateUpdates = 3;
|
||||
|
||||
const int kPercTargetvsActualMismatch = 20;
|
||||
|
||||
// Codec and network settings.
|
||||
struct CodecConfigPars {
|
||||
float packet_loss;
|
||||
int num_temporal_layers;
|
||||
int key_frame_interval;
|
||||
bool error_concealment_on;
|
||||
bool denoising_on;
|
||||
bool frame_dropper_on;
|
||||
bool spatial_resize_on;
|
||||
};
|
||||
|
||||
// Quality metrics.
|
||||
struct QualityMetrics {
|
||||
double minimum_avg_psnr;
|
||||
double minimum_min_psnr;
|
||||
double minimum_avg_ssim;
|
||||
double minimum_min_ssim;
|
||||
};
|
||||
|
||||
// The sequence of bitrate and frame rate changes for the encoder, the frame
|
||||
// number where the changes are made, and the total number of frames for the
|
||||
// test.
|
||||
struct RateProfile {
|
||||
int target_bit_rate[kMaxNumRateUpdates];
|
||||
int input_frame_rate[kMaxNumRateUpdates];
|
||||
int frame_index_rate_update[kMaxNumRateUpdates + 1];
|
||||
int num_frames;
|
||||
};
|
||||
|
||||
// Metrics for the rate control. The rate mismatch metrics are defined as
|
||||
// percentages.|max_time_hit_target| is defined as number of frames, after a
|
||||
// rate update is made to the encoder, for the encoder to reach within
|
||||
// |kPercTargetvsActualMismatch| of new target rate. The metrics are defined for
|
||||
// each rate update sequence.
|
||||
struct RateControlMetrics {
|
||||
int max_num_dropped_frames;
|
||||
int max_key_frame_size_mismatch;
|
||||
int max_delta_frame_size_mismatch;
|
||||
int max_encoding_rate_mismatch;
|
||||
int max_time_hit_target;
|
||||
int num_spatial_resizes;
|
||||
};
|
||||
|
||||
|
||||
// Sequence used is foreman (CIF): may be better to use VGA for resize test.
|
||||
const int kCIFWidth = 352;
|
||||
const int kCIFHeight = 288;
|
||||
const int kNbrFramesShort = 100; // Some tests are run for shorter sequence.
|
||||
const int kNbrFramesLong = 299;
|
||||
|
||||
// Parameters from VP8 wrapper, which control target size of key frames.
|
||||
const float kInitialBufferSize = 0.5f;
|
||||
const float kOptimalBufferSize = 0.6f;
|
||||
const float kScaleKeyFrameSize = 0.5f;
|
||||
|
||||
// Integration test for video processor. Encodes+decodes a clip and
|
||||
// writes it to the output directory. After completion, quality metrics
|
||||
// (PSNR and SSIM) and rate control metrics are computed to verify that the
|
||||
// quality and encoder response is acceptable. The rate control tests allow us
|
||||
// to verify the behavior for changing bitrate, changing frame rate, frame
|
||||
// dropping/spatial resize, and temporal layers. The limits for the rate
|
||||
// control metrics are set to be fairly conservative, so failure should only
|
||||
// happen when some significant regression or breakdown occurs.
|
||||
class VideoProcessorIntegrationTest: public testing::Test {
|
||||
protected:
|
||||
VideoEncoder* encoder_;
|
||||
VideoDecoder* decoder_;
|
||||
webrtc::test::FrameReader* frame_reader_;
|
||||
webrtc::test::FrameWriter* frame_writer_;
|
||||
webrtc::test::PacketReader packet_reader_;
|
||||
webrtc::test::PacketManipulator* packet_manipulator_;
|
||||
webrtc::test::Stats stats_;
|
||||
webrtc::test::TestConfig config_;
|
||||
VideoCodec codec_settings_;
|
||||
webrtc::test::VideoProcessor* processor_;
|
||||
|
||||
// Quantities defined/updated for every encoder rate update.
|
||||
// Some quantities defined per temporal layer (at most 3 layers in this test).
|
||||
int num_frames_per_update_[3];
|
||||
float sum_frame_size_mismatch_[3];
|
||||
float sum_encoded_frame_size_[3];
|
||||
float encoding_bitrate_[3];
|
||||
float per_frame_bandwidth_[3];
|
||||
float bit_rate_layer_[3];
|
||||
float frame_rate_layer_[3];
|
||||
int num_frames_total_;
|
||||
float sum_encoded_frame_size_total_;
|
||||
float encoding_bitrate_total_;
|
||||
float perc_encoding_rate_mismatch_;
|
||||
int num_frames_to_hit_target_;
|
||||
bool encoding_rate_within_target_;
|
||||
int bit_rate_;
|
||||
int frame_rate_;
|
||||
int layer_;
|
||||
float target_size_key_frame_initial_;
|
||||
float target_size_key_frame_;
|
||||
float sum_key_frame_size_mismatch_;
|
||||
int num_key_frames_;
|
||||
float start_bitrate_;
|
||||
|
||||
// Codec and network settings.
|
||||
float packet_loss_;
|
||||
int num_temporal_layers_;
|
||||
int key_frame_interval_;
|
||||
bool error_concealment_on_;
|
||||
bool denoising_on_;
|
||||
bool frame_dropper_on_;
|
||||
bool spatial_resize_on_;
|
||||
|
||||
|
||||
VideoProcessorIntegrationTest() {}
|
||||
virtual ~VideoProcessorIntegrationTest() {}
|
||||
|
||||
void SetUpCodecConfig() {
|
||||
encoder_ = VP8Encoder::Create();
|
||||
decoder_ = VP8Decoder::Create();
|
||||
|
||||
// CIF is currently used for all tests below.
|
||||
// Setup the TestConfig struct for processing of a clip in CIF resolution.
|
||||
config_.input_filename =
|
||||
webrtc::test::ResourcePath("foreman_cif", "yuv");
|
||||
config_.output_filename = webrtc::test::OutputPath() +
|
||||
"foreman_cif_short_video_codecs_test_framework_integrationtests.yuv";
|
||||
config_.frame_length_in_bytes = 3 * kCIFWidth * kCIFHeight / 2;
|
||||
config_.verbose = false;
|
||||
// Only allow encoder/decoder to use single core, for predictability.
|
||||
config_.use_single_core = true;
|
||||
// Key frame interval and packet loss are set for each test.
|
||||
config_.keyframe_interval = key_frame_interval_;
|
||||
config_.networking_config.packet_loss_probability = packet_loss_;
|
||||
|
||||
// Get a codec configuration struct and configure it.
|
||||
VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
|
||||
config_.codec_settings = &codec_settings_;
|
||||
config_.codec_settings->startBitrate = start_bitrate_;
|
||||
config_.codec_settings->width = kCIFWidth;
|
||||
config_.codec_settings->height = kCIFHeight;
|
||||
// These features may be set depending on the test.
|
||||
config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
|
||||
error_concealment_on_;
|
||||
config_.codec_settings->codecSpecific.VP8.denoisingOn =
|
||||
denoising_on_;
|
||||
config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
|
||||
num_temporal_layers_;
|
||||
config_.codec_settings->codecSpecific.VP8.frameDroppingOn =
|
||||
frame_dropper_on_;
|
||||
config_.codec_settings->codecSpecific.VP8.automaticResizeOn =
|
||||
spatial_resize_on_;
|
||||
|
||||
frame_reader_ =
|
||||
new webrtc::test::FrameReaderImpl(config_.input_filename,
|
||||
config_.frame_length_in_bytes);
|
||||
frame_writer_ =
|
||||
new webrtc::test::FrameWriterImpl(config_.output_filename,
|
||||
config_.frame_length_in_bytes);
|
||||
ASSERT_TRUE(frame_reader_->Init());
|
||||
ASSERT_TRUE(frame_writer_->Init());
|
||||
|
||||
packet_manipulator_ = new webrtc::test::PacketManipulatorImpl(
|
||||
&packet_reader_, config_.networking_config, config_.verbose);
|
||||
processor_ = new webrtc::test::VideoProcessorImpl(encoder_, decoder_,
|
||||
frame_reader_,
|
||||
frame_writer_,
|
||||
packet_manipulator_,
|
||||
config_, &stats_);
|
||||
ASSERT_TRUE(processor_->Init());
|
||||
}
|
||||
|
||||
// Reset quantities after each encoder update, update the target
|
||||
// per-frame bandwidth.
|
||||
void ResetRateControlMetrics(int num_frames) {
|
||||
for (int i = 0; i < num_temporal_layers_; i++) {
|
||||
num_frames_per_update_[i] = 0;
|
||||
sum_frame_size_mismatch_[i] = 0.0f;
|
||||
sum_encoded_frame_size_[i] = 0.0f;
|
||||
encoding_bitrate_[i] = 0.0f;
|
||||
// Update layer per-frame-bandwidth.
|
||||
per_frame_bandwidth_[i] = static_cast<float>(bit_rate_layer_[i]) /
|
||||
static_cast<float>(frame_rate_layer_[i]);
|
||||
}
|
||||
// Set maximum size of key frames, following setting in the VP8 wrapper.
|
||||
float max_key_size = kScaleKeyFrameSize * kOptimalBufferSize * frame_rate_;
|
||||
// We don't know exact target size of the key frames (except for first one),
|
||||
// but the minimum in libvpx is ~|3 * per_frame_bandwidth| and maximum is
|
||||
// set by |max_key_size_ * per_frame_bandwidth|. Take middle point/average
|
||||
// as reference for mismatch. Note key frames always correspond to base
|
||||
// layer frame in this test.
|
||||
target_size_key_frame_ = 0.5 * (3 + max_key_size) * per_frame_bandwidth_[0];
|
||||
num_frames_total_ = 0;
|
||||
sum_encoded_frame_size_total_ = 0.0f;
|
||||
encoding_bitrate_total_ = 0.0f;
|
||||
perc_encoding_rate_mismatch_ = 0.0f;
|
||||
num_frames_to_hit_target_ = num_frames;
|
||||
encoding_rate_within_target_ = false;
|
||||
sum_key_frame_size_mismatch_ = 0.0;
|
||||
num_key_frames_ = 0;
|
||||
}
|
||||
|
||||
// For every encoded frame, update the rate control metrics.
|
||||
void UpdateRateControlMetrics(int frame_num, VideoFrameType frame_type) {
|
||||
int encoded_frame_size = processor_->EncodedFrameSize();
|
||||
float encoded_size_kbits = encoded_frame_size * 8.0f / 1000.0f;
|
||||
// Update layer data.
|
||||
// Update rate mismatch relative to per-frame bandwidth for delta frames.
|
||||
if (frame_type == kDeltaFrame) {
|
||||
// TODO(marpan): Should we count dropped (zero size) frames in mismatch?
|
||||
sum_frame_size_mismatch_[layer_] += fabs(encoded_size_kbits -
|
||||
per_frame_bandwidth_[layer_]) /
|
||||
per_frame_bandwidth_[layer_];
|
||||
} else {
|
||||
float target_size = (frame_num == 1) ? target_size_key_frame_initial_ :
|
||||
target_size_key_frame_;
|
||||
sum_key_frame_size_mismatch_ += fabs(encoded_size_kbits - target_size) /
|
||||
target_size;
|
||||
num_key_frames_ += 1;
|
||||
}
|
||||
sum_encoded_frame_size_[layer_] += encoded_size_kbits;
|
||||
// Encoding bitrate per layer: from the start of the update/run to the
|
||||
// current frame.
|
||||
encoding_bitrate_[layer_] = sum_encoded_frame_size_[layer_] *
|
||||
frame_rate_layer_[layer_] /
|
||||
num_frames_per_update_[layer_];
|
||||
// Total encoding rate: from the start of the update/run to current frame.
|
||||
sum_encoded_frame_size_total_ += encoded_size_kbits;
|
||||
encoding_bitrate_total_ = sum_encoded_frame_size_total_ * frame_rate_ /
|
||||
num_frames_total_;
|
||||
perc_encoding_rate_mismatch_ = 100 * fabs(encoding_bitrate_total_ -
|
||||
bit_rate_) / bit_rate_;
|
||||
if (perc_encoding_rate_mismatch_ < kPercTargetvsActualMismatch &&
|
||||
!encoding_rate_within_target_) {
|
||||
num_frames_to_hit_target_ = num_frames_total_;
|
||||
encoding_rate_within_target_ = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Verify expected behavior of rate control and print out data.
|
||||
void VerifyRateControl(int update_index,
|
||||
int max_key_frame_size_mismatch,
|
||||
int max_delta_frame_size_mismatch,
|
||||
int max_encoding_rate_mismatch,
|
||||
int max_time_hit_target,
|
||||
int max_num_dropped_frames,
|
||||
int num_spatial_resizes) {
|
||||
int num_dropped_frames = processor_->NumberDroppedFrames();
|
||||
int num_resize_actions = processor_->NumberSpatialResizes();
|
||||
printf("For update #: %d,\n "
|
||||
" Target Bitrate: %d,\n"
|
||||
" Encoding bitrate: %f,\n"
|
||||
" Frame rate: %d \n",
|
||||
update_index, bit_rate_, encoding_bitrate_total_, frame_rate_);
|
||||
printf(" Number of frames to approach target rate = %d, \n"
|
||||
" Number of dropped frames = %d, \n"
|
||||
" Number of spatial resizes = %d, \n",
|
||||
num_frames_to_hit_target_, num_dropped_frames, num_resize_actions);
|
||||
EXPECT_LE(perc_encoding_rate_mismatch_, max_encoding_rate_mismatch);
|
||||
if (num_key_frames_ > 0) {
|
||||
int perc_key_frame_size_mismatch = 100 * sum_key_frame_size_mismatch_ /
|
||||
num_key_frames_;
|
||||
printf(" Number of Key frames: %d \n"
|
||||
" Key frame rate mismatch: %d \n",
|
||||
num_key_frames_, perc_key_frame_size_mismatch);
|
||||
EXPECT_LE(perc_key_frame_size_mismatch, max_key_frame_size_mismatch);
|
||||
}
|
||||
printf("\n");
|
||||
printf("Rates statistics for Layer data \n");
|
||||
for (int i = 0; i < num_temporal_layers_ ; i++) {
|
||||
printf("Layer #%d \n", i);
|
||||
int perc_frame_size_mismatch = 100 * sum_frame_size_mismatch_[i] /
|
||||
num_frames_per_update_[i];
|
||||
int perc_encoding_rate_mismatch = 100 * fabs(encoding_bitrate_[i] -
|
||||
bit_rate_layer_[i]) /
|
||||
bit_rate_layer_[i];
|
||||
printf(" Target Layer Bit rate: %f \n"
|
||||
" Layer frame rate: %f, \n"
|
||||
" Layer per frame bandwidth: %f, \n"
|
||||
" Layer Encoding bit rate: %f, \n"
|
||||
" Layer Percent frame size mismatch: %d, \n"
|
||||
" Layer Percent encoding rate mismatch = %d, \n"
|
||||
" Number of frame processed per layer = %d \n",
|
||||
bit_rate_layer_[i], frame_rate_layer_[i], per_frame_bandwidth_[i],
|
||||
encoding_bitrate_[i], perc_frame_size_mismatch,
|
||||
perc_encoding_rate_mismatch, num_frames_per_update_[i]);
|
||||
EXPECT_LE(perc_frame_size_mismatch, max_delta_frame_size_mismatch);
|
||||
EXPECT_LE(perc_encoding_rate_mismatch, max_encoding_rate_mismatch);
|
||||
}
|
||||
printf("\n");
|
||||
EXPECT_LE(num_frames_to_hit_target_, max_time_hit_target);
|
||||
EXPECT_LE(num_dropped_frames, max_num_dropped_frames);
|
||||
EXPECT_EQ(num_resize_actions, num_spatial_resizes);
|
||||
}
|
||||
|
||||
// Layer index corresponding to frame number, for up to 3 layers.
|
||||
void LayerIndexForFrame(int frame_number) {
|
||||
if (num_temporal_layers_ == 1) {
|
||||
layer_ = 0;
|
||||
} else if (num_temporal_layers_ == 2) {
|
||||
// layer 0: 0 2 4 ...
|
||||
// layer 1: 1 3
|
||||
if (frame_number % 2 == 0) {
|
||||
layer_ = 0;
|
||||
} else {
|
||||
layer_ = 1;
|
||||
}
|
||||
} else if (num_temporal_layers_ == 3) {
|
||||
// layer 0: 0 4 8 ...
|
||||
// layer 1: 2 6
|
||||
// layer 2: 1 3 5 7
|
||||
if (frame_number % 4 == 0) {
|
||||
layer_ = 0;
|
||||
} else if ((frame_number + 2) % 4 == 0) {
|
||||
layer_ = 1;
|
||||
} else if ((frame_number + 1) % 2 == 0) {
|
||||
layer_ = 2;
|
||||
}
|
||||
} else {
|
||||
assert(false); // Only up to 3 layers.
|
||||
}
|
||||
}
|
||||
|
||||
// Set the bitrate and frame rate per layer, for up to 3 layers.
|
||||
void SetLayerRates() {
|
||||
assert(num_temporal_layers_<= 3);
|
||||
for (int i = 0; i < num_temporal_layers_; i++) {
|
||||
float bit_rate_ratio =
|
||||
kVp8LayerRateAlloction[num_temporal_layers_ - 1][i];
|
||||
if (i > 0) {
|
||||
float bit_rate_delta_ratio = kVp8LayerRateAlloction
|
||||
[num_temporal_layers_ - 1][i] -
|
||||
kVp8LayerRateAlloction[num_temporal_layers_ - 1][i - 1];
|
||||
bit_rate_layer_[i] = bit_rate_ * bit_rate_delta_ratio;
|
||||
} else {
|
||||
bit_rate_layer_[i] = bit_rate_ * bit_rate_ratio;
|
||||
}
|
||||
frame_rate_layer_[i] = frame_rate_ / static_cast<float>(
|
||||
1 << (num_temporal_layers_ - 1));
|
||||
}
|
||||
if (num_temporal_layers_ == 3) {
|
||||
frame_rate_layer_[2] = frame_rate_ / 2.0f;
|
||||
}
|
||||
}
|
||||
|
||||
VideoFrameType FrameType(int frame_number) {
|
||||
if (frame_number == 0 || ((frame_number) % key_frame_interval_ == 0 &&
|
||||
key_frame_interval_ > 0)) {
|
||||
return kKeyFrame;
|
||||
} else {
|
||||
return kDeltaFrame;
|
||||
}
|
||||
}
|
||||
|
||||
void TearDown() {
|
||||
delete processor_;
|
||||
delete packet_manipulator_;
|
||||
delete frame_writer_;
|
||||
delete frame_reader_;
|
||||
delete decoder_;
|
||||
delete encoder_;
|
||||
}
|
||||
|
||||
// Processes all frames in the clip and verifies the result.
|
||||
void ProcessFramesAndVerify(QualityMetrics quality_metrics,
|
||||
RateProfile rate_profile,
|
||||
CodecConfigPars process,
|
||||
RateControlMetrics* rc_metrics) {
|
||||
// Codec/config settings.
|
||||
start_bitrate_ = rate_profile.target_bit_rate[0];
|
||||
packet_loss_ = process.packet_loss;
|
||||
key_frame_interval_ = process.key_frame_interval;
|
||||
num_temporal_layers_ = process.num_temporal_layers;
|
||||
error_concealment_on_ = process.error_concealment_on;
|
||||
denoising_on_ = process.denoising_on;
|
||||
frame_dropper_on_ = process.frame_dropper_on;
|
||||
spatial_resize_on_ = process.spatial_resize_on;
|
||||
SetUpCodecConfig();
|
||||
// Update the layers and the codec with the initial rates.
|
||||
bit_rate_ = rate_profile.target_bit_rate[0];
|
||||
frame_rate_ = rate_profile.input_frame_rate[0];
|
||||
SetLayerRates();
|
||||
// Set the initial target size for key frame.
|
||||
target_size_key_frame_initial_ = 0.5 * kInitialBufferSize *
|
||||
bit_rate_layer_[0];
|
||||
processor_->SetRates(bit_rate_, frame_rate_);
|
||||
// Process each frame, up to |num_frames|.
|
||||
int num_frames = rate_profile.num_frames;
|
||||
int update_index = 0;
|
||||
ResetRateControlMetrics(
|
||||
rate_profile.frame_index_rate_update[update_index + 1]);
|
||||
int frame_number = 0;
|
||||
VideoFrameType frame_type = kDeltaFrame;
|
||||
while (processor_->ProcessFrame(frame_number) &&
|
||||
frame_number < num_frames) {
|
||||
// Get the layer index for the frame |frame_number|.
|
||||
LayerIndexForFrame(frame_number);
|
||||
frame_type = FrameType(frame_number);
|
||||
// Counter for whole sequence run.
|
||||
++frame_number;
|
||||
// Counters for each rate update.
|
||||
++num_frames_per_update_[layer_];
|
||||
++num_frames_total_;
|
||||
UpdateRateControlMetrics(frame_number, frame_type);
|
||||
// If we hit another/next update, verify stats for current state and
|
||||
// update layers and codec with new rates.
|
||||
if (frame_number ==
|
||||
rate_profile.frame_index_rate_update[update_index + 1]) {
|
||||
VerifyRateControl(
|
||||
update_index,
|
||||
rc_metrics[update_index].max_key_frame_size_mismatch,
|
||||
rc_metrics[update_index].max_delta_frame_size_mismatch,
|
||||
rc_metrics[update_index].max_encoding_rate_mismatch,
|
||||
rc_metrics[update_index].max_time_hit_target,
|
||||
rc_metrics[update_index].max_num_dropped_frames,
|
||||
rc_metrics[update_index].num_spatial_resizes);
|
||||
// Update layer rates and the codec with new rates.
|
||||
++update_index;
|
||||
bit_rate_ = rate_profile.target_bit_rate[update_index];
|
||||
frame_rate_ = rate_profile.input_frame_rate[update_index];
|
||||
SetLayerRates();
|
||||
ResetRateControlMetrics(rate_profile.
|
||||
frame_index_rate_update[update_index + 1]);
|
||||
processor_->SetRates(bit_rate_, frame_rate_);
|
||||
}
|
||||
}
|
||||
VerifyRateControl(
|
||||
update_index,
|
||||
rc_metrics[update_index].max_key_frame_size_mismatch,
|
||||
rc_metrics[update_index].max_delta_frame_size_mismatch,
|
||||
rc_metrics[update_index].max_encoding_rate_mismatch,
|
||||
rc_metrics[update_index].max_time_hit_target,
|
||||
rc_metrics[update_index].max_num_dropped_frames,
|
||||
rc_metrics[update_index].num_spatial_resizes);
|
||||
EXPECT_EQ(num_frames, frame_number);
|
||||
EXPECT_EQ(num_frames + 1, static_cast<int>(stats_.stats_.size()));
|
||||
|
||||
// Release encoder and decoder to make sure they have finished processing:
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
|
||||
// Close the files before we start using them for SSIM/PSNR calculations.
|
||||
frame_reader_->Close();
|
||||
frame_writer_->Close();
|
||||
|
||||
// TODO(marpan): should compute these quality metrics per SetRates update.
|
||||
webrtc::test::QualityMetricsResult psnr_result, ssim_result;
|
||||
EXPECT_EQ(0, webrtc::test::I420MetricsFromFiles(
|
||||
config_.input_filename.c_str(),
|
||||
config_.output_filename.c_str(),
|
||||
config_.codec_settings->width,
|
||||
config_.codec_settings->height,
|
||||
&psnr_result,
|
||||
&ssim_result));
|
||||
printf("PSNR avg: %f, min: %f SSIM avg: %f, min: %f\n",
|
||||
psnr_result.average, psnr_result.min,
|
||||
ssim_result.average, ssim_result.min);
|
||||
stats_.PrintSummary();
|
||||
EXPECT_GT(psnr_result.average, quality_metrics.minimum_avg_psnr);
|
||||
EXPECT_GT(psnr_result.min, quality_metrics.minimum_min_psnr);
|
||||
EXPECT_GT(ssim_result.average, quality_metrics.minimum_avg_ssim);
|
||||
EXPECT_GT(ssim_result.min, quality_metrics.minimum_min_ssim);
|
||||
}
|
||||
};
|
||||
|
||||
void SetRateProfilePars(RateProfile* rate_profile,
|
||||
int update_index,
|
||||
int bit_rate,
|
||||
int frame_rate,
|
||||
int frame_index_rate_update) {
|
||||
rate_profile->target_bit_rate[update_index] = bit_rate;
|
||||
rate_profile->input_frame_rate[update_index] = frame_rate;
|
||||
rate_profile->frame_index_rate_update[update_index] = frame_index_rate_update;
|
||||
}
|
||||
|
||||
void SetCodecParameters(CodecConfigPars* process_settings,
|
||||
float packet_loss,
|
||||
int key_frame_interval,
|
||||
int num_temporal_layers,
|
||||
bool error_concealment_on,
|
||||
bool denoising_on,
|
||||
bool frame_dropper_on,
|
||||
bool spatial_resize_on) {
|
||||
process_settings->packet_loss = packet_loss;
|
||||
process_settings->key_frame_interval = key_frame_interval;
|
||||
process_settings->num_temporal_layers = num_temporal_layers,
|
||||
process_settings->error_concealment_on = error_concealment_on;
|
||||
process_settings->denoising_on = denoising_on;
|
||||
process_settings->frame_dropper_on = frame_dropper_on;
|
||||
process_settings->spatial_resize_on = spatial_resize_on;
|
||||
}
|
||||
|
||||
void SetQualityMetrics(QualityMetrics* quality_metrics,
|
||||
double minimum_avg_psnr,
|
||||
double minimum_min_psnr,
|
||||
double minimum_avg_ssim,
|
||||
double minimum_min_ssim) {
|
||||
quality_metrics->minimum_avg_psnr = minimum_avg_psnr;
|
||||
quality_metrics->minimum_min_psnr = minimum_min_psnr;
|
||||
quality_metrics->minimum_avg_ssim = minimum_avg_ssim;
|
||||
quality_metrics->minimum_min_ssim = minimum_min_ssim;
|
||||
}
|
||||
|
||||
void SetRateControlMetrics(RateControlMetrics* rc_metrics,
|
||||
int update_index,
|
||||
int max_num_dropped_frames,
|
||||
int max_key_frame_size_mismatch,
|
||||
int max_delta_frame_size_mismatch,
|
||||
int max_encoding_rate_mismatch,
|
||||
int max_time_hit_target,
|
||||
int num_spatial_resizes) {
|
||||
rc_metrics[update_index].max_num_dropped_frames = max_num_dropped_frames;
|
||||
rc_metrics[update_index].max_key_frame_size_mismatch =
|
||||
max_key_frame_size_mismatch;
|
||||
rc_metrics[update_index].max_delta_frame_size_mismatch =
|
||||
max_delta_frame_size_mismatch;
|
||||
rc_metrics[update_index].max_encoding_rate_mismatch =
|
||||
max_encoding_rate_mismatch;
|
||||
rc_metrics[update_index].max_time_hit_target = max_time_hit_target;
|
||||
rc_metrics[update_index].num_spatial_resizes = num_spatial_resizes;
|
||||
}
|
||||
|
||||
// Run with no packet loss and fixed bitrate. Quality should be very high.
|
||||
// One key frame (first frame only) in sequence. Setting |key_frame_interval|
|
||||
// to -1 below means no periodic key frames in test.
|
||||
TEST_F(VideoProcessorIntegrationTest, ProcessZeroPacketLoss) {
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
|
||||
rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
|
||||
rate_profile.num_frames = kNbrFramesShort;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.0f, -1, 1, true, true, true, false);
|
||||
// Metrics for expected quality.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 36.95, 33.0, 0.90, 0.90);
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[1];
|
||||
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
rc_metrics);
|
||||
}
|
||||
|
||||
// Run with 5% packet loss and fixed bitrate. Quality should be a bit lower.
|
||||
// One key frame (first frame only) in sequence.
|
||||
TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
|
||||
rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
|
||||
rate_profile.num_frames = kNbrFramesShort;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.05f, -1, 1, true, true, true, false);
|
||||
// Metrics for expected quality.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 20.0, 16.0, 0.60, 0.40);
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[1];
|
||||
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
rc_metrics);
|
||||
}
|
||||
|
||||
// Run with 10% packet loss and fixed bitrate. Quality should be even lower.
|
||||
// One key frame (first frame only) in sequence.
|
||||
TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
|
||||
rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
|
||||
rate_profile.num_frames = kNbrFramesShort;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.1f, -1, 1, true, true, true, false);
|
||||
// Metrics for expected quality.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 19.0, 16.0, 0.50, 0.35);
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[1];
|
||||
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
rc_metrics);
|
||||
}
|
||||
|
||||
// Run with no packet loss, with varying bitrate (3 rate updates):
|
||||
// low to high to medium. Check that quality and encoder response to the new
|
||||
// target rate/per-frame bandwidth (for each rate update) is within limits.
|
||||
// One key frame (first frame only) in sequence.
|
||||
TEST_F(VideoProcessorIntegrationTest, ProcessNoLossChangeBitRate) {
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 200, 30, 0);
|
||||
SetRateProfilePars(&rate_profile, 1, 800, 30, 100);
|
||||
SetRateProfilePars(&rate_profile, 2, 500, 30, 200);
|
||||
rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
|
||||
rate_profile.num_frames = kNbrFramesLong;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.0f, -1, 1, true, true, true, false);
|
||||
// Metrics for expected quality.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 34.0, 32.0, 0.85, 0.80);
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[3];
|
||||
SetRateControlMetrics(rc_metrics, 0, 0, 45, 20, 10, 15, 0);
|
||||
SetRateControlMetrics(rc_metrics, 1, 0, 0, 25, 20, 10, 0);
|
||||
SetRateControlMetrics(rc_metrics, 2, 0, 0, 25, 15, 10, 0);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
rc_metrics);
|
||||
}
|
||||
|
||||
// Run with no packet loss, with an update (decrease) in frame rate.
|
||||
// Lower frame rate means higher per-frame-bandwidth, so easier to encode.
|
||||
// At the bitrate in this test, this means better rate control after the
|
||||
// update(s) to lower frame rate. So expect less frame drops, and max values
|
||||
// for the rate control metrics can be lower. One key frame (first frame only).
|
||||
// Note: quality after update should be higher but we currently compute quality
|
||||
// metrics avergaed over whole sequence run.
|
||||
TEST_F(VideoProcessorIntegrationTest, ProcessNoLossChangeFrameRateFrameDrop) {
|
||||
config_.networking_config.packet_loss_probability = 0;
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 80, 24, 0);
|
||||
SetRateProfilePars(&rate_profile, 1, 80, 15, 100);
|
||||
SetRateProfilePars(&rate_profile, 2, 80, 10, 200);
|
||||
rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
|
||||
rate_profile.num_frames = kNbrFramesLong;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.0f, -1, 1, true, true, true, false);
|
||||
// Metrics for expected quality.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 31.0, 23.0, 0.80, 0.65);
|
||||
quality_metrics.minimum_avg_psnr = 31;
|
||||
quality_metrics.minimum_min_psnr = 23;
|
||||
quality_metrics.minimum_avg_ssim = 0.8;
|
||||
quality_metrics.minimum_min_ssim = 0.65;
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[3];
|
||||
SetRateControlMetrics(rc_metrics, 0, 40, 20, 75, 15, 60, 0);
|
||||
SetRateControlMetrics(rc_metrics, 1, 10, 0, 25, 10, 35, 0);
|
||||
SetRateControlMetrics(rc_metrics, 2, 0, 0, 20, 10, 15, 0);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
rc_metrics);
|
||||
}
|
||||
|
||||
// Run with no packet loss, at low bitrate, then increase rate somewhat.
|
||||
// Key frame is thrown in every 120 frames. Can expect some frame drops after
|
||||
// key frame, even at high rate. The internal spatial resizer is on, so expect
|
||||
// spatial resize down at first key frame, and back up at second key frame.
|
||||
// Error_concealment is off in this test since there is a memory leak with
|
||||
// resizing and error concealment.
|
||||
TEST_F(VideoProcessorIntegrationTest, ProcessNoLossSpatialResizeFrameDrop) {
|
||||
config_.networking_config.packet_loss_probability = 0;
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 100, 30, 0);
|
||||
SetRateProfilePars(&rate_profile, 1, 200, 30, 120);
|
||||
SetRateProfilePars(&rate_profile, 2, 200, 30, 240);
|
||||
rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
|
||||
rate_profile.num_frames = kNbrFramesLong;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.0f, 120, 1, false, true, true, true);
|
||||
// Metrics for expected quality.: lower quality on average from up-sampling
|
||||
// the down-sampled portion of the run, in case resizer is on.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 29.0, 20.0, 0.75, 0.60);
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[3];
|
||||
SetRateControlMetrics(rc_metrics, 0, 45, 30, 75, 20, 70, 0);
|
||||
SetRateControlMetrics(rc_metrics, 1, 20, 35, 30, 20, 15, 1);
|
||||
SetRateControlMetrics(rc_metrics, 2, 0, 30, 30, 15, 25, 1);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
rc_metrics);
|
||||
}
|
||||
|
||||
// Run with no packet loss, with 3 temporal layers, with a rate update in the
|
||||
// middle of the sequence. The max values for the frame size mismatch and
|
||||
// encoding rate mismatch are applied to each layer.
|
||||
// No dropped frames in this test, and internal spatial resizer is off.
|
||||
// One key frame (first frame only) in sequence, so no spatial resizing.
|
||||
TEST_F(VideoProcessorIntegrationTest, ProcessNoLossTemporalLayers) {
|
||||
config_.networking_config.packet_loss_probability = 0;
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 200, 30, 0);
|
||||
SetRateProfilePars(&rate_profile, 1, 400, 30, 150);
|
||||
rate_profile.frame_index_rate_update[2] = kNbrFramesLong + 1;
|
||||
rate_profile.num_frames = kNbrFramesLong;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.0f, -1, 3, true, true, true, false);
|
||||
// Metrics for expected quality.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 32.5, 30.0, 0.85, 0.80);
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[2];
|
||||
SetRateControlMetrics(rc_metrics, 0, 0, 20, 30, 10, 10, 0);
|
||||
SetRateControlMetrics(rc_metrics, 1, 0, 0, 30, 15, 10, 0);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
rc_metrics);
|
||||
}
|
||||
} // namespace webrtc
|
||||
@@ -0,0 +1,99 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "gmock/gmock.h"
|
||||
#include "modules/video_coding/codecs/test/mock/mock_packet_manipulator.h"
|
||||
#include "modules/video_coding/codecs/test/videoprocessor.h"
|
||||
#include "modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
|
||||
#include "modules/video_coding/main/interface/video_coding.h"
|
||||
#include "testsupport/mock/mock_frame_reader.h"
|
||||
#include "testsupport/mock/mock_frame_writer.h"
|
||||
#include "testsupport/packet_reader.h"
|
||||
#include "testsupport/unittest_utils.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
using ::testing::_;
|
||||
using ::testing::AtLeast;
|
||||
using ::testing::Return;
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
// Very basic testing for VideoProcessor. It's mostly tested by running the
|
||||
// video_quality_measurement program.
|
||||
class VideoProcessorTest: public testing::Test {
|
||||
protected:
|
||||
MockVideoEncoder encoder_mock_;
|
||||
MockVideoDecoder decoder_mock_;
|
||||
MockFrameReader frame_reader_mock_;
|
||||
MockFrameWriter frame_writer_mock_;
|
||||
MockPacketManipulator packet_manipulator_mock_;
|
||||
Stats stats_;
|
||||
TestConfig config_;
|
||||
VideoCodec codec_settings_;
|
||||
|
||||
VideoProcessorTest() {}
|
||||
virtual ~VideoProcessorTest() {}
|
||||
void SetUp() {
|
||||
// Get a codec configuration struct and configure it.
|
||||
VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
|
||||
config_.codec_settings = &codec_settings_;
|
||||
config_.codec_settings->startBitrate = 100;
|
||||
config_.codec_settings->width = 352;
|
||||
config_.codec_settings->height = 288;
|
||||
}
|
||||
void TearDown() {}
|
||||
|
||||
void ExpectInit() {
|
||||
EXPECT_CALL(encoder_mock_, InitEncode(_, _, _))
|
||||
.Times(1);
|
||||
EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_))
|
||||
.Times(AtLeast(1));
|
||||
EXPECT_CALL(decoder_mock_, InitDecode(_, _))
|
||||
.Times(1);
|
||||
EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_))
|
||||
.Times(AtLeast(1));
|
||||
EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
|
||||
.WillOnce(Return(1));
|
||||
EXPECT_CALL(frame_reader_mock_, FrameLength())
|
||||
.WillOnce(Return(150000));
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(VideoProcessorTest, Init) {
|
||||
ExpectInit();
|
||||
VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
|
||||
&frame_reader_mock_,
|
||||
&frame_writer_mock_,
|
||||
&packet_manipulator_mock_, config_,
|
||||
&stats_);
|
||||
ASSERT_TRUE(video_processor.Init());
|
||||
}
|
||||
|
||||
TEST_F(VideoProcessorTest, ProcessFrame) {
|
||||
ExpectInit();
|
||||
EXPECT_CALL(encoder_mock_, Encode(_, _, _))
|
||||
.Times(1);
|
||||
EXPECT_CALL(frame_reader_mock_, ReadFrame(_))
|
||||
.WillOnce(Return(true));
|
||||
// Since we don't return any callback from the mock, the decoder will not
|
||||
// be more than initialized...
|
||||
VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
|
||||
&frame_reader_mock_,
|
||||
&frame_writer_mock_,
|
||||
&packet_manipulator_mock_, config_,
|
||||
&stats_);
|
||||
ASSERT_TRUE(video_processor.Init());
|
||||
video_processor.ProcessFrame(0);
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
309
webrtc/modules/video_coding/codecs/test_framework/benchmark.cc
Normal file
309
webrtc/modules/video_coding/codecs/test_framework/benchmark.cc
Normal file
@@ -0,0 +1,309 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "benchmark.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <vector>
|
||||
#if defined(_WIN32)
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "system_wrappers/interface/event_wrapper.h"
|
||||
#include "modules/video_coding/codecs/test_framework/video_source.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "testsupport/metrics/video_metrics.h"
|
||||
|
||||
#define SSIM_CALC 0 // by default, don't compute SSIM
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
Benchmark::Benchmark()
|
||||
:
|
||||
NormalAsyncTest("Benchmark", "Codec benchmark over a range of test cases", 6),
|
||||
_resultsFileName(webrtc::test::OutputPath() + "benchmark.txt"),
|
||||
_codecName("Default")
|
||||
{
|
||||
}
|
||||
|
||||
Benchmark::Benchmark(std::string name, std::string description)
|
||||
:
|
||||
NormalAsyncTest(name, description, 6),
|
||||
_resultsFileName(webrtc::test::OutputPath() + "benchmark.txt"),
|
||||
_codecName("Default")
|
||||
{
|
||||
}
|
||||
|
||||
Benchmark::Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName)
|
||||
:
|
||||
NormalAsyncTest(name, description, 6),
|
||||
_resultsFileName(resultsFileName),
|
||||
_codecName(codecName)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
Benchmark::Perform()
|
||||
{
|
||||
std::vector<const VideoSource*> sources;
|
||||
std::vector<const VideoSource*>::iterator it;
|
||||
|
||||
// Configuration --------------------------
|
||||
sources.push_back(new const VideoSource(webrtc::test::ProjectRootPath() +
|
||||
"resources/foreman_cif.yuv", kCIF));
|
||||
// sources.push_back(new const VideoSource(webrtc::test::ProjectRootPath() +
|
||||
// "resources/akiyo_cif.yuv", kCIF));
|
||||
|
||||
const VideoSize size[] = {kQCIF, kCIF};
|
||||
const int frameRate[] = {10, 15, 30};
|
||||
// Specifies the framerates for which to perform a speed test.
|
||||
const bool speedTestMask[] = {false, false, false};
|
||||
const int bitRate[] = {50, 100, 200, 300, 400, 500, 600, 1000};
|
||||
// Determines the number of iterations to perform to arrive at the speed result.
|
||||
enum { kSpeedTestIterations = 10 };
|
||||
// ----------------------------------------
|
||||
|
||||
const int nFrameRates = sizeof(frameRate)/sizeof(*frameRate);
|
||||
assert(sizeof(speedTestMask)/sizeof(*speedTestMask) == nFrameRates);
|
||||
const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
|
||||
int testIterations = 10;
|
||||
|
||||
webrtc::test::QualityMetricsResult psnr[nBitrates];
|
||||
webrtc::test::QualityMetricsResult ssim[nBitrates];
|
||||
double fps[nBitrates];
|
||||
double totalEncodeTime[nBitrates];
|
||||
double totalDecodeTime[nBitrates];
|
||||
|
||||
_results.open(_resultsFileName.c_str(), std::fstream::out);
|
||||
_results << GetMagicStr() << std::endl;
|
||||
_results << _codecName << std::endl;
|
||||
|
||||
for (it = sources.begin() ; it < sources.end(); it++)
|
||||
{
|
||||
for (int i = 0; i < static_cast<int>(sizeof(size)/sizeof(*size)); i++)
|
||||
{
|
||||
for (int j = 0; j < nFrameRates; j++)
|
||||
{
|
||||
std::stringstream ss;
|
||||
std::string strFrameRate;
|
||||
std::string outFileName;
|
||||
ss << frameRate[j];
|
||||
ss >> strFrameRate;
|
||||
outFileName = (*it)->GetFilePath() + "/" + (*it)->GetName() + "_" +
|
||||
VideoSource::GetSizeString(size[i]) + "_" + strFrameRate + ".yuv";
|
||||
|
||||
_target = new const VideoSource(outFileName, size[i], frameRate[j]);
|
||||
(*it)->Convert(*_target);
|
||||
if (VideoSource::FileExists(outFileName.c_str()))
|
||||
{
|
||||
_inname = outFileName;
|
||||
}
|
||||
else
|
||||
{
|
||||
_inname = (*it)->GetFileName();
|
||||
}
|
||||
|
||||
std::cout << (*it)->GetName() << ", " << VideoSource::GetSizeString(size[i])
|
||||
<< ", " << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]:";
|
||||
_results << (*it)->GetName() << "," << VideoSource::GetSizeString(size[i])
|
||||
<< "," << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]";
|
||||
|
||||
if (speedTestMask[j])
|
||||
{
|
||||
testIterations = kSpeedTestIterations;
|
||||
}
|
||||
else
|
||||
{
|
||||
testIterations = 1;
|
||||
}
|
||||
|
||||
for (int k = 0; k < nBitrates; k++)
|
||||
{
|
||||
_bitRate = (bitRate[k]);
|
||||
double avgFps = 0.0;
|
||||
totalEncodeTime[k] = 0;
|
||||
totalDecodeTime[k] = 0;
|
||||
|
||||
for (int l = 0; l < testIterations; l++)
|
||||
{
|
||||
PerformNormalTest();
|
||||
_appendNext = false;
|
||||
|
||||
avgFps += _framecnt / (_totalEncodeTime + _totalDecodeTime);
|
||||
totalEncodeTime[k] += _totalEncodeTime;
|
||||
totalDecodeTime[k] += _totalDecodeTime;
|
||||
|
||||
}
|
||||
avgFps /= testIterations;
|
||||
totalEncodeTime[k] /= testIterations;
|
||||
totalDecodeTime[k] /= testIterations;
|
||||
|
||||
double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
|
||||
std::cout << " " << actualBitRate;
|
||||
_results << "," << actualBitRate;
|
||||
webrtc::test::QualityMetricsResult psnr_result;
|
||||
I420PSNRFromFiles(_inname.c_str(), _outname.c_str(),
|
||||
_inst.width, _inst.height, &psnr[k]);
|
||||
if (SSIM_CALC)
|
||||
{
|
||||
webrtc::test::QualityMetricsResult ssim_result;
|
||||
I420SSIMFromFiles(_inname.c_str(), _outname.c_str(),
|
||||
_inst.width, _inst.height, &ssim[k]);
|
||||
|
||||
}
|
||||
fps[k] = avgFps;
|
||||
}
|
||||
std::cout << std::endl << "Y-PSNR [dB]:";
|
||||
_results << std::endl << "Y-PSNR [dB]";
|
||||
for (int k = 0; k < nBitrates; k++)
|
||||
{
|
||||
std::cout << " " << psnr[k].average;
|
||||
_results << "," << psnr[k].average;
|
||||
|
||||
}
|
||||
if (SSIM_CALC)
|
||||
{
|
||||
std::cout << std::endl << "SSIM: ";
|
||||
_results << std::endl << "SSIM ";
|
||||
for (int k = 0; k < nBitrates; k++)
|
||||
{
|
||||
std::cout << " " << ssim[k].average;
|
||||
_results << "," << ssim[k].average;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
std::cout << std::endl << "Encode Time[ms]:";
|
||||
_results << std::endl << "Encode Time[ms]";
|
||||
for (int k = 0; k < nBitrates; k++)
|
||||
{
|
||||
std::cout << " " << totalEncodeTime[k];
|
||||
_results << "," << totalEncodeTime[k];
|
||||
|
||||
}
|
||||
|
||||
std::cout << std::endl << "Decode Time[ms]:";
|
||||
_results << std::endl << "Decode Time[ms]";
|
||||
for (int k = 0; k < nBitrates; k++)
|
||||
{
|
||||
std::cout << " " << totalDecodeTime[k];
|
||||
_results << "," << totalDecodeTime[k];
|
||||
|
||||
}
|
||||
|
||||
if (speedTestMask[j])
|
||||
{
|
||||
std::cout << std::endl << "Speed [fps]:";
|
||||
_results << std::endl << "Speed [fps]";
|
||||
for (int k = 0; k < nBitrates; k++)
|
||||
{
|
||||
std::cout << " " << static_cast<int>(fps[k] + 0.5);
|
||||
_results << "," << static_cast<int>(fps[k] + 0.5);
|
||||
}
|
||||
}
|
||||
std::cout << std::endl << std::endl;
|
||||
_results << std::endl << std::endl;
|
||||
|
||||
delete _target;
|
||||
}
|
||||
}
|
||||
delete *it;
|
||||
}
|
||||
_results.close();
|
||||
}
|
||||
|
||||
void
|
||||
Benchmark::PerformNormalTest()
|
||||
{
|
||||
_encoder = GetNewEncoder();
|
||||
_decoder = GetNewDecoder();
|
||||
CodecSettings(_target->GetWidth(), _target->GetHeight(), _target->GetFrameRate(), _bitRate);
|
||||
Setup();
|
||||
EventWrapper* waitEvent = EventWrapper::Create();
|
||||
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_encoder->InitEncode(&_inst, 4, 1440);
|
||||
CodecSpecific_InitBitrate();
|
||||
_decoder->InitDecode(&_inst,1);
|
||||
|
||||
FrameQueue frameQueue;
|
||||
VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
|
||||
VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
|
||||
_encoder->RegisterEncodeCompleteCallback(&encCallback);
|
||||
_decoder->RegisterDecodeCompleteCallback(&decCallback);
|
||||
|
||||
SetCodecSpecificParameters();
|
||||
|
||||
_totalEncodeTime = _totalDecodeTime = 0;
|
||||
_totalEncodePipeTime = _totalDecodePipeTime = 0;
|
||||
bool complete = false;
|
||||
_framecnt = 0;
|
||||
_encFrameCnt = 0;
|
||||
_sumEncBytes = 0;
|
||||
_lengthEncFrame = 0;
|
||||
while (!complete)
|
||||
{
|
||||
complete = Encode();
|
||||
if (!frameQueue.Empty() || complete)
|
||||
{
|
||||
while (!frameQueue.Empty())
|
||||
{
|
||||
_frameToDecode = static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
|
||||
DoPacketLoss();
|
||||
int ret = Decode();
|
||||
delete _frameToDecode;
|
||||
_frameToDecode = NULL;
|
||||
if (ret < 0)
|
||||
{
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
else if (ret == 0)
|
||||
{
|
||||
_framecnt++;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "\n\nPositive return value from decode!\n\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
waitEvent->Wait(5);
|
||||
}
|
||||
|
||||
_inputVideoBuffer.Free();
|
||||
_encodedVideoBuffer.Free();
|
||||
_decodedVideoBuffer.Free();
|
||||
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
delete waitEvent;
|
||||
delete _encoder;
|
||||
delete _decoder;
|
||||
Teardown();
|
||||
}
|
||||
|
||||
void
|
||||
Benchmark::CodecSpecific_InitBitrate()
|
||||
{
|
||||
if (_bitRate == 0)
|
||||
{
|
||||
_encoder->SetRates(600, _inst.maxFramerate);
|
||||
}
|
||||
else
|
||||
{
|
||||
_encoder->SetRates(_bitRate, _inst.maxFramerate);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
|
||||
|
||||
#include "normal_async_test.h"
|
||||
|
||||
class VideoSource;
|
||||
|
||||
class Benchmark : public NormalAsyncTest
|
||||
{
|
||||
public:
|
||||
Benchmark();
|
||||
virtual void Perform();
|
||||
|
||||
protected:
|
||||
Benchmark(std::string name, std::string description);
|
||||
Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName);
|
||||
virtual webrtc::VideoEncoder* GetNewEncoder() = 0;
|
||||
virtual webrtc::VideoDecoder* GetNewDecoder() = 0;
|
||||
virtual void PerformNormalTest();
|
||||
virtual void CodecSpecific_InitBitrate();
|
||||
static const char* GetMagicStr() { return "#!benchmark1.0"; }
|
||||
|
||||
const VideoSource* _target;
|
||||
std::string _resultsFileName;
|
||||
std::ofstream _results;
|
||||
std::string _codecName;
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
|
||||
|
||||
500
webrtc/modules/video_coding/codecs/test_framework/exportfig.m
Normal file
500
webrtc/modules/video_coding/codecs/test_framework/exportfig.m
Normal file
@@ -0,0 +1,500 @@
|
||||
function exportfig(varargin)
|
||||
%EXPORTFIG Export a figure to Encapsulated Postscript.
|
||||
% EXPORTFIG(H, FILENAME) writes the figure H to FILENAME. H is
|
||||
% a figure handle and FILENAME is a string that specifies the
|
||||
% name of the output file.
|
||||
%
|
||||
% EXPORTFIG(...,PARAM1,VAL1,PARAM2,VAL2,...) specifies
|
||||
% parameters that control various characteristics of the output
|
||||
% file.
|
||||
%
|
||||
% Format Paramter:
|
||||
% 'Format' one of the strings 'eps','eps2','jpeg','png','preview'
|
||||
% specifies the output format. Defaults to 'eps'.
|
||||
% The output format 'preview' does not generate an output
|
||||
% file but instead creates a new figure window with a
|
||||
% preview of the exported figure. In this case the
|
||||
% FILENAME parameter is ignored.
|
||||
%
|
||||
% 'Preview' one of the strings 'none', 'tiff'
|
||||
% specifies a preview for EPS files. Defaults to 'none'.
|
||||
%
|
||||
% Size Parameters:
|
||||
% 'Width' a positive scalar
|
||||
% specifies the width in the figure's PaperUnits
|
||||
% 'Height' a positive scalar
|
||||
% specifies the height in the figure's PaperUnits
|
||||
%
|
||||
% Specifying only one dimension sets the other dimension
|
||||
% so that the exported aspect ratio is the same as the
|
||||
% figure's current aspect ratio.
|
||||
% If neither dimension is specified the size defaults to
|
||||
% the width and height from the figure's PaperPosition.
|
||||
%
|
||||
% Rendering Parameters:
|
||||
% 'Color' one of the strings 'bw', 'gray', 'cmyk'
|
||||
% 'bw' specifies that lines and text are exported in
|
||||
% black and all other objects in grayscale
|
||||
% 'gray' specifies that all objects are exported in grayscale
|
||||
% 'cmyk' specifies that all objects are exported in color
|
||||
% using the CMYK color space
|
||||
% 'Renderer' one of the strings 'painters', 'zbuffer', 'opengl'
|
||||
% specifies the renderer to use
|
||||
% 'Resolution' a positive scalar
|
||||
% specifies the resolution in dots-per-inch.
|
||||
%
|
||||
% The default color setting is 'bw'.
|
||||
%
|
||||
% Font Parameters:
|
||||
% 'FontMode' one of the strings 'scaled', 'fixed'
|
||||
% 'FontSize' a positive scalar
|
||||
% in 'scaled' mode multiplies with the font size of each
|
||||
% text object to obtain the exported font size
|
||||
% in 'fixed' mode specifies the font size of all text
|
||||
% objects in points
|
||||
% 'FontEncoding' one of the strings 'latin1', 'adobe'
|
||||
% specifies the character encoding of the font
|
||||
%
|
||||
% If FontMode is 'scaled' but FontSize is not specified then a
|
||||
% scaling factor is computed from the ratio of the size of the
|
||||
% exported figure to the size of the actual figure. The minimum
|
||||
% font size allowed after scaling is 5 points.
|
||||
% If FontMode is 'fixed' but FontSize is not specified then the
|
||||
% exported font sizes of all text objects is 7 points.
|
||||
%
|
||||
% The default 'FontMode' setting is 'scaled'.
|
||||
%
|
||||
% Line Width Parameters:
|
||||
% 'LineMode' one of the strings 'scaled', 'fixed'
|
||||
% 'LineWidth' a positive scalar
|
||||
% the semantics of LineMode and LineWidth are exactly the
|
||||
% same as FontMode and FontSize, except that they apply
|
||||
% to line widths instead of font sizes. The minumum line
|
||||
% width allowed after scaling is 0.5 points.
|
||||
% If LineMode is 'fixed' but LineWidth is not specified
|
||||
% then the exported line width of all line objects is 1
|
||||
% point.
|
||||
%
|
||||
% Examples:
|
||||
% exportfig(gcf,'fig1.eps','height',3);
|
||||
% Exports the current figure to the file named 'fig1.eps' with
|
||||
% a height of 3 inches (assuming the figure's PaperUnits is
|
||||
% inches) and an aspect ratio the same as the figure's aspect
|
||||
% ratio on screen.
|
||||
%
|
||||
% exportfig(gcf, 'fig2.eps', 'FontMode', 'fixed',...
|
||||
% 'FontSize', 10, 'color', 'cmyk' );
|
||||
% Exports the current figure to 'fig2.eps' in color with all
|
||||
% text in 10 point fonts. The size of the exported figure is
|
||||
% the figure's PaperPostion width and height.
|
||||
|
||||
|
||||
if (nargin < 2)
|
||||
error('Too few input arguments');
|
||||
end
|
||||
|
||||
% exportfig(H, filename, ...)
|
||||
H = varargin{1};
|
||||
if ~ishandle(H) | ~strcmp(get(H,'type'), 'figure')
|
||||
error('First argument must be a handle to a figure.');
|
||||
end
|
||||
filename = varargin{2};
|
||||
if ~ischar(filename)
|
||||
error('Second argument must be a string.');
|
||||
end
|
||||
paramPairs = varargin(3:end);
|
||||
|
||||
% Do some validity checking on param-value pairs
|
||||
if (rem(length(paramPairs),2) ~= 0)
|
||||
error(['Invalid input syntax. Optional parameters and values' ...
|
||||
' must be in pairs.']);
|
||||
end
|
||||
|
||||
format = 'eps';
|
||||
preview = 'none';
|
||||
width = -1;
|
||||
height = -1;
|
||||
color = 'bw';
|
||||
fontsize = -1;
|
||||
fontmode='scaled';
|
||||
linewidth = -1;
|
||||
linemode=[];
|
||||
fontencoding = 'latin1';
|
||||
renderer = [];
|
||||
resolution = [];
|
||||
|
||||
% Process param-value pairs
|
||||
args = {};
|
||||
for k = 1:2:length(paramPairs)
|
||||
param = lower(paramPairs{k});
|
||||
if (~ischar(param))
|
||||
error('Optional parameter names must be strings');
|
||||
end
|
||||
value = paramPairs{k+1};
|
||||
|
||||
switch (param)
|
||||
case 'format'
|
||||
format = value;
|
||||
if (~strcmp(format,{'eps','eps2','jpeg','png','preview'}))
|
||||
error(['Format must be ''eps'', ''eps2'', ''jpeg'', ''png'' or' ...
|
||||
' ''preview''.']);
|
||||
end
|
||||
case 'preview'
|
||||
preview = value;
|
||||
if (~strcmp(preview,{'none','tiff'}))
|
||||
error('Preview must be ''none'' or ''tiff''.');
|
||||
end
|
||||
case 'width'
|
||||
width = LocalToNum(value);
|
||||
if(~LocalIsPositiveScalar(width))
|
||||
error('Width must be a numeric scalar > 0');
|
||||
end
|
||||
case 'height'
|
||||
height = LocalToNum(value);
|
||||
if(~LocalIsPositiveScalar(height))
|
||||
error('Height must be a numeric scalar > 0');
|
||||
end
|
||||
case 'color'
|
||||
color = lower(value);
|
||||
if (~strcmp(color,{'bw','gray','cmyk'}))
|
||||
error('Color must be ''bw'', ''gray'' or ''cmyk''.');
|
||||
end
|
||||
case 'fontmode'
|
||||
fontmode = lower(value);
|
||||
if (~strcmp(fontmode,{'scaled','fixed'}))
|
||||
error('FontMode must be ''scaled'' or ''fixed''.');
|
||||
end
|
||||
case 'fontsize'
|
||||
fontsize = LocalToNum(value);
|
||||
if(~LocalIsPositiveScalar(fontsize))
|
||||
error('FontSize must be a numeric scalar > 0');
|
||||
end
|
||||
case 'fontencoding'
|
||||
fontencoding = lower(value);
|
||||
if (~strcmp(fontencoding,{'latin1','adobe'}))
|
||||
error('FontEncoding must be ''latin1'' or ''adobe''.');
|
||||
end
|
||||
case 'linemode'
|
||||
linemode = lower(value);
|
||||
if (~strcmp(linemode,{'scaled','fixed'}))
|
||||
error('LineMode must be ''scaled'' or ''fixed''.');
|
||||
end
|
||||
case 'linewidth'
|
||||
linewidth = LocalToNum(value);
|
||||
if(~LocalIsPositiveScalar(linewidth))
|
||||
error('LineWidth must be a numeric scalar > 0');
|
||||
end
|
||||
case 'renderer'
|
||||
renderer = lower(value);
|
||||
if (~strcmp(renderer,{'painters','zbuffer','opengl'}))
|
||||
error('Renderer must be ''painters'', ''zbuffer'' or ''opengl''.');
|
||||
end
|
||||
case 'resolution'
|
||||
resolution = LocalToNum(value);
|
||||
if ~(isnumeric(value) & (prod(size(value)) == 1) & (value >= 0));
|
||||
error('Resolution must be a numeric scalar >= 0');
|
||||
end
|
||||
otherwise
|
||||
error(['Unrecognized option ' param '.']);
|
||||
end
|
||||
end
|
||||
|
||||
allLines = findall(H, 'type', 'line');
|
||||
allText = findall(H, 'type', 'text');
|
||||
allAxes = findall(H, 'type', 'axes');
|
||||
allImages = findall(H, 'type', 'image');
|
||||
allLights = findall(H, 'type', 'light');
|
||||
allPatch = findall(H, 'type', 'patch');
|
||||
allSurf = findall(H, 'type', 'surface');
|
||||
allRect = findall(H, 'type', 'rectangle');
|
||||
allFont = [allText; allAxes];
|
||||
allColor = [allLines; allText; allAxes; allLights];
|
||||
allMarker = [allLines; allPatch; allSurf];
|
||||
allEdge = [allPatch; allSurf];
|
||||
allCData = [allImages; allPatch; allSurf];
|
||||
|
||||
old.objs = {};
|
||||
old.prop = {};
|
||||
old.values = {};
|
||||
|
||||
% Process format and preview parameter
|
||||
showPreview = strcmp(format,'preview');
|
||||
if showPreview
|
||||
format = 'png';
|
||||
filename = [tempName '.png'];
|
||||
end
|
||||
if strncmp(format,'eps',3) & ~strcmp(preview,'none')
|
||||
args = {args{:}, ['-' preview]};
|
||||
end
|
||||
|
||||
hadError = 0;
|
||||
try
|
||||
% Process size parameters
|
||||
paperPos = get(H, 'PaperPosition');
|
||||
old = LocalPushOldData(old, H, 'PaperPosition', paperPos);
|
||||
figureUnits = get(H, 'Units');
|
||||
set(H, 'Units', get(H,'PaperUnits'));
|
||||
figurePos = get(H, 'Position');
|
||||
aspectRatio = figurePos(3)/figurePos(4);
|
||||
set(H, 'Units', figureUnits);
|
||||
if (width == -1) & (height == -1)
|
||||
width = paperPos(3);
|
||||
height = paperPos(4);
|
||||
elseif (width == -1)
|
||||
width = height * aspectRatio;
|
||||
elseif (height == -1)
|
||||
height = width / aspectRatio;
|
||||
end
|
||||
set(H, 'PaperPosition', [0 0 width height]);
|
||||
paperPosMode = get(H, 'PaperPositionMode');
|
||||
old = LocalPushOldData(old, H, 'PaperPositionMode', paperPosMode);
|
||||
set(H, 'PaperPositionMode', 'manual');
|
||||
|
||||
% Process rendering parameters
|
||||
switch (color)
|
||||
case {'bw', 'gray'}
|
||||
if ~strcmp(color,'bw') & strncmp(format,'eps',3)
|
||||
format = [format 'c'];
|
||||
end
|
||||
args = {args{:}, ['-d' format]};
|
||||
|
||||
%compute and set gray colormap
|
||||
oldcmap = get(H,'Colormap');
|
||||
newgrays = 0.30*oldcmap(:,1) + 0.59*oldcmap(:,2) + 0.11*oldcmap(:,3);
|
||||
newcmap = [newgrays newgrays newgrays];
|
||||
old = LocalPushOldData(old, H, 'Colormap', oldcmap);
|
||||
set(H, 'Colormap', newcmap);
|
||||
|
||||
%compute and set ColorSpec and CData properties
|
||||
old = LocalUpdateColors(allColor, 'color', old);
|
||||
old = LocalUpdateColors(allAxes, 'xcolor', old);
|
||||
old = LocalUpdateColors(allAxes, 'ycolor', old);
|
||||
old = LocalUpdateColors(allAxes, 'zcolor', old);
|
||||
old = LocalUpdateColors(allMarker, 'MarkerEdgeColor', old);
|
||||
old = LocalUpdateColors(allMarker, 'MarkerFaceColor', old);
|
||||
old = LocalUpdateColors(allEdge, 'EdgeColor', old);
|
||||
old = LocalUpdateColors(allEdge, 'FaceColor', old);
|
||||
old = LocalUpdateColors(allCData, 'CData', old);
|
||||
|
||||
case 'cmyk'
|
||||
if strncmp(format,'eps',3)
|
||||
format = [format 'c'];
|
||||
args = {args{:}, ['-d' format], '-cmyk'};
|
||||
else
|
||||
args = {args{:}, ['-d' format]};
|
||||
end
|
||||
otherwise
|
||||
error('Invalid Color parameter');
|
||||
end
|
||||
if (~isempty(renderer))
|
||||
args = {args{:}, ['-' renderer]};
|
||||
end
|
||||
if (~isempty(resolution)) | ~strncmp(format,'eps',3)
|
||||
if isempty(resolution)
|
||||
resolution = 0;
|
||||
end
|
||||
args = {args{:}, ['-r' int2str(resolution)]};
|
||||
end
|
||||
|
||||
% Process font parameters
|
||||
if (~isempty(fontmode))
|
||||
oldfonts = LocalGetAsCell(allFont,'FontSize');
|
||||
switch (fontmode)
|
||||
case 'fixed'
|
||||
oldfontunits = LocalGetAsCell(allFont,'FontUnits');
|
||||
old = LocalPushOldData(old, allFont, {'FontUnits'}, oldfontunits);
|
||||
set(allFont,'FontUnits','points');
|
||||
if (fontsize == -1)
|
||||
set(allFont,'FontSize',7);
|
||||
else
|
||||
set(allFont,'FontSize',fontsize);
|
||||
end
|
||||
case 'scaled'
|
||||
if (fontsize == -1)
|
||||
wscale = width/figurePos(3);
|
||||
hscale = height/figurePos(4);
|
||||
scale = min(wscale, hscale);
|
||||
else
|
||||
scale = fontsize;
|
||||
end
|
||||
newfonts = LocalScale(oldfonts,scale,5);
|
||||
set(allFont,{'FontSize'},newfonts);
|
||||
otherwise
|
||||
error('Invalid FontMode parameter');
|
||||
end
|
||||
% make sure we push the size after the units
|
||||
old = LocalPushOldData(old, allFont, {'FontSize'}, oldfonts);
|
||||
end
|
||||
if strcmp(fontencoding,'adobe') & strncmp(format,'eps',3)
|
||||
args = {args{:}, '-adobecset'};
|
||||
end
|
||||
|
||||
% Process linewidth parameters
|
||||
if (~isempty(linemode))
|
||||
oldlines = LocalGetAsCell(allMarker,'LineWidth');
|
||||
old = LocalPushOldData(old, allMarker, {'LineWidth'}, oldlines);
|
||||
switch (linemode)
|
||||
case 'fixed'
|
||||
if (linewidth == -1)
|
||||
set(allMarker,'LineWidth',1);
|
||||
else
|
||||
set(allMarker,'LineWidth',linewidth);
|
||||
end
|
||||
case 'scaled'
|
||||
if (linewidth == -1)
|
||||
wscale = width/figurePos(3);
|
||||
hscale = height/figurePos(4);
|
||||
scale = min(wscale, hscale);
|
||||
else
|
||||
scale = linewidth;
|
||||
end
|
||||
newlines = LocalScale(oldlines, scale, 0.5);
|
||||
set(allMarker,{'LineWidth'},newlines);
|
||||
otherwise
|
||||
error('Invalid LineMode parameter');
|
||||
end
|
||||
end
|
||||
|
||||
% Export
|
||||
print(H, filename, args{:});
|
||||
|
||||
catch
|
||||
hadError = 1;
|
||||
end
|
||||
|
||||
% Restore figure settings
|
||||
for n=1:length(old.objs)
|
||||
set(old.objs{n}, old.prop{n}, old.values{n});
|
||||
end
|
||||
|
||||
if hadError
|
||||
error(deblank(lasterr));
|
||||
end
|
||||
|
||||
% Show preview if requested
|
||||
if showPreview
|
||||
X = imread(filename,'png');
|
||||
delete(filename);
|
||||
f = figure( 'Name', 'Preview', ...
|
||||
'Menubar', 'none', ...
|
||||
'NumberTitle', 'off', ...
|
||||
'Visible', 'off');
|
||||
image(X);
|
||||
axis image;
|
||||
ax = findobj(f, 'type', 'axes');
|
||||
set(ax, 'Units', get(H,'PaperUnits'), ...
|
||||
'Position', [0 0 width height], ...
|
||||
'Visible', 'off');
|
||||
set(ax, 'Units', 'pixels');
|
||||
axesPos = get(ax,'Position');
|
||||
figPos = get(f,'Position');
|
||||
rootSize = get(0,'ScreenSize');
|
||||
figPos(3:4) = axesPos(3:4);
|
||||
if figPos(1) + figPos(3) > rootSize(3)
|
||||
figPos(1) = rootSize(3) - figPos(3) - 50;
|
||||
end
|
||||
if figPos(2) + figPos(4) > rootSize(4)
|
||||
figPos(2) = rootSize(4) - figPos(4) - 50;
|
||||
end
|
||||
set(f, 'Position',figPos, ...
|
||||
'Visible', 'on');
|
||||
end
|
||||
|
||||
%
|
||||
% Local Functions
|
||||
%
|
||||
|
||||
function outData = LocalPushOldData(inData, objs, prop, values)
|
||||
outData.objs = {inData.objs{:}, objs};
|
||||
outData.prop = {inData.prop{:}, prop};
|
||||
outData.values = {inData.values{:}, values};
|
||||
|
||||
function cellArray = LocalGetAsCell(fig,prop);
|
||||
cellArray = get(fig,prop);
|
||||
if (~isempty(cellArray)) & (~iscell(cellArray))
|
||||
cellArray = {cellArray};
|
||||
end
|
||||
|
||||
function newArray = LocalScale(inArray, scale, minValue)
|
||||
n = length(inArray);
|
||||
newArray = cell(n,1);
|
||||
for k=1:n
|
||||
newArray{k} = max(minValue,scale*inArray{k}(1));
|
||||
end
|
||||
|
||||
function newArray = LocalMapToGray(inArray);
|
||||
n = length(inArray);
|
||||
newArray = cell(n,1);
|
||||
for k=1:n
|
||||
color = inArray{k};
|
||||
if (~isempty(color))
|
||||
if ischar(color)
|
||||
switch color(1)
|
||||
case 'y'
|
||||
color = [1 1 0];
|
||||
case 'm'
|
||||
color = [1 0 1];
|
||||
case 'c'
|
||||
color = [0 1 1];
|
||||
case 'r'
|
||||
color = [1 0 0];
|
||||
case 'g'
|
||||
color = [0 1 0];
|
||||
case 'b'
|
||||
color = [0 0 1];
|
||||
case 'w'
|
||||
color = [1 1 1];
|
||||
case 'k'
|
||||
color = [0 0 0];
|
||||
otherwise
|
||||
newArray{k} = color;
|
||||
end
|
||||
end
|
||||
if ~ischar(color)
|
||||
color = 0.30*color(1) + 0.59*color(2) + 0.11*color(3);
|
||||
end
|
||||
end
|
||||
if isempty(color) | ischar(color)
|
||||
newArray{k} = color;
|
||||
else
|
||||
newArray{k} = [color color color];
|
||||
end
|
||||
end
|
||||
|
||||
function newArray = LocalMapCData(inArray);
|
||||
n = length(inArray);
|
||||
newArray = cell(n,1);
|
||||
for k=1:n
|
||||
color = inArray{k};
|
||||
if (ndims(color) == 3) & isa(color,'double')
|
||||
gray = 0.30*color(:,:,1) + 0.59*color(:,:,2) + 0.11*color(:,:,3);
|
||||
color(:,:,1) = gray;
|
||||
color(:,:,2) = gray;
|
||||
color(:,:,3) = gray;
|
||||
end
|
||||
newArray{k} = color;
|
||||
end
|
||||
|
||||
function outData = LocalUpdateColors(inArray, prop, inData)
|
||||
value = LocalGetAsCell(inArray,prop);
|
||||
outData.objs = {inData.objs{:}, inArray};
|
||||
outData.prop = {inData.prop{:}, {prop}};
|
||||
outData.values = {inData.values{:}, value};
|
||||
if (~isempty(value))
|
||||
if strcmp(prop,'CData')
|
||||
value = LocalMapCData(value);
|
||||
else
|
||||
value = LocalMapToGray(value);
|
||||
end
|
||||
set(inArray,{prop},value);
|
||||
end
|
||||
|
||||
function bool = LocalIsPositiveScalar(value)
|
||||
bool = isnumeric(value) & ...
|
||||
prod(size(value)) == 1 & ...
|
||||
value > 0;
|
||||
|
||||
function value = LocalToNum(value)
|
||||
if ischar(value)
|
||||
value = str2num(value);
|
||||
end
|
||||
@@ -0,0 +1,600 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "normal_async_test.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <string.h>
|
||||
#include <queue>
|
||||
#include <sstream>
|
||||
#include <vector>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "tick_util.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
NormalAsyncTest::NormalAsyncTest()
|
||||
:
|
||||
NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
|
||||
_testNo),
|
||||
_decodeCompleteTime(0),
|
||||
_encodeCompleteTime(0),
|
||||
_encFrameCnt(0),
|
||||
_decFrameCnt(0),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(1),
|
||||
_appendNext(false),
|
||||
_missingFrames(false),
|
||||
_rttFrames(0),
|
||||
_hasReceivedSLI(false),
|
||||
_hasReceivedRPSI(false),
|
||||
_hasReceivedPLI(false),
|
||||
_waitForKey(false)
|
||||
{
|
||||
}
|
||||
|
||||
NormalAsyncTest::NormalAsyncTest(WebRtc_UWord32 bitRate)
|
||||
:
|
||||
NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
|
||||
bitRate, _testNo),
|
||||
_decodeCompleteTime(0),
|
||||
_encodeCompleteTime(0),
|
||||
_encFrameCnt(0),
|
||||
_decFrameCnt(0),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(1),
|
||||
_appendNext(false),
|
||||
_missingFrames(false),
|
||||
_rttFrames(0),
|
||||
_hasReceivedSLI(false),
|
||||
_hasReceivedRPSI(false),
|
||||
_hasReceivedPLI(false),
|
||||
_waitForKey(false)
|
||||
{
|
||||
}
|
||||
|
||||
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
|
||||
unsigned int testNo)
|
||||
:
|
||||
NormalTest(name, description, _testNo),
|
||||
_decodeCompleteTime(0),
|
||||
_encodeCompleteTime(0),
|
||||
_encFrameCnt(0),
|
||||
_decFrameCnt(0),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(testNo),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false),
|
||||
_missingFrames(false),
|
||||
_rttFrames(0),
|
||||
_hasReceivedSLI(false),
|
||||
_hasReceivedRPSI(false),
|
||||
_hasReceivedPLI(false),
|
||||
_waitForKey(false)
|
||||
{
|
||||
}
|
||||
|
||||
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
|
||||
WebRtc_UWord32 bitRate, unsigned int testNo)
|
||||
:
|
||||
NormalTest(name, description, bitRate, _testNo),
|
||||
_decodeCompleteTime(0),
|
||||
_encodeCompleteTime(0),
|
||||
_encFrameCnt(0),
|
||||
_decFrameCnt(0),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(testNo),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false),
|
||||
_missingFrames(false),
|
||||
_rttFrames(0),
|
||||
_hasReceivedSLI(false),
|
||||
_hasReceivedRPSI(false),
|
||||
_hasReceivedPLI(false),
|
||||
_waitForKey(false)
|
||||
{
|
||||
}
|
||||
|
||||
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
|
||||
WebRtc_UWord32 bitRate, unsigned int testNo,
|
||||
unsigned int rttFrames)
|
||||
:
|
||||
NormalTest(name, description, bitRate, _testNo),
|
||||
_decodeCompleteTime(0),
|
||||
_encodeCompleteTime(0),
|
||||
_encFrameCnt(0),
|
||||
_decFrameCnt(0),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(testNo),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false),
|
||||
_missingFrames(false),
|
||||
_rttFrames(rttFrames),
|
||||
_hasReceivedSLI(false),
|
||||
_hasReceivedRPSI(false),
|
||||
_hasReceivedPLI(false),
|
||||
_waitForKey(false)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
NormalAsyncTest::Setup()
|
||||
{
|
||||
CodecTest::Setup();
|
||||
std::stringstream ss;
|
||||
std::string strTestNo;
|
||||
ss << _testNo;
|
||||
ss >> strTestNo;
|
||||
|
||||
// Check if settings exist. Otherwise use defaults.
|
||||
if (_outname == "")
|
||||
{
|
||||
_outname = webrtc::test::OutputPath() + "out_normaltest" + strTestNo +
|
||||
".yuv";
|
||||
}
|
||||
|
||||
if (_encodedName == "")
|
||||
{
|
||||
_encodedName = webrtc::test::OutputPath() + "encoded_normaltest" +
|
||||
strTestNo + ".yuv";
|
||||
}
|
||||
|
||||
if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
|
||||
{
|
||||
printf("Cannot read file %s.\n", _inname.c_str());
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
|
||||
{
|
||||
printf("Cannot write encoded file.\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
char mode[3] = "wb";
|
||||
if (_appendNext)
|
||||
{
|
||||
strncpy(mode, "ab", 3);
|
||||
}
|
||||
|
||||
if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
|
||||
{
|
||||
printf("Cannot write file %s.\n", _outname.c_str());
|
||||
exit(1);
|
||||
}
|
||||
|
||||
_appendNext = true;
|
||||
}
|
||||
|
||||
void
|
||||
NormalAsyncTest::Teardown()
|
||||
{
|
||||
CodecTest::Teardown();
|
||||
fclose(_sourceFile);
|
||||
fclose(_encodedFile);
|
||||
fclose(_decodedFile);
|
||||
}
|
||||
|
||||
FrameQueueTuple::~FrameQueueTuple()
|
||||
{
|
||||
if (_codecSpecificInfo != NULL)
|
||||
{
|
||||
delete _codecSpecificInfo;
|
||||
}
|
||||
if (_frame != NULL)
|
||||
{
|
||||
delete _frame;
|
||||
}
|
||||
}
|
||||
|
||||
void FrameQueue::PushFrame(VideoFrame *frame,
|
||||
webrtc::CodecSpecificInfo* codecSpecificInfo)
|
||||
{
|
||||
WriteLockScoped cs(_queueRWLock);
|
||||
_frameBufferQueue.push(new FrameQueueTuple(frame, codecSpecificInfo));
|
||||
}
|
||||
|
||||
FrameQueueTuple* FrameQueue::PopFrame()
|
||||
{
|
||||
WriteLockScoped cs(_queueRWLock);
|
||||
if (_frameBufferQueue.empty())
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
FrameQueueTuple* tuple = _frameBufferQueue.front();
|
||||
_frameBufferQueue.pop();
|
||||
return tuple;
|
||||
}
|
||||
|
||||
bool FrameQueue::Empty()
|
||||
{
|
||||
ReadLockScoped cs(_queueRWLock);
|
||||
return _frameBufferQueue.empty();
|
||||
}
|
||||
|
||||
WebRtc_UWord32 VideoEncodeCompleteCallback::EncodedBytes()
|
||||
{
|
||||
return _encodedBytes;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo,
|
||||
const webrtc::RTPFragmentationHeader*
|
||||
fragmentation)
|
||||
{
|
||||
_test.Encoded(encodedImage);
|
||||
VideoFrame *newBuffer = new VideoFrame();
|
||||
//newBuffer->VerifyAndAllocate(encodedImage._length);
|
||||
newBuffer->VerifyAndAllocate(encodedImage._size);
|
||||
_encodedBytes += encodedImage._length;
|
||||
// If _frameQueue would have been a fixed sized buffer we could have asked
|
||||
// it for an empty frame and then just do:
|
||||
// emptyFrame->SwapBuffers(encodedBuffer);
|
||||
// This is how it should be done in Video Engine to save in on memcpys
|
||||
webrtc::CodecSpecificInfo* codecSpecificInfoCopy =
|
||||
_test.CopyCodecSpecificInfo(codecSpecificInfo);
|
||||
_test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
|
||||
if (_encodedFile != NULL)
|
||||
{
|
||||
if (fwrite(newBuffer->Buffer(), 1, newBuffer->Length(),
|
||||
_encodedFile) != newBuffer->Length()) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
_frameQueue->PushFrame(newBuffer, codecSpecificInfoCopy);
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes()
|
||||
{
|
||||
return _decodedBytes;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VideoDecodeCompleteCallback::Decoded(VideoFrame& image)
|
||||
{
|
||||
_test.Decoded(image);
|
||||
_decodedBytes += image.Length();
|
||||
if (_decodedFile != NULL)
|
||||
{
|
||||
if (fwrite(image.Buffer(), 1, image.Length(),
|
||||
_decodedFile) != image.Length()) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VideoDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
|
||||
const WebRtc_UWord64 pictureId)
|
||||
{
|
||||
return _test.ReceivedDecodedReferenceFrame(pictureId);
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VideoDecodeCompleteCallback::ReceivedDecodedFrame(
|
||||
const WebRtc_UWord64 pictureId)
|
||||
{
|
||||
return _test.ReceivedDecodedFrame(pictureId);
|
||||
}
|
||||
|
||||
void
|
||||
NormalAsyncTest::Encoded(const EncodedImage& encodedImage)
|
||||
{
|
||||
_encodeCompleteTime = tGetTime();
|
||||
_encFrameCnt++;
|
||||
_totalEncodePipeTime += _encodeCompleteTime -
|
||||
_encodeTimes[encodedImage._timeStamp];
|
||||
}
|
||||
|
||||
void
|
||||
NormalAsyncTest::Decoded(const VideoFrame& decodedImage)
|
||||
{
|
||||
_decodeCompleteTime = tGetTime();
|
||||
_decFrameCnt++;
|
||||
_totalDecodePipeTime += _decodeCompleteTime -
|
||||
_decodeTimes[decodedImage.TimeStamp()];
|
||||
_decodedWidth = decodedImage.Width();
|
||||
_decodedHeight = decodedImage.Height();
|
||||
}
|
||||
|
||||
void
|
||||
NormalAsyncTest::Perform()
|
||||
{
|
||||
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
|
||||
CodecSettings(352, 288, 30, _bitRate);
|
||||
Setup();
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
if(_encoder->InitEncode(&_inst, 1, 1440) < 0)
|
||||
{
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
_decoder->InitDecode(&_inst, 1);
|
||||
FrameQueue frameQueue;
|
||||
VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
|
||||
VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
|
||||
_encoder->RegisterEncodeCompleteCallback(&encCallback);
|
||||
_decoder->RegisterDecodeCompleteCallback(&decCallback);
|
||||
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
|
||||
{
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
_totalEncodeTime = _totalDecodeTime = 0;
|
||||
_totalEncodePipeTime = _totalDecodePipeTime = 0;
|
||||
bool complete = false;
|
||||
_framecnt = 0;
|
||||
_encFrameCnt = 0;
|
||||
_decFrameCnt = 0;
|
||||
_sumEncBytes = 0;
|
||||
_lengthEncFrame = 0;
|
||||
double starttime = tGetTime();
|
||||
while (!complete)
|
||||
{
|
||||
CodecSpecific_InitBitrate();
|
||||
complete = Encode();
|
||||
if (!frameQueue.Empty() || complete)
|
||||
{
|
||||
while (!frameQueue.Empty())
|
||||
{
|
||||
_frameToDecode =
|
||||
static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
|
||||
int lost = DoPacketLoss();
|
||||
if (lost == 2)
|
||||
{
|
||||
// Lost the whole frame, continue
|
||||
_missingFrames = true;
|
||||
delete _frameToDecode;
|
||||
_frameToDecode = NULL;
|
||||
continue;
|
||||
}
|
||||
int ret = Decode(lost);
|
||||
delete _frameToDecode;
|
||||
_frameToDecode = NULL;
|
||||
if (ret < 0)
|
||||
{
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
else if (ret == 0)
|
||||
{
|
||||
_framecnt++;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr,
|
||||
"\n\nPositive return value from decode!\n\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
double endtime = tGetTime();
|
||||
double totalExecutionTime = endtime - starttime;
|
||||
printf("Total execution time: %.1f s\n", totalExecutionTime);
|
||||
_sumEncBytes = encCallback.EncodedBytes();
|
||||
double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
|
||||
double avgEncTime = _totalEncodeTime / _encFrameCnt;
|
||||
double avgDecTime = _totalDecodeTime / _decFrameCnt;
|
||||
printf("Actual bitrate: %f kbps\n", actualBitRate);
|
||||
printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
|
||||
printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
|
||||
printf("Average encode pipeline time: %.1f ms\n",
|
||||
1000 * _totalEncodePipeTime / _encFrameCnt);
|
||||
printf("Average decode pipeline time: %.1f ms\n",
|
||||
1000 * _totalDecodePipeTime / _decFrameCnt);
|
||||
printf("Number of encoded frames: %u\n", _encFrameCnt);
|
||||
printf("Number of decoded frames: %u\n", _decFrameCnt);
|
||||
(*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
|
||||
_bitRate << " kbps" << std::endl;
|
||||
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
|
||||
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
Teardown();
|
||||
}
|
||||
|
||||
bool
|
||||
NormalAsyncTest::Encode()
|
||||
{
|
||||
_lengthEncFrame = 0;
|
||||
EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp((unsigned int)
|
||||
(_encFrameCnt * 9e4 / _inst.maxFramerate));
|
||||
_inputVideoBuffer.SetWidth(_inst.width);
|
||||
_inputVideoBuffer.SetHeight(_inst.height);
|
||||
if (feof(_sourceFile) != 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
_encodeCompleteTime = 0;
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
|
||||
std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
|
||||
|
||||
// check SLI queue
|
||||
_hasReceivedSLI = false;
|
||||
while (!_signalSLI.empty() && _signalSLI.front().delay == 0)
|
||||
{
|
||||
// SLI message has arrived at sender side
|
||||
_hasReceivedSLI = true;
|
||||
_pictureIdSLI = _signalSLI.front().id;
|
||||
_signalSLI.pop_front();
|
||||
}
|
||||
// decrement SLI queue times
|
||||
for (std::list<fbSignal>::iterator it = _signalSLI.begin();
|
||||
it !=_signalSLI.end(); it++)
|
||||
{
|
||||
(*it).delay--;
|
||||
}
|
||||
|
||||
// check PLI queue
|
||||
_hasReceivedPLI = false;
|
||||
while (!_signalPLI.empty() && _signalPLI.front().delay == 0)
|
||||
{
|
||||
// PLI message has arrived at sender side
|
||||
_hasReceivedPLI = true;
|
||||
_signalPLI.pop_front();
|
||||
}
|
||||
// decrement PLI queue times
|
||||
for (std::list<fbSignal>::iterator it = _signalPLI.begin();
|
||||
it != _signalPLI.end(); it++)
|
||||
{
|
||||
(*it).delay--;
|
||||
}
|
||||
|
||||
if (_hasReceivedPLI)
|
||||
{
|
||||
// respond to PLI by encoding a key frame
|
||||
frame_types[0] = kKeyFrame;
|
||||
_hasReceivedPLI = false;
|
||||
_hasReceivedSLI = false; // don't trigger both at once
|
||||
}
|
||||
|
||||
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
|
||||
int ret = _encoder->Encode(_inputVideoBuffer,
|
||||
codecSpecificInfo, &frame_types);
|
||||
EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
|
||||
if (codecSpecificInfo != NULL)
|
||||
{
|
||||
delete codecSpecificInfo;
|
||||
codecSpecificInfo = NULL;
|
||||
}
|
||||
if (_encodeCompleteTime > 0)
|
||||
{
|
||||
_totalEncodeTime += _encodeCompleteTime -
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||
}
|
||||
else
|
||||
{
|
||||
_totalEncodeTime += tGetTime() -
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||
}
|
||||
assert(ret >= 0);
|
||||
return false;
|
||||
}
|
||||
|
||||
int
|
||||
NormalAsyncTest::Decode(int lossValue)
|
||||
{
|
||||
_sumEncBytes += _frameToDecode->_frame->Length();
|
||||
EncodedImage encodedImage;
|
||||
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
||||
encodedImage._completeFrame = !lossValue;
|
||||
_decodeCompleteTime = 0;
|
||||
_decodeTimes[encodedImage._timeStamp] = tGetTime();
|
||||
int ret = WEBRTC_VIDEO_CODEC_OK;
|
||||
if (!_waitForKey || encodedImage._frameType == kKeyFrame)
|
||||
{
|
||||
_waitForKey = false;
|
||||
ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
|
||||
_frameToDecode->_codecSpecificInfo);
|
||||
|
||||
if (ret >= 0)
|
||||
{
|
||||
_missingFrames = false;
|
||||
}
|
||||
}
|
||||
|
||||
// check for SLI
|
||||
if (ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
|
||||
{
|
||||
// add an SLI feedback to the feedback "queue"
|
||||
// to be delivered to encoder with _rttFrames delay
|
||||
_signalSLI.push_back(fbSignal(_rttFrames,
|
||||
static_cast<WebRtc_UWord8>((_lastDecPictureId) & 0x3f))); // 6 lsb
|
||||
|
||||
ret = WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
else if (ret == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI)
|
||||
{
|
||||
// add an SLI feedback to the feedback "queue"
|
||||
// to be delivered to encoder with _rttFrames delay
|
||||
_signalSLI.push_back(fbSignal(_rttFrames,
|
||||
static_cast<WebRtc_UWord8>((_lastDecPictureId + 1) & 0x3f)));//6 lsb
|
||||
|
||||
ret = WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
else if (ret == WEBRTC_VIDEO_CODEC_ERROR)
|
||||
{
|
||||
// wait for new key frame
|
||||
// add an PLI feedback to the feedback "queue"
|
||||
// to be delivered to encoder with _rttFrames delay
|
||||
_signalPLI.push_back(fbSignal(_rttFrames, 0 /* picId not used*/));
|
||||
_waitForKey = true;
|
||||
|
||||
ret = WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
if (_decodeCompleteTime > 0)
|
||||
{
|
||||
_totalDecodeTime += _decodeCompleteTime -
|
||||
_decodeTimes[encodedImage._timeStamp];
|
||||
}
|
||||
else
|
||||
{
|
||||
_totalDecodeTime += tGetTime() - _decodeTimes[encodedImage._timeStamp];
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
webrtc::CodecSpecificInfo*
|
||||
NormalAsyncTest::CopyCodecSpecificInfo(
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo) const
|
||||
{
|
||||
webrtc::CodecSpecificInfo* info = new webrtc::CodecSpecificInfo;
|
||||
*info = *codecSpecificInfo;
|
||||
return info;
|
||||
}
|
||||
|
||||
void NormalAsyncTest::CodecSpecific_InitBitrate()
|
||||
{
|
||||
if (_bitRate == 0)
|
||||
{
|
||||
_encoder->SetRates(600, _inst.maxFramerate);
|
||||
}
|
||||
else
|
||||
{
|
||||
_encoder->SetRates(_bitRate, _inst.maxFramerate);
|
||||
}
|
||||
}
|
||||
|
||||
void NormalAsyncTest::CopyEncodedImage(VideoFrame& dest,
|
||||
EncodedImage& src,
|
||||
void* /*codecSpecificInfo*/) const
|
||||
{
|
||||
dest.CopyFrame(src._length, src._buffer);
|
||||
//dest.SetFrameType(src._frameType);
|
||||
dest.SetWidth((WebRtc_UWord16)src._encodedWidth);
|
||||
dest.SetHeight((WebRtc_UWord16)src._encodedHeight);
|
||||
dest.SetTimeStamp(src._timeStamp);
|
||||
}
|
||||
|
||||
WebRtc_Word32 NormalAsyncTest::ReceivedDecodedReferenceFrame(
|
||||
const WebRtc_UWord64 pictureId) {
|
||||
_lastDecRefPictureId = pictureId;
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 NormalAsyncTest::ReceivedDecodedFrame(
|
||||
const WebRtc_UWord64 pictureId) {
|
||||
_lastDecPictureId = pictureId;
|
||||
return 0;
|
||||
}
|
||||
|
||||
double
|
||||
NormalAsyncTest::tGetTime()
|
||||
{// return time in sec
|
||||
return ((double) (TickTime::MillisecondTimestamp())/1000);
|
||||
}
|
||||
@@ -0,0 +1,187 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
|
||||
|
||||
#include "common_types.h"
|
||||
|
||||
#include "normal_test.h"
|
||||
#include "rw_lock_wrapper.h"
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <queue>
|
||||
|
||||
class FrameQueueTuple
|
||||
{
|
||||
public:
|
||||
FrameQueueTuple(webrtc::VideoFrame *frame,
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL)
|
||||
:
|
||||
_frame(frame),
|
||||
_codecSpecificInfo(codecSpecificInfo)
|
||||
{};
|
||||
~FrameQueueTuple();
|
||||
webrtc::VideoFrame* _frame;
|
||||
const webrtc::CodecSpecificInfo* _codecSpecificInfo;
|
||||
};
|
||||
|
||||
class FrameQueue
|
||||
{
|
||||
public:
|
||||
FrameQueue()
|
||||
:
|
||||
_queueRWLock(*webrtc::RWLockWrapper::CreateRWLock())
|
||||
{
|
||||
}
|
||||
|
||||
~FrameQueue()
|
||||
{
|
||||
delete &_queueRWLock;
|
||||
}
|
||||
|
||||
void PushFrame(webrtc::VideoFrame *frame,
|
||||
webrtc::CodecSpecificInfo* codecSpecificInfo = NULL);
|
||||
FrameQueueTuple* PopFrame();
|
||||
bool Empty();
|
||||
|
||||
private:
|
||||
webrtc::RWLockWrapper& _queueRWLock;
|
||||
std::queue<FrameQueueTuple *> _frameBufferQueue;
|
||||
};
|
||||
|
||||
// feedback signal to encoder
|
||||
struct fbSignal
|
||||
{
|
||||
fbSignal(int d, WebRtc_UWord8 pid) : delay(d), id(pid) {};
|
||||
int delay;
|
||||
WebRtc_UWord8 id;
|
||||
};
|
||||
|
||||
class NormalAsyncTest : public NormalTest
|
||||
{
|
||||
public:
|
||||
NormalAsyncTest();
|
||||
NormalAsyncTest(WebRtc_UWord32 bitRate);
|
||||
NormalAsyncTest(std::string name, std::string description,
|
||||
unsigned int testNo);
|
||||
NormalAsyncTest(std::string name, std::string description,
|
||||
WebRtc_UWord32 bitRate, unsigned int testNo);
|
||||
NormalAsyncTest(std::string name, std::string description,
|
||||
WebRtc_UWord32 bitRate, unsigned int testNo,
|
||||
unsigned int rttFrames);
|
||||
virtual ~NormalAsyncTest() {};
|
||||
virtual void Perform();
|
||||
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
|
||||
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
|
||||
virtual webrtc::CodecSpecificInfo*
|
||||
CopyCodecSpecificInfo(
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
|
||||
virtual void CopyEncodedImage(webrtc::VideoFrame& dest,
|
||||
webrtc::EncodedImage& src,
|
||||
void* /*codecSpecificInfo*/) const;
|
||||
virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const
|
||||
{
|
||||
return NULL;
|
||||
};
|
||||
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
|
||||
const WebRtc_UWord64 pictureId);
|
||||
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
|
||||
|
||||
protected:
|
||||
virtual void Setup();
|
||||
virtual void Teardown();
|
||||
virtual bool Encode();
|
||||
virtual int Decode(int lossValue = 0);
|
||||
virtual void CodecSpecific_InitBitrate();
|
||||
virtual int SetCodecSpecificParameters() {return 0;};
|
||||
double tGetTime();// return time in sec
|
||||
|
||||
FILE* _sourceFile;
|
||||
FILE* _decodedFile;
|
||||
WebRtc_UWord32 _decodedWidth;
|
||||
WebRtc_UWord32 _decodedHeight;
|
||||
double _totalEncodeTime;
|
||||
double _totalDecodeTime;
|
||||
double _decodeCompleteTime;
|
||||
double _encodeCompleteTime;
|
||||
double _totalEncodePipeTime;
|
||||
double _totalDecodePipeTime;
|
||||
int _framecnt;
|
||||
int _encFrameCnt;
|
||||
int _decFrameCnt;
|
||||
bool _requestKeyFrame;
|
||||
unsigned int _testNo;
|
||||
unsigned int _lengthEncFrame;
|
||||
FrameQueueTuple* _frameToDecode;
|
||||
bool _appendNext;
|
||||
std::map<WebRtc_UWord32, double> _encodeTimes;
|
||||
std::map<WebRtc_UWord32, double> _decodeTimes;
|
||||
bool _missingFrames;
|
||||
std::list<fbSignal> _signalSLI;
|
||||
int _rttFrames;
|
||||
mutable bool _hasReceivedSLI;
|
||||
mutable bool _hasReceivedRPSI;
|
||||
WebRtc_UWord8 _pictureIdSLI;
|
||||
WebRtc_UWord16 _pictureIdRPSI;
|
||||
WebRtc_UWord64 _lastDecRefPictureId;
|
||||
WebRtc_UWord64 _lastDecPictureId;
|
||||
std::list<fbSignal> _signalPLI;
|
||||
bool _hasReceivedPLI;
|
||||
bool _waitForKey;
|
||||
};
|
||||
|
||||
class VideoEncodeCompleteCallback : public webrtc::EncodedImageCallback
|
||||
{
|
||||
public:
|
||||
VideoEncodeCompleteCallback(FILE* encodedFile, FrameQueue *frameQueue,
|
||||
NormalAsyncTest& test)
|
||||
:
|
||||
_encodedFile(encodedFile),
|
||||
_frameQueue(frameQueue),
|
||||
_test(test),
|
||||
_encodedBytes(0)
|
||||
{}
|
||||
|
||||
WebRtc_Word32
|
||||
Encoded(webrtc::EncodedImage& encodedImage,
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
|
||||
const webrtc::RTPFragmentationHeader* fragmentation = NULL);
|
||||
WebRtc_UWord32 EncodedBytes();
|
||||
private:
|
||||
FILE* _encodedFile;
|
||||
FrameQueue* _frameQueue;
|
||||
NormalAsyncTest& _test;
|
||||
WebRtc_UWord32 _encodedBytes;
|
||||
};
|
||||
|
||||
class VideoDecodeCompleteCallback : public webrtc::DecodedImageCallback
|
||||
{
|
||||
public:
|
||||
VideoDecodeCompleteCallback(FILE* decodedFile, NormalAsyncTest& test)
|
||||
:
|
||||
_decodedFile(decodedFile),
|
||||
_test(test),
|
||||
_decodedBytes(0)
|
||||
{}
|
||||
|
||||
virtual WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
||||
virtual WebRtc_Word32
|
||||
ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
|
||||
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
|
||||
|
||||
WebRtc_UWord32 DecodedBytes();
|
||||
private:
|
||||
FILE* _decodedFile;
|
||||
NormalAsyncTest& _test;
|
||||
WebRtc_UWord32 _decodedBytes;
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
|
||||
259
webrtc/modules/video_coding/codecs/test_framework/normal_test.cc
Normal file
259
webrtc/modules/video_coding/codecs/test_framework/normal_test.cc
Normal file
@@ -0,0 +1,259 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "normal_test.h"
|
||||
|
||||
#include <time.h>
|
||||
#include <sstream>
|
||||
#include <string.h>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
|
||||
NormalTest::NormalTest()
|
||||
:
|
||||
CodecTest("Normal Test 1", "A test of normal execution of the codec"),
|
||||
_testNo(1),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false)
|
||||
{
|
||||
}
|
||||
|
||||
NormalTest::NormalTest(std::string name, std::string description,
|
||||
unsigned int testNo)
|
||||
:
|
||||
CodecTest(name, description),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(testNo),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false)
|
||||
{
|
||||
}
|
||||
|
||||
NormalTest::NormalTest(std::string name, std::string description,
|
||||
WebRtc_UWord32 bitRate, unsigned int testNo)
|
||||
:
|
||||
CodecTest(name, description, bitRate),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(testNo),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
NormalTest::Setup()
|
||||
{
|
||||
CodecTest::Setup();
|
||||
std::stringstream ss;
|
||||
std::string strTestNo;
|
||||
ss << _testNo;
|
||||
ss >> strTestNo;
|
||||
|
||||
// Check if settings exist. Otherwise use defaults.
|
||||
if (_outname == "")
|
||||
{
|
||||
_outname = webrtc::test::OutputPath() + "out_normaltest" + strTestNo +
|
||||
".yuv";
|
||||
}
|
||||
|
||||
if (_encodedName == "")
|
||||
{
|
||||
_encodedName = webrtc::test::OutputPath() + "encoded_normaltest" +
|
||||
strTestNo + ".yuv";
|
||||
}
|
||||
|
||||
if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
|
||||
{
|
||||
printf("Cannot read file %s.\n", _inname.c_str());
|
||||
exit(1);
|
||||
}
|
||||
|
||||
if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
|
||||
{
|
||||
printf("Cannot write encoded file.\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
char mode[3] = "wb";
|
||||
if (_appendNext)
|
||||
{
|
||||
strncpy(mode, "ab", 3);
|
||||
}
|
||||
|
||||
if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
|
||||
{
|
||||
printf("Cannot write file %s.\n", _outname.c_str());
|
||||
exit(1);
|
||||
}
|
||||
|
||||
_appendNext = true;
|
||||
}
|
||||
|
||||
void
|
||||
NormalTest::Teardown()
|
||||
{
|
||||
CodecTest::Teardown();
|
||||
fclose(_sourceFile);
|
||||
fclose(_decodedFile);
|
||||
}
|
||||
|
||||
void
|
||||
NormalTest::Perform()
|
||||
{
|
||||
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
|
||||
CodecSettings(352, 288, 30, _bitRate);
|
||||
Setup();
|
||||
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
|
||||
_encoder->InitEncode(&_inst, 1, 1460);
|
||||
CodecSpecific_InitBitrate();
|
||||
_decoder->InitDecode(&_inst,1);
|
||||
|
||||
_totalEncodeTime = _totalDecodeTime = 0;
|
||||
_framecnt = 0;
|
||||
_sumEncBytes = 0;
|
||||
_lengthEncFrame = 0;
|
||||
int decodeLength = 0;
|
||||
while (!Encode())
|
||||
{
|
||||
DoPacketLoss();
|
||||
_encodedVideoBuffer.SetLength(_encodedVideoBuffer.Length());
|
||||
if (fwrite(_encodedVideoBuffer.Buffer(), 1,
|
||||
_encodedVideoBuffer.Length(),
|
||||
_encodedFile) != _encodedVideoBuffer.Length()) {
|
||||
return;
|
||||
}
|
||||
decodeLength = Decode();
|
||||
if (decodeLength < 0)
|
||||
{
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
|
||||
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
||||
return;
|
||||
}
|
||||
CodecSpecific_InitBitrate();
|
||||
_framecnt++;
|
||||
}
|
||||
|
||||
// Ensure we empty the decoding queue.
|
||||
while (decodeLength > 0)
|
||||
{
|
||||
decodeLength = Decode();
|
||||
if (decodeLength < 0)
|
||||
{
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
|
||||
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
|
||||
double avgEncTime = _totalEncodeTime / _framecnt;
|
||||
double avgDecTime = _totalDecodeTime / _framecnt;
|
||||
printf("Actual bitrate: %f kbps\n", actualBitRate);
|
||||
printf("Average encode time: %f s\n", avgEncTime);
|
||||
printf("Average decode time: %f s\n", avgDecTime);
|
||||
(*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
|
||||
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
|
||||
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
|
||||
|
||||
_inputVideoBuffer.Free();
|
||||
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
|
||||
Teardown();
|
||||
}
|
||||
|
||||
bool
|
||||
NormalTest::Encode()
|
||||
{
|
||||
_lengthEncFrame = 0;
|
||||
EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
|
||||
if (feof(_sourceFile) != 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp(_framecnt);
|
||||
|
||||
// This multiple attempt ridiculousness is to accomodate VP7:
|
||||
// 1. The wrapper can unilaterally reduce the framerate for low bitrates.
|
||||
// 2. The codec inexplicably likes to reject some frames. Perhaps there
|
||||
// is a good reason for this...
|
||||
int encodingAttempts = 0;
|
||||
double starttime = 0;
|
||||
double endtime = 0;
|
||||
while (_lengthEncFrame == 0)
|
||||
{
|
||||
starttime = clock()/(double)CLOCKS_PER_SEC;
|
||||
|
||||
_inputVideoBuffer.SetWidth(_inst.width);
|
||||
_inputVideoBuffer.SetHeight(_inst.height);
|
||||
//_lengthEncFrame = _encoder->Encode(_inputVideoBuffer, _encodedVideoBuffer, _frameInfo,
|
||||
// _inst.frameRate, _requestKeyFrame && !(_framecnt%50));
|
||||
|
||||
endtime = clock()/(double)CLOCKS_PER_SEC;
|
||||
|
||||
_encodedVideoBuffer.SetHeight(_inst.height);
|
||||
_encodedVideoBuffer.SetWidth(_inst.width);
|
||||
if (_lengthEncFrame < 0)
|
||||
{
|
||||
(*_log) << "Error in encoder: " << _lengthEncFrame << std::endl;
|
||||
fprintf(stderr,"\n\nError in encoder: %d\n\n", _lengthEncFrame);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
_sumEncBytes += _lengthEncFrame;
|
||||
|
||||
encodingAttempts++;
|
||||
if (encodingAttempts > 50)
|
||||
{
|
||||
(*_log) << "Unable to encode frame: " << _framecnt << std::endl;
|
||||
fprintf(stderr,"\n\nUnable to encode frame: %d\n\n", _framecnt);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
_totalEncodeTime += endtime - starttime;
|
||||
|
||||
if (encodingAttempts > 1)
|
||||
{
|
||||
(*_log) << encodingAttempts << " attempts required to encode frame: " <<
|
||||
_framecnt + 1 << std::endl;
|
||||
fprintf(stderr,"\n%d attempts required to encode frame: %d\n", encodingAttempts,
|
||||
_framecnt + 1);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
int
|
||||
NormalTest::Decode(int lossValue)
|
||||
{
|
||||
_encodedVideoBuffer.SetWidth(_inst.width);
|
||||
_encodedVideoBuffer.SetHeight(_inst.height);
|
||||
int lengthDecFrame = 0;
|
||||
//int lengthDecFrame = _decoder->Decode(_encodedVideoBuffer, _decodedVideoBuffer);
|
||||
//_totalDecodeTime += (double)((clock()/(double)CLOCKS_PER_SEC) - starttime);
|
||||
if (lengthDecFrame < 0)
|
||||
{
|
||||
return lengthDecFrame;
|
||||
}
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
return lengthDecFrame;
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
|
||||
|
||||
#include "test.h"
|
||||
|
||||
class NormalTest : public CodecTest
|
||||
{
|
||||
public:
|
||||
NormalTest();
|
||||
NormalTest(std::string name, std::string description, unsigned int testNo);
|
||||
NormalTest(std::string name, std::string description, WebRtc_UWord32 bitRate, unsigned int testNo);
|
||||
virtual ~NormalTest() {};
|
||||
virtual void Perform();
|
||||
|
||||
protected:
|
||||
virtual void Setup();
|
||||
virtual void Teardown();
|
||||
virtual bool Encode();
|
||||
virtual int Decode(int lossValue = 0);
|
||||
virtual void CodecSpecific_InitBitrate()=0;
|
||||
virtual int DoPacketLoss() {return 0;};
|
||||
|
||||
FILE* _sourceFile;
|
||||
FILE* _decodedFile;
|
||||
FILE* _encodedFile;
|
||||
double _totalEncodeTime;
|
||||
double _totalDecodeTime;
|
||||
unsigned int _framecnt;
|
||||
bool _requestKeyFrame;
|
||||
unsigned int _testNo;
|
||||
int _lengthEncFrame;
|
||||
bool _appendNext;
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
|
||||
|
||||
@@ -0,0 +1,254 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "packet_loss_test.h"
|
||||
#include "video_source.h"
|
||||
#include <sstream>
|
||||
#include <cassert>
|
||||
#include <string.h>
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
PacketLossTest::PacketLossTest()
|
||||
:
|
||||
NormalAsyncTest("PacketLossTest", "Encode, remove lost packets, decode", 300,
|
||||
5),
|
||||
_lossRate(0.1),
|
||||
_lossProbability(0.1),
|
||||
_lastFrame(NULL),
|
||||
_lastFrameLength(0)
|
||||
{
|
||||
}
|
||||
|
||||
PacketLossTest::PacketLossTest(std::string name, std::string description)
|
||||
:
|
||||
NormalAsyncTest(name, description, 300, 5),
|
||||
_lossRate(0.1),
|
||||
_lossProbability(0.1),
|
||||
_lastFrame(NULL),
|
||||
_lastFrameLength(0)
|
||||
{
|
||||
}
|
||||
|
||||
PacketLossTest::PacketLossTest(std::string name, std::string description, double lossRate, bool useNack, unsigned int rttFrames /* = 0*/)
|
||||
:
|
||||
NormalAsyncTest(name, description, 300, 5, rttFrames),
|
||||
_lossRate(lossRate),
|
||||
_lastFrame(NULL),
|
||||
_lastFrameLength(0)
|
||||
{
|
||||
assert(lossRate >= 0 && lossRate <= 1);
|
||||
if (useNack)
|
||||
{
|
||||
_lossProbability = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
_lossProbability = lossRate;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
PacketLossTest::Encoded(const EncodedImage& encodedImage)
|
||||
{
|
||||
// push timestamp to queue
|
||||
_frameQueue.push_back(encodedImage._timeStamp);
|
||||
NormalAsyncTest::Encoded(encodedImage);
|
||||
}
|
||||
|
||||
void
|
||||
PacketLossTest::Decoded(const VideoFrame& decodedImage)
|
||||
{
|
||||
// check the frame queue if any frames have gone missing
|
||||
assert(!_frameQueue.empty()); // decoded frame is not in the queue
|
||||
while(_frameQueue.front() < decodedImage.TimeStamp())
|
||||
{
|
||||
// this frame is missing
|
||||
// write previous decoded frame again (frame freeze)
|
||||
if (_decodedFile && _lastFrame)
|
||||
{
|
||||
if (fwrite(_lastFrame, 1, _lastFrameLength,
|
||||
_decodedFile) != _lastFrameLength) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// remove frame from queue
|
||||
_frameQueue.pop_front();
|
||||
}
|
||||
// Decoded frame is not in the queue.
|
||||
assert(_frameQueue.front() == decodedImage.TimeStamp());
|
||||
|
||||
// pop the current frame
|
||||
_frameQueue.pop_front();
|
||||
|
||||
// save image for future freeze-frame
|
||||
if (_lastFrameLength < decodedImage.Length())
|
||||
{
|
||||
if (_lastFrame) delete [] _lastFrame;
|
||||
|
||||
_lastFrame = new WebRtc_UWord8[decodedImage.Length()];
|
||||
}
|
||||
memcpy(_lastFrame, decodedImage.Buffer(), decodedImage.Length());
|
||||
_lastFrameLength = decodedImage.Length();
|
||||
|
||||
NormalAsyncTest::Decoded(decodedImage);
|
||||
}
|
||||
|
||||
void
|
||||
PacketLossTest::Teardown()
|
||||
{
|
||||
if (_totalKept + _totalThrown > 0)
|
||||
{
|
||||
printf("Target packet loss rate: %.4f\n", _lossProbability);
|
||||
printf("Actual packet loss rate: %.4f\n", (_totalThrown * 1.0f) / (_totalKept + _totalThrown));
|
||||
printf("Channel rate: %.2f kbps\n",
|
||||
0.001 * 8.0 * _sumChannelBytes / ((_framecnt * 1.0f) / _inst.maxFramerate));
|
||||
}
|
||||
else
|
||||
{
|
||||
printf("No packet losses inflicted\n");
|
||||
}
|
||||
|
||||
NormalAsyncTest::Teardown();
|
||||
}
|
||||
|
||||
void
|
||||
PacketLossTest::Setup()
|
||||
{
|
||||
const VideoSource source(_inname, _inst.width, _inst.height, _inst.maxFramerate);
|
||||
|
||||
std::stringstream ss;
|
||||
std::string lossRateStr;
|
||||
ss << _lossRate;
|
||||
ss >> lossRateStr;
|
||||
_encodedName = source.GetName() + "-" + lossRateStr;
|
||||
_outname = "out-" + source.GetName() + "-" + lossRateStr;
|
||||
|
||||
if (_lossProbability != _lossRate)
|
||||
{
|
||||
_encodedName += "-nack";
|
||||
_outname += "-nack";
|
||||
}
|
||||
_encodedName += ".vp8";
|
||||
_outname += ".yuv";
|
||||
|
||||
_totalKept = 0;
|
||||
_totalThrown = 0;
|
||||
_sumChannelBytes = 0;
|
||||
|
||||
NormalAsyncTest::Setup();
|
||||
}
|
||||
|
||||
void
|
||||
PacketLossTest::CodecSpecific_InitBitrate()
|
||||
{
|
||||
assert(_bitRate > 0);
|
||||
WebRtc_UWord32 simulatedBitRate;
|
||||
if (_lossProbability != _lossRate)
|
||||
{
|
||||
// Simulating NACK
|
||||
simulatedBitRate = WebRtc_UWord32(_bitRate / (1 + _lossRate));
|
||||
}
|
||||
else
|
||||
{
|
||||
simulatedBitRate = _bitRate;
|
||||
}
|
||||
int rtt = 0;
|
||||
if (_inst.maxFramerate > 0)
|
||||
rtt = _rttFrames * (1000 / _inst.maxFramerate);
|
||||
_encoder->SetChannelParameters((WebRtc_UWord32)(_lossProbability * 255.0),
|
||||
rtt);
|
||||
_encoder->SetRates(simulatedBitRate, _inst.maxFramerate);
|
||||
}
|
||||
|
||||
int PacketLossTest::DoPacketLoss()
|
||||
{
|
||||
// Only packet loss for delta frames
|
||||
// TODO(mikhal): Identify delta frames
|
||||
// First frame so never a delta frame.
|
||||
if (_frameToDecode->_frame->Length() == 0 || _sumChannelBytes == 0)
|
||||
{
|
||||
_sumChannelBytes += _frameToDecode->_frame->Length();
|
||||
return 0;
|
||||
}
|
||||
unsigned char *packet = NULL;
|
||||
VideoFrame newEncBuf;
|
||||
newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_inBufIdx = 0;
|
||||
_outBufIdx = 0;
|
||||
int size = 1;
|
||||
int kept = 0;
|
||||
int thrown = 0;
|
||||
while ((size = NextPacket(1500, &packet)) > 0)
|
||||
{
|
||||
if (!PacketLoss(_lossProbability, thrown))
|
||||
{
|
||||
InsertPacket(&newEncBuf, packet, size);
|
||||
kept++;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Use the ByteLoss function if you want to lose only
|
||||
// parts of a packet, and not the whole packet.
|
||||
|
||||
//int size2 = ByteLoss(size, packet, 15);
|
||||
thrown++;
|
||||
//if (size2 != size)
|
||||
//{
|
||||
// InsertPacket(&newEncBuf, packet, size2);
|
||||
//}
|
||||
}
|
||||
}
|
||||
int lossResult = (thrown!=0); // 0 = no loss 1 = loss(es)
|
||||
if (lossResult)
|
||||
{
|
||||
lossResult += (kept==0); // 2 = all lost = full frame
|
||||
}
|
||||
_frameToDecode->_frame->CopyFrame(newEncBuf.Length(), newEncBuf.Buffer());
|
||||
_sumChannelBytes += newEncBuf.Length();
|
||||
_totalKept += kept;
|
||||
_totalThrown += thrown;
|
||||
|
||||
return lossResult;
|
||||
//printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
|
||||
//printf("Encoded left: %d bytes\n", _encodedVideoBuffer.Length());
|
||||
}
|
||||
|
||||
int PacketLossTest::NextPacket(int mtu, unsigned char **pkg)
|
||||
{
|
||||
unsigned char *buf = _frameToDecode->_frame->Buffer();
|
||||
*pkg = buf + _inBufIdx;
|
||||
if (static_cast<long>(_frameToDecode->_frame->Length()) - _inBufIdx <= mtu)
|
||||
{
|
||||
int size = _frameToDecode->_frame->Length() - _inBufIdx;
|
||||
_inBufIdx = _frameToDecode->_frame->Length();
|
||||
return size;
|
||||
}
|
||||
_inBufIdx += mtu;
|
||||
return mtu;
|
||||
}
|
||||
|
||||
int PacketLossTest::ByteLoss(int size, unsigned char *pkg, int bytesToLose)
|
||||
{
|
||||
return size;
|
||||
}
|
||||
|
||||
void PacketLossTest::InsertPacket(VideoFrame *buf, unsigned char *pkg, int size)
|
||||
{
|
||||
if (static_cast<long>(buf->Size()) - _outBufIdx < size)
|
||||
{
|
||||
printf("InsertPacket error!\n");
|
||||
return;
|
||||
}
|
||||
memcpy(buf->Buffer() + _outBufIdx, pkg, size);
|
||||
buf->SetLength(buf->Length() + size);
|
||||
_outBufIdx += size;
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
|
||||
|
||||
#include <list>
|
||||
|
||||
#include "normal_async_test.h"
|
||||
|
||||
class PacketLossTest : public NormalAsyncTest
|
||||
{
|
||||
public:
|
||||
PacketLossTest();
|
||||
virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}}
|
||||
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
|
||||
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
|
||||
protected:
|
||||
PacketLossTest(std::string name, std::string description);
|
||||
PacketLossTest(std::string name,
|
||||
std::string description,
|
||||
double lossRate,
|
||||
bool useNack,
|
||||
unsigned int rttFrames = 0);
|
||||
|
||||
virtual void Setup();
|
||||
virtual void Teardown();
|
||||
virtual void CodecSpecific_InitBitrate();
|
||||
virtual int DoPacketLoss();
|
||||
virtual int NextPacket(int size, unsigned char **pkg);
|
||||
virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
|
||||
virtual void InsertPacket(webrtc::VideoFrame *buf, unsigned char *pkg,
|
||||
int size);
|
||||
int _inBufIdx;
|
||||
int _outBufIdx;
|
||||
|
||||
// When NACK is being simulated _lossProbabilty is zero,
|
||||
// otherwise it is set equal to _lossRate.
|
||||
// Desired channel loss rate.
|
||||
double _lossRate;
|
||||
// Probability used to simulate packet drops.
|
||||
double _lossProbability;
|
||||
|
||||
int _totalKept;
|
||||
int _totalThrown;
|
||||
int _sumChannelBytes;
|
||||
std::list<WebRtc_UWord32> _frameQueue;
|
||||
WebRtc_UWord8* _lastFrame;
|
||||
WebRtc_UWord32 _lastFrameLength;
|
||||
};
|
||||
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
|
||||
@@ -0,0 +1,427 @@
|
||||
function plotBenchmark(fileNames, export)
|
||||
%PLOTBENCHMARK Plots and exports video codec benchmarking results.
|
||||
% PLOTBENCHMARK(FILENAMES, EXPORT) parses the video codec benchmarking result
|
||||
% files given by the cell array of strings FILENAME. It plots the results and
|
||||
% optionally exports each plot to an appropriately named file.
|
||||
%
|
||||
% EXPORT parameter:
|
||||
% 'none' No file exports.
|
||||
% 'eps' Exports to eps files (default).
|
||||
% 'pdf' Exports to eps files and uses the command-line utility
|
||||
% epstopdf to obtain pdf files.
|
||||
%
|
||||
% Example:
|
||||
% plotBenchmark({'H264Benchmark.txt' 'LSVXBenchmark.txt'}, 'pdf')
|
||||
|
||||
if (nargin < 1)
|
||||
error('Too few input arguments');
|
||||
elseif (nargin < 2)
|
||||
export = 'eps';
|
||||
end
|
||||
|
||||
if ~iscell(fileNames)
|
||||
if ischar(fileNames)
|
||||
% one single file name as a string is ok
|
||||
if size(fileNames,1) > 1
|
||||
% this is a char matrix, not ok
|
||||
error('First argument must not be a char matrix');
|
||||
end
|
||||
% wrap in a cell array
|
||||
fileNames = {fileNames};
|
||||
else
|
||||
error('First argument must be a cell array of strings');
|
||||
end
|
||||
end
|
||||
|
||||
if ~ischar(export)
|
||||
error('Second argument must be a string');
|
||||
end
|
||||
|
||||
outpath = 'BenchmarkPlots';
|
||||
[status, errMsg] = mkdir(outpath);
|
||||
if status == 0
|
||||
error(errMsg);
|
||||
end
|
||||
|
||||
nCases = 0;
|
||||
testCases = [];
|
||||
% Read each test result file
|
||||
for fileIdx = 1:length(fileNames)
|
||||
if ~isstr(fileNames{fileIdx})
|
||||
error('First argument must be a cell array of strings');
|
||||
end
|
||||
|
||||
fid = fopen(fileNames{fileIdx}, 'rt');
|
||||
if fid == -1
|
||||
error(['Unable to open ' fileNames{fileIdx}]);
|
||||
end
|
||||
|
||||
version = '1.0';
|
||||
if ~strcmp(fgetl(fid), ['#!benchmark' version])
|
||||
fclose(fid);
|
||||
error(['Requires benchmark file format version ' version]);
|
||||
end
|
||||
|
||||
% Parse results file into testCases struct
|
||||
codec = fgetl(fid);
|
||||
tline = fgetl(fid);
|
||||
while(tline ~= -1)
|
||||
nCases = nCases + 1;
|
||||
|
||||
delim = strfind(tline, ',');
|
||||
name = tline(1:delim(1)-1);
|
||||
% Drop underscored suffix from name
|
||||
underscore = strfind(name, '_');
|
||||
if ~isempty(underscore)
|
||||
name = name(1:underscore(1)-1);
|
||||
end
|
||||
|
||||
resolution = tline(delim(1)+1:delim(2)-1);
|
||||
frameRate = tline(delim(2)+1:end);
|
||||
|
||||
tline = fgetl(fid);
|
||||
delim = strfind(tline, ',');
|
||||
bitrateLabel = tline(1:delim(1)-1);
|
||||
bitrate = sscanf(tline(delim(1):end),',%f');
|
||||
|
||||
tline = fgetl(fid);
|
||||
delim = strfind(tline, ',');
|
||||
psnrLabel = tline(1:delim(1)-1);
|
||||
psnr = sscanf(tline(delim(1):end),',%f');
|
||||
|
||||
|
||||
% Default data for the optional lines
|
||||
speedLabel = 'Default';
|
||||
speed = 0;
|
||||
ssimLabel = 'Default';
|
||||
ssim = 0;
|
||||
|
||||
tline = fgetl(fid);
|
||||
delim = strfind(tline, ',');
|
||||
|
||||
while ~isempty(delim)
|
||||
% More data
|
||||
% Check type of data
|
||||
if strncmp(lower(tline), 'speed', 5)
|
||||
% Speed data included
|
||||
speedLabel = tline(1:delim(1)-1);
|
||||
speed = sscanf(tline(delim(1):end), ',%f');
|
||||
|
||||
tline = fgetl(fid);
|
||||
|
||||
elseif strncmp(lower(tline), 'encode time', 11)
|
||||
% Encode and decode times included
|
||||
% TODO: take care of the data
|
||||
|
||||
% pop two lines from file
|
||||
tline = fgetl(fid);
|
||||
tline = fgetl(fid);
|
||||
|
||||
elseif strncmp(tline, 'SSIM', 4)
|
||||
% SSIM data included
|
||||
ssimLabel = tline(1:delim(1)-1);
|
||||
ssim = sscanf(tline(delim(1):end), ',%f');
|
||||
|
||||
tline = fgetl(fid);
|
||||
end
|
||||
delim = strfind(tline, ',');
|
||||
end
|
||||
|
||||
testCases = [testCases struct('codec', codec, 'name', name, 'resolution', ...
|
||||
resolution, 'frameRate', frameRate, 'bitrate', bitrate, 'psnr', psnr, ...
|
||||
'speed', speed, 'bitrateLabel', bitrateLabel, 'psnrLabel', psnrLabel, ...
|
||||
'speedLabel', speedLabel, ...
|
||||
'ssim', ssim, 'ssimLabel', ssimLabel)];
|
||||
|
||||
tline = fgetl(fid);
|
||||
end
|
||||
|
||||
fclose(fid);
|
||||
end
|
||||
|
||||
i = 0;
|
||||
casesPsnr = testCases;
|
||||
while ~isempty(casesPsnr)
|
||||
i = i + 1;
|
||||
casesPsnr = plotOnePsnr(casesPsnr, i, export, outpath);
|
||||
end
|
||||
|
||||
casesSSIM = testCases;
|
||||
while ~isempty(casesSSIM)
|
||||
i = i + 1;
|
||||
casesSSIM = plotOneSSIM(casesSSIM, i, export, outpath);
|
||||
end
|
||||
|
||||
casesSpeed = testCases;
|
||||
while ~isempty(casesSpeed)
|
||||
if casesSpeed(1).speed == 0
|
||||
casesSpeed = casesSpeed(2:end);
|
||||
else
|
||||
i = i + 1;
|
||||
casesSpeed = plotOneSpeed(casesSpeed, i, export, outpath);
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%
|
||||
%% SUBFUNCTIONS %%
|
||||
%%%%%%%%%%%%%%%%%%
|
||||
|
||||
function casesOut = plotOnePsnr(cases, num, export, outpath)
|
||||
% Find matching specs
|
||||
plotIdx = 1;
|
||||
for i = 2:length(cases)
|
||||
if strcmp(cases(1).resolution, cases(i).resolution) & ...
|
||||
strcmp(cases(1).frameRate, cases(i).frameRate)
|
||||
plotIdx = [plotIdx i];
|
||||
end
|
||||
end
|
||||
|
||||
% Return unplotted cases
|
||||
casesOut = cases(setdiff(1:length(cases), plotIdx));
|
||||
cases = cases(plotIdx);
|
||||
|
||||
% Prune similar results
|
||||
for i = 1:length(cases)
|
||||
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
|
||||
while ~isempty(simIndx)
|
||||
diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
|
||||
cases(i).psnr = cases(i).psnr(diffIndx);
|
||||
cases(i).bitrate = cases(i).bitrate(diffIndx);
|
||||
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
|
||||
end
|
||||
end
|
||||
|
||||
% Prepare figure with axis labels and so on
|
||||
hFig = figure(num);
|
||||
clf;
|
||||
hold on;
|
||||
grid on;
|
||||
axis([0 1100 20 50]);
|
||||
set(gca, 'XTick', 0:200:1000);
|
||||
set(gca, 'YTick', 20:10:60);
|
||||
xlabel(cases(1).bitrateLabel);
|
||||
ylabel(cases(1).psnrLabel);
|
||||
res = cases(1).resolution;
|
||||
frRate = cases(1).frameRate;
|
||||
title([res ', ' frRate]);
|
||||
|
||||
hLines = [];
|
||||
codecs = {};
|
||||
sequences = {};
|
||||
i = 0;
|
||||
while ~isempty(cases)
|
||||
i = i + 1;
|
||||
[cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'psnr', i, sequences, 1);
|
||||
|
||||
% Stored to generate the legend
|
||||
hLines = [hLines ; hLine];
|
||||
codecs = {codecs{:} codec};
|
||||
end
|
||||
legend(hLines, codecs, 4);
|
||||
hold off;
|
||||
|
||||
if ~strcmp(export, 'none')
|
||||
% Export figure to an eps file
|
||||
res = stripws(res);
|
||||
frRate = stripws(frRate);
|
||||
exportName = [outpath '/psnr-' res '-' frRate];
|
||||
exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
|
||||
end
|
||||
|
||||
if strcmp(export, 'pdf')
|
||||
% Use the epstopdf utility to convert to pdf
|
||||
system(['epstopdf ' exportName '.eps']);
|
||||
end
|
||||
|
||||
|
||||
function casesOut = plotOneSSIM(cases, num, export, outpath)
|
||||
% Find matching specs
|
||||
plotIdx = 1;
|
||||
for i = 2:length(cases)
|
||||
if strcmp(cases(1).resolution, cases(i).resolution) & ...
|
||||
strcmp(cases(1).frameRate, cases(i).frameRate)
|
||||
plotIdx = [plotIdx i];
|
||||
end
|
||||
end
|
||||
|
||||
% Return unplotted cases
|
||||
casesOut = cases(setdiff(1:length(cases), plotIdx));
|
||||
cases = cases(plotIdx);
|
||||
|
||||
% Prune similar results
|
||||
for i = 1:length(cases)
|
||||
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
|
||||
while ~isempty(simIndx)
|
||||
diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
|
||||
cases(i).ssim = cases(i).ssim(diffIndx);
|
||||
cases(i).bitrate = cases(i).bitrate(diffIndx);
|
||||
simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
|
||||
end
|
||||
end
|
||||
|
||||
% Prepare figure with axis labels and so on
|
||||
hFig = figure(num);
|
||||
clf;
|
||||
hold on;
|
||||
grid on;
|
||||
axis([0 1100 0.5 1]); % y-limit are set to 'auto' below
|
||||
set(gca, 'XTick', 0:200:1000);
|
||||
%set(gca, 'YTick', 20:10:60);
|
||||
xlabel(cases(1).bitrateLabel);
|
||||
ylabel(cases(1).ssimLabel);
|
||||
res = cases(1).resolution;
|
||||
frRate = cases(1).frameRate;
|
||||
title([res ', ' frRate]);
|
||||
|
||||
hLines = [];
|
||||
codecs = {};
|
||||
sequences = {};
|
||||
i = 0;
|
||||
while ~isempty(cases)
|
||||
i = i + 1;
|
||||
[cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'ssim', i, sequences, 1);
|
||||
|
||||
% Stored to generate the legend
|
||||
hLines = [hLines ; hLine];
|
||||
codecs = {codecs{:} codec};
|
||||
end
|
||||
%set(gca,'YLimMode','auto')
|
||||
set(gca,'YLim',[0.5 1])
|
||||
set(gca,'YScale','log')
|
||||
legend(hLines, codecs, 4);
|
||||
hold off;
|
||||
|
||||
if ~strcmp(export, 'none')
|
||||
% Export figure to an eps file
|
||||
res = stripws(res);
|
||||
frRate = stripws(frRate);
|
||||
exportName = [outpath '/psnr-' res '-' frRate];
|
||||
exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
|
||||
end
|
||||
|
||||
if strcmp(export, 'pdf')
|
||||
% Use the epstopdf utility to convert to pdf
|
||||
system(['epstopdf ' exportName '.eps']);
|
||||
end
|
||||
|
||||
|
||||
function casesOut = plotOneSpeed(cases, num, export, outpath)
|
||||
% Find matching specs
|
||||
plotIdx = 1;
|
||||
for i = 2:length(cases)
|
||||
if strcmp(cases(1).resolution, cases(i).resolution) & ...
|
||||
strcmp(cases(1).frameRate, cases(i).frameRate) & ...
|
||||
strcmp(cases(1).name, cases(i).name)
|
||||
plotIdx = [plotIdx i];
|
||||
end
|
||||
end
|
||||
|
||||
% Return unplotted cases
|
||||
casesOut = cases(setdiff(1:length(cases), plotIdx));
|
||||
cases = cases(plotIdx);
|
||||
|
||||
% Prune similar results
|
||||
for i = 1:length(cases)
|
||||
simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
|
||||
while ~isempty(simIndx)
|
||||
diffIndx = setdiff(1:length(cases(i).psnr), simIndx);
|
||||
cases(i).psnr = cases(i).psnr(diffIndx);
|
||||
cases(i).speed = cases(i).speed(diffIndx);
|
||||
simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
|
||||
end
|
||||
end
|
||||
|
||||
hFig = figure(num);
|
||||
clf;
|
||||
hold on;
|
||||
%grid on;
|
||||
xlabel(cases(1).psnrLabel);
|
||||
ylabel(cases(1).speedLabel);
|
||||
res = cases(1).resolution;
|
||||
name = cases(1).name;
|
||||
frRate = cases(1).frameRate;
|
||||
title([name ', ' res ', ' frRate]);
|
||||
|
||||
hLines = [];
|
||||
codecs = {};
|
||||
sequences = {};
|
||||
i = 0;
|
||||
while ~isempty(cases)
|
||||
i = i + 1;
|
||||
[cases, hLine, codec, sequences] = plotOneCodec(cases, 'psnr', 'speed', i, sequences, 0);
|
||||
|
||||
% Stored to generate the legend
|
||||
hLines = [hLines ; hLine];
|
||||
codecs = {codecs{:} codec};
|
||||
end
|
||||
legend(hLines, codecs, 1);
|
||||
hold off;
|
||||
|
||||
if ~strcmp(export, 'none')
|
||||
% Export figure to an eps file
|
||||
res = stripws(res);
|
||||
frRate = stripws(frRate);
|
||||
exportName = [outpath '/speed-' name '-' res '-' frRate];
|
||||
exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
|
||||
end
|
||||
|
||||
if strcmp(export, 'pdf')
|
||||
% Use the epstopdf utility to convert to pdf
|
||||
system(['epstopdf ' exportName '.eps']);
|
||||
end
|
||||
|
||||
|
||||
function [casesOut, hLine, codec, sequences] = plotOneCodec(cases, xfield, yfield, num, sequences, annotatePlot)
|
||||
plotStr = {'gx-', 'bo-', 'r^-', 'kd-', 'cx-', 'go--', 'b^--'};
|
||||
% Find matching codecs
|
||||
plotIdx = 1;
|
||||
for i = 2:length(cases)
|
||||
if strcmp(cases(1).codec, cases(i).codec)
|
||||
plotIdx = [plotIdx i];
|
||||
end
|
||||
end
|
||||
|
||||
% Return unplotted cases
|
||||
casesOut = cases(setdiff(1:length(cases), plotIdx));
|
||||
cases = cases(plotIdx);
|
||||
|
||||
for i = 1:length(cases)
|
||||
% Plot a single case
|
||||
hLine = plot(getfield(cases(i), xfield), getfield(cases(i), yfield), plotStr{num}, ...
|
||||
'LineWidth', 1.1, 'MarkerSize', 6);
|
||||
end
|
||||
|
||||
% hLine handle and codec are returned to construct the legend afterwards
|
||||
codec = cases(1).codec;
|
||||
|
||||
if annotatePlot == 0
|
||||
return;
|
||||
end
|
||||
|
||||
for i = 1:length(cases)
|
||||
% Print the codec name as a text label
|
||||
% Ensure each codec is only printed once
|
||||
sequencePlotted = 0;
|
||||
for j = 1:length(sequences)
|
||||
if strcmp(cases(i).name, sequences{j})
|
||||
sequencePlotted = 1;
|
||||
break;
|
||||
end
|
||||
end
|
||||
|
||||
if sequencePlotted == 0
|
||||
text(getfield(cases(i), xfield, {1}), getfield(cases(i), yfield, {1}), ...
|
||||
[' ' cases(i).name]);
|
||||
sequences = {sequences{:} cases(i).name};
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
% Strip whitespace from string
|
||||
function str = stripws(str)
|
||||
if ~isstr(str)
|
||||
error('String required');
|
||||
end
|
||||
str = str(setdiff(1:length(str), find(isspace(str) == 1)));
|
||||
160
webrtc/modules/video_coding/codecs/test_framework/test.cc
Normal file
160
webrtc/modules/video_coding/codecs/test_framework/test.cc
Normal file
@@ -0,0 +1,160 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "test.h"
|
||||
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
|
||||
#include "testsupport/metrics/video_metrics.h"
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
long filesize(const char *filename); // local function defined at end of file
|
||||
|
||||
CodecTest::CodecTest(std::string name, std::string description)
|
||||
:
|
||||
_bitRate(0),
|
||||
_inname(""),
|
||||
_outname(""),
|
||||
_encodedName(""),
|
||||
_name(name),
|
||||
_description(description)
|
||||
{
|
||||
memset(&_inst, 0, sizeof(_inst));
|
||||
unsigned int seed = static_cast<unsigned int>(0);
|
||||
std::srand(seed);
|
||||
}
|
||||
|
||||
CodecTest::CodecTest(std::string name, std::string description,
|
||||
WebRtc_UWord32 bitRate)
|
||||
:
|
||||
_bitRate(bitRate),
|
||||
_inname(""),
|
||||
_outname(""),
|
||||
_encodedName(""),
|
||||
_name(name),
|
||||
_description(description)
|
||||
{
|
||||
memset(&_inst, 0, sizeof(_inst));
|
||||
unsigned int seed = static_cast<unsigned int>(0);
|
||||
std::srand(seed);
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::Print()
|
||||
{
|
||||
std::cout << _name << " completed!" << std::endl;
|
||||
(*_log) << _name << std::endl;
|
||||
(*_log) << _description << std::endl;
|
||||
(*_log) << "Input file: " << _inname << std::endl;
|
||||
(*_log) << "Output file: " << _outname << std::endl;
|
||||
webrtc::test::QualityMetricsResult psnr;
|
||||
webrtc::test::QualityMetricsResult ssim;
|
||||
I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
|
||||
_inst.height, &psnr);
|
||||
I420SSIMFromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
|
||||
_inst.height, &ssim);
|
||||
|
||||
(*_log) << "PSNR: " << psnr.average << std::endl;
|
||||
std::cout << "PSNR: " << psnr.average << std::endl << std::endl;
|
||||
(*_log) << "SSIM: " << ssim.average << std::endl;
|
||||
std::cout << "SSIM: " << ssim.average << std::endl << std::endl;
|
||||
(*_log) << std::endl;
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::Setup()
|
||||
{
|
||||
int widhei = _inst.width*_inst.height;
|
||||
_lengthSourceFrame = 3*widhei/2;
|
||||
_sourceBuffer = new unsigned char[_lengthSourceFrame];
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::CodecSettings(int width, int height,
|
||||
WebRtc_UWord32 frameRate /*=30*/,
|
||||
WebRtc_UWord32 bitRate /*=0*/)
|
||||
{
|
||||
if (bitRate > 0)
|
||||
{
|
||||
_bitRate = bitRate;
|
||||
}
|
||||
else if (_bitRate == 0)
|
||||
{
|
||||
_bitRate = 600;
|
||||
}
|
||||
_inst.codecType = kVideoCodecVP8;
|
||||
_inst.codecSpecific.VP8.feedbackModeOn = true;
|
||||
_inst.maxFramerate = (unsigned char)frameRate;
|
||||
_inst.startBitrate = (int)_bitRate;
|
||||
_inst.maxBitrate = 8000;
|
||||
_inst.width = width;
|
||||
_inst.height = height;
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::Teardown()
|
||||
{
|
||||
delete [] _sourceBuffer;
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::SetEncoder(webrtc::VideoEncoder*encoder)
|
||||
{
|
||||
_encoder = encoder;
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::SetDecoder(VideoDecoder*decoder)
|
||||
{
|
||||
_decoder = decoder;
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::SetLog(std::fstream* log)
|
||||
{
|
||||
_log = log;
|
||||
}
|
||||
|
||||
double CodecTest::ActualBitRate(int nFrames)
|
||||
{
|
||||
return 8.0 * _sumEncBytes / (nFrames / _inst.maxFramerate);
|
||||
}
|
||||
|
||||
bool CodecTest::PacketLoss(double lossRate, int /*thrown*/)
|
||||
{
|
||||
return RandUniform() < lossRate;
|
||||
}
|
||||
|
||||
void
|
||||
CodecTest::VideoEncodedBufferToEncodedImage(VideoFrame& videoBuffer,
|
||||
EncodedImage &image)
|
||||
{
|
||||
image._buffer = videoBuffer.Buffer();
|
||||
image._length = videoBuffer.Length();
|
||||
image._size = videoBuffer.Size();
|
||||
//image._frameType = static_cast<VideoFrameType>
|
||||
// (videoBuffer.GetFrameType());
|
||||
image._timeStamp = videoBuffer.TimeStamp();
|
||||
image._encodedWidth = videoBuffer.Width();
|
||||
image._encodedHeight = videoBuffer.Height();
|
||||
image._completeFrame = true;
|
||||
}
|
||||
|
||||
long filesize(const char *filename)
|
||||
{
|
||||
FILE *f = fopen(filename,"rb"); /* open the file in read only */
|
||||
long size = 0;
|
||||
if (fseek(f,0,SEEK_END)==0) /* seek was successful */
|
||||
size = ftell(f);
|
||||
fclose(f);
|
||||
return size;
|
||||
}
|
||||
70
webrtc/modules/video_coding/codecs/test_framework/test.h
Normal file
70
webrtc/modules/video_coding/codecs/test_framework/test.h
Normal file
@@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
|
||||
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "video_codec_interface.h"
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include <cstdlib>
|
||||
|
||||
class CodecTest
|
||||
{
|
||||
public:
|
||||
CodecTest(std::string name, std::string description);
|
||||
CodecTest(std::string name, std::string description,
|
||||
WebRtc_UWord32 bitRate);
|
||||
virtual ~CodecTest() {};
|
||||
virtual void Perform()=0;
|
||||
virtual void Print();
|
||||
void SetEncoder(webrtc::VideoEncoder *encoder);
|
||||
void SetDecoder(webrtc::VideoDecoder *decoder);
|
||||
void SetLog(std::fstream* log);
|
||||
|
||||
protected:
|
||||
virtual void Setup();
|
||||
virtual void CodecSettings(int width,
|
||||
int height,
|
||||
WebRtc_UWord32 frameRate=30,
|
||||
WebRtc_UWord32 bitRate=0);
|
||||
virtual void Teardown();
|
||||
double ActualBitRate(int nFrames);
|
||||
virtual bool PacketLoss(double lossRate, int /*thrown*/);
|
||||
static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
|
||||
static void VideoEncodedBufferToEncodedImage(
|
||||
webrtc::VideoFrame& videoBuffer,
|
||||
webrtc::EncodedImage &image);
|
||||
|
||||
webrtc::VideoEncoder* _encoder;
|
||||
webrtc::VideoDecoder* _decoder;
|
||||
WebRtc_UWord32 _bitRate;
|
||||
unsigned int _lengthSourceFrame;
|
||||
unsigned char* _sourceBuffer;
|
||||
webrtc::VideoFrame _inputVideoBuffer;
|
||||
// TODO(mikhal): For now using VideoFrame for encodedBuffer, should use a
|
||||
// designated class.
|
||||
webrtc::VideoFrame _encodedVideoBuffer;
|
||||
webrtc::VideoFrame _decodedVideoBuffer;
|
||||
webrtc::VideoCodec _inst;
|
||||
std::fstream* _log;
|
||||
std::string _inname;
|
||||
std::string _outname;
|
||||
std::string _encodedName;
|
||||
int _sumEncBytes;
|
||||
|
||||
private:
|
||||
std::string _name;
|
||||
std::string _description;
|
||||
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
|
||||
@@ -0,0 +1,60 @@
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
{
|
||||
'conditions': [
|
||||
['include_tests==1', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'test_framework',
|
||||
'type': '<(library)',
|
||||
|
||||
'dependencies': [
|
||||
'<(DEPTH)/testing/gtest.gyp:gtest',
|
||||
'<(webrtc_root)/common_video/common_video.gyp:common_video',
|
||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||
'<(webrtc_root)/test/metrics.gyp:metrics',
|
||||
'<(webrtc_root)/test/test.gyp:test_support',
|
||||
],
|
||||
|
||||
'include_dirs': [
|
||||
'../interface',
|
||||
'<(DEPTH)/testing/gtest/include',
|
||||
'../../../../common_video/interface',
|
||||
],
|
||||
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'../interface',
|
||||
],
|
||||
},
|
||||
|
||||
'sources': [
|
||||
# header files
|
||||
'benchmark.h',
|
||||
'normal_async_test.h',
|
||||
'normal_test.h',
|
||||
'packet_loss_test.h',
|
||||
'test.h',
|
||||
'unit_test.h',
|
||||
'video_source.h',
|
||||
|
||||
# source files
|
||||
'benchmark.cc',
|
||||
'normal_async_test.cc',
|
||||
'normal_test.cc',
|
||||
'packet_loss_test.cc',
|
||||
'test.cc',
|
||||
'unit_test.cc',
|
||||
'video_source.cc',
|
||||
],
|
||||
},
|
||||
], # targets
|
||||
}], # include_tests
|
||||
], # conditions
|
||||
}
|
||||
764
webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
Normal file
764
webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
Normal file
@@ -0,0 +1,764 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <cassert>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "tick_util.h"
|
||||
#include "unit_test.h"
|
||||
#include "video_source.h"
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
UnitTest::UnitTest()
|
||||
:
|
||||
CodecTest("UnitTest", "Unit test"),
|
||||
_tests(0),
|
||||
_errors(0),
|
||||
_source(NULL),
|
||||
_refFrame(NULL),
|
||||
_refEncFrame(NULL),
|
||||
_refDecFrame(NULL),
|
||||
_refEncFrameLength(0),
|
||||
_sourceFile(NULL),
|
||||
_encodeCompleteCallback(NULL),
|
||||
_decodeCompleteCallback(NULL)
|
||||
{
|
||||
}
|
||||
|
||||
UnitTest::UnitTest(std::string name, std::string description)
|
||||
:
|
||||
CodecTest(name, description),
|
||||
_tests(0),
|
||||
_errors(0),
|
||||
_source(NULL),
|
||||
_refFrame(NULL),
|
||||
_refEncFrame(NULL),
|
||||
_refDecFrame(NULL),
|
||||
_refEncFrameLength(0),
|
||||
_sourceFile(NULL),
|
||||
_encodeCompleteCallback(NULL),
|
||||
_decodeCompleteCallback(NULL)
|
||||
{
|
||||
}
|
||||
|
||||
UnitTest::~UnitTest()
|
||||
{
|
||||
if (_encodeCompleteCallback) {
|
||||
delete _encodeCompleteCallback;
|
||||
}
|
||||
|
||||
if (_decodeCompleteCallback) {
|
||||
delete _decodeCompleteCallback;
|
||||
}
|
||||
|
||||
if (_source) {
|
||||
delete _source;
|
||||
}
|
||||
|
||||
if (_refFrame) {
|
||||
delete [] _refFrame;
|
||||
}
|
||||
|
||||
if (_refDecFrame) {
|
||||
delete [] _refDecFrame;
|
||||
}
|
||||
|
||||
if (_sourceBuffer) {
|
||||
delete [] _sourceBuffer;
|
||||
}
|
||||
|
||||
if (_sourceFile) {
|
||||
fclose(_sourceFile);
|
||||
}
|
||||
|
||||
if (_refEncFrame) {
|
||||
delete [] _refEncFrame;
|
||||
}
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo,
|
||||
const webrtc::RTPFragmentationHeader*
|
||||
fragmentation)
|
||||
{
|
||||
_encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
|
||||
_encodedVideoBuffer->CopyFrame(encodedImage._size, encodedImage._buffer);
|
||||
_encodedVideoBuffer->SetLength(encodedImage._length);
|
||||
// _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
|
||||
_encodedVideoBuffer->SetWidth(
|
||||
(WebRtc_UWord16)encodedImage._encodedWidth);
|
||||
_encodedVideoBuffer->SetHeight(
|
||||
(WebRtc_UWord16)encodedImage._encodedHeight);
|
||||
_encodedVideoBuffer->SetTimeStamp(encodedImage._timeStamp);
|
||||
_encodeComplete = true;
|
||||
_encodedFrameType = encodedImage._frameType;
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
|
||||
{
|
||||
_decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
|
||||
_decodedVideoBuffer->SetWidth(image.Width());
|
||||
_decodedVideoBuffer->SetHeight(image.Height());
|
||||
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
|
||||
_decodeComplete = true;
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool
|
||||
UnitTestEncodeCompleteCallback::EncodeComplete()
|
||||
{
|
||||
if (_encodeComplete)
|
||||
{
|
||||
_encodeComplete = false;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
VideoFrameType
|
||||
UnitTestEncodeCompleteCallback::EncodedFrameType() const
|
||||
{
|
||||
return _encodedFrameType;
|
||||
}
|
||||
|
||||
bool
|
||||
UnitTestDecodeCompleteCallback::DecodeComplete()
|
||||
{
|
||||
if (_decodeComplete)
|
||||
{
|
||||
_decodeComplete = false;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
WebRtc_UWord32
|
||||
UnitTest::WaitForEncodedFrame() const
|
||||
{
|
||||
WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
|
||||
while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs)
|
||||
{
|
||||
if (_encodeCompleteCallback->EncodeComplete())
|
||||
{
|
||||
return _encodedVideoBuffer.Length();
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_UWord32
|
||||
UnitTest::WaitForDecodedFrame() const
|
||||
{
|
||||
WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
|
||||
while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs)
|
||||
{
|
||||
if (_decodeCompleteCallback->DecodeComplete())
|
||||
{
|
||||
return _decodedVideoBuffer.Length();
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_UWord32
|
||||
UnitTest::CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate,
|
||||
WebRtc_UWord32 /* frameRate */)
|
||||
{
|
||||
return _encoder->SetRates(bitRate, _inst.maxFramerate);
|
||||
}
|
||||
|
||||
void
|
||||
UnitTest::Setup()
|
||||
{
|
||||
// Use _sourceFile as a check to prevent multiple Setup() calls.
|
||||
if (_sourceFile != NULL)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (_encodeCompleteCallback == NULL)
|
||||
{
|
||||
_encodeCompleteCallback =
|
||||
new UnitTestEncodeCompleteCallback(&_encodedVideoBuffer);
|
||||
}
|
||||
if (_decodeCompleteCallback == NULL)
|
||||
{
|
||||
_decodeCompleteCallback =
|
||||
new UnitTestDecodeCompleteCallback(&_decodedVideoBuffer);
|
||||
}
|
||||
|
||||
_encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
|
||||
_decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
|
||||
|
||||
_source = new VideoSource(webrtc::test::ProjectRootPath() +
|
||||
"resources/foreman_cif.yuv", kCIF);
|
||||
|
||||
_lengthSourceFrame = _source->GetFrameLength();
|
||||
_refFrame = new unsigned char[_lengthSourceFrame];
|
||||
_refDecFrame = new unsigned char[_lengthSourceFrame];
|
||||
_sourceBuffer = new unsigned char [_lengthSourceFrame];
|
||||
_sourceFile = fopen(_source->GetFileName().c_str(), "rb");
|
||||
ASSERT_TRUE(_sourceFile != NULL);
|
||||
|
||||
_inst.maxFramerate = _source->GetFrameRate();
|
||||
_bitRate = 300;
|
||||
_inst.startBitrate = 300;
|
||||
_inst.maxBitrate = 4000;
|
||||
_inst.width = _source->GetWidth();
|
||||
_inst.height = _source->GetHeight();
|
||||
_inst.codecSpecific.VP8.denoisingOn = true;
|
||||
|
||||
// Get input frame.
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
|
||||
== _lengthSourceFrame);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||
rewind(_sourceFile);
|
||||
|
||||
// Get a reference encoded frame.
|
||||
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
|
||||
// Ensures our initial parameters are valid.
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||
_refEncFrameLength = WaitForEncodedFrame();
|
||||
ASSERT_TRUE(_refEncFrameLength > 0);
|
||||
_refEncFrame = new unsigned char[_refEncFrameLength];
|
||||
memcpy(_refEncFrame, _encodedVideoBuffer.Buffer(), _refEncFrameLength);
|
||||
|
||||
// Get a reference decoded frame.
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
unsigned int frameLength = 0;
|
||||
int i=0;
|
||||
while (frameLength == 0)
|
||||
{
|
||||
if (i > 0)
|
||||
{
|
||||
// Insert yet another frame
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
|
||||
_sourceFile) == _lengthSourceFrame);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||
ASSERT_TRUE(WaitForEncodedFrame() > 0);
|
||||
}
|
||||
EncodedImage encodedImage;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
ASSERT_TRUE(_decoder->Decode(encodedImage, 0, NULL)
|
||||
== WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
i++;
|
||||
}
|
||||
rewind(_sourceFile);
|
||||
EXPECT_TRUE(frameLength == _lengthSourceFrame);
|
||||
memcpy(_refDecFrame, _decodedVideoBuffer.Buffer(), _lengthSourceFrame);
|
||||
}
|
||||
|
||||
void
|
||||
UnitTest::Teardown()
|
||||
{
|
||||
// Use _sourceFile as a check to prevent multiple Teardown() calls.
|
||||
if (_sourceFile == NULL)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
|
||||
fclose(_sourceFile);
|
||||
_sourceFile = NULL;
|
||||
delete [] _refFrame;
|
||||
_refFrame = NULL;
|
||||
delete [] _refEncFrame;
|
||||
_refEncFrame = NULL;
|
||||
delete [] _refDecFrame;
|
||||
_refDecFrame = NULL;
|
||||
delete [] _sourceBuffer;
|
||||
_sourceBuffer = NULL;
|
||||
}
|
||||
|
||||
void
|
||||
UnitTest::Print()
|
||||
{
|
||||
}
|
||||
|
||||
int
|
||||
UnitTest::DecodeWithoutAssert()
|
||||
{
|
||||
EncodedImage encodedImage;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
int ret = _decoder->Decode(encodedImage, 0, NULL);
|
||||
int frameLength = WaitForDecodedFrame();
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
|
||||
}
|
||||
|
||||
int
|
||||
UnitTest::Decode()
|
||||
{
|
||||
EncodedImage encodedImage;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
if (encodedImage._length == 0)
|
||||
{
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
int ret = _decoder->Decode(encodedImage, 0, NULL);
|
||||
unsigned int frameLength = WaitForDecodedFrame();
|
||||
assert(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
|
||||
== _lengthSourceFrame));
|
||||
EXPECT_TRUE(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
|
||||
== _lengthSourceFrame));
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
|
||||
}
|
||||
|
||||
// Test pure virtual VideoEncoder and VideoDecoder APIs.
|
||||
void
|
||||
UnitTest::Perform()
|
||||
{
|
||||
UnitTest::Setup();
|
||||
int frameLength;
|
||||
VideoFrame inputImage;
|
||||
EncodedImage encodedImage;
|
||||
|
||||
//----- Encoder parameter tests -----
|
||||
|
||||
//-- Calls before InitEncode() --
|
||||
// We want to revert the initialization done in Setup().
|
||||
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL)
|
||||
== WEBRTC_VIDEO_CODEC_UNINITIALIZED);
|
||||
|
||||
//-- InitEncode() errors --
|
||||
// Null pointer.
|
||||
EXPECT_TRUE(_encoder->InitEncode(NULL, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
// bit rate exceeds max bit rate
|
||||
WebRtc_Word32 tmpBitRate = _inst.startBitrate;
|
||||
WebRtc_Word32 tmpMaxBitRate = _inst.maxBitrate;
|
||||
_inst.startBitrate = 4000;
|
||||
_inst.maxBitrate = 3000;
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
_inst.startBitrate = tmpBitRate;
|
||||
_inst.maxBitrate = tmpMaxBitRate; //unspecified value
|
||||
|
||||
// Bad framerate.
|
||||
_inst.maxFramerate = 0;
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
// Seems like we should allow any framerate in range [0, 255].
|
||||
//_inst.frameRate = 100;
|
||||
//EXPECT_TRUE(_encoder->InitEncode(&_inst, 1) == -1); // FAILS
|
||||
_inst.maxFramerate = 30;
|
||||
|
||||
// Bad bitrate.
|
||||
_inst.startBitrate = -1;
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
_inst.maxBitrate = _inst.startBitrate - 1;
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
_inst.maxBitrate = 0;
|
||||
_inst.startBitrate = 300;
|
||||
|
||||
// Bad maxBitRate.
|
||||
_inst.maxBitrate = 200;
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
_inst.maxBitrate = 4000;
|
||||
|
||||
// Bad width.
|
||||
_inst.width = 0;
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) < 0);
|
||||
_inst.width = _source->GetWidth();
|
||||
|
||||
// Bad height.
|
||||
_inst.height = 0;
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) < 0);
|
||||
_inst.height = _source->GetHeight();
|
||||
|
||||
// Bad number of cores.
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, -1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
//-- Encode() errors --
|
||||
|
||||
// inputVideoBuffer unallocated.
|
||||
_inputVideoBuffer.Free();
|
||||
inputImage.Free();
|
||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||
|
||||
//----- Encoder stress tests -----
|
||||
|
||||
// Vary frame rate and I-frame request.
|
||||
for (int i = 1; i <= 60; i++)
|
||||
{
|
||||
VideoFrameType frame_type = !(i % 2) ? kKeyFrame : kDeltaFrame;
|
||||
std::vector<VideoFrameType> frame_types(1, frame_type);
|
||||
EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, &frame_types) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(WaitForEncodedFrame() > 0);
|
||||
}
|
||||
|
||||
// Init then encode.
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(WaitForEncodedFrame() > 0);
|
||||
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
||||
_encodedVideoBuffer.Buffer(), frameLength) == true);
|
||||
|
||||
// Reset then encode.
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
WaitForEncodedFrame();
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
||||
_encodedVideoBuffer.Buffer(), frameLength) == true);
|
||||
|
||||
// Release then encode.
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
EXPECT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
WaitForEncodedFrame();
|
||||
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
|
||||
_encodedVideoBuffer.Buffer(), frameLength) == true);
|
||||
|
||||
//----- Decoder parameter tests -----
|
||||
|
||||
//-- Calls before InitDecode() --
|
||||
// We want to revert the initialization done in Setup().
|
||||
EXPECT_TRUE(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
EXPECT_TRUE(_decoder->Decode(encodedImage, false, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_UNINITIALIZED);
|
||||
WaitForDecodedFrame();
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
//-- Decode() errors --
|
||||
// Unallocated encodedVideoBuffer.
|
||||
_encodedVideoBuffer.Free();
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
encodedImage._length = 10; // Buffer NULL but length > 0
|
||||
EXPECT_EQ(_decoder->Decode(encodedImage, false, NULL),
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
|
||||
//----- Decoder stress tests -----
|
||||
unsigned char* tmpBuf = new unsigned char[_lengthSourceFrame];
|
||||
|
||||
// "Random" and zero data.
|
||||
// We either expect an error, or at the least, no output.
|
||||
// This relies on the codec's ability to detect an erroneous bitstream.
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
ASSERT_TRUE(fread(tmpBuf, 1, _refEncFrameLength, _sourceFile)
|
||||
== _refEncFrameLength);
|
||||
_encodedVideoBuffer.CopyFrame(_refEncFrameLength, tmpBuf);
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
int ret = _decoder->Decode(encodedImage, false, NULL);
|
||||
EXPECT_TRUE(ret <= 0);
|
||||
if (ret == 0)
|
||||
{
|
||||
EXPECT_TRUE(WaitForDecodedFrame() == 0);
|
||||
}
|
||||
|
||||
memset(tmpBuf, 0, _refEncFrameLength);
|
||||
_encodedVideoBuffer.CopyFrame(_refEncFrameLength, tmpBuf);
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
ret = _decoder->Decode(encodedImage, false, NULL);
|
||||
EXPECT_TRUE(ret <= 0);
|
||||
if (ret == 0)
|
||||
{
|
||||
EXPECT_TRUE(WaitForDecodedFrame() == 0);
|
||||
}
|
||||
}
|
||||
rewind(_sourceFile);
|
||||
|
||||
_encodedVideoBuffer.SetLength(_refEncFrameLength);
|
||||
_encodedVideoBuffer.CopyFrame(_refEncFrameLength, _refEncFrame);
|
||||
|
||||
// Init then decode.
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = 0;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
while (frameLength == 0)
|
||||
{
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
|
||||
// Reset then decode.
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = 0;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
while (frameLength == 0)
|
||||
{
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
|
||||
// Decode with other size, reset, then decode with original size again
|
||||
// to verify that decoder is reset to a "fresh" state upon Reset().
|
||||
{
|
||||
// Assert that input frame size is a factor of two, so that we can use
|
||||
// quarter size below.
|
||||
EXPECT_TRUE((_inst.width % 2 == 0) && (_inst.height % 2 == 0));
|
||||
|
||||
VideoCodec tempInst;
|
||||
memcpy(&tempInst, &_inst, sizeof(VideoCodec));
|
||||
tempInst.width /= 2;
|
||||
tempInst.height /= 2;
|
||||
|
||||
// Encode reduced (quarter) frame size.
|
||||
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
VideoFrame tempInput;
|
||||
unsigned int tmpLength = _inputVideoBuffer.Length() / 4;
|
||||
tempInput.CopyFrame(tmpLength, _inputVideoBuffer.Buffer());
|
||||
tempInput.SetWidth(tempInst.width);
|
||||
tempInput.SetHeight(tempInst.height);
|
||||
_encoder->Encode(tempInput, NULL, NULL);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
tempInput.Free();
|
||||
// Reset then decode.
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = 0;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
while (frameLength == 0)
|
||||
{
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
|
||||
// Encode original frame again
|
||||
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
|
||||
// Reset then decode original frame again.
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = 0;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
while (frameLength == 0)
|
||||
{
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
|
||||
// check that decoded frame matches with reference
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
|
||||
}
|
||||
|
||||
// Release then decode.
|
||||
EXPECT_TRUE(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = 0;
|
||||
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
|
||||
while (frameLength == 0)
|
||||
{
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
|
||||
delete [] tmpBuf;
|
||||
|
||||
//----- Function tests -----
|
||||
int frames = 0;
|
||||
// Do not specify maxBitRate (as in ViE).
|
||||
_inst.maxBitrate = 0;
|
||||
|
||||
//-- Timestamp propagation --
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
frames = 0;
|
||||
int frameDelay = 0;
|
||||
int encTimeStamp;
|
||||
_decodedVideoBuffer.SetTimeStamp(0);
|
||||
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
||||
_lengthSourceFrame)
|
||||
{
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp(frames);
|
||||
ASSERT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
//ASSERT_TRUE(frameLength);
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
encTimeStamp = _encodedVideoBuffer.TimeStamp();
|
||||
EXPECT_TRUE(_inputVideoBuffer.TimeStamp() ==
|
||||
static_cast<unsigned>(encTimeStamp));
|
||||
|
||||
frameLength = Decode();
|
||||
if (frameLength == 0)
|
||||
{
|
||||
frameDelay++;
|
||||
}
|
||||
|
||||
encTimeStamp -= frameDelay;
|
||||
if (encTimeStamp < 0)
|
||||
{
|
||||
encTimeStamp = 0;
|
||||
}
|
||||
EXPECT_TRUE(_decodedVideoBuffer.TimeStamp() ==
|
||||
static_cast<unsigned>(encTimeStamp));
|
||||
frames++;
|
||||
}
|
||||
ASSERT_TRUE(feof(_sourceFile) != 0);
|
||||
rewind(_sourceFile);
|
||||
|
||||
RateControlTests();
|
||||
inputImage.Free();
|
||||
|
||||
Teardown();
|
||||
}
|
||||
|
||||
void
|
||||
UnitTest::RateControlTests()
|
||||
{
|
||||
int frames = 0;
|
||||
VideoFrame inputImage;
|
||||
WebRtc_UWord32 frameLength;
|
||||
|
||||
// Do not specify maxBitRate (as in ViE).
|
||||
_inst.maxBitrate = 0;
|
||||
//-- Verify rate control --
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
// add: should also be 0, and 1
|
||||
const int bitRate[] = {30, 100, 500, 1000, 2000};
|
||||
const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
|
||||
|
||||
printf("\nRate control test\n");
|
||||
for (int i = 0; i < nBitrates; i++)
|
||||
{
|
||||
_bitRate = bitRate[i];
|
||||
int totalBytes = 0;
|
||||
_inst.startBitrate = _bitRate;
|
||||
_encoder->InitEncode(&_inst, 4, 1440);
|
||||
_decoder->Reset();
|
||||
_decoder->InitDecode(&_inst, 1);
|
||||
frames = 0;
|
||||
|
||||
if (_bitRate > _inst.maxBitrate)
|
||||
{
|
||||
CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
|
||||
}
|
||||
else
|
||||
{
|
||||
CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
|
||||
}
|
||||
|
||||
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
||||
_lengthSourceFrame)
|
||||
{
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.TimeStamp() +
|
||||
static_cast<WebRtc_UWord32>(9e4 /
|
||||
static_cast<float>(_inst.maxFramerate)));
|
||||
ASSERT_EQ(_encoder->Encode(_inputVideoBuffer, NULL, NULL),
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
ASSERT_GE(frameLength, 0u);
|
||||
totalBytes += frameLength;
|
||||
frames++;
|
||||
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
}
|
||||
WebRtc_UWord32 actualBitrate =
|
||||
(totalBytes / frames * _inst.maxFramerate * 8)/1000;
|
||||
printf("Target bitrate: %d kbps, actual bitrate: %d kbps\n", _bitRate,
|
||||
actualBitrate);
|
||||
// Test for close match over reasonable range.
|
||||
if (_bitRate >= 100 && _bitRate <= 2500)
|
||||
{
|
||||
EXPECT_TRUE(abs(WebRtc_Word32(actualBitrate - _bitRate)) <
|
||||
0.12 * _bitRate); // for VP8
|
||||
}
|
||||
ASSERT_TRUE(feof(_sourceFile) != 0);
|
||||
rewind(_sourceFile);
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
UnitTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
|
||||
const void* ptrB, unsigned int bLengthBytes)
|
||||
{
|
||||
if (aLengthBytes != bLengthBytes)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
|
||||
}
|
||||
107
webrtc/modules/video_coding/codecs/test_framework/unit_test.h
Normal file
107
webrtc/modules/video_coding/codecs/test_framework/unit_test.h
Normal file
@@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
|
||||
|
||||
#include "test.h"
|
||||
#include "event_wrapper.h"
|
||||
|
||||
// Disable "conditional expression is constant" warnings on the perfectly
|
||||
// acceptable
|
||||
// do { ... } while (0) constructions below.
|
||||
// Refer to http://stackoverflow.com/questions/1946445/
|
||||
// is-there-better-way-to-write-do-while0-construct-to-avoid-compiler-warnings
|
||||
// for some discussion of the issue.
|
||||
#ifdef _WIN32
|
||||
#pragma warning(disable : 4127)
|
||||
#endif
|
||||
|
||||
class VideoSource;
|
||||
class UnitTestEncodeCompleteCallback;
|
||||
class UnitTestDecodeCompleteCallback;
|
||||
|
||||
class UnitTest : public CodecTest
|
||||
{
|
||||
public:
|
||||
UnitTest();
|
||||
virtual ~UnitTest();
|
||||
virtual void Perform();
|
||||
virtual void Print();
|
||||
|
||||
protected:
|
||||
UnitTest(std::string name, std::string description);
|
||||
virtual WebRtc_UWord32 CodecSpecific_SetBitrate(
|
||||
WebRtc_UWord32 bitRate,
|
||||
WebRtc_UWord32 /* frameRate */);
|
||||
virtual void Setup();
|
||||
virtual void Teardown();
|
||||
virtual void RateControlTests();
|
||||
virtual int Decode();
|
||||
virtual int DecodeWithoutAssert();
|
||||
virtual int SetCodecSpecificParameters() {return 0;};
|
||||
|
||||
virtual bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
|
||||
const void *ptrB, unsigned int bLengthBytes);
|
||||
|
||||
WebRtc_UWord32 WaitForEncodedFrame() const;
|
||||
WebRtc_UWord32 WaitForDecodedFrame() const;
|
||||
|
||||
int _tests;
|
||||
int _errors;
|
||||
|
||||
VideoSource* _source;
|
||||
unsigned char* _refFrame;
|
||||
unsigned char* _refEncFrame;
|
||||
unsigned char* _refDecFrame;
|
||||
unsigned int _refEncFrameLength;
|
||||
FILE* _sourceFile;
|
||||
|
||||
UnitTestEncodeCompleteCallback* _encodeCompleteCallback;
|
||||
UnitTestDecodeCompleteCallback* _decodeCompleteCallback;
|
||||
enum { kMaxWaitEncTimeMs = 100 };
|
||||
enum { kMaxWaitDecTimeMs = 25 };
|
||||
};
|
||||
|
||||
class UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback
|
||||
{
|
||||
public:
|
||||
UnitTestEncodeCompleteCallback(webrtc::VideoFrame* buffer,
|
||||
WebRtc_UWord32 decoderSpecificSize = 0,
|
||||
void* decoderSpecificInfo = NULL) :
|
||||
_encodedVideoBuffer(buffer),
|
||||
_encodeComplete(false) {}
|
||||
WebRtc_Word32 Encoded(webrtc::EncodedImage& encodedImage,
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo,
|
||||
const webrtc::RTPFragmentationHeader*
|
||||
fragmentation = NULL);
|
||||
bool EncodeComplete();
|
||||
// Note that this only makes sense if an encode has been completed
|
||||
webrtc::VideoFrameType EncodedFrameType() const;
|
||||
private:
|
||||
webrtc::VideoFrame* _encodedVideoBuffer;
|
||||
bool _encodeComplete;
|
||||
webrtc::VideoFrameType _encodedFrameType;
|
||||
};
|
||||
|
||||
class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
|
||||
{
|
||||
public:
|
||||
UnitTestDecodeCompleteCallback(webrtc::VideoFrame* buffer) :
|
||||
_decodedVideoBuffer(buffer), _decodeComplete(false) {}
|
||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
|
||||
bool DecodeComplete();
|
||||
private:
|
||||
webrtc::VideoFrame* _decodedVideoBuffer;
|
||||
bool _decodeComplete;
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
|
||||
|
||||
@@ -0,0 +1,425 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video_source.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
|
||||
VideoSource::VideoSource()
|
||||
:
|
||||
_fileName(webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"),
|
||||
_width(352),
|
||||
_height(288),
|
||||
_type(webrtc::kI420),
|
||||
_frameRate(30)
|
||||
{
|
||||
}
|
||||
|
||||
VideoSource::VideoSource(std::string fileName, VideoSize size,
|
||||
int frameRate /*= 30*/, webrtc::VideoType type /*= webrtc::kI420*/)
|
||||
:
|
||||
_fileName(fileName),
|
||||
_type(type),
|
||||
_frameRate(frameRate)
|
||||
{
|
||||
assert(size != kUndefined && size != kNumberOfVideoSizes);
|
||||
assert(type != webrtc::kUnknown);
|
||||
assert(frameRate > 0);
|
||||
if (GetWidthHeight(size, _width, _height) != 0) {
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
VideoSource::VideoSource(std::string fileName, int width, int height,
|
||||
int frameRate /*= 30*/, webrtc::VideoType type /*= webrtc::kI420*/)
|
||||
:
|
||||
_fileName(fileName),
|
||||
_width(width),
|
||||
_height(height),
|
||||
_type(type),
|
||||
_frameRate(frameRate)
|
||||
{
|
||||
assert(width > 0);
|
||||
assert(height > 0);
|
||||
assert(type != webrtc::kUnknown);
|
||||
assert(frameRate > 0);
|
||||
}
|
||||
|
||||
VideoSize
|
||||
VideoSource::GetSize() const
|
||||
{
|
||||
return GetSize(_width, _height);
|
||||
}
|
||||
|
||||
VideoSize
|
||||
VideoSource::GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height)
|
||||
{
|
||||
if(width == 128 && height == 96)
|
||||
{
|
||||
return kSQCIF;
|
||||
}else if(width == 160 && height == 120)
|
||||
{
|
||||
return kQQVGA;
|
||||
}else if(width == 176 && height == 144)
|
||||
{
|
||||
return kQCIF;
|
||||
}else if(width == 320 && height == 240)
|
||||
{
|
||||
return kQVGA;
|
||||
}else if(width == 352 && height == 288)
|
||||
{
|
||||
return kCIF;
|
||||
}else if(width == 640 && height == 480)
|
||||
{
|
||||
return kVGA;
|
||||
}else if(width == 720 && height == 480)
|
||||
{
|
||||
return kNTSC;
|
||||
}else if(width == 704 && height == 576)
|
||||
{
|
||||
return k4CIF;
|
||||
}else if(width == 800 && height == 600)
|
||||
{
|
||||
return kSVGA;
|
||||
}else if(width == 960 && height == 720)
|
||||
{
|
||||
return kHD;
|
||||
}else if(width == 1024 && height == 768)
|
||||
{
|
||||
return kXGA;
|
||||
}else if(width == 1440 && height == 1080)
|
||||
{
|
||||
return kFullHD;
|
||||
}else if(width == 400 && height == 240)
|
||||
{
|
||||
return kWQVGA;
|
||||
}else if(width == 800 && height == 480)
|
||||
{
|
||||
return kWVGA;
|
||||
}else if(width == 1280 && height == 720)
|
||||
{
|
||||
return kWHD;
|
||||
}else if(width == 1920 && height == 1080)
|
||||
{
|
||||
return kWFullHD;
|
||||
}
|
||||
return kUndefined;
|
||||
}
|
||||
|
||||
unsigned int
|
||||
VideoSource::GetFrameLength() const
|
||||
{
|
||||
return webrtc::CalcBufferSize(_type, _width, _height);
|
||||
}
|
||||
|
||||
const char*
|
||||
VideoSource::GetMySizeString() const
|
||||
{
|
||||
return VideoSource::GetSizeString(GetSize());
|
||||
}
|
||||
|
||||
const char*
|
||||
VideoSource::GetSizeString(VideoSize size)
|
||||
{
|
||||
switch (size)
|
||||
{
|
||||
case kSQCIF:
|
||||
return "SQCIF";
|
||||
case kQQVGA:
|
||||
return "QQVGA";
|
||||
case kQCIF:
|
||||
return "QCIF";
|
||||
case kQVGA:
|
||||
return "QVGA";
|
||||
case kCIF:
|
||||
return "CIF";
|
||||
case kVGA:
|
||||
return "VGA";
|
||||
case kNTSC:
|
||||
return "NTSC";
|
||||
case k4CIF:
|
||||
return "4CIF";
|
||||
case kSVGA:
|
||||
return "SVGA";
|
||||
case kHD:
|
||||
return "HD";
|
||||
case kXGA:
|
||||
return "XGA";
|
||||
case kFullHD:
|
||||
return "Full_HD";
|
||||
case kWQVGA:
|
||||
return "WQVGA";
|
||||
case kWHD:
|
||||
return "WHD";
|
||||
case kWFullHD:
|
||||
return "WFull_HD";
|
||||
default:
|
||||
return "Undefined";
|
||||
}
|
||||
}
|
||||
|
||||
std::string
|
||||
VideoSource::GetFilePath() const
|
||||
{
|
||||
size_t slashPos = _fileName.find_last_of("/\\");
|
||||
if (slashPos == std::string::npos)
|
||||
{
|
||||
return ".";
|
||||
}
|
||||
|
||||
return _fileName.substr(0, slashPos);
|
||||
}
|
||||
|
||||
std::string
|
||||
VideoSource::GetName() const
|
||||
{
|
||||
// Remove path.
|
||||
size_t slashPos = _fileName.find_last_of("/\\");
|
||||
if (slashPos == std::string::npos)
|
||||
{
|
||||
slashPos = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
slashPos++;
|
||||
}
|
||||
|
||||
// Remove extension and underscored suffix if it exists.
|
||||
return _fileName.substr(slashPos, std::min(_fileName.find_last_of("_"),
|
||||
_fileName.find_last_of(".")) - slashPos);
|
||||
}
|
||||
|
||||
void
|
||||
VideoSource::Convert(const VideoSource &target, bool force /* = false */) const
|
||||
{
|
||||
// Ensure target rate is less than or equal to source
|
||||
// (i.e. we are only temporally downsampling).
|
||||
ASSERT_TRUE(target.GetFrameRate() <= _frameRate);
|
||||
// Only supports YUV420 currently.
|
||||
ASSERT_TRUE(_type == webrtc::kI420 && target.GetType() == webrtc::kI420);
|
||||
if (!force && (FileExists(target.GetFileName().c_str()) ||
|
||||
(target.GetWidth() == _width && target.GetHeight() == _height && target.GetFrameRate() == _frameRate)))
|
||||
{
|
||||
// Assume that the filename uniquely defines the content.
|
||||
// If the file already exists, it is the correct file.
|
||||
return;
|
||||
}
|
||||
FILE *inFile = NULL;
|
||||
FILE *outFile = NULL;
|
||||
|
||||
inFile = fopen(_fileName.c_str(), "rb");
|
||||
ASSERT_TRUE(inFile != NULL);
|
||||
|
||||
outFile = fopen(target.GetFileName().c_str(), "wb");
|
||||
ASSERT_TRUE(outFile != NULL);
|
||||
|
||||
FrameDropper fd;
|
||||
fd.SetFrameRate(target.GetFrameRate(), _frameRate);
|
||||
|
||||
const size_t lengthOutFrame = webrtc::CalcBufferSize(target.GetType(),
|
||||
target.GetWidth(), target.GetHeight());
|
||||
ASSERT_TRUE(lengthOutFrame > 0);
|
||||
unsigned char *outFrame = new unsigned char[lengthOutFrame];
|
||||
|
||||
const size_t lengthInFrame = webrtc::CalcBufferSize(_type, _width, _height);
|
||||
ASSERT_TRUE(lengthInFrame > 0);
|
||||
unsigned char *inFrame = new unsigned char[lengthInFrame];
|
||||
|
||||
while (fread(inFrame, 1, lengthInFrame, inFile) == lengthInFrame)
|
||||
{
|
||||
if (!fd.DropFrame())
|
||||
{
|
||||
ASSERT_TRUE(target.GetWidth() == _width &&
|
||||
target.GetHeight() == _height);
|
||||
// Add video interpolator here!
|
||||
if (fwrite(outFrame, 1, lengthOutFrame,
|
||||
outFile) != lengthOutFrame) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
delete inFrame;
|
||||
delete outFrame;
|
||||
fclose(inFile);
|
||||
fclose(outFile);
|
||||
}
|
||||
|
||||
bool VideoSource::FileExists(const char* fileName)
|
||||
{
|
||||
FILE* fp = NULL;
|
||||
fp = fopen(fileName, "rb");
|
||||
if(fp != NULL)
|
||||
{
|
||||
fclose(fp);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
int
|
||||
VideoSource::GetWidthHeight( VideoSize size, int & width, int& height)
|
||||
{
|
||||
switch(size)
|
||||
{
|
||||
case kSQCIF:
|
||||
width = 128;
|
||||
height = 96;
|
||||
return 0;
|
||||
case kQQVGA:
|
||||
width = 160;
|
||||
height = 120;
|
||||
return 0;
|
||||
case kQCIF:
|
||||
width = 176;
|
||||
height = 144;
|
||||
return 0;
|
||||
case kCGA:
|
||||
width = 320;
|
||||
height = 200;
|
||||
return 0;
|
||||
case kQVGA:
|
||||
width = 320;
|
||||
height = 240;
|
||||
return 0;
|
||||
case kSIF:
|
||||
width = 352;
|
||||
height = 240;
|
||||
return 0;
|
||||
case kWQVGA:
|
||||
width = 400;
|
||||
height = 240;
|
||||
return 0;
|
||||
case kCIF:
|
||||
width = 352;
|
||||
height = 288;
|
||||
return 0;
|
||||
case kW288p:
|
||||
width = 512;
|
||||
height = 288;
|
||||
return 0;
|
||||
case k448p:
|
||||
width = 576;
|
||||
height = 448;
|
||||
return 0;
|
||||
case kVGA:
|
||||
width = 640;
|
||||
height = 480;
|
||||
return 0;
|
||||
case k432p:
|
||||
width = 720;
|
||||
height = 432;
|
||||
return 0;
|
||||
case kW432p:
|
||||
width = 768;
|
||||
height = 432;
|
||||
return 0;
|
||||
case k4SIF:
|
||||
width = 704;
|
||||
height = 480;
|
||||
return 0;
|
||||
case kW448p:
|
||||
width = 768;
|
||||
height = 448;
|
||||
return 0;
|
||||
case kNTSC:
|
||||
width = 720;
|
||||
height = 480;
|
||||
return 0;
|
||||
case kFW448p:
|
||||
width = 800;
|
||||
height = 448;
|
||||
return 0;
|
||||
case kWVGA:
|
||||
width = 800;
|
||||
height = 480;
|
||||
return 0;
|
||||
case k4CIF:
|
||||
width = 704;
|
||||
height = 576;
|
||||
return 0;
|
||||
case kSVGA:
|
||||
width = 800;
|
||||
height = 600;
|
||||
return 0;
|
||||
case kW544p:
|
||||
width = 960;
|
||||
height = 544;
|
||||
return 0;
|
||||
case kW576p:
|
||||
width = 1024;
|
||||
height = 576;
|
||||
return 0;
|
||||
case kHD:
|
||||
width = 960;
|
||||
height = 720;
|
||||
return 0;
|
||||
case kXGA:
|
||||
width = 1024;
|
||||
height = 768;
|
||||
return 0;
|
||||
case kFullHD:
|
||||
width = 1440;
|
||||
height = 1080;
|
||||
return 0;
|
||||
case kWHD:
|
||||
width = 1280;
|
||||
height = 720;
|
||||
return 0;
|
||||
case kWFullHD:
|
||||
width = 1920;
|
||||
height = 1080;
|
||||
return 0;
|
||||
default:
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
FrameDropper::FrameDropper()
|
||||
:
|
||||
_dropsBetweenRenders(0),
|
||||
_frameCounter(0)
|
||||
{
|
||||
}
|
||||
|
||||
bool
|
||||
FrameDropper::DropFrame()
|
||||
{
|
||||
_frameCounter++;
|
||||
if (_frameCounter > _dropsBetweenRenders)
|
||||
{
|
||||
_frameCounter = 0;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
unsigned int
|
||||
FrameDropper::DropsBetweenRenders()
|
||||
{
|
||||
return _dropsBetweenRenders;
|
||||
}
|
||||
|
||||
void
|
||||
FrameDropper::SetFrameRate(double frameRate, double maxFrameRate)
|
||||
{
|
||||
if (frameRate >= 1.0)
|
||||
{
|
||||
_dropsBetweenRenders = static_cast<unsigned int>(maxFrameRate / frameRate + 0.5) - 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
_dropsBetweenRenders = 0;
|
||||
}
|
||||
}
|
||||
109
webrtc/modules/video_coding/codecs/test_framework/video_source.h
Normal file
109
webrtc/modules/video_coding/codecs/test_framework/video_source.h
Normal file
@@ -0,0 +1,109 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
|
||||
|
||||
#include <string>
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
|
||||
enum VideoSize
|
||||
{
|
||||
kUndefined,
|
||||
kSQCIF, // 128*96 = 12 288
|
||||
kQQVGA, // 160*120 = 19 200
|
||||
kQCIF, // 176*144 = 25 344
|
||||
kCGA, // 320*200 = 64 000
|
||||
kQVGA, // 320*240 = 76 800
|
||||
kSIF, // 352*240 = 84 480
|
||||
kWQVGA, // 400*240 = 96 000
|
||||
kCIF, // 352*288 = 101 376
|
||||
kW288p, // 512*288 = 147 456 (WCIF)
|
||||
k448p, // 576*448 = 281 088
|
||||
kVGA, // 640*480 = 307 200
|
||||
k432p, // 720*432 = 311 040
|
||||
kW432p, // 768*432 = 331 776
|
||||
k4SIF, // 704*480 = 337 920
|
||||
kW448p, // 768*448 = 344 064
|
||||
kNTSC, // 720*480 = 345 600
|
||||
kFW448p, // 800*448 = 358 400
|
||||
kWVGA, // 800*480 = 384 000
|
||||
k4CIF, // 704<30>576 = 405 504
|
||||
kSVGA, // 800*600 = 480 000
|
||||
kW544p, // 960*544 = 522 240
|
||||
kW576p, // 1024*576 = 589 824 (W4CIF)
|
||||
kHD, // 960*720 = 691 200
|
||||
kXGA, // 1024*768 = 786 432
|
||||
kWHD, // 1280*720 = 921 600
|
||||
kFullHD, // 1440*1080 = 1 555 200
|
||||
kWFullHD, // 1920*1080 = 2 073 600
|
||||
|
||||
kNumberOfVideoSizes
|
||||
};
|
||||
|
||||
class VideoSource
|
||||
{
|
||||
public:
|
||||
VideoSource();
|
||||
VideoSource(std::string fileName, VideoSize size, int frameRate = 30,
|
||||
webrtc::VideoType type = webrtc::kI420);
|
||||
VideoSource(std::string fileName, int width, int height, int frameRate = 30,
|
||||
webrtc::VideoType type = webrtc::kI420);
|
||||
|
||||
std::string GetFileName() const { return _fileName; }
|
||||
int GetWidth() const { return _width; }
|
||||
int GetHeight() const { return _height; }
|
||||
webrtc::VideoType GetType() const { return _type; }
|
||||
int GetFrameRate() const { return _frameRate; }
|
||||
|
||||
// Returns the file path without a trailing slash.
|
||||
std::string GetFilePath() const;
|
||||
|
||||
// Returns the filename with the path (including the leading slash) removed.
|
||||
std::string GetName() const;
|
||||
|
||||
VideoSize GetSize() const;
|
||||
static VideoSize GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height);
|
||||
unsigned int GetFrameLength() const;
|
||||
|
||||
// Returns a human-readable size string.
|
||||
static const char* GetSizeString(VideoSize size);
|
||||
const char* GetMySizeString() const;
|
||||
|
||||
// Opens the video source, converting and writing to the specified target.
|
||||
// If force is true, the conversion will be done even if the target file
|
||||
// already exists.
|
||||
void Convert(const VideoSource& target, bool force = false) const;
|
||||
static bool FileExists(const char* fileName);
|
||||
private:
|
||||
static int GetWidthHeight( VideoSize size, int& width, int& height);
|
||||
std::string _fileName;
|
||||
int _width;
|
||||
int _height;
|
||||
webrtc::VideoType _type;
|
||||
int _frameRate;
|
||||
};
|
||||
|
||||
class FrameDropper
|
||||
{
|
||||
public:
|
||||
FrameDropper();
|
||||
bool DropFrame();
|
||||
unsigned int DropsBetweenRenders();
|
||||
void SetFrameRate(double frameRate, double maxFrameRate);
|
||||
|
||||
private:
|
||||
unsigned int _dropsBetweenRenders;
|
||||
unsigned int _frameCounter;
|
||||
};
|
||||
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
{
|
||||
'conditions': [
|
||||
['include_tests==1', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'video_quality_measurement',
|
||||
'type': 'executable',
|
||||
'dependencies': [
|
||||
'video_codecs_test_framework',
|
||||
'webrtc_video_coding',
|
||||
'<(DEPTH)/third_party/google-gflags/google-gflags.gyp:google-gflags',
|
||||
'<(webrtc_root)/test/metrics.gyp:metrics',
|
||||
'<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
|
||||
],
|
||||
'sources': [
|
||||
'video_quality_measurement.cc',
|
||||
],
|
||||
},
|
||||
], # targets
|
||||
}], # include_tests
|
||||
], # conditions
|
||||
}
|
||||
@@ -0,0 +1,526 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <stdarg.h>
|
||||
#include <sys/stat.h> // To check for directory existence.
|
||||
|
||||
#include <cassert>
|
||||
#include <cstdio>
|
||||
#include <ctime>
|
||||
|
||||
#ifndef S_ISDIR // Not defined in stat.h on Windows.
|
||||
#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
|
||||
#endif
|
||||
|
||||
#include "common_types.h"
|
||||
#include "google/gflags.h"
|
||||
#include "modules/video_coding/codecs/test/packet_manipulator.h"
|
||||
#include "modules/video_coding/codecs/test/stats.h"
|
||||
#include "modules/video_coding/codecs/test/videoprocessor.h"
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8.h"
|
||||
#include "modules/video_coding/main/interface/video_coding.h"
|
||||
#include "system_wrappers/interface/trace.h"
|
||||
#include "testsupport/frame_reader.h"
|
||||
#include "testsupport/frame_writer.h"
|
||||
#include "testsupport/metrics/video_metrics.h"
|
||||
#include "testsupport/packet_reader.h"
|
||||
|
||||
DEFINE_string(test_name, "Quality test", "The name of the test to run. ");
|
||||
DEFINE_string(test_description, "", "A more detailed description about what "
|
||||
"the current test is about.");
|
||||
DEFINE_string(input_filename, "", "Input file. "
|
||||
"The source video file to be encoded and decoded. Must be in "
|
||||
".yuv format");
|
||||
DEFINE_int32(width, -1, "Width in pixels of the frames in the input file.");
|
||||
DEFINE_int32(height, -1, "Height in pixels of the frames in the input file.");
|
||||
DEFINE_int32(framerate, 30, "Frame rate of the input file, in FPS "
|
||||
"(frames-per-second). ");
|
||||
DEFINE_string(output_dir, ".", "Output directory. "
|
||||
"The directory where the output file will be put. Must already "
|
||||
"exist.");
|
||||
DEFINE_bool(use_single_core, false, "Force using a single core. If set to "
|
||||
"true, only one core will be used for processing. Using a single "
|
||||
"core is necessary to get a deterministic behavior for the"
|
||||
"encoded frames - using multiple cores will produce different "
|
||||
"encoded frames since multiple cores are competing to consume the "
|
||||
"byte budget for each frame in parallel. If set to false, "
|
||||
"the maximum detected number of cores will be used. ");
|
||||
DEFINE_bool(disable_fixed_random_seed , false, "Set this flag to disable the"
|
||||
"usage of a fixed random seed for the random generator used "
|
||||
"for packet loss. Disabling this will cause consecutive runs "
|
||||
"loose packets at different locations, which is bad for "
|
||||
"reproducibility.");
|
||||
DEFINE_string(output_filename, "", "Output file. "
|
||||
"The name of the output video file resulting of the processing "
|
||||
"of the source file. By default this is the same name as the "
|
||||
"input file with '_out' appended before the extension.");
|
||||
DEFINE_int32(bitrate, 500, "Bit rate in kilobits/second.");
|
||||
DEFINE_int32(keyframe_interval, 0, "Forces a keyframe every Nth frame. "
|
||||
"0 means the encoder decides when to insert keyframes. Note that "
|
||||
"the encoder may create a keyframe in other locations in addition "
|
||||
"to the interval that is set using this parameter.");
|
||||
DEFINE_int32(temporal_layers, 0, "The number of temporal layers to use "
|
||||
"(VP8 specific codec setting). Must be 0-4.");
|
||||
DEFINE_int32(packet_size, 1500, "Simulated network packet size in bytes (MTU). "
|
||||
"Used for packet loss simulation.");
|
||||
DEFINE_int32(max_payload_size, 1440, "Max payload size in bytes for the "
|
||||
"encoder.");
|
||||
DEFINE_string(packet_loss_mode, "uniform", "Packet loss mode. Two different "
|
||||
"packet loss models are supported: uniform or burst. This "
|
||||
"setting has no effect unless packet_loss_rate is >0. ");
|
||||
DEFINE_double(packet_loss_probability, 0.0, "Packet loss probability. A value "
|
||||
"between 0.0 and 1.0 that defines the probability of a packet "
|
||||
"being lost. 0.1 means 10% and so on.");
|
||||
DEFINE_int32(packet_loss_burst_length, 1, "Packet loss burst length. Defines "
|
||||
"how many packets will be lost in a burst when a packet has been "
|
||||
"decided to be lost. Must be >=1.");
|
||||
DEFINE_bool(csv, false, "CSV output. Enabling this will output all frame "
|
||||
"statistics at the end of execution. Recommended to run combined "
|
||||
"with --noverbose to avoid mixing output.");
|
||||
DEFINE_bool(python, false, "Python output. Enabling this will output all frame "
|
||||
"statistics as a Python script at the end of execution. "
|
||||
"Recommended to run combine with --noverbose to avoid mixing "
|
||||
"output.");
|
||||
DEFINE_bool(verbose, true, "Verbose mode. Prints a lot of debugging info. "
|
||||
"Suitable for tracking progress but not for capturing output. "
|
||||
"Disable with --noverbose flag.");
|
||||
|
||||
// Custom log method that only prints if the verbose flag is given.
|
||||
// Supports all the standard printf parameters and formatting (just forwarded).
|
||||
int Log(const char *format, ...) {
|
||||
int result = 0;
|
||||
if (FLAGS_verbose) {
|
||||
va_list args;
|
||||
va_start(args, format);
|
||||
result = vprintf(format, args);
|
||||
va_end(args);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Validates the arguments given as command line flags and fills in the
|
||||
// TestConfig struct with all configurations needed for video processing.
|
||||
// Returns 0 if everything is OK, otherwise an exit code.
|
||||
int HandleCommandLineFlags(webrtc::test::TestConfig* config) {
|
||||
// Validate the mandatory flags:
|
||||
if (FLAGS_input_filename == "" || FLAGS_width == -1 || FLAGS_height == -1) {
|
||||
printf("%s\n", google::ProgramUsage());
|
||||
return 1;
|
||||
}
|
||||
config->name = FLAGS_test_name;
|
||||
config->description = FLAGS_test_description;
|
||||
|
||||
// Verify the input file exists and is readable.
|
||||
FILE* test_file;
|
||||
test_file = fopen(FLAGS_input_filename.c_str(), "rb");
|
||||
if (test_file == NULL) {
|
||||
fprintf(stderr, "Cannot read the specified input file: %s\n",
|
||||
FLAGS_input_filename.c_str());
|
||||
return 2;
|
||||
}
|
||||
fclose(test_file);
|
||||
config->input_filename = FLAGS_input_filename;
|
||||
|
||||
// Verify the output dir exists.
|
||||
struct stat dir_info;
|
||||
if (!(stat(FLAGS_output_dir.c_str(), &dir_info) == 0 &&
|
||||
S_ISDIR(dir_info.st_mode))) {
|
||||
fprintf(stderr, "Cannot find output directory: %s\n",
|
||||
FLAGS_output_dir.c_str());
|
||||
return 3;
|
||||
}
|
||||
config->output_dir = FLAGS_output_dir;
|
||||
|
||||
// Manufacture an output filename if none was given.
|
||||
if (FLAGS_output_filename == "") {
|
||||
// Cut out the filename without extension from the given input file
|
||||
// (which may include a path)
|
||||
int startIndex = FLAGS_input_filename.find_last_of("/") + 1;
|
||||
if (startIndex == 0) {
|
||||
startIndex = 0;
|
||||
}
|
||||
FLAGS_output_filename =
|
||||
FLAGS_input_filename.substr(startIndex,
|
||||
FLAGS_input_filename.find_last_of(".")
|
||||
- startIndex) + "_out.yuv";
|
||||
}
|
||||
|
||||
// Verify output file can be written.
|
||||
if (FLAGS_output_dir == ".") {
|
||||
config->output_filename = FLAGS_output_filename;
|
||||
} else {
|
||||
config->output_filename = FLAGS_output_dir + "/"+ FLAGS_output_filename;
|
||||
}
|
||||
test_file = fopen(config->output_filename.c_str(), "wb");
|
||||
if (test_file == NULL) {
|
||||
fprintf(stderr, "Cannot write output file: %s\n",
|
||||
config->output_filename.c_str());
|
||||
return 4;
|
||||
}
|
||||
fclose(test_file);
|
||||
|
||||
// Check single core flag.
|
||||
config->use_single_core = FLAGS_use_single_core;
|
||||
|
||||
// Get codec specific configuration.
|
||||
webrtc::VideoCodingModule::Codec(webrtc::kVideoCodecVP8,
|
||||
config->codec_settings);
|
||||
|
||||
// Check the temporal layers.
|
||||
if (FLAGS_temporal_layers < 0 ||
|
||||
FLAGS_temporal_layers > webrtc::kMaxTemporalStreams) {
|
||||
fprintf(stderr, "Temporal layers number must be 0-4, was: %d\n",
|
||||
FLAGS_temporal_layers);
|
||||
return 13;
|
||||
}
|
||||
config->codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
|
||||
FLAGS_temporal_layers;
|
||||
|
||||
// Check the bit rate.
|
||||
if (FLAGS_bitrate <= 0) {
|
||||
fprintf(stderr, "Bit rate must be >0 kbps, was: %d\n", FLAGS_bitrate);
|
||||
return 5;
|
||||
}
|
||||
config->codec_settings->startBitrate = FLAGS_bitrate;
|
||||
|
||||
// Check the keyframe interval.
|
||||
if (FLAGS_keyframe_interval < 0) {
|
||||
fprintf(stderr, "Keyframe interval must be >=0, was: %d\n",
|
||||
FLAGS_keyframe_interval);
|
||||
return 6;
|
||||
}
|
||||
config->keyframe_interval = FLAGS_keyframe_interval;
|
||||
|
||||
// Check packet size and max payload size.
|
||||
if (FLAGS_packet_size <= 0) {
|
||||
fprintf(stderr, "Packet size must be >0 bytes, was: %d\n",
|
||||
FLAGS_packet_size);
|
||||
return 7;
|
||||
}
|
||||
config->networking_config.packet_size_in_bytes = FLAGS_packet_size;
|
||||
|
||||
if (FLAGS_max_payload_size <= 0) {
|
||||
fprintf(stderr, "Max payload size must be >0 bytes, was: %d\n",
|
||||
FLAGS_max_payload_size);
|
||||
return 8;
|
||||
}
|
||||
config->networking_config.max_payload_size_in_bytes =
|
||||
FLAGS_max_payload_size;
|
||||
|
||||
// Check the width and height
|
||||
if (FLAGS_width <= 0 || FLAGS_height <= 0) {
|
||||
fprintf(stderr, "Width and height must be >0.");
|
||||
return 9;
|
||||
}
|
||||
config->codec_settings->width = FLAGS_width;
|
||||
config->codec_settings->height = FLAGS_height;
|
||||
config->codec_settings->maxFramerate = FLAGS_framerate;
|
||||
|
||||
// Calculate the size of each frame to read (according to YUV spec).
|
||||
config->frame_length_in_bytes =
|
||||
3 * config->codec_settings->width * config->codec_settings->height / 2;
|
||||
|
||||
// Check packet loss settings
|
||||
if (FLAGS_packet_loss_mode != "uniform" &&
|
||||
FLAGS_packet_loss_mode != "burst") {
|
||||
fprintf(stderr, "Unsupported packet loss mode, must be 'uniform' or "
|
||||
"'burst'\n.");
|
||||
return 10;
|
||||
}
|
||||
config->networking_config.packet_loss_mode = webrtc::test::kUniform;
|
||||
if (FLAGS_packet_loss_mode == "burst") {
|
||||
config->networking_config.packet_loss_mode = webrtc::test::kBurst;
|
||||
}
|
||||
|
||||
if (FLAGS_packet_loss_probability < 0.0 ||
|
||||
FLAGS_packet_loss_probability > 1.0) {
|
||||
fprintf(stderr, "Invalid packet loss probability. Must be 0.0 - 1.0, "
|
||||
"was: %f\n", FLAGS_packet_loss_probability);
|
||||
return 11;
|
||||
}
|
||||
config->networking_config.packet_loss_probability =
|
||||
FLAGS_packet_loss_probability;
|
||||
|
||||
if (FLAGS_packet_loss_burst_length < 1) {
|
||||
fprintf(stderr, "Invalid packet loss burst length, must be >=1, "
|
||||
"was: %d\n", FLAGS_packet_loss_burst_length);
|
||||
return 12;
|
||||
}
|
||||
config->networking_config.packet_loss_burst_length =
|
||||
FLAGS_packet_loss_burst_length;
|
||||
config->verbose = FLAGS_verbose;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void CalculateSsimVideoMetrics(webrtc::test::TestConfig* config,
|
||||
webrtc::test::QualityMetricsResult* result) {
|
||||
Log("Calculating SSIM...\n");
|
||||
I420SSIMFromFiles(config->input_filename.c_str(),
|
||||
config->output_filename.c_str(),
|
||||
config->codec_settings->width,
|
||||
config->codec_settings->height, result);
|
||||
Log(" Average: %3.2f\n", result->average);
|
||||
Log(" Min : %3.2f (frame %d)\n", result->min, result->min_frame_number);
|
||||
Log(" Max : %3.2f (frame %d)\n", result->max, result->max_frame_number);
|
||||
}
|
||||
|
||||
void CalculatePsnrVideoMetrics(webrtc::test::TestConfig* config,
|
||||
webrtc::test::QualityMetricsResult* result) {
|
||||
Log("Calculating PSNR...\n");
|
||||
I420PSNRFromFiles(config->input_filename.c_str(),
|
||||
config->output_filename.c_str(),
|
||||
config->codec_settings->width,
|
||||
config->codec_settings->height, result);
|
||||
Log(" Average: %3.2f\n", result->average);
|
||||
Log(" Min : %3.2f (frame %d)\n", result->min, result->min_frame_number);
|
||||
Log(" Max : %3.2f (frame %d)\n", result->max, result->max_frame_number);
|
||||
}
|
||||
|
||||
void PrintConfigurationSummary(const webrtc::test::TestConfig& config) {
|
||||
Log("Quality test with parameters:\n");
|
||||
Log(" Test name : %s\n", config.name.c_str());
|
||||
Log(" Description : %s\n", config.description.c_str());
|
||||
Log(" Input filename : %s\n", config.input_filename.c_str());
|
||||
Log(" Output directory : %s\n", config.output_dir.c_str());
|
||||
Log(" Output filename : %s\n", config.output_filename.c_str());
|
||||
Log(" Frame length : %d bytes\n", config.frame_length_in_bytes);
|
||||
Log(" Packet size : %d bytes\n",
|
||||
config.networking_config.packet_size_in_bytes);
|
||||
Log(" Max payload size : %d bytes\n",
|
||||
config.networking_config.max_payload_size_in_bytes);
|
||||
Log(" Packet loss:\n");
|
||||
Log(" Mode : %s\n",
|
||||
PacketLossModeToStr(config.networking_config.packet_loss_mode));
|
||||
Log(" Probability : %2.1f\n",
|
||||
config.networking_config.packet_loss_probability);
|
||||
Log(" Burst length : %d packets\n",
|
||||
config.networking_config.packet_loss_burst_length);
|
||||
}
|
||||
|
||||
void PrintCsvOutput(const webrtc::test::Stats& stats,
|
||||
const webrtc::test::QualityMetricsResult& ssim_result,
|
||||
const webrtc::test::QualityMetricsResult& psnr_result) {
|
||||
Log("\nCSV output (recommended to run with --noverbose to skip the "
|
||||
"above output)\n");
|
||||
printf("frame_number encoding_successful decoding_successful "
|
||||
"encode_return_code decode_return_code "
|
||||
"encode_time_in_us decode_time_in_us "
|
||||
"bit_rate_in_kbps encoded_frame_length_in_bytes frame_type "
|
||||
"packets_dropped total_packets "
|
||||
"ssim psnr\n");
|
||||
|
||||
for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
|
||||
const webrtc::test::FrameStatistic& f = stats.stats_[i];
|
||||
const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
|
||||
const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
|
||||
printf("%4d, %d, %d, %2d, %2d, %6d, %6d, %5d, %7d, %d, %2d, %2d, "
|
||||
"%5.3f, %5.2f\n",
|
||||
f.frame_number,
|
||||
f.encoding_successful,
|
||||
f.decoding_successful,
|
||||
f.encode_return_code,
|
||||
f.decode_return_code,
|
||||
f.encode_time_in_us,
|
||||
f.decode_time_in_us,
|
||||
f.bit_rate_in_kbps,
|
||||
f.encoded_frame_length_in_bytes,
|
||||
f.frame_type,
|
||||
f.packets_dropped,
|
||||
f.total_packets,
|
||||
ssim.value,
|
||||
psnr.value);
|
||||
}
|
||||
}
|
||||
|
||||
void PrintPythonOutput(const webrtc::test::TestConfig& config,
|
||||
const webrtc::test::Stats& stats,
|
||||
const webrtc::test::QualityMetricsResult& ssim_result,
|
||||
const webrtc::test::QualityMetricsResult& psnr_result) {
|
||||
Log("\nPython output (recommended to run with --noverbose to skip the "
|
||||
"above output)\n");
|
||||
printf("test_configuration = ["
|
||||
"{'name': 'name', 'value': '%s'},\n"
|
||||
"{'name': 'description', 'value': '%s'},\n"
|
||||
"{'name': 'test_number', 'value': '%d'},\n"
|
||||
"{'name': 'input_filename', 'value': '%s'},\n"
|
||||
"{'name': 'output_filename', 'value': '%s'},\n"
|
||||
"{'name': 'output_dir', 'value': '%s'},\n"
|
||||
"{'name': 'packet_size_in_bytes', 'value': '%d'},\n"
|
||||
"{'name': 'max_payload_size_in_bytes', 'value': '%d'},\n"
|
||||
"{'name': 'packet_loss_mode', 'value': '%s'},\n"
|
||||
"{'name': 'packet_loss_probability', 'value': '%f'},\n"
|
||||
"{'name': 'packet_loss_burst_length', 'value': '%d'},\n"
|
||||
"{'name': 'exclude_frame_types', 'value': '%s'},\n"
|
||||
"{'name': 'frame_length_in_bytes', 'value': '%d'},\n"
|
||||
"{'name': 'use_single_core', 'value': '%s'},\n"
|
||||
"{'name': 'keyframe_interval;', 'value': '%d'},\n"
|
||||
"{'name': 'video_codec_type', 'value': '%s'},\n"
|
||||
"{'name': 'width', 'value': '%d'},\n"
|
||||
"{'name': 'height', 'value': '%d'},\n"
|
||||
"{'name': 'bit_rate_in_kbps', 'value': '%d'},\n"
|
||||
"]\n",
|
||||
config.name.c_str(),
|
||||
config.description.c_str(),
|
||||
config.test_number,
|
||||
config.input_filename.c_str(),
|
||||
config.output_filename.c_str(),
|
||||
config.output_dir.c_str(),
|
||||
config.networking_config.packet_size_in_bytes,
|
||||
config.networking_config.max_payload_size_in_bytes,
|
||||
PacketLossModeToStr(config.networking_config.packet_loss_mode),
|
||||
config.networking_config.packet_loss_probability,
|
||||
config.networking_config.packet_loss_burst_length,
|
||||
ExcludeFrameTypesToStr(config.exclude_frame_types),
|
||||
config.frame_length_in_bytes,
|
||||
config.use_single_core ? "True " : "False",
|
||||
config.keyframe_interval,
|
||||
webrtc::test::VideoCodecTypeToStr(config.codec_settings->codecType),
|
||||
config.codec_settings->width,
|
||||
config.codec_settings->height,
|
||||
config.codec_settings->startBitrate);
|
||||
printf("frame_data_types = {"
|
||||
"'frame_number': ('number', 'Frame number'),\n"
|
||||
"'encoding_successful': ('boolean', 'Encoding successful?'),\n"
|
||||
"'decoding_successful': ('boolean', 'Decoding successful?'),\n"
|
||||
"'encode_time': ('number', 'Encode time (us)'),\n"
|
||||
"'decode_time': ('number', 'Decode time (us)'),\n"
|
||||
"'encode_return_code': ('number', 'Encode return code'),\n"
|
||||
"'decode_return_code': ('number', 'Decode return code'),\n"
|
||||
"'bit_rate': ('number', 'Bit rate (kbps)'),\n"
|
||||
"'encoded_frame_length': "
|
||||
"('number', 'Encoded frame length (bytes)'),\n"
|
||||
"'frame_type': ('string', 'Frame type'),\n"
|
||||
"'packets_dropped': ('number', 'Packets dropped'),\n"
|
||||
"'total_packets': ('number', 'Total packets'),\n"
|
||||
"'ssim': ('number', 'SSIM'),\n"
|
||||
"'psnr': ('number', 'PSNR (dB)'),\n"
|
||||
"}\n");
|
||||
printf("frame_data = [");
|
||||
for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
|
||||
const webrtc::test::FrameStatistic& f = stats.stats_[i];
|
||||
const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
|
||||
const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
|
||||
printf("{'frame_number': %d, "
|
||||
"'encoding_successful': %s, 'decoding_successful': %s, "
|
||||
"'encode_time': %d, 'decode_time': %d, "
|
||||
"'encode_return_code': %d, 'decode_return_code': %d, "
|
||||
"'bit_rate': %d, 'encoded_frame_length': %d, 'frame_type': %s, "
|
||||
"'packets_dropped': %d, 'total_packets': %d, "
|
||||
"'ssim': %f, 'psnr': %f},\n",
|
||||
f.frame_number,
|
||||
f.encoding_successful ? "True " : "False",
|
||||
f.decoding_successful ? "True " : "False",
|
||||
f.encode_time_in_us,
|
||||
f.decode_time_in_us,
|
||||
f.encode_return_code,
|
||||
f.decode_return_code,
|
||||
f.bit_rate_in_kbps,
|
||||
f.encoded_frame_length_in_bytes,
|
||||
f.frame_type == webrtc::kDeltaFrame ? "'Delta'" : "'Other'",
|
||||
f.packets_dropped,
|
||||
f.total_packets,
|
||||
ssim.value,
|
||||
psnr.value);
|
||||
}
|
||||
printf("]\n");
|
||||
}
|
||||
|
||||
// Runs a quality measurement on the input file supplied to the program.
|
||||
// The input file must be in YUV format.
|
||||
int main(int argc, char* argv[]) {
|
||||
std::string program_name = argv[0];
|
||||
std::string usage = "Quality test application for video comparisons.\n"
|
||||
"Run " + program_name + " --helpshort for usage.\n"
|
||||
"Example usage:\n" + program_name +
|
||||
" --input_filename=filename.yuv --width=352 --height=288\n";
|
||||
google::SetUsageMessage(usage);
|
||||
|
||||
google::ParseCommandLineFlags(&argc, &argv, true);
|
||||
|
||||
// Create TestConfig and codec settings struct.
|
||||
webrtc::test::TestConfig config;
|
||||
webrtc::VideoCodec codec_settings;
|
||||
config.codec_settings = &codec_settings;
|
||||
|
||||
int return_code = HandleCommandLineFlags(&config);
|
||||
// Exit if an invalid argument is supplied.
|
||||
if (return_code != 0) {
|
||||
return return_code;
|
||||
}
|
||||
|
||||
PrintConfigurationSummary(config);
|
||||
|
||||
webrtc::VP8Encoder* encoder = webrtc::VP8Encoder::Create();
|
||||
webrtc::VP8Decoder* decoder = webrtc::VP8Decoder::Create();
|
||||
webrtc::test::Stats stats;
|
||||
webrtc::test::FrameReaderImpl frame_reader(config.input_filename,
|
||||
config.frame_length_in_bytes);
|
||||
webrtc::test::FrameWriterImpl frame_writer(config.output_filename,
|
||||
config.frame_length_in_bytes);
|
||||
frame_reader.Init();
|
||||
frame_writer.Init();
|
||||
webrtc::test::PacketReader packet_reader;
|
||||
|
||||
webrtc::test::PacketManipulatorImpl packet_manipulator(
|
||||
&packet_reader, config.networking_config, config.verbose);
|
||||
// By default the packet manipulator is seeded with a fixed random.
|
||||
// If disabled we must generate a new seed.
|
||||
if (FLAGS_disable_fixed_random_seed) {
|
||||
packet_manipulator.InitializeRandomSeed(time(NULL));
|
||||
}
|
||||
webrtc::test::VideoProcessor* processor =
|
||||
new webrtc::test::VideoProcessorImpl(encoder, decoder,
|
||||
&frame_reader,
|
||||
&frame_writer,
|
||||
&packet_manipulator,
|
||||
config, &stats);
|
||||
processor->Init();
|
||||
|
||||
int frame_number = 0;
|
||||
while (processor->ProcessFrame(frame_number)) {
|
||||
if (frame_number % 80 == 0) {
|
||||
Log("\n"); // make the output a bit nicer.
|
||||
}
|
||||
Log(".");
|
||||
frame_number++;
|
||||
}
|
||||
Log("\n");
|
||||
Log("Processed %d frames\n", frame_number);
|
||||
|
||||
// Release encoder and decoder to make sure they have finished processing.
|
||||
encoder->Release();
|
||||
decoder->Release();
|
||||
|
||||
// Verify statistics are correct:
|
||||
assert(frame_number == static_cast<int>(stats.stats_.size()));
|
||||
|
||||
// Close the files before we start using them for SSIM/PSNR calculations.
|
||||
frame_reader.Close();
|
||||
frame_writer.Close();
|
||||
|
||||
stats.PrintSummary();
|
||||
|
||||
webrtc::test::QualityMetricsResult ssim_result;
|
||||
CalculateSsimVideoMetrics(&config, &ssim_result);
|
||||
webrtc::test::QualityMetricsResult psnr_result;
|
||||
CalculatePsnrVideoMetrics(&config, &psnr_result);
|
||||
|
||||
if (FLAGS_csv) {
|
||||
PrintCsvOutput(stats, ssim_result, psnr_result);
|
||||
}
|
||||
if (FLAGS_python) {
|
||||
PrintPythonOutput(config, stats, ssim_result, psnr_result);
|
||||
}
|
||||
delete processor;
|
||||
delete encoder;
|
||||
delete decoder;
|
||||
Log("Quality test finished!");
|
||||
return 0;
|
||||
}
|
||||
47
webrtc/modules/video_coding/codecs/vp8/Android.mk
Normal file
47
webrtc/modules/video_coding/codecs/vp8/Android.mk
Normal file
@@ -0,0 +1,47 @@
|
||||
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
include $(LOCAL_PATH)/../../../../../../../android-webrtc.mk
|
||||
|
||||
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
|
||||
LOCAL_MODULE := libwebrtc_vp8
|
||||
LOCAL_MODULE_TAGS := optional
|
||||
LOCAL_CPP_EXTENSION := .cc
|
||||
LOCAL_SRC_FILES := \
|
||||
reference_picture_selection.cc \
|
||||
vp8_impl.cc
|
||||
|
||||
# Flags passed to both C and C++ files.
|
||||
LOCAL_CFLAGS := \
|
||||
$(MY_WEBRTC_COMMON_DEFS)
|
||||
# TODO(leozwang) Enable WEBRTC_LIBVPX_VERSION after libvpx is updateed
|
||||
# to a new version and also add temporal_layers.cc
|
||||
|
||||
LOCAL_C_INCLUDES := \
|
||||
$(LOCAL_PATH)/../interface \
|
||||
$(LOCAL_PATH)/../../../interface \
|
||||
$(LOCAL_PATH)/../../../../../.. \
|
||||
$(LOCAL_PATH)/../../../../../../common_video/interface \
|
||||
$(LOCAL_PATH)/../../../../../../common_video/vplib/main/interface \
|
||||
$(LOCAL_PATH)/../../../../../../modules/interface \
|
||||
$(LOCAL_PATH)/../../../../../../system_wrappers/interface \
|
||||
external/libvpx
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := \
|
||||
libcutils \
|
||||
libdl \
|
||||
libstlport
|
||||
|
||||
ifndef NDK_ROOT
|
||||
include external/stlport/libstlport.mk
|
||||
endif
|
||||
include $(BUILD_STATIC_LIBRARY)
|
||||
36
webrtc/modules/video_coding/codecs/vp8/include/vp8.h
Normal file
36
webrtc/modules/video_coding/codecs/vp8/include/vp8.h
Normal file
@@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*
|
||||
* WEBRTC VP8 wrapper interface
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
|
||||
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VP8Encoder : public VideoEncoder {
|
||||
public:
|
||||
static VP8Encoder* Create();
|
||||
|
||||
virtual ~VP8Encoder() {};
|
||||
}; // end of VP8Encoder class
|
||||
|
||||
|
||||
class VP8Decoder : public VideoDecoder {
|
||||
public:
|
||||
static VP8Decoder* Create();
|
||||
|
||||
virtual ~VP8Decoder() {};
|
||||
}; // end of VP8Decoder class
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_H_
|
||||
@@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
|
||||
|
||||
#include "common_types.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Ratio allocation between temporal streams:
|
||||
// Values as required for the VP8 codec (accumulating).
|
||||
static const float
|
||||
kVp8LayerRateAlloction[kMaxTemporalStreams][kMaxTemporalStreams] = {
|
||||
{1.0f, 0, 0, 0}, // 1 layer
|
||||
{0.6f, 1.0f , 0 , 0}, // 2 layers {60%, 40%}
|
||||
{0.4f, 0.6f , 1.0f, 0}, // 3 layers {40%, 20%, 40%}
|
||||
{0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%}
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
|
||||
@@ -0,0 +1,131 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "reference_picture_selection.h"
|
||||
|
||||
#include "typedefs.h"
|
||||
#include "vpx/vpx_encoder.h"
|
||||
#include "vpx/vp8cx.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
ReferencePictureSelection::ReferencePictureSelection()
|
||||
: kRttConfidence(1.33),
|
||||
update_golden_next_(true),
|
||||
established_golden_(false),
|
||||
received_ack_(false),
|
||||
last_sent_ref_picture_id_(0),
|
||||
last_sent_ref_update_time_(0),
|
||||
established_ref_picture_id_(0),
|
||||
last_refresh_time_(0),
|
||||
rtt_(0) {
|
||||
}
|
||||
|
||||
void ReferencePictureSelection::Init() {
|
||||
update_golden_next_ = true;
|
||||
established_golden_ = false;
|
||||
received_ack_ = false;
|
||||
last_sent_ref_picture_id_ = 0;
|
||||
last_sent_ref_update_time_ = 0;
|
||||
established_ref_picture_id_ = 0;
|
||||
last_refresh_time_ = 0;
|
||||
rtt_ = 0;
|
||||
}
|
||||
|
||||
void ReferencePictureSelection::ReceivedRPSI(int rpsi_picture_id) {
|
||||
// Assume RPSI is signaled with 14 bits.
|
||||
if ((rpsi_picture_id & 0x3fff) == (last_sent_ref_picture_id_ & 0x3fff)) {
|
||||
// Remote peer has received our last reference frame, switch frame type.
|
||||
received_ack_ = true;
|
||||
established_golden_ = update_golden_next_;
|
||||
update_golden_next_ = !update_golden_next_;
|
||||
established_ref_picture_id_ = last_sent_ref_picture_id_;
|
||||
}
|
||||
}
|
||||
|
||||
bool ReferencePictureSelection::ReceivedSLI(uint32_t now_ts) {
|
||||
bool send_refresh = false;
|
||||
// Don't send a refresh more than once per round-trip time.
|
||||
// This is to avoid too frequent refreshes, since the receiver
|
||||
// will signal an SLI for every corrupt frame.
|
||||
if (TimestampDiff(now_ts, last_refresh_time_) > rtt_) {
|
||||
send_refresh = true;
|
||||
last_refresh_time_ = now_ts;
|
||||
}
|
||||
return send_refresh;
|
||||
}
|
||||
|
||||
int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
|
||||
uint32_t now_ts) {
|
||||
int flags = 0;
|
||||
// We can't refresh the decoder until we have established the key frame.
|
||||
if (send_refresh && received_ack_) {
|
||||
flags |= VP8_EFLAG_NO_REF_LAST; // Don't reference the last frame
|
||||
if (established_golden_)
|
||||
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
|
||||
else
|
||||
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame
|
||||
}
|
||||
|
||||
// Make sure we don't update the reference frames too often. We must wait long
|
||||
// enough for an RPSI to arrive after the decoder decoded the reference frame.
|
||||
// Ideally that should happen after one round-trip time.
|
||||
// Add a margin defined by |kRttConfidence|.
|
||||
uint32_t update_interval = kRttConfidence * rtt_;
|
||||
if (update_interval < kMinUpdateInterval)
|
||||
update_interval = kMinUpdateInterval;
|
||||
// Don't send reference frame updates until we have an established reference.
|
||||
if (TimestampDiff(now_ts, last_sent_ref_update_time_) > update_interval &&
|
||||
received_ack_) {
|
||||
flags |= VP8_EFLAG_NO_REF_LAST; // Don't reference the last frame.
|
||||
if (update_golden_next_) {
|
||||
flags |= VP8_EFLAG_FORCE_GF; // Update the golden reference.
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF; // Don't update alt-ref.
|
||||
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
|
||||
} else {
|
||||
flags |= VP8_EFLAG_FORCE_ARF; // Update the alt-ref reference.
|
||||
flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
|
||||
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
|
||||
}
|
||||
last_sent_ref_picture_id_ = picture_id;
|
||||
last_sent_ref_update_time_ = now_ts;
|
||||
} else {
|
||||
// No update of golden or alt-ref. We can therefore freely reference the
|
||||
// established reference frame and the last frame.
|
||||
if (established_golden_)
|
||||
flags |= VP8_EFLAG_NO_REF_ARF; // Don't reference the alt-ref frame.
|
||||
else
|
||||
flags |= VP8_EFLAG_NO_REF_GF; // Don't reference the golden frame.
|
||||
flags |= VP8_EFLAG_NO_UPD_GF; // Don't update the golden frame.
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF; // Don't update the alt-ref frame.
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
void ReferencePictureSelection::EncodedKeyFrame(int picture_id) {
|
||||
last_sent_ref_picture_id_ = picture_id;
|
||||
received_ack_ = false;
|
||||
}
|
||||
|
||||
void ReferencePictureSelection::SetRtt(int rtt) {
|
||||
// Convert from milliseconds to timestamp frequency.
|
||||
rtt_ = 90 * rtt;
|
||||
}
|
||||
|
||||
uint32_t ReferencePictureSelection::TimestampDiff(uint32_t new_ts,
|
||||
uint32_t old_ts) {
|
||||
if (old_ts > new_ts) {
|
||||
// Assuming this is a wrap, doing a compensated subtraction.
|
||||
return (new_ts + (static_cast<int64_t>(1) << 32)) - old_ts;
|
||||
}
|
||||
return new_ts - old_ts;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
/*
|
||||
* This file defines classes for doing reference picture selection, primarily
|
||||
* with VP8.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class ReferencePictureSelection {
|
||||
public:
|
||||
ReferencePictureSelection();
|
||||
void Init();
|
||||
|
||||
// Report a received reference picture selection indication. This will
|
||||
// introduce a new established reference if the received RPSI isn't too late.
|
||||
void ReceivedRPSI(int rpsi_picture_id);
|
||||
|
||||
// Report a received slice loss indication. Returns true if a refresh frame
|
||||
// must be sent to the receiver, which is accomplished by only predicting
|
||||
// from the established reference.
|
||||
// |now_ts| is the RTP timestamp corresponding to the current time. Typically
|
||||
// the capture timestamp of the frame currently being processed.
|
||||
// Returns true if it's time to encode a decoder refresh, otherwise false.
|
||||
bool ReceivedSLI(uint32_t now_ts);
|
||||
|
||||
// Returns the recommended VP8 encode flags needed. May refresh the decoder
|
||||
// and/or update the reference buffers.
|
||||
// |picture_id| picture id of the frame to be encoded.
|
||||
// |send_refresh| should be set to true if a decoder refresh should be
|
||||
// encoded, otherwise false.
|
||||
// |now_ts| is the RTP timestamp corresponding to the current time. Typically
|
||||
// the capture timestamp of the frame currently being processed.
|
||||
// Returns the flags to be given to the libvpx encoder when encoding the next
|
||||
// frame.
|
||||
int EncodeFlags(int picture_id, bool send_refresh, uint32_t now_ts);
|
||||
|
||||
// Notify the RPS that the frame with picture id |picture_id| was encoded as
|
||||
// a key frame, effectively updating all reference buffers.
|
||||
void EncodedKeyFrame(int picture_id);
|
||||
|
||||
// Set the round-trip time between the sender and the receiver to |rtt|
|
||||
// milliseconds.
|
||||
void SetRtt(int rtt);
|
||||
|
||||
private:
|
||||
static uint32_t TimestampDiff(uint32_t new_ts, uint32_t old_ts);
|
||||
|
||||
// The minimum time between reference frame updates.
|
||||
enum { kMinUpdateInterval = 90 * 10 }; // Timestamp frequency
|
||||
const double kRttConfidence;
|
||||
|
||||
bool update_golden_next_;
|
||||
bool established_golden_;
|
||||
bool received_ack_;
|
||||
int last_sent_ref_picture_id_;
|
||||
uint32_t last_sent_ref_update_time_;
|
||||
int established_ref_picture_id_;
|
||||
uint32_t last_refresh_time_;
|
||||
uint32_t rtt_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
|
||||
@@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "reference_picture_selection.h"
|
||||
#include "vpx/vpx_encoder.h"
|
||||
#include "vpx/vp8cx.h"
|
||||
|
||||
using webrtc::ReferencePictureSelection;
|
||||
|
||||
// The minimum time between reference frame updates. Should match the values
|
||||
// set in reference_picture_selection.h
|
||||
enum { kMinUpdateInterval = 10 };
|
||||
// The minimum time between decoder refreshes through restricted prediction.
|
||||
// Should match the values set in reference_picture_selection.h
|
||||
enum { kRtt = 10 };
|
||||
|
||||
enum {
|
||||
kNoPropagationGolden = VP8_EFLAG_NO_REF_ARF |
|
||||
VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF,
|
||||
kNoPropagationAltRef = VP8_EFLAG_NO_REF_GF |
|
||||
VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF,
|
||||
kPropagateGolden = VP8_EFLAG_FORCE_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_REF_GF |
|
||||
VP8_EFLAG_NO_REF_LAST,
|
||||
kPropagateAltRef = VP8_EFLAG_FORCE_ARF |
|
||||
VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_REF_ARF |
|
||||
VP8_EFLAG_NO_REF_LAST,
|
||||
kRefreshFromGolden = VP8_EFLAG_NO_REF_LAST |
|
||||
VP8_EFLAG_NO_REF_ARF,
|
||||
kRefreshFromAltRef = VP8_EFLAG_NO_REF_LAST |
|
||||
VP8_EFLAG_NO_REF_GF
|
||||
};
|
||||
|
||||
class TestRPS : public ::testing::Test {
|
||||
protected:
|
||||
virtual void SetUp() {
|
||||
rps_.Init();
|
||||
// Initialize with sending a key frame and acknowledging it.
|
||||
rps_.EncodedKeyFrame(0);
|
||||
rps_.ReceivedRPSI(0);
|
||||
rps_.SetRtt(kRtt);
|
||||
}
|
||||
|
||||
ReferencePictureSelection rps_;
|
||||
};
|
||||
|
||||
TEST_F(TestRPS, TestPropagateReferenceFrames) {
|
||||
// Should propagate the alt-ref reference.
|
||||
uint32_t time = (4 * kMinUpdateInterval) / 3 + 1;
|
||||
EXPECT_EQ(rps_.EncodeFlags(1, false, 90 * time), kPropagateAltRef);
|
||||
rps_.ReceivedRPSI(1);
|
||||
time += (4 * (time + kMinUpdateInterval)) / 3 + 1;
|
||||
// Should propagate the golden reference.
|
||||
EXPECT_EQ(rps_.EncodeFlags(2, false, 90 * time), kPropagateGolden);
|
||||
rps_.ReceivedRPSI(2);
|
||||
// Should propagate the alt-ref reference.
|
||||
time = (4 * (time + kMinUpdateInterval)) / 3 + 1;
|
||||
EXPECT_EQ(rps_.EncodeFlags(3, false, 90 * time), kPropagateAltRef);
|
||||
rps_.ReceivedRPSI(3);
|
||||
// Shouldn't propagate any reference frames (except last), and the established
|
||||
// reference is alt-ref.
|
||||
time = time + kMinUpdateInterval;
|
||||
EXPECT_EQ(rps_.EncodeFlags(4, false, 90 * time), kNoPropagationAltRef);
|
||||
}
|
||||
|
||||
TEST_F(TestRPS, TestDecoderRefresh) {
|
||||
uint32_t time = kRtt + 1;
|
||||
// No more than one refresh per RTT.
|
||||
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
|
||||
time += 5;
|
||||
EXPECT_EQ(rps_.ReceivedSLI(90 * time), false);
|
||||
time += kRtt - 4;
|
||||
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
|
||||
// Enough time have elapsed since the previous reference propagation, we will
|
||||
// therefore get both a refresh from golden and a propagation of alt-ref.
|
||||
EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time), kRefreshFromGolden |
|
||||
kPropagateAltRef);
|
||||
rps_.ReceivedRPSI(5);
|
||||
time += kRtt + 1;
|
||||
// Enough time for a new refresh, but not enough time for a reference
|
||||
// propagation.
|
||||
EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
|
||||
EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time), kRefreshFromAltRef |
|
||||
kNoPropagationAltRef);
|
||||
}
|
||||
|
||||
TEST_F(TestRPS, TestWrap) {
|
||||
EXPECT_EQ(rps_.ReceivedSLI(0xffffffff), true);
|
||||
EXPECT_EQ(rps_.ReceivedSLI(1), false);
|
||||
EXPECT_EQ(rps_.ReceivedSLI(90 * 100), true);
|
||||
|
||||
EXPECT_EQ(rps_.EncodeFlags(7, false, 0xffffffff), kPropagateAltRef);
|
||||
EXPECT_EQ(rps_.EncodeFlags(8, false, 1), kNoPropagationGolden);
|
||||
EXPECT_EQ(rps_.EncodeFlags(10, false, 90 * 100), kPropagateAltRef);
|
||||
}
|
||||
249
webrtc/modules/video_coding/codecs/vp8/temporal_layers.cc
Normal file
249
webrtc/modules/video_coding/codecs/vp8/temporal_layers.cc
Normal file
@@ -0,0 +1,249 @@
|
||||
/* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "temporal_layers.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <cassert>
|
||||
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
|
||||
|
||||
#include "vpx/vpx_encoder.h"
|
||||
#include "vpx/vp8cx.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
TemporalLayers::TemporalLayers(int numberOfTemporalLayers)
|
||||
: number_of_temporal_layers_(numberOfTemporalLayers),
|
||||
temporal_ids_length_(0),
|
||||
temporal_pattern_length_(0),
|
||||
tl0_pic_idx_(rand()),
|
||||
pattern_idx_(255),
|
||||
timestamp_(0) {
|
||||
assert(kMaxTemporalStreams >= numberOfTemporalLayers);
|
||||
memset(temporal_ids_, 0, sizeof(temporal_ids_));
|
||||
memset(temporal_pattern_, 0, sizeof(temporal_pattern_));
|
||||
}
|
||||
|
||||
bool TemporalLayers::ConfigureBitrates(int bitrateKbit,
|
||||
vpx_codec_enc_cfg_t* cfg) {
|
||||
switch (number_of_temporal_layers_) {
|
||||
case 0:
|
||||
case 1:
|
||||
// Do nothing.
|
||||
break;
|
||||
case 2:
|
||||
temporal_ids_length_ = 2;
|
||||
temporal_ids_[0] = 0;
|
||||
temporal_ids_[1] = 1;
|
||||
cfg->ts_number_layers = number_of_temporal_layers_;
|
||||
cfg->ts_periodicity = temporal_ids_length_;
|
||||
// Split stream 60% 40%.
|
||||
// Bitrate API for VP8 is the agregated bitrate for all lower layers.
|
||||
cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[1][0];
|
||||
cfg->ts_target_bitrate[1] = bitrateKbit;
|
||||
cfg->ts_rate_decimator[0] = 2;
|
||||
cfg->ts_rate_decimator[1] = 1;
|
||||
memcpy(cfg->ts_layer_id,
|
||||
temporal_ids_,
|
||||
sizeof(unsigned int) * temporal_ids_length_);
|
||||
temporal_pattern_length_ = 8;
|
||||
temporal_pattern_[0] = kTemporalUpdateLastAndGoldenRefAltRef;
|
||||
temporal_pattern_[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
|
||||
temporal_pattern_[2] = kTemporalUpdateLastRefAltRef;
|
||||
temporal_pattern_[3] = kTemporalUpdateGoldenRefAltRef;
|
||||
temporal_pattern_[4] = kTemporalUpdateLastRefAltRef;
|
||||
temporal_pattern_[5] = kTemporalUpdateGoldenRefAltRef;
|
||||
temporal_pattern_[6] = kTemporalUpdateLastRefAltRef;
|
||||
temporal_pattern_[7] = kTemporalUpdateNone;
|
||||
break;
|
||||
case 3:
|
||||
temporal_ids_length_ = 4;
|
||||
temporal_ids_[0] = 0;
|
||||
temporal_ids_[1] = 2;
|
||||
temporal_ids_[2] = 1;
|
||||
temporal_ids_[3] = 2;
|
||||
cfg->ts_number_layers = number_of_temporal_layers_;
|
||||
cfg->ts_periodicity = temporal_ids_length_;
|
||||
// Split stream 40% 20% 40%.
|
||||
// Bitrate API for VP8 is the agregated bitrate for all lower layers.
|
||||
cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[2][0];
|
||||
cfg->ts_target_bitrate[1] = bitrateKbit * kVp8LayerRateAlloction[2][1];
|
||||
cfg->ts_target_bitrate[2] = bitrateKbit;
|
||||
cfg->ts_rate_decimator[0] = 4;
|
||||
cfg->ts_rate_decimator[1] = 2;
|
||||
cfg->ts_rate_decimator[2] = 1;
|
||||
memcpy(cfg->ts_layer_id,
|
||||
temporal_ids_,
|
||||
sizeof(unsigned int) * temporal_ids_length_);
|
||||
temporal_pattern_length_ = 8;
|
||||
temporal_pattern_[0] = kTemporalUpdateLastAndGoldenRefAltRef;
|
||||
temporal_pattern_[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
|
||||
temporal_pattern_[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
|
||||
temporal_pattern_[3] = kTemporalUpdateNone;
|
||||
temporal_pattern_[4] = kTemporalUpdateLastRefAltRef;
|
||||
temporal_pattern_[5] = kTemporalUpdateNone;
|
||||
temporal_pattern_[6] = kTemporalUpdateGoldenRefAltRef;
|
||||
temporal_pattern_[7] = kTemporalUpdateNone;
|
||||
break;
|
||||
case 4:
|
||||
temporal_ids_length_ = 8;
|
||||
temporal_ids_[0] = 0;
|
||||
temporal_ids_[1] = 3;
|
||||
temporal_ids_[2] = 2;
|
||||
temporal_ids_[3] = 3;
|
||||
temporal_ids_[4] = 1;
|
||||
temporal_ids_[5] = 3;
|
||||
temporal_ids_[6] = 2;
|
||||
temporal_ids_[7] = 3;
|
||||
// Split stream 25% 15% 20% 40%.
|
||||
// Bitrate API for VP8 is the agregated bitrate for all lower layers.
|
||||
cfg->ts_number_layers = 4;
|
||||
cfg->ts_periodicity = temporal_ids_length_;
|
||||
cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[3][0];
|
||||
cfg->ts_target_bitrate[1] = bitrateKbit * kVp8LayerRateAlloction[3][1];
|
||||
cfg->ts_target_bitrate[2] = bitrateKbit * kVp8LayerRateAlloction[3][2];
|
||||
cfg->ts_target_bitrate[3] = bitrateKbit;
|
||||
cfg->ts_rate_decimator[0] = 8;
|
||||
cfg->ts_rate_decimator[1] = 4;
|
||||
cfg->ts_rate_decimator[2] = 2;
|
||||
cfg->ts_rate_decimator[3] = 1;
|
||||
memcpy(cfg->ts_layer_id,
|
||||
temporal_ids_,
|
||||
sizeof(unsigned int) * temporal_ids_length_);
|
||||
temporal_pattern_length_ = 16;
|
||||
temporal_pattern_[0] = kTemporalUpdateLast;
|
||||
temporal_pattern_[1] = kTemporalUpdateNone;
|
||||
temporal_pattern_[2] = kTemporalUpdateAltrefWithoutDependency;
|
||||
temporal_pattern_[3] = kTemporalUpdateNone;
|
||||
temporal_pattern_[4] = kTemporalUpdateGoldenWithoutDependency;
|
||||
temporal_pattern_[5] = kTemporalUpdateNone;
|
||||
temporal_pattern_[6] = kTemporalUpdateAltref;
|
||||
temporal_pattern_[7] = kTemporalUpdateNone;
|
||||
temporal_pattern_[8] = kTemporalUpdateLast;
|
||||
temporal_pattern_[9] = kTemporalUpdateNone;
|
||||
temporal_pattern_[10] = kTemporalUpdateAltref;
|
||||
temporal_pattern_[11] = kTemporalUpdateNone;
|
||||
temporal_pattern_[12] = kTemporalUpdateGolden;
|
||||
temporal_pattern_[13] = kTemporalUpdateNone;
|
||||
temporal_pattern_[14] = kTemporalUpdateAltref;
|
||||
temporal_pattern_[15] = kTemporalUpdateNone;
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
int TemporalLayers::EncodeFlags() {
|
||||
assert(number_of_temporal_layers_ > 1);
|
||||
assert(kMaxTemporalPattern >= temporal_pattern_length_);
|
||||
assert(0 < temporal_pattern_length_);
|
||||
|
||||
int flags = 0;
|
||||
int patternIdx = ++pattern_idx_ % temporal_pattern_length_;
|
||||
assert(kMaxTemporalPattern >= patternIdx);
|
||||
switch (temporal_pattern_[patternIdx]) {
|
||||
case kTemporalUpdateLast:
|
||||
flags |= VP8_EFLAG_NO_UPD_GF;
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_REF_GF;
|
||||
flags |= VP8_EFLAG_NO_REF_ARF;
|
||||
break;
|
||||
case kTemporalUpdateGoldenWithoutDependency:
|
||||
flags |= VP8_EFLAG_NO_REF_GF;
|
||||
// Deliberately no break here.
|
||||
case kTemporalUpdateGolden:
|
||||
flags |= VP8_EFLAG_NO_REF_ARF;
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_UPD_LAST;
|
||||
break;
|
||||
case kTemporalUpdateAltrefWithoutDependency:
|
||||
flags |= VP8_EFLAG_NO_REF_ARF;
|
||||
flags |= VP8_EFLAG_NO_REF_GF;
|
||||
// Deliberately no break here.
|
||||
case kTemporalUpdateAltref:
|
||||
flags |= VP8_EFLAG_NO_UPD_GF;
|
||||
flags |= VP8_EFLAG_NO_UPD_LAST;
|
||||
break;
|
||||
case kTemporalUpdateNoneNoRefAltref:
|
||||
flags |= VP8_EFLAG_NO_REF_ARF;
|
||||
// Deliberately no break here.
|
||||
case kTemporalUpdateNone:
|
||||
flags |= VP8_EFLAG_NO_UPD_GF;
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_UPD_LAST;
|
||||
flags |= VP8_EFLAG_NO_UPD_ENTROPY;
|
||||
break;
|
||||
case kTemporalUpdateNoneNoRefGoldenRefAltRef:
|
||||
flags |= VP8_EFLAG_NO_REF_GF;
|
||||
flags |= VP8_EFLAG_NO_UPD_GF;
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_UPD_LAST;
|
||||
flags |= VP8_EFLAG_NO_UPD_ENTROPY;
|
||||
break;
|
||||
case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
|
||||
flags |= VP8_EFLAG_NO_REF_GF;
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_UPD_LAST;
|
||||
break;
|
||||
case kTemporalUpdateLastRefAltRef:
|
||||
flags |= VP8_EFLAG_NO_UPD_GF;
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_REF_GF;
|
||||
break;
|
||||
case kTemporalUpdateGoldenRefAltRef:
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_UPD_LAST;
|
||||
break;
|
||||
case kTemporalUpdateLastAndGoldenRefAltRef:
|
||||
flags |= VP8_EFLAG_NO_UPD_ARF;
|
||||
flags |= VP8_EFLAG_NO_REF_GF;
|
||||
break;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
void TemporalLayers::PopulateCodecSpecific(bool key_frame,
|
||||
CodecSpecificInfoVP8 *vp8_info,
|
||||
uint32_t timestamp) {
|
||||
assert(number_of_temporal_layers_ > 1);
|
||||
assert(0 < temporal_ids_length_);
|
||||
|
||||
if (key_frame) {
|
||||
// Keyframe is always temporal layer 0
|
||||
vp8_info->temporalIdx = 0;
|
||||
} else {
|
||||
vp8_info->temporalIdx = temporal_ids_[pattern_idx_ % temporal_ids_length_];
|
||||
}
|
||||
TemporalReferences temporal_reference =
|
||||
temporal_pattern_[pattern_idx_ % temporal_pattern_length_];
|
||||
|
||||
if (temporal_reference == kTemporalUpdateAltrefWithoutDependency ||
|
||||
temporal_reference == kTemporalUpdateGoldenWithoutDependency ||
|
||||
temporal_reference == kTemporalUpdateGoldenWithoutDependencyRefAltRef ||
|
||||
temporal_reference == kTemporalUpdateNoneNoRefGoldenRefAltRef ||
|
||||
(temporal_reference == kTemporalUpdateNone &&
|
||||
number_of_temporal_layers_ == 4)) {
|
||||
vp8_info->layerSync = true;
|
||||
} else {
|
||||
vp8_info->layerSync = false;
|
||||
}
|
||||
if (vp8_info->temporalIdx == 0 && timestamp != timestamp_) {
|
||||
timestamp_ = timestamp;
|
||||
tl0_pic_idx_++;
|
||||
}
|
||||
vp8_info->tl0PicIdx = tl0_pic_idx_;
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
||||
81
webrtc/modules/video_coding/codecs/vp8/temporal_layers.h
Normal file
81
webrtc/modules/video_coding/codecs/vp8/temporal_layers.h
Normal file
@@ -0,0 +1,81 @@
|
||||
/* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
/*
|
||||
* This file defines classes for doing temporal layers with VP8.
|
||||
*/
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
|
||||
|
||||
#include <typedefs.h>
|
||||
|
||||
// VPX forward declaration
|
||||
typedef struct vpx_codec_enc_cfg vpx_codec_enc_cfg_t;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
struct CodecSpecificInfoVP8;
|
||||
|
||||
class TemporalLayers {
|
||||
public:
|
||||
TemporalLayers(int number_of_temporal_layers);
|
||||
|
||||
// Returns the recommended VP8 encode flags needed. May refresh the decoder
|
||||
// and/or update the reference buffers.
|
||||
int EncodeFlags();
|
||||
|
||||
bool ConfigureBitrates(int bitrate_kbit, vpx_codec_enc_cfg_t* cfg);
|
||||
|
||||
void PopulateCodecSpecific(bool key_frame, CodecSpecificInfoVP8 *vp8_info,
|
||||
uint32_t timestamp);
|
||||
|
||||
private:
|
||||
enum TemporalReferences {
|
||||
// First base layer frame for 3 temporal layers, which updates last and
|
||||
// golden with alt ref dependency.
|
||||
kTemporalUpdateLastAndGoldenRefAltRef = 11,
|
||||
// First enhancement layer with alt ref dependency.
|
||||
kTemporalUpdateGoldenRefAltRef = 10,
|
||||
// First enhancement layer with alt ref dependency.
|
||||
kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9,
|
||||
// Base layer with alt ref dependency.
|
||||
kTemporalUpdateLastRefAltRef = 8,
|
||||
// Highest enhacement layer without dependency on golden with alt ref
|
||||
// dependency.
|
||||
kTemporalUpdateNoneNoRefGoldenRefAltRef = 7,
|
||||
// Second layer and last frame in cycle, for 2 layers.
|
||||
kTemporalUpdateNoneNoRefAltref = 6,
|
||||
// Highest enhancement layer.
|
||||
kTemporalUpdateNone = 5,
|
||||
// Second enhancement layer.
|
||||
kTemporalUpdateAltref = 4,
|
||||
// Second enhancement layer without dependency on previous frames in
|
||||
// the second enhancement layer.
|
||||
kTemporalUpdateAltrefWithoutDependency = 3,
|
||||
// First enhancement layer.
|
||||
kTemporalUpdateGolden = 2,
|
||||
// First enhancement layer without dependency on previous frames in
|
||||
// the first enhancement layer.
|
||||
kTemporalUpdateGoldenWithoutDependency = 1,
|
||||
// Base layer.
|
||||
kTemporalUpdateLast = 0,
|
||||
};
|
||||
enum { kMaxTemporalPattern = 16 };
|
||||
|
||||
int number_of_temporal_layers_;
|
||||
int temporal_ids_length_;
|
||||
int temporal_ids_[kMaxTemporalPattern];
|
||||
int temporal_pattern_length_;
|
||||
TemporalReferences temporal_pattern_[kMaxTemporalPattern];
|
||||
uint8_t tl0_pic_idx_;
|
||||
uint8_t pattern_idx_;
|
||||
uint32_t timestamp_;
|
||||
};
|
||||
} // namespace webrtc
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
|
||||
|
||||
@@ -0,0 +1,213 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "temporal_layers.h"
|
||||
#include "video_codec_interface.h"
|
||||
|
||||
#include "vpx/vpx_encoder.h"
|
||||
#include "vpx/vp8cx.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum {
|
||||
kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_REF_GF |
|
||||
VP8_EFLAG_NO_REF_ARF,
|
||||
kTemporalUpdateGoldenWithoutDependency = VP8_EFLAG_NO_REF_GF |
|
||||
VP8_EFLAG_NO_REF_ARF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_UPD_LAST,
|
||||
kTemporalUpdateGolden = VP8_EFLAG_NO_REF_ARF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_UPD_LAST,
|
||||
kTemporalUpdateAltrefWithoutDependency = VP8_EFLAG_NO_REF_ARF |
|
||||
VP8_EFLAG_NO_REF_GF |
|
||||
VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_LAST,
|
||||
kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_LAST,
|
||||
kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_UPD_LAST |
|
||||
VP8_EFLAG_NO_UPD_ENTROPY,
|
||||
kTemporalUpdateNoneNoRefAltRef = VP8_EFLAG_NO_REF_ARF |
|
||||
VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_UPD_LAST |
|
||||
VP8_EFLAG_NO_UPD_ENTROPY,
|
||||
kTemporalUpdateNoneNoRefGolden = VP8_EFLAG_NO_REF_GF |
|
||||
VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_UPD_LAST |
|
||||
VP8_EFLAG_NO_UPD_ENTROPY,
|
||||
kTemporalUpdateGoldenWithoutDependencyRefAltRef = VP8_EFLAG_NO_REF_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_UPD_LAST,
|
||||
kTemporalUpdateGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_UPD_LAST,
|
||||
kTemporalUpdateLastRefAltRef = VP8_EFLAG_NO_UPD_GF |
|
||||
VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_REF_GF,
|
||||
kTemporalUpdateLastAndGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
|
||||
VP8_EFLAG_NO_REF_GF,
|
||||
};
|
||||
|
||||
TEST(TemporalLayersTest, 2Layers) {
|
||||
TemporalLayers tl(2);
|
||||
vpx_codec_enc_cfg_t cfg;
|
||||
CodecSpecificInfoVP8 vp8_info;
|
||||
tl.ConfigureBitrates(500, &cfg);
|
||||
|
||||
int expected_flags[16] = { kTemporalUpdateLastAndGoldenRefAltRef,
|
||||
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateGoldenRefAltRef,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateGoldenRefAltRef,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateLastAndGoldenRefAltRef,
|
||||
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateGoldenRefAltRef,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateGoldenRefAltRef,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
};
|
||||
int expected_temporal_idx[16] =
|
||||
{ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 };
|
||||
|
||||
bool expected_layer_sync[16] =
|
||||
{ false, true, false, false, false, false, false, false,
|
||||
false, true, false, false, false, false, false, false };
|
||||
|
||||
for (int i = 0; i < 16; ++i) {
|
||||
EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
|
||||
tl.PopulateCodecSpecific(false, &vp8_info, 0);
|
||||
EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
|
||||
EXPECT_EQ(expected_layer_sync[i], vp8_info.layerSync);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(TemporalLayersTest, 3Layers) {
|
||||
TemporalLayers tl(3);
|
||||
vpx_codec_enc_cfg_t cfg;
|
||||
CodecSpecificInfoVP8 vp8_info;
|
||||
tl.ConfigureBitrates(500, &cfg);
|
||||
|
||||
int expected_flags[16] = { kTemporalUpdateLastAndGoldenRefAltRef,
|
||||
kTemporalUpdateNoneNoRefGolden,
|
||||
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateGoldenRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateLastAndGoldenRefAltRef,
|
||||
kTemporalUpdateNoneNoRefGolden,
|
||||
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateGoldenRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
};
|
||||
int expected_temporal_idx[16] =
|
||||
{ 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2 };
|
||||
|
||||
bool expected_layer_sync[16] =
|
||||
{ false, true, true, false, false, false, false, false,
|
||||
false, true, true, false, false, false, false, false };
|
||||
|
||||
for (int i = 0; i < 16; ++i) {
|
||||
EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
|
||||
tl.PopulateCodecSpecific(false, &vp8_info, 0);
|
||||
EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
|
||||
EXPECT_EQ(expected_layer_sync[i], vp8_info.layerSync);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(TemporalLayersTest, 4Layers) {
|
||||
TemporalLayers tl(4);
|
||||
vpx_codec_enc_cfg_t cfg;
|
||||
CodecSpecificInfoVP8 vp8_info;
|
||||
tl.ConfigureBitrates(500, &cfg);
|
||||
int expected_flags[16] = {
|
||||
kTemporalUpdateLast,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateAltrefWithoutDependency,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateGoldenWithoutDependency,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateAltref,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateLast,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateAltref,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateGolden,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateAltref,
|
||||
kTemporalUpdateNone,
|
||||
};
|
||||
int expected_temporal_idx[16] =
|
||||
{ 0, 3, 2, 3, 1, 3, 2, 3, 0, 3, 2, 3, 1, 3, 2, 3 };
|
||||
|
||||
bool expected_layer_sync[16] =
|
||||
{ false, true, true, true, true, true, false, true,
|
||||
false, true, false, true, false, true, false, true };
|
||||
|
||||
for (int i = 0; i < 16; ++i) {
|
||||
EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
|
||||
tl.PopulateCodecSpecific(false, &vp8_info, 0);
|
||||
EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
|
||||
EXPECT_EQ(expected_layer_sync[i], vp8_info.layerSync);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(TemporalLayersTest, KeyFrame) {
|
||||
TemporalLayers tl(3);
|
||||
vpx_codec_enc_cfg_t cfg;
|
||||
CodecSpecificInfoVP8 vp8_info;
|
||||
tl.ConfigureBitrates(500, &cfg);
|
||||
|
||||
int expected_flags[8] = {
|
||||
kTemporalUpdateLastAndGoldenRefAltRef,
|
||||
kTemporalUpdateNoneNoRefGolden,
|
||||
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateLastRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
kTemporalUpdateGoldenRefAltRef,
|
||||
kTemporalUpdateNone,
|
||||
};
|
||||
int expected_temporal_idx[8] =
|
||||
{ 0, 0, 0, 0, 0, 0, 0, 2};
|
||||
|
||||
bool expected_layer_sync[8] =
|
||||
{ false, true, true, false, false, false, false, false };
|
||||
|
||||
for (int i = 0; i < 7; ++i) {
|
||||
EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
|
||||
tl.PopulateCodecSpecific(true, &vp8_info, 0);
|
||||
EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
|
||||
EXPECT_EQ(expected_layer_sync[i], vp8_info.layerSync);
|
||||
}
|
||||
EXPECT_EQ(expected_flags[7], tl.EncodeFlags());
|
||||
tl.PopulateCodecSpecific(false, &vp8_info, 0);
|
||||
EXPECT_EQ(expected_temporal_idx[7], vp8_info.temporalIdx);
|
||||
EXPECT_EQ(expected_layer_sync[7], vp8_info.layerSync);
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
||||
39
webrtc/modules/video_coding/codecs/vp8/test/benchmark.cc
Normal file
39
webrtc/modules/video_coding/codecs/vp8/test/benchmark.cc
Normal file
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "benchmark.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "vp8.h"
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
VP8Benchmark::VP8Benchmark()
|
||||
: Benchmark("VP8Benchmark", "VP8 benchmark over a range of test cases",
|
||||
webrtc::test::OutputPath() + "VP8Benchmark.txt", "VP8") {
|
||||
}
|
||||
|
||||
VP8Benchmark::VP8Benchmark(std::string name, std::string description)
|
||||
: Benchmark(name, description,
|
||||
webrtc::test::OutputPath() + "VP8Benchmark.txt",
|
||||
"VP8") {
|
||||
}
|
||||
|
||||
VP8Benchmark::VP8Benchmark(std::string name, std::string description,
|
||||
std::string resultsFileName)
|
||||
: Benchmark(name, description, resultsFileName, "VP8") {
|
||||
}
|
||||
|
||||
VideoEncoder* VP8Benchmark::GetNewEncoder() {
|
||||
return VP8Encoder::Create();
|
||||
}
|
||||
|
||||
VideoDecoder* VP8Benchmark::GetNewDecoder() {
|
||||
return VP8Decoder::Create();
|
||||
}
|
||||
28
webrtc/modules/video_coding/codecs/vp8/test/benchmark.h
Normal file
28
webrtc/modules/video_coding/codecs/vp8/test/benchmark.h
Normal file
@@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
|
||||
|
||||
#include "modules/video_coding/codecs/test_framework/benchmark.h"
|
||||
|
||||
class VP8Benchmark : public Benchmark
|
||||
{
|
||||
public:
|
||||
VP8Benchmark();
|
||||
VP8Benchmark(std::string name, std::string description);
|
||||
VP8Benchmark(std::string name, std::string description, std::string resultsFileName);
|
||||
|
||||
protected:
|
||||
virtual webrtc::VideoEncoder* GetNewEncoder();
|
||||
virtual webrtc::VideoDecoder* GetNewDecoder();
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
|
||||
221
webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc
Normal file
221
webrtc/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc
Normal file
@@ -0,0 +1,221 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "dual_decoder_test.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <string.h> // memcmp
|
||||
#include <time.h>
|
||||
|
||||
#include "testsupport/fileutils.h"
|
||||
|
||||
VP8DualDecoderTest::VP8DualDecoderTest(float bitRate)
|
||||
:
|
||||
VP8NormalAsyncTest(bitRate)
|
||||
{
|
||||
_decoder2 = NULL;
|
||||
}
|
||||
|
||||
VP8DualDecoderTest::VP8DualDecoderTest()
|
||||
:
|
||||
VP8NormalAsyncTest("VP8 Dual Decoder Test", "Tests VP8 dual decoder", 1),
|
||||
_decoder2(NULL)
|
||||
{}
|
||||
|
||||
VP8DualDecoderTest::~VP8DualDecoderTest()
|
||||
{
|
||||
if(_decoder2)
|
||||
{
|
||||
_decoder2->Release();
|
||||
delete _decoder2;
|
||||
}
|
||||
|
||||
_decodedVideoBuffer2.Free();
|
||||
}
|
||||
|
||||
void
|
||||
VP8DualDecoderTest::Perform()
|
||||
{
|
||||
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
|
||||
CodecSettings(352, 288, 30, _bitRate);
|
||||
Setup();
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer2.VerifyAndAllocate(_lengthSourceFrame);
|
||||
if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
|
||||
{
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
_decoder->InitDecode(&_inst,1);
|
||||
|
||||
FrameQueue frameQueue;
|
||||
VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
|
||||
DualDecoderCompleteCallback decCallback(&_decodedVideoBuffer);
|
||||
DualDecoderCompleteCallback decCallback2(&_decodedVideoBuffer2);
|
||||
_encoder->RegisterEncodeCompleteCallback(&encCallback);
|
||||
_decoder->RegisterDecodeCompleteCallback(&decCallback);
|
||||
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
|
||||
{
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
_totalEncodeTime = _totalDecodeTime = 0;
|
||||
_totalEncodePipeTime = _totalDecodePipeTime = 0;
|
||||
bool complete = false;
|
||||
_framecnt = 0;
|
||||
_encFrameCnt = 0;
|
||||
_decFrameCnt = 0;
|
||||
_sumEncBytes = 0;
|
||||
_lengthEncFrame = 0;
|
||||
double starttime = clock()/(double)CLOCKS_PER_SEC;
|
||||
while (!complete)
|
||||
{
|
||||
if (_encFrameCnt == 10)
|
||||
{
|
||||
// initialize second decoder and copy state
|
||||
_decoder2 = static_cast<webrtc::VP8Decoder *>(_decoder->Copy());
|
||||
assert(_decoder2 != NULL);
|
||||
_decoder2->RegisterDecodeCompleteCallback(&decCallback2);
|
||||
}
|
||||
CodecSpecific_InitBitrate();
|
||||
complete = Encode();
|
||||
if (!frameQueue.Empty() || complete)
|
||||
{
|
||||
while (!frameQueue.Empty())
|
||||
{
|
||||
_frameToDecode =
|
||||
static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
|
||||
int lost = DoPacketLoss();
|
||||
if (lost == 2)
|
||||
{
|
||||
// Lost the whole frame, continue
|
||||
_missingFrames = true;
|
||||
delete _frameToDecode;
|
||||
_frameToDecode = NULL;
|
||||
continue;
|
||||
}
|
||||
int ret = Decode(lost);
|
||||
delete _frameToDecode;
|
||||
_frameToDecode = NULL;
|
||||
if (ret < 0)
|
||||
{
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
else if (ret == 0)
|
||||
{
|
||||
_framecnt++;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr,
|
||||
"\n\nPositive return value from decode!\n\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
double endtime = clock()/(double)CLOCKS_PER_SEC;
|
||||
double totalExecutionTime = endtime - starttime;
|
||||
printf("Total execution time: %.1f s\n", totalExecutionTime);
|
||||
_sumEncBytes = encCallback.EncodedBytes();
|
||||
double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
|
||||
double avgEncTime = _totalEncodeTime / _encFrameCnt;
|
||||
double avgDecTime = _totalDecodeTime / _decFrameCnt;
|
||||
printf("Actual bitrate: %f kbps\n", actualBitRate);
|
||||
printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
|
||||
printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
|
||||
printf("Average encode pipeline time: %.1f ms\n",
|
||||
1000 * _totalEncodePipeTime / _encFrameCnt);
|
||||
printf("Average decode pipeline time: %.1f ms\n",
|
||||
1000 * _totalDecodePipeTime / _decFrameCnt);
|
||||
printf("Number of encoded frames: %u\n", _encFrameCnt);
|
||||
printf("Number of decoded frames: %u\n", _decFrameCnt);
|
||||
(*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
|
||||
_bitRate << " kbps" << std::endl;
|
||||
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
|
||||
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
Teardown();
|
||||
}
|
||||
|
||||
|
||||
int
|
||||
VP8DualDecoderTest::Decode(int lossValue)
|
||||
{
|
||||
_sumEncBytes += _frameToDecode->_frame->Length();
|
||||
webrtc::EncodedImage encodedImage;
|
||||
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
||||
encodedImage._completeFrame = !lossValue;
|
||||
_decodeCompleteTime = 0;
|
||||
_decodeTimes[encodedImage._timeStamp] = clock()/(double)CLOCKS_PER_SEC;
|
||||
int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
|
||||
_frameToDecode->_codecSpecificInfo);
|
||||
// second decoder
|
||||
if (_decoder2)
|
||||
{
|
||||
int ret2 = _decoder2->Decode(encodedImage, _missingFrames, NULL,
|
||||
_frameToDecode->_codecSpecificInfo,
|
||||
0 /* dummy */);
|
||||
|
||||
// check return values
|
||||
if (ret < 0 || ret2 < 0 || ret2 != ret)
|
||||
{
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// compare decoded images
|
||||
if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
|
||||
_decodedVideoBuffer.Length(),
|
||||
_decodedVideoBuffer2.Buffer(), _decodedVideoBuffer.Length()))
|
||||
{
|
||||
fprintf(stderr,"\n\nClone output different from master.\n\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
_missingFrames = false;
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
VP8DualDecoderTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
|
||||
const void* ptrB, unsigned int bLengthBytes)
|
||||
{
|
||||
if (aLengthBytes != bLengthBytes)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image)
|
||||
{
|
||||
_decodedVideoBuffer->VerifyAndAllocate(image.Length());
|
||||
_decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
|
||||
_decodedVideoBuffer->SetWidth(image.Width());
|
||||
_decodedVideoBuffer->SetHeight(image.Height());
|
||||
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
|
||||
_decodeComplete = true;
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool DualDecoderCompleteCallback::DecodeComplete()
|
||||
{
|
||||
if (_decodeComplete)
|
||||
{
|
||||
_decodeComplete = false;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_DUAL_DECODER_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_DUAL_DECODER_TEST_H_
|
||||
|
||||
#include "vp8.h"
|
||||
#include "normal_async_test.h"
|
||||
|
||||
class DualDecoderCompleteCallback;
|
||||
|
||||
class VP8DualDecoderTest : public VP8NormalAsyncTest
|
||||
{
|
||||
public:
|
||||
VP8DualDecoderTest(float bitRate);
|
||||
VP8DualDecoderTest();
|
||||
virtual ~VP8DualDecoderTest();
|
||||
virtual void Perform();
|
||||
protected:
|
||||
VP8DualDecoderTest(std::string name, std::string description,
|
||||
unsigned int testNo)
|
||||
: VP8NormalAsyncTest(name, description, testNo) {}
|
||||
virtual int Decode(int lossValue = 0);
|
||||
|
||||
webrtc::VP8Decoder* _decoder2;
|
||||
webrtc::VideoFrame _decodedVideoBuffer2;
|
||||
static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
|
||||
const void *ptrB, unsigned int bLengthBytes);
|
||||
private:
|
||||
};
|
||||
|
||||
class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
|
||||
{
|
||||
public:
|
||||
DualDecoderCompleteCallback(webrtc::VideoFrame* buffer)
|
||||
: _decodedVideoBuffer(buffer), _decodeComplete(false) {}
|
||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
||||
bool DecodeComplete();
|
||||
private:
|
||||
webrtc::VideoFrame* _decodedVideoBuffer;
|
||||
bool _decodeComplete;
|
||||
};
|
||||
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "normal_async_test.h"
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
VP8NormalAsyncTest::VP8NormalAsyncTest(WebRtc_UWord32 bitRate) :
|
||||
NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", bitRate, 1),
|
||||
_hasReceivedRPSI(false)
|
||||
{
|
||||
}
|
||||
|
||||
VP8NormalAsyncTest::VP8NormalAsyncTest(WebRtc_UWord32 bitRate, unsigned int testNo):
|
||||
NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", bitRate, testNo),
|
||||
_hasReceivedRPSI(false)
|
||||
{
|
||||
}
|
||||
|
||||
void
|
||||
VP8NormalAsyncTest::CodecSettings(int width, int height, WebRtc_UWord32 frameRate /*=30*/, WebRtc_UWord32 bitRate /*=0*/)
|
||||
{
|
||||
if (bitRate > 0)
|
||||
{
|
||||
_bitRate = bitRate;
|
||||
|
||||
}else if (_bitRate == 0)
|
||||
{
|
||||
_bitRate = 600;
|
||||
}
|
||||
_inst.codecType = kVideoCodecVP8;
|
||||
_inst.codecSpecific.VP8.feedbackModeOn = true;
|
||||
_inst.codecSpecific.VP8.pictureLossIndicationOn = true;
|
||||
_inst.codecSpecific.VP8.complexity = kComplexityNormal;
|
||||
_inst.maxFramerate = (unsigned char)frameRate;
|
||||
_inst.startBitrate = _bitRate;
|
||||
_inst.maxBitrate = 8000;
|
||||
_inst.width = width;
|
||||
_inst.height = height;
|
||||
}
|
||||
|
||||
void
|
||||
VP8NormalAsyncTest::CodecSpecific_InitBitrate()
|
||||
{
|
||||
if (_bitRate == 0)
|
||||
{
|
||||
_encoder->SetRates(600, _inst.maxFramerate);
|
||||
}else
|
||||
{
|
||||
_encoder->SetRates(_bitRate, _inst.maxFramerate);
|
||||
}
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VP8NormalAsyncTest::ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId)
|
||||
{
|
||||
_pictureIdRPSI = pictureId;
|
||||
_hasReceivedRPSI = true;
|
||||
return 0;
|
||||
}
|
||||
|
||||
CodecSpecificInfo*
|
||||
VP8NormalAsyncTest::CreateEncoderSpecificInfo() const
|
||||
{
|
||||
CodecSpecificInfo* vp8CodecSpecificInfo = new CodecSpecificInfo();
|
||||
vp8CodecSpecificInfo->codecType = kVideoCodecVP8;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
|
||||
|
||||
_hasReceivedSLI = false;
|
||||
_hasReceivedRPSI = false;
|
||||
|
||||
return vp8CodecSpecificInfo;
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_NORMAL_ASYNC_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_NORMAL_ASYNC_TEST_H_
|
||||
|
||||
#include "modules/video_coding/codecs/test_framework/normal_async_test.h"
|
||||
|
||||
class VP8NormalAsyncTest : public NormalAsyncTest
|
||||
{
|
||||
public:
|
||||
VP8NormalAsyncTest(WebRtc_UWord32 bitRate);
|
||||
VP8NormalAsyncTest(WebRtc_UWord32 bitRate, unsigned int testNo);
|
||||
VP8NormalAsyncTest() : NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", 1) {}
|
||||
protected:
|
||||
VP8NormalAsyncTest(std::string name, std::string description, unsigned int testNo) : NormalAsyncTest(name, description, testNo) {}
|
||||
virtual void CodecSpecific_InitBitrate();
|
||||
virtual void CodecSettings(int width, int height, WebRtc_UWord32 frameRate=30, WebRtc_UWord32 bitRate=0);
|
||||
virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const;
|
||||
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
|
||||
private:
|
||||
mutable bool _hasReceivedRPSI;
|
||||
WebRtc_UWord64 _pictureIdRPSI;
|
||||
};
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "packet_loss_test.h"
|
||||
#include <cassert>
|
||||
|
||||
VP8PacketLossTest::VP8PacketLossTest()
|
||||
:
|
||||
PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode")
|
||||
{
|
||||
}
|
||||
|
||||
VP8PacketLossTest::VP8PacketLossTest(std::string name, std::string description)
|
||||
:
|
||||
PacketLossTest(name, description)
|
||||
{
|
||||
}
|
||||
|
||||
VP8PacketLossTest::VP8PacketLossTest(double lossRate,
|
||||
bool useNack,
|
||||
int rttFrames)
|
||||
:
|
||||
PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode",
|
||||
lossRate, useNack, rttFrames)
|
||||
{
|
||||
}
|
||||
|
||||
int VP8PacketLossTest::ByteLoss(int size, unsigned char* /* pkg */, int bytesToLose)
|
||||
{
|
||||
int retLength = size - bytesToLose;
|
||||
if (retLength < 4)
|
||||
{
|
||||
retLength = 4;
|
||||
}
|
||||
return retLength;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VP8PacketLossTest::ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId)
|
||||
{
|
||||
_pictureIdRPSI = pictureId;
|
||||
_hasReceivedRPSI = true;
|
||||
return 0;
|
||||
}
|
||||
|
||||
webrtc::CodecSpecificInfo*
|
||||
VP8PacketLossTest::CreateEncoderSpecificInfo() const
|
||||
{
|
||||
webrtc::CodecSpecificInfo* vp8CodecSpecificInfo =
|
||||
new webrtc::CodecSpecificInfo();
|
||||
vp8CodecSpecificInfo->codecType = webrtc::kVideoCodecVP8;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
|
||||
vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
|
||||
|
||||
_hasReceivedSLI = false;
|
||||
_hasReceivedRPSI = false;
|
||||
|
||||
return vp8CodecSpecificInfo;
|
||||
}
|
||||
|
||||
bool VP8PacketLossTest::PacketLoss(double lossRate, int numLosses) {
|
||||
if (numLosses)
|
||||
return true;
|
||||
return RandUniform() < lossRate;
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
|
||||
|
||||
#include "modules/video_coding/codecs/test_framework/packet_loss_test.h"
|
||||
|
||||
class VP8PacketLossTest : public PacketLossTest
|
||||
{
|
||||
public:
|
||||
VP8PacketLossTest();
|
||||
VP8PacketLossTest(double lossRate, bool useNack, int rttFrames);
|
||||
|
||||
protected:
|
||||
VP8PacketLossTest(std::string name, std::string description);
|
||||
virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
|
||||
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
|
||||
// |lossRate| is the probability of packet loss between 0 and 1.
|
||||
// |numLosses| is the number of packets already lost in the current frame.
|
||||
virtual bool PacketLoss(double lossRate, int numLosses);
|
||||
|
||||
webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const;
|
||||
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
|
||||
304
webrtc/modules/video_coding/codecs/vp8/test/rps_test.cc
Normal file
304
webrtc/modules/video_coding/codecs/vp8/test/rps_test.cc
Normal file
@@ -0,0 +1,304 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "rps_test.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <string.h> // memcmp
|
||||
#include <time.h>
|
||||
|
||||
#include "vp8.h"
|
||||
|
||||
VP8RpsTest::VP8RpsTest(float bitRate)
|
||||
: VP8NormalAsyncTest(bitRate),
|
||||
decoder2_(webrtc::VP8Decoder::Create()),
|
||||
sli_(false) {
|
||||
}
|
||||
|
||||
VP8RpsTest::VP8RpsTest()
|
||||
: VP8NormalAsyncTest("VP8 Reference Picture Selection Test",
|
||||
"VP8 Reference Picture Selection Test", 1),
|
||||
decoder2_(webrtc::VP8Decoder::Create()),
|
||||
sli_(false) {
|
||||
}
|
||||
|
||||
VP8RpsTest::~VP8RpsTest() {
|
||||
if (decoder2_) {
|
||||
decoder2_->Release();
|
||||
delete decoder2_;
|
||||
}
|
||||
decoded_frame2_.Free();
|
||||
}
|
||||
|
||||
void VP8RpsTest::Perform() {
|
||||
_inname = "test/testFiles/foreman_cif.yuv";
|
||||
CodecSettings(352, 288, 30, _bitRate);
|
||||
Setup();
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
decoded_frame2_.VerifyAndAllocate(_lengthSourceFrame);
|
||||
|
||||
// Enable RPS functionality
|
||||
_inst.codecSpecific.VP8.pictureLossIndicationOn = true;
|
||||
_inst.codecSpecific.VP8.feedbackModeOn = true;
|
||||
|
||||
if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
|
||||
exit(EXIT_FAILURE);
|
||||
|
||||
_decoder->InitDecode(&_inst,1);
|
||||
decoder2_->InitDecode(&_inst,1);
|
||||
|
||||
FrameQueue frameQueue;
|
||||
VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
|
||||
RpsDecodeCompleteCallback decCallback(&_decodedVideoBuffer);
|
||||
RpsDecodeCompleteCallback decCallback2(&decoded_frame2_);
|
||||
_encoder->RegisterEncodeCompleteCallback(&encCallback);
|
||||
_decoder->RegisterDecodeCompleteCallback(&decCallback);
|
||||
decoder2_->RegisterDecodeCompleteCallback(&decCallback2);
|
||||
|
||||
if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
|
||||
exit(EXIT_FAILURE);
|
||||
|
||||
_totalEncodeTime = _totalDecodeTime = 0;
|
||||
_totalEncodePipeTime = _totalDecodePipeTime = 0;
|
||||
bool complete = false;
|
||||
_framecnt = 0;
|
||||
_encFrameCnt = 0;
|
||||
_decFrameCnt = 0;
|
||||
_sumEncBytes = 0;
|
||||
_lengthEncFrame = 0;
|
||||
double starttime = clock()/(double)CLOCKS_PER_SEC;
|
||||
while (!complete) {
|
||||
CodecSpecific_InitBitrate();
|
||||
complete = EncodeRps(&decCallback2);
|
||||
if (!frameQueue.Empty() || complete) {
|
||||
while (!frameQueue.Empty()) {
|
||||
_frameToDecode =
|
||||
static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
|
||||
int lost = DoPacketLoss();
|
||||
if (lost == 2) {
|
||||
// Lost the whole frame, continue
|
||||
_missingFrames = true;
|
||||
delete _frameToDecode;
|
||||
_frameToDecode = NULL;
|
||||
continue;
|
||||
}
|
||||
int ret = Decode(lost);
|
||||
delete _frameToDecode;
|
||||
_frameToDecode = NULL;
|
||||
if (ret < 0) {
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
else if (ret == 0) {
|
||||
_framecnt++;
|
||||
}
|
||||
else {
|
||||
fprintf(stderr,
|
||||
"\n\nPositive return value from decode!\n\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
double endtime = clock()/(double)CLOCKS_PER_SEC;
|
||||
double totalExecutionTime = endtime - starttime;
|
||||
printf("Total execution time: %.1f s\n", totalExecutionTime);
|
||||
_sumEncBytes = encCallback.EncodedBytes();
|
||||
double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
|
||||
double avgEncTime = _totalEncodeTime / _encFrameCnt;
|
||||
double avgDecTime = _totalDecodeTime / _decFrameCnt;
|
||||
printf("Actual bitrate: %f kbps\n", actualBitRate);
|
||||
printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
|
||||
printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
|
||||
printf("Average encode pipeline time: %.1f ms\n",
|
||||
1000 * _totalEncodePipeTime / _encFrameCnt);
|
||||
printf("Average decode pipeline time: %.1f ms\n",
|
||||
1000 * _totalDecodePipeTime / _decFrameCnt);
|
||||
printf("Number of encoded frames: %u\n", _encFrameCnt);
|
||||
printf("Number of decoded frames: %u\n", _decFrameCnt);
|
||||
(*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
|
||||
_bitRate << " kbps" << std::endl;
|
||||
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
|
||||
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
Teardown();
|
||||
}
|
||||
|
||||
bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
|
||||
_lengthEncFrame = 0;
|
||||
size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
|
||||
if (bytes_read < _lengthSourceFrame)
|
||||
return true;
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp((unsigned int)
|
||||
(_encFrameCnt * 9e4 / _inst.maxFramerate));
|
||||
_inputVideoBuffer.SetWidth(_inst.width);
|
||||
_inputVideoBuffer.SetHeight(_inst.height);
|
||||
if (feof(_sourceFile) != 0) {
|
||||
return true;
|
||||
}
|
||||
_encodeCompleteTime = 0;
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
|
||||
|
||||
webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
|
||||
codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
|
||||
decodeCallback->LastDecodedRefPictureId(
|
||||
&codecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI);
|
||||
if (sli_) {
|
||||
codecSpecificInfo->codecSpecific.VP8.pictureIdSLI =
|
||||
decodeCallback->LastDecodedPictureId();
|
||||
codecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = true;
|
||||
sli_ = false;
|
||||
}
|
||||
printf("Encoding: %u\n", _framecnt);
|
||||
int ret = _encoder->Encode(_inputVideoBuffer, codecSpecificInfo, NULL);
|
||||
if (ret < 0)
|
||||
printf("Failed to encode: %u\n", _framecnt);
|
||||
|
||||
if (codecSpecificInfo != NULL) {
|
||||
delete codecSpecificInfo;
|
||||
codecSpecificInfo = NULL;
|
||||
}
|
||||
if (_encodeCompleteTime > 0) {
|
||||
_totalEncodeTime += _encodeCompleteTime -
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||
}
|
||||
else {
|
||||
_totalEncodeTime += tGetTime() -
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
//#define FRAME_LOSS 1
|
||||
|
||||
int VP8RpsTest::Decode(int lossValue) {
|
||||
_sumEncBytes += _frameToDecode->_frame->Length();
|
||||
webrtc::EncodedImage encodedImage;
|
||||
VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
|
||||
encodedImage._completeFrame = !lossValue;
|
||||
_decodeCompleteTime = 0;
|
||||
_decodeTimes[encodedImage._timeStamp] = clock()/(double)CLOCKS_PER_SEC;
|
||||
int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
|
||||
_frameToDecode->_codecSpecificInfo);
|
||||
// Drop every 10th frame for the second decoder
|
||||
#if FRAME_LOSS
|
||||
if (_framecnt == 0 || _framecnt % 10 != 0) {
|
||||
printf("Decoding: %u\n", _framecnt);
|
||||
if (_framecnt > 1 && (_framecnt - 1) % 10 == 0)
|
||||
_missingFrames = true;
|
||||
#else
|
||||
if (true) {
|
||||
if (_framecnt > 0 && _framecnt % 10 == 0) {
|
||||
encodedImage._length = std::rand() % encodedImage._length;
|
||||
printf("Decoding with loss: %u\n", _framecnt);
|
||||
}
|
||||
else
|
||||
printf("Decoding: %u\n", _framecnt);
|
||||
#endif
|
||||
int ret2 = decoder2_->Decode(encodedImage, _missingFrames, NULL,
|
||||
_frameToDecode->_codecSpecificInfo,
|
||||
0 /* dummy */);
|
||||
|
||||
// check return values
|
||||
if (ret < 0 || ret2 < 0) {
|
||||
return -1;
|
||||
} else if (ret2 == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI ||
|
||||
ret2 == WEBRTC_VIDEO_CODEC_REQUEST_SLI) {
|
||||
sli_ = true;
|
||||
}
|
||||
|
||||
// compare decoded images
|
||||
#if FRAME_LOSS
|
||||
if (!_missingFrames) {
|
||||
if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
|
||||
_decodedVideoBuffer.GetLength(),
|
||||
decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
|
||||
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
|
||||
_framecnt);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
#else
|
||||
if (_framecnt > 0 && _framecnt % 10 != 0) {
|
||||
if (!CheckIfBitExact(_decodedVideoBuffer.Buffer(),
|
||||
_decodedVideoBuffer.Length(),
|
||||
decoded_frame2_.Buffer(), _decodedVideoBuffer.Length())) {
|
||||
fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
|
||||
_framecnt);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
#if FRAME_LOSS
|
||||
else
|
||||
printf("Dropping %u\n", _framecnt);
|
||||
#endif
|
||||
_missingFrames = false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
VP8RpsTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
|
||||
const void* ptrB, unsigned int bLengthBytes) {
|
||||
if (aLengthBytes != bLengthBytes)
|
||||
return false;
|
||||
return memcmp(ptrA, ptrB, aLengthBytes) == 0;
|
||||
}
|
||||
|
||||
RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer)
|
||||
: decoded_frame_(buffer),
|
||||
decode_complete_(false),
|
||||
last_decoded_picture_id_(0),
|
||||
last_decoded_ref_picture_id_(0),
|
||||
updated_ref_picture_id_(false) {
|
||||
}
|
||||
|
||||
WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) {
|
||||
return decoded_frame_->CopyFrame(image);
|
||||
decode_complete_ = true;
|
||||
}
|
||||
|
||||
bool RpsDecodeCompleteCallback::DecodeComplete() {
|
||||
if (decode_complete_)
|
||||
{
|
||||
decode_complete_ = false;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
|
||||
const WebRtc_UWord64 picture_id) {
|
||||
last_decoded_ref_picture_id_ = picture_id & 0x7FFF;
|
||||
updated_ref_picture_id_ = true;
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedFrame(
|
||||
const WebRtc_UWord64 picture_id) {
|
||||
last_decoded_picture_id_ = picture_id & 0x3F;
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedPictureId() const {
|
||||
return last_decoded_picture_id_;
|
||||
}
|
||||
|
||||
WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedRefPictureId(
|
||||
bool *updated) {
|
||||
if (updated)
|
||||
*updated = updated_ref_picture_id_;
|
||||
updated_ref_picture_id_ = false;
|
||||
return last_decoded_ref_picture_id_;
|
||||
}
|
||||
57
webrtc/modules/video_coding/codecs/vp8/test/rps_test.h
Normal file
57
webrtc/modules/video_coding/codecs/vp8/test/rps_test.h
Normal file
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
|
||||
|
||||
#include "vp8.h"
|
||||
#include "normal_async_test.h"
|
||||
|
||||
class RpsDecodeCompleteCallback;
|
||||
|
||||
class VP8RpsTest : public VP8NormalAsyncTest {
|
||||
public:
|
||||
VP8RpsTest(float bitRate);
|
||||
VP8RpsTest();
|
||||
virtual ~VP8RpsTest();
|
||||
virtual void Perform();
|
||||
private:
|
||||
VP8RpsTest(std::string name, std::string description, unsigned int testNo)
|
||||
: VP8NormalAsyncTest(name, description, testNo) {}
|
||||
virtual bool EncodeRps(RpsDecodeCompleteCallback* decodeCallback);
|
||||
virtual int Decode(int lossValue = 0);
|
||||
|
||||
static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
|
||||
const void *ptrB, unsigned int bLengthBytes);
|
||||
|
||||
webrtc::VP8Decoder* decoder2_;
|
||||
webrtc::VideoFrame decoded_frame2_;
|
||||
bool sli_;
|
||||
};
|
||||
|
||||
class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
||||
public:
|
||||
RpsDecodeCompleteCallback(webrtc::VideoFrame* buffer);
|
||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
||||
bool DecodeComplete();
|
||||
WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
|
||||
WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 picture_id);
|
||||
WebRtc_UWord64 LastDecodedPictureId() const;
|
||||
WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
|
||||
|
||||
private:
|
||||
webrtc::VideoFrame* decoded_frame_;
|
||||
bool decode_complete_;
|
||||
WebRtc_UWord64 last_decoded_picture_id_;
|
||||
WebRtc_UWord64 last_decoded_ref_picture_id_;
|
||||
bool updated_ref_picture_id_;
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
|
||||
62
webrtc/modules/video_coding/codecs/vp8/test/tester.cc
Normal file
62
webrtc/modules/video_coding/codecs/vp8/test/tester.cc
Normal file
@@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include "benchmark.h"
|
||||
#include "dual_decoder_test.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "normal_async_test.h"
|
||||
#include "packet_loss_test.h"
|
||||
#include "vp8_unittest.h"
|
||||
#include "rps_test.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "vp8.h"
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
void PopulateTests(std::vector<CodecTest*>* tests)
|
||||
{
|
||||
// tests->push_back(new VP8RpsTest());
|
||||
tests->push_back(new VP8UnitTest());
|
||||
// tests->push_back(new VP8DualDecoderTest());
|
||||
// tests->push_back(new VP8Benchmark());
|
||||
// tests->push_back(new VP8PacketLossTest(0.05, false, 5));
|
||||
// tests->push_back(new VP8NormalAsyncTest());
|
||||
}
|
||||
|
||||
TEST(Vp8WrapperTest, RunAllTests)
|
||||
{
|
||||
VP8Encoder* enc;
|
||||
VP8Decoder* dec;
|
||||
std::vector<CodecTest*> tests;
|
||||
PopulateTests(&tests);
|
||||
std::fstream log;
|
||||
std::string log_file = webrtc::test::OutputPath() + "VP8_test_log.txt";
|
||||
log.open(log_file.c_str(), std::fstream::out | std::fstream::app);
|
||||
std::vector<CodecTest*>::iterator it;
|
||||
for (it = tests.begin() ; it < tests.end(); it++)
|
||||
{
|
||||
enc = VP8Encoder::Create();
|
||||
dec = VP8Decoder::Create();
|
||||
(*it)->SetEncoder(enc);
|
||||
(*it)->SetDecoder(dec);
|
||||
(*it)->SetLog(&log);
|
||||
(*it)->Perform();
|
||||
(*it)->Print();
|
||||
delete enc;
|
||||
delete dec;
|
||||
delete *it;
|
||||
}
|
||||
log.close();
|
||||
tests.pop_back();
|
||||
}
|
||||
119
webrtc/modules/video_coding/codecs/vp8/test/vp8_unittest.cc
Normal file
119
webrtc/modules/video_coding/codecs/vp8/test/vp8_unittest.cc
Normal file
@@ -0,0 +1,119 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "vp8_unittest.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include "modules/video_coding/codecs/test_framework/video_source.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "vp8.h"
|
||||
|
||||
using namespace webrtc;
|
||||
|
||||
VP8UnitTest::VP8UnitTest()
|
||||
:
|
||||
UnitTest("VP8UnitTest", "Unit test")
|
||||
{
|
||||
}
|
||||
|
||||
VP8UnitTest::VP8UnitTest(std::string name, std::string description)
|
||||
:
|
||||
UnitTest(name, description)
|
||||
{
|
||||
}
|
||||
|
||||
WebRtc_UWord32
|
||||
VP8UnitTest::CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate,
|
||||
WebRtc_UWord32 /*frameRate*/)
|
||||
{
|
||||
int rate = _encoder->SetRates(bitRate, _inst.maxFramerate);
|
||||
EXPECT_TRUE(rate >= 0);
|
||||
return rate;
|
||||
}
|
||||
|
||||
void
|
||||
VP8UnitTest::Perform()
|
||||
{
|
||||
Setup();
|
||||
VP8Encoder* enc = (VP8Encoder*)_encoder;
|
||||
VP8Decoder* dec = (VP8Decoder*)_decoder;
|
||||
|
||||
//----- Encoder parameter tests -----
|
||||
//-- Calls before InitEncode() --
|
||||
EXPECT_EQ(enc->Release(), WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_EQ(enc->SetRates(_bitRate, _inst.maxFramerate),
|
||||
WEBRTC_VIDEO_CODEC_UNINITIALIZED);
|
||||
|
||||
EXPECT_EQ(enc->SetRates(_bitRate, _inst.maxFramerate),
|
||||
WEBRTC_VIDEO_CODEC_UNINITIALIZED);
|
||||
|
||||
VideoCodec codecInst;
|
||||
memset(&codecInst, 0, sizeof(codecInst));
|
||||
strncpy(codecInst.plName, "VP8", 31);
|
||||
codecInst.plType = 126;
|
||||
codecInst.maxBitrate = 0;
|
||||
codecInst.minBitrate = 0;
|
||||
codecInst.width = 1440;
|
||||
codecInst.height = 1080;
|
||||
codecInst.maxFramerate = 30;
|
||||
codecInst.startBitrate = 300;
|
||||
codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
|
||||
codecInst.codecSpecific.VP8.numberOfTemporalLayers = 1;
|
||||
EXPECT_EQ(enc->InitEncode(&codecInst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
|
||||
//-- Test two problematic level settings --
|
||||
strncpy(codecInst.plName, "VP8", 31);
|
||||
codecInst.plType = 126;
|
||||
codecInst.maxBitrate = 0;
|
||||
codecInst.minBitrate = 0;
|
||||
codecInst.width = 352;
|
||||
codecInst.height = 288;
|
||||
codecInst.maxFramerate = 30;
|
||||
codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
|
||||
codecInst.startBitrate = 300;
|
||||
EXPECT_EQ(enc->InitEncode(&codecInst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
// Settings not correct for this profile
|
||||
strncpy(codecInst.plName, "VP8", 31);
|
||||
codecInst.plType = 126;
|
||||
codecInst.maxBitrate = 0;
|
||||
codecInst.minBitrate = 0;
|
||||
codecInst.width = 176;
|
||||
codecInst.height = 144;
|
||||
codecInst.maxFramerate = 15;
|
||||
codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
|
||||
codecInst.startBitrate = 300;
|
||||
ASSERT_EQ(enc->InitEncode(&_inst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
|
||||
//-- ProcessNewBitrate() errors --
|
||||
// Bad bitrate.
|
||||
EXPECT_EQ(enc->SetRates(_inst.maxBitrate + 1, _inst.maxFramerate),
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
//----- Decoder parameter tests -----
|
||||
//-- Calls before InitDecode() --
|
||||
EXPECT_TRUE(dec->Release() == 0);
|
||||
ASSERT_TRUE(dec->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
//-- SetCodecConfigParameters() errors --
|
||||
unsigned char tmpBuf[128];
|
||||
EXPECT_TRUE(dec->SetCodecConfigParameters(NULL, sizeof(tmpBuf)) == -1);
|
||||
EXPECT_TRUE(dec->SetCodecConfigParameters(tmpBuf, 1) == -1);
|
||||
// Garbage data.
|
||||
EXPECT_TRUE(dec->SetCodecConfigParameters(tmpBuf, sizeof(tmpBuf)) == -1);
|
||||
|
||||
UnitTest::Perform();
|
||||
Teardown();
|
||||
|
||||
}
|
||||
29
webrtc/modules/video_coding/codecs/vp8/test/vp8_unittest.h
Normal file
29
webrtc/modules/video_coding/codecs/vp8/test/vp8_unittest.h
Normal file
@@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_UNITTEST_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_UNITTEST_H_
|
||||
|
||||
#include "modules/video_coding/codecs/test_framework/unit_test.h"
|
||||
|
||||
class VP8UnitTest : public UnitTest
|
||||
{
|
||||
public:
|
||||
VP8UnitTest();
|
||||
VP8UnitTest(std::string name, std::string description);
|
||||
virtual void Perform();
|
||||
|
||||
protected:
|
||||
virtual WebRtc_UWord32 CodecSpecific_SetBitrate(
|
||||
WebRtc_UWord32 bitRate,
|
||||
WebRtc_UWord32 /*frameRate*/);
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_UNITTEST_H_
|
||||
128
webrtc/modules/video_coding/codecs/vp8/vp8.gyp
Normal file
128
webrtc/modules/video_coding/codecs/vp8/vp8.gyp
Normal file
@@ -0,0 +1,128 @@
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
{
|
||||
'includes': [
|
||||
'../../../../build/common.gypi',
|
||||
'../test_framework/test_framework.gypi'
|
||||
],
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'webrtc_vp8',
|
||||
'type': '<(library)',
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||
'<(webrtc_root)/common_video/common_video.gyp:common_video',
|
||||
],
|
||||
'include_dirs': [
|
||||
'include',
|
||||
'<(webrtc_root)/common_video/interface',
|
||||
'<(webrtc_root)/modules/video_coding/codecs/interface',
|
||||
'<(webrtc_root)/modules/interface',
|
||||
],
|
||||
'conditions': [
|
||||
['build_libvpx==1', {
|
||||
'dependencies': [
|
||||
'<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
|
||||
],
|
||||
}],
|
||||
# TODO(mikhal): Investigate this mechanism for handling differences
|
||||
# between the Chromium and standalone builds.
|
||||
# http://code.google.com/p/webrtc/issues/detail?id=201
|
||||
['build_with_chromium==1', {
|
||||
'defines': [
|
||||
'WEBRTC_LIBVPX_VERSION=960' # Bali
|
||||
],
|
||||
}, {
|
||||
'defines': [
|
||||
'WEBRTC_LIBVPX_VERSION=971' # Cayuga
|
||||
],
|
||||
'sources': [
|
||||
'temporal_layers.h',
|
||||
'temporal_layers.cc',
|
||||
],
|
||||
}],
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'include',
|
||||
'<(webrtc_root)/common_video/interface',
|
||||
'<(webrtc_root)/modules/video_coding/codecs/interface',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'reference_picture_selection.h',
|
||||
'reference_picture_selection.cc',
|
||||
'include/vp8.h',
|
||||
'include/vp8_common_types.h',
|
||||
'vp8_impl.cc',
|
||||
],
|
||||
},
|
||||
], # targets
|
||||
'conditions': [
|
||||
['include_tests==1', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'vp8_integrationtests',
|
||||
'type': 'executable',
|
||||
'dependencies': [
|
||||
'test_framework',
|
||||
'webrtc_vp8',
|
||||
'<(webrtc_root)/common_video/common_video.gyp:common_video',
|
||||
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||
'<(webrtc_root)/test/test.gyp:test_support',
|
||||
'<(webrtc_root)/test/test.gyp:test_support_main',
|
||||
'<(DEPTH)/testing/gtest.gyp:gtest',
|
||||
],
|
||||
'sources': [
|
||||
# header files
|
||||
'test/benchmark.h',
|
||||
'test/dual_decoder_test.h',
|
||||
'test/normal_async_test.h',
|
||||
'test/packet_loss_test.h',
|
||||
'test/rps_test.h',
|
||||
'test/vp8_unittest.h',
|
||||
|
||||
# source files
|
||||
'test/benchmark.cc',
|
||||
'test/dual_decoder_test.cc',
|
||||
'test/normal_async_test.cc',
|
||||
'test/packet_loss_test.cc',
|
||||
'test/rps_test.cc',
|
||||
'test/tester.cc',
|
||||
'test/vp8_unittest.cc',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'vp8_unittests',
|
||||
'type': 'executable',
|
||||
'dependencies': [
|
||||
'webrtc_vp8',
|
||||
'<(DEPTH)/testing/gmock.gyp:gmock',
|
||||
'<(DEPTH)/testing/gtest.gyp:gtest',
|
||||
'<(webrtc_root)/test/test.gyp:test_support_main',
|
||||
],
|
||||
'include_dirs': [
|
||||
'<(DEPTH)/third_party/libvpx/source/libvpx',
|
||||
],
|
||||
'sources': [
|
||||
'reference_picture_selection_unittest.cc',
|
||||
'temporal_layers_unittest.cc',
|
||||
],
|
||||
'conditions': [
|
||||
['build_libvpx==1', {
|
||||
'dependencies': [
|
||||
'<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
], # targets
|
||||
}], # include_tests
|
||||
],
|
||||
}
|
||||
1033
webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
Normal file
1033
webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
Normal file
File diff suppressed because it is too large
Load Diff
237
webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
Normal file
237
webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
Normal file
@@ -0,0 +1,237 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*
|
||||
* WEBRTC VP8 wrapper interface
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_IMPL_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_IMPL_H_
|
||||
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8.h"
|
||||
|
||||
// VPX forward declaration
|
||||
typedef struct vpx_codec_ctx vpx_codec_ctx_t;
|
||||
typedef struct vpx_codec_ctx vpx_dec_ctx_t;
|
||||
typedef struct vpx_codec_enc_cfg vpx_codec_enc_cfg_t;
|
||||
typedef struct vpx_image vpx_image_t;
|
||||
typedef struct vpx_ref_frame vpx_ref_frame_t;
|
||||
struct vpx_codec_cx_pkt;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class TemporalLayers;
|
||||
class ReferencePictureSelection;
|
||||
|
||||
class VP8EncoderImpl : public VP8Encoder {
|
||||
public:
|
||||
VP8EncoderImpl();
|
||||
|
||||
virtual ~VP8EncoderImpl();
|
||||
|
||||
// Free encoder memory.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int Release();
|
||||
|
||||
// Initialize the encoder with the information from the codecSettings
|
||||
//
|
||||
// Input:
|
||||
// - codec_settings : Codec settings
|
||||
// - number_of_cores : Number of cores available for the encoder
|
||||
// - max_payload_size : The maximum size each payload is allowed
|
||||
// to have. Usually MTU - overhead.
|
||||
//
|
||||
// Return value : Set bit rate if OK
|
||||
// <0 - Errors:
|
||||
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
|
||||
// WEBRTC_VIDEO_CODEC_ERR_SIZE
|
||||
// WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED
|
||||
// WEBRTC_VIDEO_CODEC_MEMORY
|
||||
// WEBRTC_VIDEO_CODEC_ERROR
|
||||
virtual int InitEncode(const VideoCodec* codec_settings,
|
||||
int number_of_cores,
|
||||
uint32_t max_payload_size);
|
||||
|
||||
// Encode an I420 image (as a part of a video stream). The encoded image
|
||||
// will be returned to the user through the encode complete callback.
|
||||
//
|
||||
// Input:
|
||||
// - input_image : Image to be encoded
|
||||
// - frame_types : Frame type to be generated by the encoder.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
|
||||
// <0 - Errors:
|
||||
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
|
||||
// WEBRTC_VIDEO_CODEC_MEMORY
|
||||
// WEBRTC_VIDEO_CODEC_ERROR
|
||||
// WEBRTC_VIDEO_CODEC_TIMEOUT
|
||||
|
||||
virtual int Encode(const VideoFrame& input_image,
|
||||
const CodecSpecificInfo* codec_specific_info,
|
||||
const std::vector<VideoFrameType>* frame_types);
|
||||
|
||||
// Register an encode complete callback object.
|
||||
//
|
||||
// Input:
|
||||
// - callback : Callback object which handles encoded images.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
|
||||
|
||||
// Inform the encoder of the new packet loss rate and the round-trip time of
|
||||
// the network.
|
||||
//
|
||||
// - packet_loss : Fraction lost
|
||||
// (loss rate in percent = 100 * packetLoss / 255)
|
||||
// - rtt : Round-trip time in milliseconds
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
|
||||
// <0 - Errors: WEBRTC_VIDEO_CODEC_ERROR
|
||||
//
|
||||
virtual int SetChannelParameters(uint32_t packet_loss, int rtt);
|
||||
|
||||
// Inform the encoder about the new target bit rate.
|
||||
//
|
||||
// - new_bitrate_kbit : New target bit rate
|
||||
// - frame_rate : The target frame rate
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate);
|
||||
|
||||
private:
|
||||
// Call encoder initialize function and set control settings.
|
||||
int InitAndSetControlSettings(const VideoCodec* inst);
|
||||
|
||||
// Update frame size for codec.
|
||||
int UpdateCodecFrameSize(WebRtc_UWord32 input_image_width,
|
||||
WebRtc_UWord32 input_image_height);
|
||||
|
||||
void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
|
||||
const vpx_codec_cx_pkt& pkt,
|
||||
uint32_t timestamp);
|
||||
|
||||
int GetEncodedFrame(const VideoFrame& input_image);
|
||||
|
||||
int GetEncodedPartitions(const VideoFrame& input_image);
|
||||
|
||||
// Determine maximum target for Intra frames
|
||||
//
|
||||
// Input:
|
||||
// - optimal_buffer_size : Optimal buffer size
|
||||
// Return Value : Max target size for Intra frames represented as
|
||||
// percentage of the per frame bandwidth
|
||||
uint32_t MaxIntraTarget(uint32_t optimal_buffer_size);
|
||||
|
||||
EncodedImage encoded_image_;
|
||||
EncodedImageCallback* encoded_complete_callback_;
|
||||
VideoCodec codec_;
|
||||
bool inited_;
|
||||
int64_t timestamp_;
|
||||
uint16_t picture_id_;
|
||||
bool feedback_mode_;
|
||||
int cpu_speed_;
|
||||
uint32_t rc_max_intra_target_;
|
||||
int token_partitions_;
|
||||
ReferencePictureSelection* rps_;
|
||||
TemporalLayers* temporal_layers_;
|
||||
vpx_codec_ctx_t* encoder_;
|
||||
vpx_codec_enc_cfg_t* config_;
|
||||
vpx_image_t* raw_;
|
||||
}; // end of VP8Encoder class
|
||||
|
||||
|
||||
class VP8DecoderImpl : public VP8Decoder {
|
||||
public:
|
||||
VP8DecoderImpl();
|
||||
|
||||
virtual ~VP8DecoderImpl();
|
||||
|
||||
// Initialize the decoder.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK.
|
||||
// <0 - Errors:
|
||||
// WEBRTC_VIDEO_CODEC_ERROR
|
||||
virtual int InitDecode(const VideoCodec* inst, int number_of_cores);
|
||||
|
||||
// Decode encoded image (as a part of a video stream). The decoded image
|
||||
// will be returned to the user through the decode complete callback.
|
||||
//
|
||||
// Input:
|
||||
// - input_image : Encoded image to be decoded
|
||||
// - missing_frames : True if one or more frames have been lost
|
||||
// since the previous decode call.
|
||||
// - fragmentation : Specifies the start and length of each VP8
|
||||
// partition.
|
||||
// - codec_specific_info : pointer to specific codec data
|
||||
// - render_time_ms : Render time in Ms
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
|
||||
// <0 - Errors:
|
||||
// WEBRTC_VIDEO_CODEC_ERROR
|
||||
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
|
||||
virtual int Decode(const EncodedImage& input_image,
|
||||
bool missing_frames,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const CodecSpecificInfo* codec_specific_info,
|
||||
int64_t /*render_time_ms*/);
|
||||
|
||||
// Register a decode complete callback object.
|
||||
//
|
||||
// Input:
|
||||
// - callback : Callback object which handles decoded images.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual int RegisterDecodeCompleteCallback(DecodedImageCallback* callback);
|
||||
|
||||
// Free decoder memory.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
|
||||
// <0 - Errors:
|
||||
// WEBRTC_VIDEO_CODEC_ERROR
|
||||
virtual int Release();
|
||||
|
||||
// Reset decoder state and prepare for a new call.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK.
|
||||
// <0 - Errors:
|
||||
// WEBRTC_VIDEO_CODEC_UNINITIALIZED
|
||||
// WEBRTC_VIDEO_CODEC_ERROR
|
||||
virtual int Reset();
|
||||
|
||||
// Create a copy of the codec and its internal state.
|
||||
//
|
||||
// Return value : A copy of the instance if OK, NULL otherwise.
|
||||
virtual VideoDecoder* Copy();
|
||||
|
||||
private:
|
||||
// Copy reference image from this _decoder to the _decoder in copyTo. Set
|
||||
// which frame type to copy in _refFrame->frame_type before the call to
|
||||
// this function.
|
||||
int CopyReference(VP8Decoder* copy);
|
||||
|
||||
int DecodePartitions(const EncodedImage& input_image,
|
||||
const RTPFragmentationHeader* fragmentation);
|
||||
|
||||
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
|
||||
|
||||
VideoFrame decoded_image_;
|
||||
DecodedImageCallback* decode_complete_callback_;
|
||||
bool inited_;
|
||||
bool feedback_mode_;
|
||||
vpx_dec_ctx_t* decoder_;
|
||||
VideoCodec codec_;
|
||||
EncodedImage last_keyframe_;
|
||||
int image_format_;
|
||||
vpx_ref_frame_t* ref_frame_;
|
||||
int propagation_cnt_;
|
||||
bool latest_keyframe_complete_;
|
||||
bool mfqe_enabled_;
|
||||
}; // end of VP8Decoder class
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_IMPL_H_
|
||||
4
webrtc/modules/video_coding/main/OWNERS
Normal file
4
webrtc/modules/video_coding/main/OWNERS
Normal file
@@ -0,0 +1,4 @@
|
||||
stefan@webrtc.org
|
||||
mikhal@webrtc.org
|
||||
marpan@webrtc.org
|
||||
henrik.lundin@webrtc.org
|
||||
@@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
|
||||
|
||||
#include "gmock/gmock.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class MockVCMFrameTypeCallback : public VCMFrameTypeCallback {
|
||||
public:
|
||||
MOCK_METHOD0(RequestKeyFrame, int32_t());
|
||||
MOCK_METHOD1(SliceLossIndicationRequest,
|
||||
WebRtc_Word32(const WebRtc_UWord64 pictureId));
|
||||
};
|
||||
|
||||
class MockPacketRequestCallback : public VCMPacketRequestCallback {
|
||||
public:
|
||||
MOCK_METHOD2(ResendPackets, int32_t(const uint16_t* sequenceNumbers,
|
||||
uint16_t length));
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
|
||||
557
webrtc/modules/video_coding/main/interface/video_coding.h
Normal file
557
webrtc/modules/video_coding/main/interface/video_coding.h
Normal file
@@ -0,0 +1,557 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
|
||||
#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
|
||||
|
||||
#include "modules/interface/module.h"
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/main/interface/video_coding_defines.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
class TickTimeBase;
|
||||
class VideoEncoder;
|
||||
class VideoDecoder;
|
||||
struct CodecSpecificInfo;
|
||||
|
||||
class VideoCodingModule : public Module
|
||||
{
|
||||
public:
|
||||
enum SenderNackMode {
|
||||
kNackNone,
|
||||
kNackAll,
|
||||
kNackSelective
|
||||
};
|
||||
|
||||
enum ReceiverRobustness {
|
||||
kNone,
|
||||
kHardNack,
|
||||
kSoftNack,
|
||||
kDualDecoder,
|
||||
kReferenceSelection
|
||||
};
|
||||
|
||||
enum DecodeErrors {
|
||||
kNoDecodeErrors,
|
||||
kAllowDecodeErrors
|
||||
};
|
||||
|
||||
static VideoCodingModule* Create(const WebRtc_Word32 id);
|
||||
|
||||
static VideoCodingModule* Create(const WebRtc_Word32 id,
|
||||
TickTimeBase* clock);
|
||||
|
||||
static void Destroy(VideoCodingModule* module);
|
||||
|
||||
// Get number of supported codecs
|
||||
//
|
||||
// Return value : Number of supported codecs
|
||||
static WebRtc_UWord8 NumberOfCodecs();
|
||||
|
||||
// Get supported codec settings with using id
|
||||
//
|
||||
// Input:
|
||||
// - listId : Id or index of the codec to look up
|
||||
// - codec : Memory where the codec settings will be stored
|
||||
//
|
||||
// Return value : VCM_OK, on success
|
||||
// VCM_PARAMETER_ERROR if codec not supported or id too high
|
||||
static WebRtc_Word32 Codec(const WebRtc_UWord8 listId, VideoCodec* codec);
|
||||
|
||||
// Get supported codec settings using codec type
|
||||
//
|
||||
// Input:
|
||||
// - codecType : The codec type to get settings for
|
||||
// - codec : Memory where the codec settings will be stored
|
||||
//
|
||||
// Return value : VCM_OK, on success
|
||||
// VCM_PARAMETER_ERROR if codec not supported
|
||||
static WebRtc_Word32 Codec(VideoCodecType codecType, VideoCodec* codec);
|
||||
|
||||
/*
|
||||
* Sender
|
||||
*/
|
||||
|
||||
// Any encoder-related state of VCM will be initialized to the
|
||||
// same state as when the VCM was created. This will not interrupt
|
||||
// or effect decoding functionality of VCM. VCM will lose all the
|
||||
// encoding-related settings by calling this function.
|
||||
// For instance, a send codec has to be registered again.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 InitializeSender() = 0;
|
||||
|
||||
// Registers a codec to be used for encoding. Calling this
|
||||
// API multiple times overwrites any previously registered codecs.
|
||||
//
|
||||
// Input:
|
||||
// - sendCodec : Settings for the codec to be registered.
|
||||
// - numberOfCores : The number of cores the codec is allowed
|
||||
// to use.
|
||||
// - maxPayloadSize : The maximum size each payload is allowed
|
||||
// to have. Usually MTU - overhead.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterSendCodec(const VideoCodec* sendCodec,
|
||||
WebRtc_UWord32 numberOfCores,
|
||||
WebRtc_UWord32 maxPayloadSize) = 0;
|
||||
|
||||
// API to get the current send codec in use.
|
||||
//
|
||||
// Input:
|
||||
// - currentSendCodec : Address where the sendCodec will be written.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 SendCodec(VideoCodec* currentSendCodec) const = 0;
|
||||
|
||||
// API to get the current send codec type
|
||||
//
|
||||
// Return value : Codec type, on success.
|
||||
// kVideoCodecUnknown, on error or if no send codec is set
|
||||
virtual VideoCodecType SendCodec() const = 0;
|
||||
|
||||
// Register an external encoder object. This can not be used together with
|
||||
// external decoder callbacks.
|
||||
//
|
||||
// Input:
|
||||
// - externalEncoder : Encoder object to be used for encoding frames inserted
|
||||
// with the AddVideoFrame API.
|
||||
// - payloadType : The payload type bound which this encoder is bound to.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* externalEncoder,
|
||||
WebRtc_UWord8 payloadType,
|
||||
bool internalSource = false) = 0;
|
||||
|
||||
// API to get codec config parameters to be sent out-of-band to a receiver.
|
||||
//
|
||||
// Input:
|
||||
// - buffer : Memory where the codec config parameters should be written.
|
||||
// - size : Size of the memory available.
|
||||
//
|
||||
// Return value : Number of bytes written, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size) = 0;
|
||||
|
||||
// API to get currently configured encoder target bitrate in kbit/s.
|
||||
//
|
||||
// Return value : 0, on success.
|
||||
// < 0, on error.
|
||||
virtual int Bitrate(unsigned int* bitrate) const = 0;
|
||||
|
||||
// API to get currently configured encoder target frame rate.
|
||||
//
|
||||
// Return value : 0, on success.
|
||||
// < 0, on error.
|
||||
virtual int FrameRate(unsigned int* framerate) const = 0;
|
||||
|
||||
// Sets the parameters describing the send channel. These parameters are inputs to the
|
||||
// Media Optimization inside the VCM and also specifies the target bit rate for the
|
||||
// encoder. Bit rate used by NACK should already be compensated for by the user.
|
||||
//
|
||||
// Input:
|
||||
// - availableBandWidth : Band width available for the VCM in kbit/s.
|
||||
// - lossRate : Fractions of lost packets the past second.
|
||||
// (loss rate in percent = 100 * packetLoss / 255)
|
||||
// - rtt : Current round-trip time in ms.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 availableBandWidth,
|
||||
WebRtc_UWord8 lossRate,
|
||||
WebRtc_UWord32 rtt) = 0;
|
||||
|
||||
// Sets the parameters describing the receive channel. These parameters are inputs to the
|
||||
// Media Optimization inside the VCM.
|
||||
//
|
||||
// Input:
|
||||
// - rtt : Current round-trip time in ms.
|
||||
// with the most amount available bandwidth in a conference
|
||||
// scenario
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 rtt) = 0;
|
||||
|
||||
// Register a transport callback which will be called to deliver the encoded data and
|
||||
// side information.
|
||||
//
|
||||
// Input:
|
||||
// - transport : The callback object to register.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterTransportCallback(VCMPacketizationCallback* transport) = 0;
|
||||
|
||||
// Register video output information callback which will be called to deliver information
|
||||
// about the video stream produced by the encoder, for instance the average frame rate and
|
||||
// bit rate.
|
||||
//
|
||||
// Input:
|
||||
// - outputInformation : The callback object to register.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterSendStatisticsCallback(
|
||||
VCMSendStatisticsCallback* sendStats) = 0;
|
||||
|
||||
// Register a video quality settings callback which will be called when
|
||||
// frame rate/dimensions need to be updated for video quality optimization
|
||||
//
|
||||
// Input:
|
||||
// - videoQMSettings : The callback object to register.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error
|
||||
virtual WebRtc_Word32 RegisterVideoQMCallback(VCMQMSettingsCallback* videoQMSettings) = 0;
|
||||
|
||||
// Register a video protection callback which will be called to deliver
|
||||
// the requested FEC rate and NACK status (on/off).
|
||||
//
|
||||
// Input:
|
||||
// - protection : The callback object to register.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterProtectionCallback(VCMProtectionCallback* protection) = 0;
|
||||
|
||||
// Enable or disable a video protection method.
|
||||
//
|
||||
// Input:
|
||||
// - videoProtection : The method to enable or disable.
|
||||
// - enable : True if the method should be enabled, false if
|
||||
// it should be disabled.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 SetVideoProtection(VCMVideoProtection videoProtection,
|
||||
bool enable) = 0;
|
||||
|
||||
// Add one raw video frame to the encoder. This function does all the necessary
|
||||
// processing, then decides what frame type to encode, or if the frame should be
|
||||
// dropped. If the frame should be encoded it passes the frame to the encoder
|
||||
// before it returns.
|
||||
//
|
||||
// Input:
|
||||
// - videoFrame : Video frame to encode.
|
||||
// - codecSpecificInfo : Extra codec information, e.g., pre-parsed in-band signaling.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 AddVideoFrame(
|
||||
const VideoFrame& videoFrame,
|
||||
const VideoContentMetrics* contentMetrics = NULL,
|
||||
const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
|
||||
|
||||
// Next frame encoded should be an intra frame (keyframe).
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 IntraFrameRequest(int stream_index) = 0;
|
||||
|
||||
// Frame Dropper enable. Can be used to disable the frame dropping when the encoder
|
||||
// over-uses its bit rate. This API is designed to be used when the encoded frames
|
||||
// are supposed to be stored to an AVI file, or when the I420 codec is used and the
|
||||
// target bit rate shouldn't affect the frame rate.
|
||||
//
|
||||
// Input:
|
||||
// - enable : True to enable the setting, false to disable it.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 EnableFrameDropper(bool enable) = 0;
|
||||
|
||||
// Sent frame counters
|
||||
virtual WebRtc_Word32 SentFrameCount(VCMFrameCount& frameCount) const = 0;
|
||||
|
||||
/*
|
||||
* Receiver
|
||||
*/
|
||||
|
||||
// The receiver state of the VCM will be initialized to the
|
||||
// same state as when the VCM was created. This will not interrupt
|
||||
// or effect the send side functionality of VCM. VCM will lose all the
|
||||
// decoding-related settings by calling this function. All frames
|
||||
// inside the jitter buffer are flushed and the delay is reset.
|
||||
// For instance, a receive codec has to be registered again.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 InitializeReceiver() = 0;
|
||||
|
||||
// Register possible receive codecs, can be called multiple times for different codecs.
|
||||
// The module will automatically switch between registered codecs depending on the
|
||||
// payload type of incoming frames. The actual decoder will be created when needed.
|
||||
//
|
||||
// Input:
|
||||
// - receiveCodec : Settings for the codec to be registered.
|
||||
// - numberOfCores : Number of CPU cores that the decoder is allowed to use.
|
||||
// - requireKeyFrame : Set this to true if you don't want any delta frames
|
||||
// to be decoded until the first key frame has been decoded.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterReceiveCodec(const VideoCodec* receiveCodec,
|
||||
WebRtc_Word32 numberOfCores,
|
||||
bool requireKeyFrame = false) = 0;
|
||||
|
||||
// Register an externally defined decoder/renderer object. Can be a decoder only or a
|
||||
// decoder coupled with a renderer. Note that RegisterReceiveCodec must be called to
|
||||
// be used for decoding incoming streams.
|
||||
//
|
||||
// Input:
|
||||
// - externalDecoder : The external decoder/renderer object.
|
||||
// - payloadType : The payload type which this decoder should be
|
||||
// registered to.
|
||||
// - internalRenderTiming : True if the internal renderer (if any) of the decoder
|
||||
// object can make sure to render at a given time in ms.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterExternalDecoder(VideoDecoder* externalDecoder,
|
||||
WebRtc_UWord8 payloadType,
|
||||
bool internalRenderTiming) = 0;
|
||||
|
||||
// Register a receive callback. Will be called whenever there is a new frame ready
|
||||
// for rendering.
|
||||
//
|
||||
// Input:
|
||||
// - receiveCallback : The callback object to be used by the module when a
|
||||
// frame is ready for rendering.
|
||||
// De-register with a NULL pointer.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterReceiveCallback(VCMReceiveCallback* receiveCallback) = 0;
|
||||
|
||||
// Register a receive statistics callback which will be called to deliver information
|
||||
// about the video stream received by the receiving side of the VCM, for instance the
|
||||
// average frame rate and bit rate.
|
||||
//
|
||||
// Input:
|
||||
// - receiveStats : The callback object to register.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterReceiveStatisticsCallback(
|
||||
VCMReceiveStatisticsCallback* receiveStats) = 0;
|
||||
|
||||
// Register a frame type request callback. This callback will be called when the
|
||||
// module needs to request specific frame types from the send side.
|
||||
//
|
||||
// Input:
|
||||
// - frameTypeCallback : The callback object to be used by the module when
|
||||
// requesting a specific type of frame from the send side.
|
||||
// De-register with a NULL pointer.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterFrameTypeCallback(
|
||||
VCMFrameTypeCallback* frameTypeCallback) = 0;
|
||||
|
||||
// Register a frame storage callback. This callback will be called right before an
|
||||
// encoded frame is given to the decoder. Useful for recording the incoming video sequence.
|
||||
//
|
||||
// Input:
|
||||
// - frameStorageCallback : The callback object used by the module
|
||||
// to store a received encoded frame.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 RegisterFrameStorageCallback(
|
||||
VCMFrameStorageCallback* frameStorageCallback) = 0;
|
||||
|
||||
// Registers a callback which is called whenever the receive side of the VCM
|
||||
// encounters holes in the packet sequence and needs packets to be retransmitted.
|
||||
//
|
||||
// Input:
|
||||
// - callback : The callback to be registered in the VCM.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// <0, on error.
|
||||
virtual WebRtc_Word32 RegisterPacketRequestCallback(
|
||||
VCMPacketRequestCallback* callback) = 0;
|
||||
|
||||
// Waits for the next frame in the jitter buffer to become complete
|
||||
// (waits no longer than maxWaitTimeMs), then passes it to the decoder for decoding.
|
||||
// Should be called as often as possible to get the most out of the decoder.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 Decode(WebRtc_UWord16 maxWaitTimeMs = 200) = 0;
|
||||
|
||||
// Waits for the next frame in the dual jitter buffer to become complete
|
||||
// (waits no longer than maxWaitTimeMs), then passes it to the dual decoder
|
||||
// for decoding. This will never trigger a render callback. Should be
|
||||
// called frequently, and as long as it returns 1 it should be called again
|
||||
// as soon as possible.
|
||||
//
|
||||
// Return value : 1, if a frame was decoded
|
||||
// 0, if no frame was decoded
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 DecodeDualFrame(WebRtc_UWord16 maxWaitTimeMs = 200) = 0;
|
||||
|
||||
// Decodes a frame and sets an appropriate render time in ms relative to the system time.
|
||||
// Should be used in conjunction with VCMFrameStorageCallback.
|
||||
//
|
||||
// Input:
|
||||
// - frameFromStorage : Encoded frame read from file or received through
|
||||
// the VCMFrameStorageCallback callback.
|
||||
//
|
||||
// Return value: : VCM_OK, on success
|
||||
// < 0, on error
|
||||
virtual WebRtc_Word32 DecodeFromStorage(const EncodedVideoData& frameFromStorage) = 0;
|
||||
|
||||
// Reset the decoder state to the initial state.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 ResetDecoder() = 0;
|
||||
|
||||
// API to get the codec which is currently used for decoding by the module.
|
||||
//
|
||||
// Input:
|
||||
// - currentReceiveCodec : Settings for the codec to be registered.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 ReceiveCodec(VideoCodec* currentReceiveCodec) const = 0;
|
||||
|
||||
// API to get the codec type currently used for decoding by the module.
|
||||
//
|
||||
// Return value : codecy type, on success.
|
||||
// kVideoCodecUnknown, on error or if no receive codec is registered
|
||||
virtual VideoCodecType ReceiveCodec() const = 0;
|
||||
|
||||
// Insert a parsed packet into the receiver side of the module. Will be placed in the
|
||||
// jitter buffer waiting for the frame to become complete. Returns as soon as the packet
|
||||
// has been placed in the jitter buffer.
|
||||
//
|
||||
// Input:
|
||||
// - incomingPayload : Payload of the packet.
|
||||
// - payloadLength : Length of the payload.
|
||||
// - rtpInfo : The parsed header.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPayload,
|
||||
WebRtc_UWord32 payloadLength,
|
||||
const WebRtcRTPHeader& rtpInfo) = 0;
|
||||
|
||||
// Minimum playout delay (Used for lip-sync). This is the minimum delay required
|
||||
// to sync with audio. Not included in VideoCodingModule::Delay()
|
||||
// Defaults to 0 ms.
|
||||
//
|
||||
// Input:
|
||||
// - minPlayoutDelayMs : Additional delay in ms.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 SetMinimumPlayoutDelay(WebRtc_UWord32 minPlayoutDelayMs) = 0;
|
||||
|
||||
// Set the time required by the renderer to render a frame.
|
||||
//
|
||||
// Input:
|
||||
// - timeMS : The time in ms required by the renderer to render a frame.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 timeMS) = 0;
|
||||
|
||||
// The total delay desired by the VCM. Can be less than the minimum
|
||||
// delay set with SetMinimumPlayoutDelay.
|
||||
//
|
||||
// Return value : Total delay in ms, on success.
|
||||
// < 0, on error.
|
||||
virtual WebRtc_Word32 Delay() const = 0;
|
||||
|
||||
// Get the received frame counters. Keeps track of the number of each frame type
|
||||
// received since the start of the call.
|
||||
//
|
||||
// Output:
|
||||
// - frameCount : Struct to be filled with the number of frames received.
|
||||
//
|
||||
// Return value : VCM_OK, on success.
|
||||
// <0, on error.
|
||||
virtual WebRtc_Word32 ReceivedFrameCount(VCMFrameCount& frameCount) const = 0;
|
||||
|
||||
// Returns the number of packets discarded by the jitter buffer due to being
|
||||
// too late. This can include duplicated packets which arrived after the
|
||||
// frame was sent to the decoder. Therefore packets which were prematurely
|
||||
// NACKed will be counted.
|
||||
virtual WebRtc_UWord32 DiscardedPackets() const = 0;
|
||||
|
||||
|
||||
// Robustness APIs
|
||||
|
||||
// Set the sender RTX/NACK mode.
|
||||
// Input:
|
||||
// - mode : the selected NACK mode.
|
||||
//
|
||||
// Return value : VCM_OK, on success;
|
||||
// < 0, on error.
|
||||
virtual int SetSenderNackMode(SenderNackMode mode) = 0;
|
||||
|
||||
// Set the sender reference picture selection (RPS) mode.
|
||||
// Input:
|
||||
// - enable : true or false, for enable and disable, respectively.
|
||||
//
|
||||
// Return value : VCM_OK, on success;
|
||||
// < 0, on error.
|
||||
virtual int SetSenderReferenceSelection(bool enable) = 0;
|
||||
|
||||
// Set the sender forward error correction (FEC) mode.
|
||||
// Input:
|
||||
// - enable : true or false, for enable and disable, respectively.
|
||||
//
|
||||
// Return value : VCM_OK, on success;
|
||||
// < 0, on error.
|
||||
virtual int SetSenderFEC(bool enable) = 0;
|
||||
|
||||
// Set the key frame period, or disable periodic key frames (I-frames).
|
||||
// Input:
|
||||
// - periodMs : period in ms; <= 0 to disable periodic key frames.
|
||||
//
|
||||
// Return value : VCM_OK, on success;
|
||||
// < 0, on error.
|
||||
virtual int SetSenderKeyFramePeriod(int periodMs) = 0;
|
||||
|
||||
// Set the receiver robustness mode. The mode decides how the receiver
|
||||
// responds to losses in the stream. The type of counter-measure (soft or
|
||||
// hard NACK, dual decoder, RPS, etc.) is selected through the
|
||||
// robustnessMode parameter. The errorMode parameter decides if it is
|
||||
// allowed to display frames corrupted by losses. Note that not all
|
||||
// combinations of the two parameters are feasible. An error will be
|
||||
// returned for invalid combinations.
|
||||
// Input:
|
||||
// - robustnessMode : selected robustness mode.
|
||||
// - errorMode : selected error mode.
|
||||
//
|
||||
// Return value : VCM_OK, on success;
|
||||
// < 0, on error.
|
||||
virtual int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
|
||||
DecodeErrors errorMode) = 0;
|
||||
|
||||
// Enables recording of debugging information.
|
||||
virtual int StartDebugRecording(const char* file_name_utf8) = 0;
|
||||
|
||||
// Disables recording of debugging information.
|
||||
virtual int StopDebugRecording() = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
|
||||
@@ -0,0 +1,190 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
|
||||
#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
#include "modules/interface/module_common_types.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Error codes
|
||||
#define VCM_FRAME_NOT_READY 3
|
||||
#define VCM_REQUEST_SLI 2
|
||||
#define VCM_MISSING_CALLBACK 1
|
||||
#define VCM_OK 0
|
||||
#define VCM_GENERAL_ERROR -1
|
||||
#define VCM_LEVEL_EXCEEDED -2
|
||||
#define VCM_MEMORY -3
|
||||
#define VCM_PARAMETER_ERROR -4
|
||||
#define VCM_UNKNOWN_PAYLOAD -5
|
||||
#define VCM_CODEC_ERROR -6
|
||||
#define VCM_UNINITIALIZED -7
|
||||
#define VCM_NO_CODEC_REGISTERED -8
|
||||
#define VCM_JITTER_BUFFER_ERROR -9
|
||||
#define VCM_OLD_PACKET_ERROR -10
|
||||
#define VCM_NO_FRAME_DECODED -11
|
||||
#define VCM_ERROR_REQUEST_SLI -12
|
||||
#define VCM_NOT_IMPLEMENTED -20
|
||||
|
||||
#define VCM_RED_PAYLOAD_TYPE 96
|
||||
#define VCM_ULPFEC_PAYLOAD_TYPE 97
|
||||
#define VCM_VP8_PAYLOAD_TYPE 120
|
||||
#define VCM_I420_PAYLOAD_TYPE 124
|
||||
|
||||
enum VCMNackProperties {
|
||||
kNackHistoryLength = 450
|
||||
};
|
||||
|
||||
enum VCMVideoProtection {
|
||||
kProtectionNack, // Both send-side and receive-side
|
||||
kProtectionNackSender, // Send-side only
|
||||
kProtectionNackReceiver, // Receive-side only
|
||||
kProtectionDualDecoder,
|
||||
kProtectionFEC,
|
||||
kProtectionNackFEC,
|
||||
kProtectionKeyOnLoss,
|
||||
kProtectionKeyOnKeyLoss,
|
||||
kProtectionPeriodicKeyFrames
|
||||
};
|
||||
|
||||
enum VCMTemporalDecimation {
|
||||
kBitrateOverUseDecimation,
|
||||
};
|
||||
|
||||
struct VCMFrameCount {
|
||||
WebRtc_UWord32 numKeyFrames;
|
||||
WebRtc_UWord32 numDeltaFrames;
|
||||
};
|
||||
|
||||
// Callback class used for sending data ready to be packetized
|
||||
class VCMPacketizationCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 SendData(
|
||||
FrameType frameType,
|
||||
WebRtc_UWord8 payloadType,
|
||||
WebRtc_UWord32 timeStamp,
|
||||
int64_t capture_time_ms,
|
||||
const WebRtc_UWord8* payloadData,
|
||||
WebRtc_UWord32 payloadSize,
|
||||
const RTPFragmentationHeader& fragmentationHeader,
|
||||
const RTPVideoHeader* rtpVideoHdr) = 0;
|
||||
protected:
|
||||
virtual ~VCMPacketizationCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback class used for passing decoded frames which are ready to be rendered.
|
||||
class VCMFrameStorageCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 StoreReceivedFrame(
|
||||
const EncodedVideoData& frameToStore) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VCMFrameStorageCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback class used for passing decoded frames which are ready to be rendered.
|
||||
class VCMReceiveCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame) = 0;
|
||||
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
|
||||
const WebRtc_UWord64 pictureId) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual ~VCMReceiveCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback class used for informing the user of the bit rate and frame rate produced by the
|
||||
// encoder.
|
||||
class VCMSendStatisticsCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
|
||||
const WebRtc_UWord32 frameRate) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VCMSendStatisticsCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback class used for informing the user of the incoming bit rate and frame rate.
|
||||
class VCMReceiveStatisticsCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bitRate,
|
||||
const WebRtc_UWord32 frameRate) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VCMReceiveStatisticsCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback class used for telling the user about how to configure the FEC,
|
||||
// and the rates sent the last second is returned to the VCM.
|
||||
class VCMProtectionCallback {
|
||||
public:
|
||||
virtual int ProtectionRequest(const FecProtectionParams* delta_params,
|
||||
const FecProtectionParams* key_params,
|
||||
uint32_t* sent_video_rate_bps,
|
||||
uint32_t* sent_nack_rate_bps,
|
||||
uint32_t* sent_fec_rate_bps) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VCMProtectionCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback class used for telling the user about what frame type needed to continue decoding.
|
||||
// Typically a key frame when the stream has been corrupted in some way.
|
||||
class VCMFrameTypeCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 RequestKeyFrame() = 0;
|
||||
virtual WebRtc_Word32 SliceLossIndicationRequest(
|
||||
const WebRtc_UWord64 pictureId) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual ~VCMFrameTypeCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback class used for telling the user about which packet sequence numbers are currently
|
||||
// missing and need to be resent.
|
||||
class VCMPacketRequestCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
|
||||
WebRtc_UWord16 length) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VCMPacketRequestCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
// Callback used to inform the user of the the desired resolution
|
||||
// as subscribed by Media Optimization (Quality Modes)
|
||||
class VCMQMSettingsCallback {
|
||||
public:
|
||||
virtual WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
|
||||
const WebRtc_UWord32 width,
|
||||
const WebRtc_UWord32 height) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VCMQMSettingsCallback() {
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
|
||||
70
webrtc/modules/video_coding/main/source/Android.mk
Normal file
70
webrtc/modules/video_coding/main/source/Android.mk
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
include $(LOCAL_PATH)/../../../../../android-webrtc.mk
|
||||
|
||||
LOCAL_ARM_MODE := arm
|
||||
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
|
||||
LOCAL_MODULE := libwebrtc_video_coding
|
||||
LOCAL_MODULE_TAGS := optional
|
||||
LOCAL_CPP_EXTENSION := .cc
|
||||
LOCAL_SRC_FILES := \
|
||||
codec_database.cc \
|
||||
codec_timer.cc \
|
||||
content_metrics_processing.cc \
|
||||
decoding_state.cc \
|
||||
encoded_frame.cc \
|
||||
exp_filter.cc \
|
||||
frame_buffer.cc \
|
||||
frame_dropper.cc \
|
||||
generic_decoder.cc \
|
||||
generic_encoder.cc \
|
||||
inter_frame_delay.cc \
|
||||
jitter_buffer.cc \
|
||||
jitter_buffer_common.cc \
|
||||
jitter_estimator.cc \
|
||||
media_opt_util.cc \
|
||||
media_optimization.cc \
|
||||
packet.cc \
|
||||
qm_select.cc \
|
||||
receiver.cc \
|
||||
rtt_filter.cc \
|
||||
session_info.cc \
|
||||
timestamp_extrapolator.cc \
|
||||
timestamp_map.cc \
|
||||
timing.cc \
|
||||
video_coding_impl.cc
|
||||
|
||||
# Flags passed to both C and C++ files.
|
||||
LOCAL_CFLAGS := \
|
||||
$(MY_WEBRTC_COMMON_DEFS)
|
||||
|
||||
LOCAL_C_INCLUDES := \
|
||||
$(LOCAL_PATH)/../interface \
|
||||
$(LOCAL_PATH)/../../codecs/interface \
|
||||
$(LOCAL_PATH)/../../codecs/i420/main/interface \
|
||||
$(LOCAL_PATH)/../../codecs/vp8/main/interface \
|
||||
$(LOCAL_PATH)/../../../interface \
|
||||
$(LOCAL_PATH)/../../../.. \
|
||||
$(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
|
||||
$(LOCAL_PATH)/../../../../common_video/interface \
|
||||
$(LOCAL_PATH)/../../../../system_wrappers/interface
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := \
|
||||
libcutils \
|
||||
libdl \
|
||||
libstlport
|
||||
|
||||
ifndef NDK_ROOT
|
||||
include external/stlport/libstlport.mk
|
||||
endif
|
||||
include $(BUILD_STATIC_LIBRARY)
|
||||
575
webrtc/modules/video_coding/main/source/codec_database.cc
Normal file
575
webrtc/modules/video_coding/main/source/codec_database.cc
Normal file
@@ -0,0 +1,575 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/main/source/codec_database.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "engine_configurations.h"
|
||||
#ifdef VIDEOCODEC_I420
|
||||
#include "modules/video_coding/codecs/i420/main/interface/i420.h"
|
||||
#endif
|
||||
#ifdef VIDEOCODEC_VP8
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8.h"
|
||||
#endif
|
||||
#include "modules/video_coding/main/source/internal_defines.h"
|
||||
#include "system_wrappers/interface/trace.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings,
|
||||
int number_of_cores,
|
||||
bool require_key_frame)
|
||||
: settings(settings),
|
||||
number_of_cores(number_of_cores),
|
||||
require_key_frame(require_key_frame) {
|
||||
assert(number_of_cores >= 0);
|
||||
}
|
||||
|
||||
VCMExtDecoderMapItem::VCMExtDecoderMapItem(
|
||||
VideoDecoder* external_decoder_instance,
|
||||
uint8_t payload_type,
|
||||
bool internal_render_timing)
|
||||
: payload_type(payload_type),
|
||||
external_decoder_instance(external_decoder_instance),
|
||||
internal_render_timing(internal_render_timing) {
|
||||
}
|
||||
|
||||
VCMCodecDataBase::VCMCodecDataBase(int id)
|
||||
: id_(id),
|
||||
number_of_cores_(0),
|
||||
max_payload_size_(kDefaultPayloadSize),
|
||||
periodic_key_frames_(false),
|
||||
current_enc_is_external_(false),
|
||||
send_codec_(),
|
||||
receive_codec_(),
|
||||
external_payload_type_(0),
|
||||
external_encoder_(NULL),
|
||||
internal_source_(false),
|
||||
ptr_encoder_(NULL),
|
||||
ptr_decoder_(NULL),
|
||||
current_dec_is_external_(false),
|
||||
dec_map_(),
|
||||
dec_external_map_() {
|
||||
}
|
||||
|
||||
VCMCodecDataBase::~VCMCodecDataBase() {
|
||||
ResetSender();
|
||||
ResetReceiver();
|
||||
}
|
||||
|
||||
int VCMCodecDataBase::NumberOfCodecs() {
|
||||
return VCM_NUM_VIDEO_CODECS_AVAILABLE;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::Codec(int list_id,
|
||||
VideoCodec* settings) {
|
||||
if (!settings) {
|
||||
return false;
|
||||
}
|
||||
if (list_id >= VCM_NUM_VIDEO_CODECS_AVAILABLE) {
|
||||
return false;
|
||||
}
|
||||
memset(settings, 0, sizeof(VideoCodec));
|
||||
switch (list_id) {
|
||||
#ifdef VIDEOCODEC_VP8
|
||||
case VCM_VP8_IDX: {
|
||||
strncpy(settings->plName, "VP8", 4);
|
||||
settings->codecType = kVideoCodecVP8;
|
||||
// 96 to 127 dynamic payload types for video codecs.
|
||||
settings->plType = VCM_VP8_PAYLOAD_TYPE;
|
||||
settings->startBitrate = 100;
|
||||
settings->minBitrate = VCM_MIN_BITRATE;
|
||||
settings->maxBitrate = 0;
|
||||
settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
|
||||
settings->width = VCM_DEFAULT_CODEC_WIDTH;
|
||||
settings->height = VCM_DEFAULT_CODEC_HEIGHT;
|
||||
settings->numberOfSimulcastStreams = 0;
|
||||
settings->codecSpecific.VP8.resilience = kResilientStream;
|
||||
settings->codecSpecific.VP8.numberOfTemporalLayers = 1;
|
||||
settings->codecSpecific.VP8.denoisingOn = true;
|
||||
settings->codecSpecific.VP8.errorConcealmentOn = false;
|
||||
settings->codecSpecific.VP8.automaticResizeOn = false;
|
||||
settings->codecSpecific.VP8.frameDroppingOn = true;
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
#ifdef VIDEOCODEC_I420
|
||||
case VCM_I420_IDX: {
|
||||
strncpy(settings->plName, "I420", 5);
|
||||
settings->codecType = kVideoCodecI420;
|
||||
// 96 to 127 dynamic payload types for video codecs.
|
||||
settings->plType = VCM_I420_PAYLOAD_TYPE;
|
||||
// Bitrate needed for this size and framerate.
|
||||
settings->startBitrate = 3 * VCM_DEFAULT_CODEC_WIDTH *
|
||||
VCM_DEFAULT_CODEC_HEIGHT * 8 *
|
||||
VCM_DEFAULT_FRAME_RATE / 1000 / 2;
|
||||
settings->maxBitrate = settings->startBitrate;
|
||||
settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
|
||||
settings->width = VCM_DEFAULT_CODEC_WIDTH;
|
||||
settings->height = VCM_DEFAULT_CODEC_HEIGHT;
|
||||
settings->minBitrate = VCM_MIN_BITRATE;
|
||||
settings->numberOfSimulcastStreams = 0;
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
default: {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::Codec(VideoCodecType codec_type,
|
||||
VideoCodec* settings) {
|
||||
for (int i = 0; i < VCMCodecDataBase::NumberOfCodecs(); i++) {
|
||||
const bool ret = VCMCodecDataBase::Codec(i, settings);
|
||||
if (!ret) {
|
||||
return false;
|
||||
}
|
||||
if (codec_type == settings->codecType) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void VCMCodecDataBase::ResetSender() {
|
||||
DeleteEncoder();
|
||||
periodic_key_frames_ = false;
|
||||
}
|
||||
|
||||
// Assuming only one registered encoder - since only one used, no need for more.
|
||||
bool VCMCodecDataBase::RegisterSendCodec(
|
||||
const VideoCodec* send_codec,
|
||||
int number_of_cores,
|
||||
int max_payload_size) {
|
||||
if (!send_codec) {
|
||||
return false;
|
||||
}
|
||||
if (max_payload_size <= 0) {
|
||||
max_payload_size = kDefaultPayloadSize;
|
||||
}
|
||||
if (number_of_cores < 0 || number_of_cores > 32) {
|
||||
return false;
|
||||
}
|
||||
if (send_codec->plType <= 0) {
|
||||
return false;
|
||||
}
|
||||
// Make sure the start bit rate is sane...
|
||||
if (send_codec->startBitrate > 1000000) {
|
||||
return false;
|
||||
}
|
||||
if (send_codec->codecType == kVideoCodecUnknown) {
|
||||
return false;
|
||||
}
|
||||
number_of_cores_ = number_of_cores;
|
||||
max_payload_size_ = max_payload_size;
|
||||
|
||||
memcpy(&send_codec_, send_codec, sizeof(VideoCodec));
|
||||
|
||||
if (send_codec_.maxBitrate == 0) {
|
||||
// max is one bit per pixel
|
||||
send_codec_.maxBitrate = (static_cast<int>(send_codec_.height) *
|
||||
static_cast<int>(send_codec_.width) *
|
||||
static_cast<int>(send_codec_.maxFramerate)) / 1000;
|
||||
if (send_codec_.startBitrate > send_codec_.maxBitrate) {
|
||||
// But if the user tries to set a higher start bit rate we will
|
||||
// increase the max accordingly.
|
||||
send_codec_.maxBitrate = send_codec_.startBitrate;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::SendCodec(VideoCodec* current_send_codec) const {
|
||||
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(id_),
|
||||
"SendCodec");
|
||||
if (!ptr_encoder_) {
|
||||
return false;
|
||||
}
|
||||
memcpy(current_send_codec, &send_codec_, sizeof(VideoCodec));
|
||||
return true;
|
||||
}
|
||||
|
||||
VideoCodecType VCMCodecDataBase::SendCodec() const {
|
||||
WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(id_),
|
||||
"SendCodec type");
|
||||
if (!ptr_encoder_) {
|
||||
return kVideoCodecUnknown;
|
||||
}
|
||||
return send_codec_.codecType;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::DeregisterExternalEncoder(
|
||||
uint8_t payload_type, bool* was_send_codec) {
|
||||
assert(was_send_codec);
|
||||
*was_send_codec = false;
|
||||
if (external_payload_type_ != payload_type) {
|
||||
return false;
|
||||
}
|
||||
if (send_codec_.plType == payload_type) {
|
||||
// De-register as send codec if needed.
|
||||
DeleteEncoder();
|
||||
memset(&send_codec_, 0, sizeof(VideoCodec));
|
||||
current_enc_is_external_ = false;
|
||||
*was_send_codec = true;
|
||||
}
|
||||
external_payload_type_ = 0;
|
||||
external_encoder_ = NULL;
|
||||
internal_source_ = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
void VCMCodecDataBase::RegisterExternalEncoder(
|
||||
VideoEncoder* external_encoder,
|
||||
uint8_t payload_type,
|
||||
bool internal_source) {
|
||||
// Since only one encoder can be used at a given time, only one external
|
||||
// encoder can be registered/used.
|
||||
external_encoder_ = external_encoder;
|
||||
external_payload_type_ = payload_type;
|
||||
internal_source_ = internal_source;
|
||||
}
|
||||
|
||||
VCMGenericEncoder* VCMCodecDataBase::GetEncoder(
|
||||
const VideoCodec* settings,
|
||||
VCMEncodedFrameCallback* encoded_frame_callback) {
|
||||
// If encoder exists, will destroy it and create new one.
|
||||
DeleteEncoder();
|
||||
if (settings->plType == external_payload_type_) {
|
||||
// External encoder.
|
||||
ptr_encoder_ = new VCMGenericEncoder(*external_encoder_, internal_source_);
|
||||
current_enc_is_external_ = true;
|
||||
} else {
|
||||
ptr_encoder_ = CreateEncoder(settings->codecType);
|
||||
current_enc_is_external_ = false;
|
||||
}
|
||||
encoded_frame_callback->SetPayloadType(settings->plType);
|
||||
if (!ptr_encoder_) {
|
||||
WEBRTC_TRACE(webrtc::kTraceError,
|
||||
webrtc::kTraceVideoCoding,
|
||||
VCMId(id_),
|
||||
"Failed to create encoder: %s.",
|
||||
settings->plName);
|
||||
return NULL;
|
||||
}
|
||||
if (ptr_encoder_->InitEncode(settings, number_of_cores_, max_payload_size_) <
|
||||
0) {
|
||||
WEBRTC_TRACE(webrtc::kTraceError,
|
||||
webrtc::kTraceVideoCoding,
|
||||
VCMId(id_),
|
||||
"Failed to initialize encoder: %s.",
|
||||
settings->plName);
|
||||
DeleteEncoder();
|
||||
return NULL;
|
||||
} else if (ptr_encoder_->RegisterEncodeCallback(encoded_frame_callback) <
|
||||
0) {
|
||||
DeleteEncoder();
|
||||
return NULL;
|
||||
}
|
||||
// Intentionally don't check return value since the encoder registration
|
||||
// shouldn't fail because the codec doesn't support changing the periodic key
|
||||
// frame setting.
|
||||
ptr_encoder_->SetPeriodicKeyFrames(periodic_key_frames_);
|
||||
return ptr_encoder_;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::SetPeriodicKeyFrames(bool enable) {
|
||||
periodic_key_frames_ = enable;
|
||||
if (ptr_encoder_) {
|
||||
return (ptr_encoder_->SetPeriodicKeyFrames(periodic_key_frames_) == 0);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void VCMCodecDataBase::ResetReceiver() {
|
||||
ReleaseDecoder(ptr_decoder_);
|
||||
ptr_decoder_ = NULL;
|
||||
memset(&receive_codec_, 0, sizeof(VideoCodec));
|
||||
while (!dec_map_.empty()) {
|
||||
DecoderMap::iterator it = dec_map_.begin();
|
||||
delete (*it).second;
|
||||
dec_map_.erase(it);
|
||||
}
|
||||
while (!dec_external_map_.empty()) {
|
||||
ExternalDecoderMap::iterator external_it = dec_external_map_.begin();
|
||||
delete (*external_it).second;
|
||||
dec_external_map_.erase(external_it);
|
||||
}
|
||||
current_dec_is_external_ = false;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::DeregisterExternalDecoder(uint8_t payload_type) {
|
||||
ExternalDecoderMap::iterator it = dec_external_map_.find(payload_type);
|
||||
if (it == dec_external_map_.end()) {
|
||||
// Not found
|
||||
return false;
|
||||
}
|
||||
if (receive_codec_.plType == payload_type) {
|
||||
// Release it if it was registered and in use.
|
||||
ReleaseDecoder(ptr_decoder_);
|
||||
ptr_decoder_ = NULL;
|
||||
}
|
||||
DeregisterReceiveCodec(payload_type);
|
||||
delete (*it).second;
|
||||
dec_external_map_.erase(it);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Add the external encoder object to the list of external decoders.
|
||||
// Won't be registered as a receive codec until RegisterReceiveCodec is called.
|
||||
bool VCMCodecDataBase::RegisterExternalDecoder(
|
||||
VideoDecoder* external_decoder,
|
||||
uint8_t payload_type,
|
||||
bool internal_render_timing) {
|
||||
// Check if payload value already exists, if so - erase old and insert new.
|
||||
VCMExtDecoderMapItem* ext_decoder = new VCMExtDecoderMapItem(
|
||||
external_decoder, payload_type, internal_render_timing);
|
||||
if (!ext_decoder) {
|
||||
return false;
|
||||
}
|
||||
DeregisterExternalDecoder(payload_type);
|
||||
dec_external_map_[payload_type] = ext_decoder;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::DecoderRegistered() const {
|
||||
return !dec_map_.empty();
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::RegisterReceiveCodec(
|
||||
const VideoCodec* receive_codec,
|
||||
int number_of_cores,
|
||||
bool require_key_frame) {
|
||||
if (number_of_cores < 0) {
|
||||
return false;
|
||||
}
|
||||
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCoding, VCMId(id_),
|
||||
"Codec: %s, Payload type %d, Height %d, Width %d, Bitrate %d,"
|
||||
"Framerate %d.",
|
||||
receive_codec->plName, receive_codec->plType,
|
||||
receive_codec->height, receive_codec->width,
|
||||
receive_codec->startBitrate, receive_codec->maxFramerate);
|
||||
// Check if payload value already exists, if so - erase old and insert new.
|
||||
DeregisterReceiveCodec(receive_codec->plType);
|
||||
if (receive_codec->codecType == kVideoCodecUnknown) {
|
||||
return false;
|
||||
}
|
||||
VideoCodec* new_receive_codec = new VideoCodec(*receive_codec);
|
||||
dec_map_[receive_codec->plType] = new VCMDecoderMapItem(new_receive_codec,
|
||||
number_of_cores,
|
||||
require_key_frame);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::DeregisterReceiveCodec(
|
||||
uint8_t payload_type) {
|
||||
DecoderMap::iterator it = dec_map_.find(payload_type);
|
||||
if (it == dec_map_.end()) {
|
||||
return false;
|
||||
}
|
||||
VCMDecoderMapItem* dec_item = (*it).second;
|
||||
delete dec_item;
|
||||
dec_map_.erase(it);
|
||||
if (receive_codec_.plType == payload_type) {
|
||||
// This codec is currently in use.
|
||||
memset(&receive_codec_, 0, sizeof(VideoCodec));
|
||||
current_dec_is_external_ = false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::ReceiveCodec(VideoCodec* current_receive_codec) const {
|
||||
assert(current_receive_codec);
|
||||
if (!ptr_decoder_) {
|
||||
return false;
|
||||
}
|
||||
memcpy(current_receive_codec, &receive_codec_, sizeof(VideoCodec));
|
||||
return true;
|
||||
}
|
||||
|
||||
VideoCodecType VCMCodecDataBase::ReceiveCodec() const {
|
||||
if (!ptr_decoder_) {
|
||||
return kVideoCodecUnknown;
|
||||
}
|
||||
return receive_codec_.codecType;
|
||||
}
|
||||
|
||||
VCMGenericDecoder* VCMCodecDataBase::GetDecoder(
|
||||
uint8_t payload_type, VCMDecodedFrameCallback* decoded_frame_callback) {
|
||||
if (payload_type == receive_codec_.plType || payload_type == 0) {
|
||||
return ptr_decoder_;
|
||||
}
|
||||
// Check for exisitng decoder, if exists - delete.
|
||||
if (ptr_decoder_) {
|
||||
ReleaseDecoder(ptr_decoder_);
|
||||
ptr_decoder_ = NULL;
|
||||
memset(&receive_codec_, 0, sizeof(VideoCodec));
|
||||
}
|
||||
ptr_decoder_ = CreateAndInitDecoder(payload_type, &receive_codec_,
|
||||
¤t_dec_is_external_);
|
||||
if (!ptr_decoder_) {
|
||||
return NULL;
|
||||
}
|
||||
if (ptr_decoder_->RegisterDecodeCompleteCallback(decoded_frame_callback)
|
||||
< 0) {
|
||||
ReleaseDecoder(ptr_decoder_);
|
||||
ptr_decoder_ = NULL;
|
||||
memset(&receive_codec_, 0, sizeof(VideoCodec));
|
||||
return NULL;
|
||||
}
|
||||
return ptr_decoder_;
|
||||
}
|
||||
|
||||
VCMGenericDecoder* VCMCodecDataBase::CreateDecoderCopy() const {
|
||||
if (!ptr_decoder_) {
|
||||
return NULL;
|
||||
}
|
||||
VideoDecoder* decoder_copy = ptr_decoder_->_decoder.Copy();
|
||||
if (!decoder_copy) {
|
||||
return NULL;
|
||||
}
|
||||
return new VCMGenericDecoder(*decoder_copy, id_, ptr_decoder_->External());
|
||||
}
|
||||
|
||||
void VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const {
|
||||
if (decoder) {
|
||||
assert(&decoder->_decoder);
|
||||
decoder->Release();
|
||||
if (!decoder->External()) {
|
||||
delete &decoder->_decoder;
|
||||
}
|
||||
delete decoder;
|
||||
}
|
||||
}
|
||||
|
||||
void VCMCodecDataBase::CopyDecoder(const VCMGenericDecoder& decoder) {
|
||||
VideoDecoder* decoder_copy = decoder._decoder.Copy();
|
||||
if (decoder_copy) {
|
||||
VCMDecodedFrameCallback* cb = ptr_decoder_->_callback;
|
||||
ReleaseDecoder(ptr_decoder_);
|
||||
ptr_decoder_ = new VCMGenericDecoder(*decoder_copy, id_,
|
||||
decoder.External());
|
||||
if (cb && ptr_decoder_->RegisterDecodeCompleteCallback(cb)) {
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool VCMCodecDataBase::SupportsRenderScheduling() const {
|
||||
bool render_timing = true;
|
||||
if (current_dec_is_external_) {
|
||||
const VCMExtDecoderMapItem* ext_item = FindExternalDecoderItem(
|
||||
receive_codec_.plType);
|
||||
render_timing = ext_item->internal_render_timing;
|
||||
}
|
||||
return render_timing;
|
||||
}
|
||||
|
||||
VCMGenericDecoder* VCMCodecDataBase::CreateAndInitDecoder(
|
||||
uint8_t payload_type,
|
||||
VideoCodec* new_codec,
|
||||
bool* external) const {
|
||||
assert(external);
|
||||
assert(new_codec);
|
||||
const VCMDecoderMapItem* decoder_item = FindDecoderItem(payload_type);
|
||||
if (!decoder_item) {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(id_),
|
||||
"Unknown payload type: %u", payload_type);
|
||||
return NULL;
|
||||
}
|
||||
VCMGenericDecoder* ptr_decoder = NULL;
|
||||
const VCMExtDecoderMapItem* external_dec_item = FindExternalDecoderItem(
|
||||
payload_type);
|
||||
if (external_dec_item) {
|
||||
// External codec.
|
||||
ptr_decoder = new VCMGenericDecoder(
|
||||
*external_dec_item->external_decoder_instance, id_, true);
|
||||
*external = true;
|
||||
} else {
|
||||
// Create decoder.
|
||||
ptr_decoder = CreateDecoder(decoder_item->settings->codecType);
|
||||
*external = false;
|
||||
}
|
||||
if (!ptr_decoder) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (ptr_decoder->InitDecode(decoder_item->settings.get(),
|
||||
decoder_item->number_of_cores,
|
||||
decoder_item->require_key_frame) < 0) {
|
||||
ReleaseDecoder(ptr_decoder);
|
||||
return NULL;
|
||||
}
|
||||
memcpy(new_codec, decoder_item->settings.get(), sizeof(VideoCodec));
|
||||
return ptr_decoder;
|
||||
}
|
||||
|
||||
VCMGenericEncoder* VCMCodecDataBase::CreateEncoder(
|
||||
const VideoCodecType type) const {
|
||||
switch (type) {
|
||||
#ifdef VIDEOCODEC_VP8
|
||||
case kVideoCodecVP8:
|
||||
return new VCMGenericEncoder(*(VP8Encoder::Create()));
|
||||
#endif
|
||||
#ifdef VIDEOCODEC_I420
|
||||
case kVideoCodecI420:
|
||||
return new VCMGenericEncoder(*(new I420Encoder));
|
||||
#endif
|
||||
default:
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void VCMCodecDataBase::DeleteEncoder() {
|
||||
if (ptr_encoder_) {
|
||||
ptr_encoder_->Release();
|
||||
if (!current_enc_is_external_) {
|
||||
delete &ptr_encoder_->_encoder;
|
||||
}
|
||||
delete ptr_encoder_;
|
||||
ptr_encoder_ = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
|
||||
switch (type) {
|
||||
#ifdef VIDEOCODEC_VP8
|
||||
case kVideoCodecVP8:
|
||||
return new VCMGenericDecoder(*(VP8Decoder::Create()), id_);
|
||||
#endif
|
||||
#ifdef VIDEOCODEC_I420
|
||||
case kVideoCodecI420:
|
||||
return new VCMGenericDecoder(*(new I420Decoder), id_);
|
||||
#endif
|
||||
default:
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
const VCMDecoderMapItem* VCMCodecDataBase::FindDecoderItem(
|
||||
uint8_t payload_type) const {
|
||||
DecoderMap::const_iterator it = dec_map_.find(payload_type);
|
||||
if (it != dec_map_.end()) {
|
||||
return (*it).second;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const VCMExtDecoderMapItem* VCMCodecDataBase::FindExternalDecoderItem(
|
||||
uint8_t payload_type) const {
|
||||
ExternalDecoderMap::const_iterator it = dec_external_map_.find(payload_type);
|
||||
if (it != dec_external_map_.end()) {
|
||||
return (*it).second;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
} // namespace webrtc
|
||||
200
webrtc/modules/video_coding/main/source/codec_database.h
Normal file
200
webrtc/modules/video_coding/main/source/codec_database.h
Normal file
@@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_CODEC_DATABASE_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_CODEC_DATABASE_H_
|
||||
|
||||
#include <map>
|
||||
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "modules/video_coding/main/interface/video_coding.h"
|
||||
#include "modules/video_coding/main/source/generic_decoder.h"
|
||||
#include "modules/video_coding/main/source/generic_encoder.h"
|
||||
#include "system_wrappers/interface/scoped_ptr.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum VCMCodecDBProperties {
|
||||
kDefaultPayloadSize = 1440
|
||||
};
|
||||
|
||||
struct VCMDecoderMapItem {
|
||||
public:
|
||||
VCMDecoderMapItem(VideoCodec* settings,
|
||||
int number_of_cores,
|
||||
bool require_key_frame);
|
||||
|
||||
scoped_ptr<VideoCodec> settings;
|
||||
int number_of_cores;
|
||||
bool require_key_frame;
|
||||
};
|
||||
|
||||
struct VCMExtDecoderMapItem {
|
||||
public:
|
||||
VCMExtDecoderMapItem(VideoDecoder* external_decoder_instance,
|
||||
uint8_t payload_type,
|
||||
bool internal_render_timing);
|
||||
|
||||
uint8_t payload_type;
|
||||
VideoDecoder* external_decoder_instance;
|
||||
bool internal_render_timing;
|
||||
};
|
||||
|
||||
class VCMCodecDataBase {
|
||||
public:
|
||||
explicit VCMCodecDataBase(int id);
|
||||
~VCMCodecDataBase();
|
||||
|
||||
// Sender Side
|
||||
// Returns the number of supported codecs (or -1 in case of error).
|
||||
static int NumberOfCodecs();
|
||||
|
||||
// Returns the default settings for the codec with id |list_id|.
|
||||
static bool Codec(int list_id, VideoCodec* settings);
|
||||
|
||||
// Returns the default settings for the codec with type |codec_type|.
|
||||
static bool Codec(VideoCodecType codec_type, VideoCodec* settings);
|
||||
|
||||
void ResetSender();
|
||||
|
||||
// Sets the sender side codec and initiates the desired codec given the
|
||||
// VideoCodec struct.
|
||||
// Returns true if the codec was successfully registered, false otherwise.
|
||||
bool RegisterSendCodec(const VideoCodec* send_codec,
|
||||
int number_of_cores,
|
||||
int max_payload_size);
|
||||
|
||||
// Gets the current send codec. Relevant for internal codecs only.
|
||||
// Returns true if there is a send codec, false otherwise.
|
||||
bool SendCodec(VideoCodec* current_send_codec) const;
|
||||
|
||||
// Gets current send side codec type. Relevant for internal codecs only.
|
||||
// Returns kVideoCodecUnknown if there is no send codec.
|
||||
VideoCodecType SendCodec() const;
|
||||
|
||||
// Registers and initializes an external encoder object.
|
||||
// |internal_source| should be set to true if the codec has an internal
|
||||
// video source and doesn't need the user to provide it with frames via
|
||||
// the Encode() method.
|
||||
void RegisterExternalEncoder(VideoEncoder* external_encoder,
|
||||
uint8_t payload_type,
|
||||
bool internal_source);
|
||||
|
||||
// Deregisters an external encoder. Returns true if the encoder was
|
||||
// found and deregistered, false otherwise. |was_send_codec| is set to true
|
||||
// if the external encoder was the send codec before being deregistered.
|
||||
bool DeregisterExternalEncoder(uint8_t payload_type, bool* was_send_codec);
|
||||
|
||||
// Returns an encoder specified by the payload type in |settings|. The
|
||||
// encoded frame callback of the encoder is set to |encoded_frame_callback|.
|
||||
// If no such encoder already exists an instance will be created and
|
||||
// initialized using |settings|.
|
||||
// NULL is returned if no encoder with the specified payload type was found
|
||||
// and the function failed to create one.
|
||||
VCMGenericEncoder* GetEncoder(
|
||||
const VideoCodec* settings,
|
||||
VCMEncodedFrameCallback* encoded_frame_callback);
|
||||
|
||||
bool SetPeriodicKeyFrames(bool enable);
|
||||
|
||||
// Receiver Side
|
||||
void ResetReceiver();
|
||||
|
||||
// Deregisters an external decoder object specified by |payload_type|.
|
||||
bool DeregisterExternalDecoder(uint8_t payload_type);
|
||||
|
||||
// Registers an external decoder object to the payload type |payload_type|.
|
||||
// |internal_render_timing| is set to true if the |external_decoder| has
|
||||
// built in rendering which is able to obey the render timestamps of the
|
||||
// encoded frames.
|
||||
bool RegisterExternalDecoder(VideoDecoder* external_decoder,
|
||||
uint8_t payload_type,
|
||||
bool internal_render_timing);
|
||||
|
||||
bool DecoderRegistered() const;
|
||||
|
||||
bool RegisterReceiveCodec(const VideoCodec* receive_codec,
|
||||
int number_of_cores,
|
||||
bool require_key_frame);
|
||||
|
||||
bool DeregisterReceiveCodec(uint8_t payload_type);
|
||||
|
||||
// Get current receive side codec. Relevant for internal codecs only.
|
||||
bool ReceiveCodec(VideoCodec* current_receive_codec) const;
|
||||
|
||||
// Get current receive side codec type. Relevant for internal codecs only.
|
||||
VideoCodecType ReceiveCodec() const;
|
||||
|
||||
// Returns a decoder specified by |payload_type|. The decoded frame callback
|
||||
// of the encoder is set to |decoded_frame_callback|. If no such decoder
|
||||
// already exists an instance will be created and initialized.
|
||||
// NULL is returned if no encoder with the specified payload type was found
|
||||
// and the function failed to create one.
|
||||
VCMGenericDecoder* GetDecoder(
|
||||
uint8_t payload_type, VCMDecodedFrameCallback* decoded_frame_callback);
|
||||
|
||||
// Returns a deep copy of the currently active decoder.
|
||||
VCMGenericDecoder* CreateDecoderCopy() const;
|
||||
|
||||
// Deletes the memory of the decoder instance |decoder|. Used to delete
|
||||
// deep copies returned by CreateDecoderCopy().
|
||||
void ReleaseDecoder(VCMGenericDecoder* decoder) const;
|
||||
|
||||
// Creates a deep copy of |decoder| and replaces the currently used decoder
|
||||
// with it.
|
||||
void CopyDecoder(const VCMGenericDecoder& decoder);
|
||||
|
||||
// Returns true if the currently active decoder supports render scheduling,
|
||||
// that is, it is able to render frames according to the render timestamp of
|
||||
// the encoded frames.
|
||||
bool SupportsRenderScheduling() const;
|
||||
|
||||
private:
|
||||
typedef std::map<uint8_t, VCMDecoderMapItem*> DecoderMap;
|
||||
typedef std::map<uint8_t, VCMExtDecoderMapItem*> ExternalDecoderMap;
|
||||
|
||||
VCMGenericDecoder* CreateAndInitDecoder(uint8_t payload_type,
|
||||
VideoCodec* new_codec,
|
||||
bool* external) const;
|
||||
|
||||
// Create an internal encoder given a codec type.
|
||||
VCMGenericEncoder* CreateEncoder(const VideoCodecType type) const;
|
||||
|
||||
void DeleteEncoder();
|
||||
|
||||
// Create an internal Decoder given a codec type
|
||||
VCMGenericDecoder* CreateDecoder(VideoCodecType type) const;
|
||||
|
||||
const VCMDecoderMapItem* FindDecoderItem(uint8_t payload_type) const;
|
||||
|
||||
const VCMExtDecoderMapItem* FindExternalDecoderItem(
|
||||
uint8_t payload_type) const;
|
||||
|
||||
int id_;
|
||||
int number_of_cores_;
|
||||
int max_payload_size_;
|
||||
bool periodic_key_frames_;
|
||||
bool current_enc_is_external_;
|
||||
VideoCodec send_codec_;
|
||||
VideoCodec receive_codec_;
|
||||
uint8_t external_payload_type_;
|
||||
VideoEncoder* external_encoder_;
|
||||
bool internal_source_;
|
||||
VCMGenericEncoder* ptr_encoder_;
|
||||
VCMGenericDecoder* ptr_decoder_;
|
||||
bool current_dec_is_external_;
|
||||
DecoderMap dec_map_;
|
||||
ExternalDecoderMap dec_external_map_;
|
||||
}; // VCMCodecDataBase
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_CODEC_DATABASE_H_
|
||||
133
webrtc/modules/video_coding/main/source/codec_timer.cc
Normal file
133
webrtc/modules/video_coding/main/source/codec_timer.cc
Normal file
@@ -0,0 +1,133 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "codec_timer.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
VCMCodecTimer::VCMCodecTimer()
|
||||
:
|
||||
_filteredMax(0),
|
||||
_firstDecodeTime(true),
|
||||
_shortMax(0),
|
||||
_history()
|
||||
{
|
||||
Reset();
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMCodecTimer::StopTimer(WebRtc_Word64 startTimeMs, WebRtc_Word64 nowMs)
|
||||
{
|
||||
const WebRtc_Word32 timeDiff = static_cast<WebRtc_Word32>(nowMs - startTimeMs);
|
||||
MaxFilter(timeDiff, nowMs);
|
||||
return timeDiff;
|
||||
}
|
||||
|
||||
void VCMCodecTimer::Reset()
|
||||
{
|
||||
_filteredMax = 0;
|
||||
_firstDecodeTime = true;
|
||||
_shortMax = 0;
|
||||
for (int i=0; i < MAX_HISTORY_SIZE; i++)
|
||||
{
|
||||
_history[i].shortMax = 0;
|
||||
_history[i].timeMs = -1;
|
||||
}
|
||||
}
|
||||
|
||||
// Update the max-value filter
|
||||
void VCMCodecTimer::MaxFilter(WebRtc_Word32 decodeTime, WebRtc_Word64 nowMs)
|
||||
{
|
||||
if (!_firstDecodeTime)
|
||||
{
|
||||
UpdateMaxHistory(decodeTime, nowMs);
|
||||
ProcessHistory(nowMs);
|
||||
}
|
||||
else
|
||||
{
|
||||
_firstDecodeTime = false;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
VCMCodecTimer::UpdateMaxHistory(WebRtc_Word32 decodeTime, WebRtc_Word64 now)
|
||||
{
|
||||
if (_history[0].timeMs >= 0 &&
|
||||
now - _history[0].timeMs < SHORT_FILTER_MS)
|
||||
{
|
||||
if (decodeTime > _shortMax)
|
||||
{
|
||||
_shortMax = decodeTime;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Only add a new value to the history once a second
|
||||
if(_history[0].timeMs == -1)
|
||||
{
|
||||
// First, no shift
|
||||
_shortMax = decodeTime;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Shift
|
||||
for(int i = (MAX_HISTORY_SIZE - 2); i >= 0 ; i--)
|
||||
{
|
||||
_history[i+1].shortMax = _history[i].shortMax;
|
||||
_history[i+1].timeMs = _history[i].timeMs;
|
||||
}
|
||||
}
|
||||
if (_shortMax == 0)
|
||||
{
|
||||
_shortMax = decodeTime;
|
||||
}
|
||||
|
||||
_history[0].shortMax = _shortMax;
|
||||
_history[0].timeMs = now;
|
||||
_shortMax = 0;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
VCMCodecTimer::ProcessHistory(WebRtc_Word64 nowMs)
|
||||
{
|
||||
_filteredMax = _shortMax;
|
||||
if (_history[0].timeMs == -1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
for (int i=0; i < MAX_HISTORY_SIZE; i++)
|
||||
{
|
||||
if (_history[i].timeMs == -1)
|
||||
{
|
||||
break;
|
||||
}
|
||||
if (nowMs - _history[i].timeMs > MAX_HISTORY_SIZE * SHORT_FILTER_MS)
|
||||
{
|
||||
// This sample (and all samples after this) is too old
|
||||
break;
|
||||
}
|
||||
if (_history[i].shortMax > _filteredMax)
|
||||
{
|
||||
// This sample is the largest one this far into the history
|
||||
_filteredMax = _history[i].shortMax;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get the maximum observed time within a time window
|
||||
WebRtc_Word32 VCMCodecTimer::RequiredDecodeTimeMs(FrameType /*frameType*/) const
|
||||
{
|
||||
return _filteredMax;
|
||||
}
|
||||
|
||||
}
|
||||
61
webrtc/modules/video_coding/main/source/codec_timer.h
Normal file
61
webrtc/modules/video_coding/main/source/codec_timer.h
Normal file
@@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
#include "module_common_types.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
// MAX_HISTORY_SIZE * SHORT_FILTER_MS defines the window size in milliseconds
|
||||
#define MAX_HISTORY_SIZE 20
|
||||
#define SHORT_FILTER_MS 1000
|
||||
|
||||
class VCMShortMaxSample
|
||||
{
|
||||
public:
|
||||
VCMShortMaxSample() : shortMax(0), timeMs(-1) {};
|
||||
|
||||
WebRtc_Word32 shortMax;
|
||||
WebRtc_Word64 timeMs;
|
||||
};
|
||||
|
||||
class VCMCodecTimer
|
||||
{
|
||||
public:
|
||||
VCMCodecTimer();
|
||||
|
||||
// Updates and returns the max filtered decode time.
|
||||
WebRtc_Word32 StopTimer(WebRtc_Word64 startTimeMs, WebRtc_Word64 nowMs);
|
||||
|
||||
// Empty the list of timers.
|
||||
void Reset();
|
||||
|
||||
// Get the required decode time in ms.
|
||||
WebRtc_Word32 RequiredDecodeTimeMs(FrameType frameType) const;
|
||||
|
||||
private:
|
||||
void UpdateMaxHistory(WebRtc_Word32 decodeTime, WebRtc_Word64 now);
|
||||
void MaxFilter(WebRtc_Word32 newTime, WebRtc_Word64 nowMs);
|
||||
void ProcessHistory(WebRtc_Word64 nowMs);
|
||||
|
||||
WebRtc_Word32 _filteredMax;
|
||||
bool _firstDecodeTime;
|
||||
WebRtc_Word32 _shortMax;
|
||||
VCMShortMaxSample _history[MAX_HISTORY_SIZE];
|
||||
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
|
||||
@@ -0,0 +1,125 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/main/source/content_metrics_processing.h"
|
||||
|
||||
#include <math.h>
|
||||
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/main/interface/video_coding_defines.h"
|
||||
|
||||
namespace webrtc {
|
||||
//////////////////////////////////
|
||||
/// VCMContentMetricsProcessing //
|
||||
//////////////////////////////////
|
||||
|
||||
VCMContentMetricsProcessing::VCMContentMetricsProcessing()
|
||||
: recursive_avg_factor_(1 / 150.0f), // matched to 30fps.
|
||||
frame_cnt_uniform_avg_(0),
|
||||
avg_motion_level_(0.0f),
|
||||
avg_spatial_level_(0.0f) {
|
||||
recursive_avg_ = new VideoContentMetrics();
|
||||
uniform_avg_ = new VideoContentMetrics();
|
||||
}
|
||||
|
||||
VCMContentMetricsProcessing::~VCMContentMetricsProcessing() {
|
||||
delete recursive_avg_;
|
||||
delete uniform_avg_;
|
||||
}
|
||||
|
||||
int VCMContentMetricsProcessing::Reset() {
|
||||
recursive_avg_->Reset();
|
||||
uniform_avg_->Reset();
|
||||
frame_cnt_uniform_avg_ = 0;
|
||||
avg_motion_level_ = 0.0f;
|
||||
avg_spatial_level_ = 0.0f;
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
void VCMContentMetricsProcessing::UpdateFrameRate(uint32_t frameRate) {
|
||||
// Update factor for recursive averaging.
|
||||
recursive_avg_factor_ = static_cast<float> (1000.0f) /
|
||||
static_cast<float>(frameRate * kQmMinIntervalMs);
|
||||
}
|
||||
|
||||
VideoContentMetrics* VCMContentMetricsProcessing::LongTermAvgData() {
|
||||
return recursive_avg_;
|
||||
}
|
||||
|
||||
VideoContentMetrics* VCMContentMetricsProcessing::ShortTermAvgData() {
|
||||
if (frame_cnt_uniform_avg_ == 0) {
|
||||
return NULL;
|
||||
}
|
||||
// Two metrics are used: motion and spatial level.
|
||||
uniform_avg_->motion_magnitude = avg_motion_level_ /
|
||||
static_cast<float>(frame_cnt_uniform_avg_);
|
||||
uniform_avg_->spatial_pred_err = avg_spatial_level_ /
|
||||
static_cast<float>(frame_cnt_uniform_avg_);
|
||||
return uniform_avg_;
|
||||
}
|
||||
|
||||
void VCMContentMetricsProcessing::ResetShortTermAvgData() {
|
||||
// Reset.
|
||||
avg_motion_level_ = 0.0f;
|
||||
avg_spatial_level_ = 0.0f;
|
||||
frame_cnt_uniform_avg_ = 0;
|
||||
}
|
||||
|
||||
int VCMContentMetricsProcessing::UpdateContentData(
|
||||
const VideoContentMetrics *contentMetrics) {
|
||||
if (contentMetrics == NULL) {
|
||||
return VCM_OK;
|
||||
}
|
||||
return ProcessContent(contentMetrics);
|
||||
}
|
||||
|
||||
int VCMContentMetricsProcessing::ProcessContent(
|
||||
const VideoContentMetrics *contentMetrics) {
|
||||
// Update the recursive averaged metrics: average is over longer window
|
||||
// of time: over QmMinIntervalMs ms.
|
||||
UpdateRecursiveAvg(contentMetrics);
|
||||
// Update the uniform averaged metrics: average is over shorter window
|
||||
// of time: based on ~RTCP reports.
|
||||
UpdateUniformAvg(contentMetrics);
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
void VCMContentMetricsProcessing::UpdateUniformAvg(
|
||||
const VideoContentMetrics *contentMetrics) {
|
||||
// Update frame counter.
|
||||
frame_cnt_uniform_avg_ += 1;
|
||||
// Update averaged metrics: motion and spatial level are used.
|
||||
avg_motion_level_ += contentMetrics->motion_magnitude;
|
||||
avg_spatial_level_ += contentMetrics->spatial_pred_err;
|
||||
return;
|
||||
}
|
||||
|
||||
void VCMContentMetricsProcessing::UpdateRecursiveAvg(
|
||||
const VideoContentMetrics *contentMetrics) {
|
||||
|
||||
// Spatial metrics: 2x2, 1x2(H), 2x1(V).
|
||||
recursive_avg_->spatial_pred_err = (1 - recursive_avg_factor_) *
|
||||
recursive_avg_->spatial_pred_err +
|
||||
recursive_avg_factor_ * contentMetrics->spatial_pred_err;
|
||||
|
||||
recursive_avg_->spatial_pred_err_h = (1 - recursive_avg_factor_) *
|
||||
recursive_avg_->spatial_pred_err_h +
|
||||
recursive_avg_factor_ * contentMetrics->spatial_pred_err_h;
|
||||
|
||||
recursive_avg_->spatial_pred_err_v = (1 - recursive_avg_factor_) *
|
||||
recursive_avg_->spatial_pred_err_v +
|
||||
recursive_avg_factor_ * contentMetrics->spatial_pred_err_v;
|
||||
|
||||
// Motion metric: Derived from NFD (normalized frame difference).
|
||||
recursive_avg_->motion_magnitude = (1 - recursive_avg_factor_) *
|
||||
recursive_avg_->motion_magnitude +
|
||||
recursive_avg_factor_ * contentMetrics->motion_magnitude;
|
||||
}
|
||||
} // end of namespace
|
||||
@@ -0,0 +1,76 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
struct VideoContentMetrics;
|
||||
|
||||
// QM interval time (in ms)
|
||||
enum {
|
||||
kQmMinIntervalMs = 10000
|
||||
};
|
||||
|
||||
// Flag for NFD metric vs motion metric
|
||||
enum {
|
||||
kNfdMetric = 1
|
||||
};
|
||||
|
||||
/**********************************/
|
||||
/* Content Metrics Processing */
|
||||
/**********************************/
|
||||
class VCMContentMetricsProcessing {
|
||||
public:
|
||||
VCMContentMetricsProcessing();
|
||||
~VCMContentMetricsProcessing();
|
||||
|
||||
// Update class with latest metrics.
|
||||
int UpdateContentData(const VideoContentMetrics *contentMetrics);
|
||||
|
||||
// Reset the short-term averaged content data.
|
||||
void ResetShortTermAvgData();
|
||||
|
||||
// Initialize.
|
||||
int Reset();
|
||||
|
||||
// Inform class of current frame rate.
|
||||
void UpdateFrameRate(uint32_t frameRate);
|
||||
|
||||
// Returns the long-term averaged content data: recursive average over longer
|
||||
// time scale.
|
||||
VideoContentMetrics* LongTermAvgData();
|
||||
|
||||
// Returns the short-term averaged content data: uniform average over
|
||||
// shorter time scalE.
|
||||
VideoContentMetrics* ShortTermAvgData();
|
||||
|
||||
private:
|
||||
// Compute working average.
|
||||
int ProcessContent(const VideoContentMetrics *contentMetrics);
|
||||
|
||||
// Update the recursive averaged metrics: longer time average (~5/10 secs).
|
||||
void UpdateRecursiveAvg(const VideoContentMetrics *contentMetrics);
|
||||
|
||||
// Update the uniform averaged metrics: shorter time average (~RTCP report).
|
||||
void UpdateUniformAvg(const VideoContentMetrics *contentMetrics);
|
||||
|
||||
VideoContentMetrics* recursive_avg_;
|
||||
VideoContentMetrics* uniform_avg_;
|
||||
float recursive_avg_factor_;
|
||||
uint32_t frame_cnt_uniform_avg_;
|
||||
float avg_motion_level_;
|
||||
float avg_spatial_level_;
|
||||
};
|
||||
} // namespace webrtc
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
|
||||
206
webrtc/modules/video_coding/main/source/decoding_state.cc
Normal file
206
webrtc/modules/video_coding/main/source/decoding_state.cc
Normal file
@@ -0,0 +1,206 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/main/source/decoding_state.h"
|
||||
|
||||
#include "modules/video_coding/main/source/frame_buffer.h"
|
||||
#include "modules/video_coding/main/source/jitter_buffer_common.h"
|
||||
#include "modules/video_coding/main/source/packet.h"
|
||||
#include "modules/interface/module_common_types.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VCMDecodingState::VCMDecodingState()
|
||||
: sequence_num_(0),
|
||||
time_stamp_(0),
|
||||
picture_id_(kNoPictureId),
|
||||
temporal_id_(kNoTemporalIdx),
|
||||
tl0_pic_id_(kNoTl0PicIdx),
|
||||
full_sync_(true),
|
||||
init_(true) {}
|
||||
|
||||
VCMDecodingState::~VCMDecodingState() {}
|
||||
|
||||
void VCMDecodingState::Reset() {
|
||||
// TODO(mikhal): Verify - not always would want to reset the sync
|
||||
sequence_num_ = 0;
|
||||
time_stamp_ = 0;
|
||||
picture_id_ = kNoPictureId;
|
||||
temporal_id_ = kNoTemporalIdx;
|
||||
tl0_pic_id_ = kNoTl0PicIdx;
|
||||
full_sync_ = true;
|
||||
init_ = true;
|
||||
}
|
||||
|
||||
uint32_t VCMDecodingState::time_stamp() const {
|
||||
return time_stamp_;
|
||||
}
|
||||
|
||||
uint16_t VCMDecodingState::sequence_num() const {
|
||||
return sequence_num_;
|
||||
}
|
||||
|
||||
bool VCMDecodingState::IsOldFrame(const VCMFrameBuffer* frame) const {
|
||||
assert(frame != NULL);
|
||||
if (init_)
|
||||
return false;
|
||||
return (LatestTimestamp(time_stamp_, frame->TimeStamp(), NULL)
|
||||
== time_stamp_);
|
||||
}
|
||||
|
||||
bool VCMDecodingState::IsOldPacket(const VCMPacket* packet) const {
|
||||
assert(packet != NULL);
|
||||
if (init_)
|
||||
return false;
|
||||
return (LatestTimestamp(time_stamp_, packet->timestamp, NULL)
|
||||
== time_stamp_);
|
||||
}
|
||||
|
||||
void VCMDecodingState::SetState(const VCMFrameBuffer* frame) {
|
||||
assert(frame != NULL && frame->GetHighSeqNum() >= 0);
|
||||
UpdateSyncState(frame);
|
||||
sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum());
|
||||
time_stamp_ = frame->TimeStamp();
|
||||
picture_id_ = frame->PictureId();
|
||||
temporal_id_ = frame->TemporalId();
|
||||
tl0_pic_id_ = frame->Tl0PicId();
|
||||
init_ = false;
|
||||
}
|
||||
|
||||
void VCMDecodingState::SetStateOneBack(const VCMFrameBuffer* frame) {
|
||||
assert(frame != NULL && frame->GetHighSeqNum() >= 0);
|
||||
sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum()) - 1u;
|
||||
time_stamp_ = frame->TimeStamp() - 1u;
|
||||
temporal_id_ = frame->TemporalId();
|
||||
if (frame->PictureId() != kNoPictureId) {
|
||||
if (frame->PictureId() == 0)
|
||||
picture_id_ = 0x7FFF;
|
||||
else
|
||||
picture_id_ = frame->PictureId() - 1;
|
||||
}
|
||||
if (frame->Tl0PicId() != kNoTl0PicIdx) {
|
||||
if (frame->Tl0PicId() == 0)
|
||||
tl0_pic_id_ = 0x00FF;
|
||||
else
|
||||
tl0_pic_id_ = frame->Tl0PicId() - 1;
|
||||
}
|
||||
init_ = false;
|
||||
}
|
||||
|
||||
void VCMDecodingState::UpdateOldPacket(const VCMPacket* packet) {
|
||||
assert(packet != NULL);
|
||||
if (packet->timestamp == time_stamp_) {
|
||||
// Late packet belonging to the last decoded frame - make sure we update the
|
||||
// last decoded sequence number.
|
||||
sequence_num_ = LatestSequenceNumber(packet->seqNum, sequence_num_, NULL);
|
||||
}
|
||||
}
|
||||
|
||||
void VCMDecodingState::SetSeqNum(uint16_t new_seq_num) {
|
||||
sequence_num_ = new_seq_num;
|
||||
}
|
||||
|
||||
bool VCMDecodingState::init() const {
|
||||
return init_;
|
||||
}
|
||||
|
||||
bool VCMDecodingState::full_sync() const {
|
||||
return full_sync_;
|
||||
}
|
||||
|
||||
void VCMDecodingState::UpdateSyncState(const VCMFrameBuffer* frame) {
|
||||
if (init_)
|
||||
return;
|
||||
if (frame->TemporalId() == kNoTemporalIdx ||
|
||||
frame->Tl0PicId() == kNoTl0PicIdx) {
|
||||
full_sync_ = true;
|
||||
} else if (frame->FrameType() == kVideoFrameKey || frame->LayerSync()) {
|
||||
full_sync_ = true;
|
||||
} else if (full_sync_) {
|
||||
// Verify that we are still in sync.
|
||||
// Sync will be broken if continuity is true for layers but not for the
|
||||
// other methods (PictureId and SeqNum).
|
||||
if (UsingPictureId(frame)) {
|
||||
full_sync_ = ContinuousPictureId(frame->PictureId());
|
||||
} else {
|
||||
full_sync_ = ContinuousSeqNum(static_cast<uint16_t>(
|
||||
frame->GetLowSeqNum()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool VCMDecodingState::ContinuousFrame(const VCMFrameBuffer* frame) const {
|
||||
// Check continuity based on the following hierarchy:
|
||||
// - Temporal layers (stop here if out of sync).
|
||||
// - Picture Id when available.
|
||||
// - Sequence numbers.
|
||||
// Return true when in initial state.
|
||||
// Note that when a method is not applicable it will return false.
|
||||
assert(frame != NULL);
|
||||
if (init_)
|
||||
return true;
|
||||
|
||||
if (!ContinuousLayer(frame->TemporalId(), frame->Tl0PicId())) {
|
||||
// Base layers are not continuous or temporal layers are inactive.
|
||||
// In the presence of temporal layers, check for Picture ID/sequence number
|
||||
// continuity if sync can be restored by this frame.
|
||||
if (!full_sync_ && !frame->LayerSync())
|
||||
return false;
|
||||
else if (UsingPictureId(frame)) {
|
||||
return ContinuousPictureId(frame->PictureId());
|
||||
} else {
|
||||
return ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()));
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VCMDecodingState::ContinuousPictureId(int picture_id) const {
|
||||
int next_picture_id = picture_id_ + 1;
|
||||
if (picture_id < picture_id_) {
|
||||
// Wrap
|
||||
if (picture_id_ >= 0x80) {
|
||||
// 15 bits used for picture id
|
||||
return ((next_picture_id & 0x7FFF) == picture_id);
|
||||
} else {
|
||||
// 7 bits used for picture id
|
||||
return ((next_picture_id & 0x7F) == picture_id);
|
||||
}
|
||||
}
|
||||
// No wrap
|
||||
return (next_picture_id == picture_id);
|
||||
}
|
||||
|
||||
bool VCMDecodingState::ContinuousSeqNum(uint16_t seq_num) const {
|
||||
return (seq_num == static_cast<uint16_t>(sequence_num_ + 1));
|
||||
}
|
||||
|
||||
bool VCMDecodingState::ContinuousLayer(int temporal_id,
|
||||
int tl0_pic_id) const {
|
||||
// First, check if applicable.
|
||||
if (temporal_id == kNoTemporalIdx || tl0_pic_id == kNoTl0PicIdx)
|
||||
return false;
|
||||
// If this is the first frame to use temporal layers, make sure we start
|
||||
// from base.
|
||||
else if (tl0_pic_id_ == kNoTl0PicIdx && temporal_id_ == kNoTemporalIdx &&
|
||||
temporal_id == 0)
|
||||
return true;
|
||||
|
||||
// Current implementation: Look for base layer continuity.
|
||||
if (temporal_id != 0)
|
||||
return false;
|
||||
return (static_cast<uint8_t>(tl0_pic_id_ + 1) == tl0_pic_id);
|
||||
}
|
||||
|
||||
bool VCMDecodingState::UsingPictureId(const VCMFrameBuffer* frame) const {
|
||||
return (frame->PictureId() != kNoPictureId && picture_id_ != kNoPictureId);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
70
webrtc/modules/video_coding/main/source/decoding_state.h
Normal file
70
webrtc/modules/video_coding/main/source/decoding_state.h
Normal file
@@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Forward declarations
|
||||
class VCMFrameBuffer;
|
||||
class VCMPacket;
|
||||
|
||||
class VCMDecodingState {
|
||||
public:
|
||||
VCMDecodingState();
|
||||
~VCMDecodingState();
|
||||
// Check for old frame
|
||||
bool IsOldFrame(const VCMFrameBuffer* frame) const;
|
||||
// Check for old packet
|
||||
bool IsOldPacket(const VCMPacket* packet) const;
|
||||
// Check for frame continuity based on current decoded state. Use best method
|
||||
// possible, i.e. temporal info, picture ID or sequence number.
|
||||
bool ContinuousFrame(const VCMFrameBuffer* frame) const;
|
||||
void SetState(const VCMFrameBuffer* frame);
|
||||
// Set the decoding state one frame back.
|
||||
void SetStateOneBack(const VCMFrameBuffer* frame);
|
||||
// Update the sequence number if the timestamp matches current state and the
|
||||
// sequence number is higher than the current one. This accounts for packets
|
||||
// arriving late.
|
||||
void UpdateOldPacket(const VCMPacket* packet);
|
||||
void SetSeqNum(uint16_t new_seq_num);
|
||||
void Reset();
|
||||
uint32_t time_stamp() const;
|
||||
uint16_t sequence_num() const;
|
||||
// Return true if at initial state.
|
||||
bool init() const;
|
||||
// Return true when sync is on - decode all layers.
|
||||
bool full_sync() const;
|
||||
|
||||
private:
|
||||
void UpdateSyncState(const VCMFrameBuffer* frame);
|
||||
// Designated continuity functions
|
||||
bool ContinuousPictureId(int picture_id) const;
|
||||
bool ContinuousSeqNum(uint16_t seq_num) const;
|
||||
bool ContinuousLayer(int temporal_id, int tl0_pic_id) const;
|
||||
bool UsingPictureId(const VCMFrameBuffer* frame) const;
|
||||
|
||||
// Keep state of last decoded frame.
|
||||
// TODO(mikhal/stefan): create designated classes to handle these types.
|
||||
uint16_t sequence_num_;
|
||||
uint32_t time_stamp_;
|
||||
int picture_id_;
|
||||
int temporal_id_;
|
||||
int tl0_pic_id_;
|
||||
bool full_sync_; // Sync flag when temporal layers are used.
|
||||
bool init_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
|
||||
@@ -0,0 +1,462 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include "modules/video_coding/main/source/decoding_state.h"
|
||||
#include "modules/video_coding/main/source/frame_buffer.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "modules/video_coding/main/source/jitter_buffer_common.h"
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/main/source/packet.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
|
||||
TEST(TestDecodingState, Sanity) {
|
||||
VCMDecodingState dec_state;
|
||||
dec_state.Reset();
|
||||
EXPECT_TRUE(dec_state.init());
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
}
|
||||
|
||||
TEST(TestDecodingState, FrameContinuity) {
|
||||
VCMDecodingState dec_state;
|
||||
// Check that makes decision based on correct method.
|
||||
VCMFrameBuffer frame;
|
||||
frame.SetState(kStateEmpty);
|
||||
VCMPacket* packet = new VCMPacket();
|
||||
packet->isFirstPacket = 1;
|
||||
packet->timestamp = 1;
|
||||
packet->seqNum = 0xffff;
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->codecSpecificHeader.codec = kRTPVideoVP8;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
// Should return true on init.
|
||||
dec_state.Reset();
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
frame.Reset();
|
||||
// Use pictureId
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x0002;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
|
||||
packet->seqNum = 10;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
|
||||
// Use sequence numbers.
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->seqNum = dec_state.sequence_num() - 1u;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->seqNum = dec_state.sequence_num() + 1u;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
// Insert another packet to this frame
|
||||
packet->seqNum++;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
// Verify wrap.
|
||||
EXPECT_EQ(dec_state.sequence_num(), 0xffff);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
|
||||
// Insert packet with temporal info.
|
||||
dec_state.Reset();
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
|
||||
packet->seqNum = 1;
|
||||
packet->timestamp = 1;
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
// 1 layer up - still good.
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
|
||||
packet->seqNum = 2;
|
||||
packet->timestamp = 2;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
// Lost non-base layer packet => should update sync parameter.
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3;
|
||||
packet->seqNum = 4;
|
||||
packet->timestamp = 4;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
// Now insert the next non-base layer (belonging to a next tl0PicId).
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4;
|
||||
packet->seqNum = 5;
|
||||
packet->timestamp = 5;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
// Checking continuity and not updating the state - this should not trigger
|
||||
// an update of sync state.
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
// Next base layer (dropped interim non-base layers) - should update sync.
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5;
|
||||
packet->seqNum = 6;
|
||||
packet->timestamp = 6;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_FALSE(dec_state.full_sync());
|
||||
|
||||
// Check wrap for temporal layers.
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x00FF;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6;
|
||||
packet->seqNum = 7;
|
||||
packet->timestamp = 7;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_FALSE(dec_state.full_sync());
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x0000;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 7;
|
||||
packet->seqNum = 8;
|
||||
packet->timestamp = 8;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
// The current frame is not continuous
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
delete packet;
|
||||
}
|
||||
|
||||
TEST(TestDecodingState, SetStateOneBack) {
|
||||
VCMDecodingState dec_state;
|
||||
VCMFrameBuffer frame;
|
||||
frame.SetState(kStateEmpty);
|
||||
VCMPacket* packet = new VCMPacket();
|
||||
// Based on PictureId.
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->codecSpecificHeader.codec = kRTPVideoVP8;
|
||||
packet->timestamp = 0;
|
||||
packet->seqNum = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetStateOneBack(&frame);
|
||||
EXPECT_EQ(dec_state.sequence_num(), 0xFFFF);
|
||||
// Check continuity.
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
|
||||
// Based on Temporal layers.
|
||||
packet->timestamp = 0;
|
||||
packet->seqNum = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetStateOneBack(&frame);
|
||||
// Check continuity
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
delete packet;
|
||||
}
|
||||
|
||||
TEST(TestDecodingState, UpdateOldPacket) {
|
||||
VCMDecodingState dec_state;
|
||||
// Update only if zero size and newer than previous.
|
||||
// Should only update if the timeStamp match.
|
||||
VCMFrameBuffer frame;
|
||||
frame.SetState(kStateEmpty);
|
||||
VCMPacket* packet = new VCMPacket();
|
||||
packet->timestamp = 1;
|
||||
packet->seqNum = 1;
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_EQ(dec_state.sequence_num(), 1);
|
||||
// Insert an empty packet that does not belong to the same frame.
|
||||
// => Sequence num should be the same.
|
||||
packet->timestamp = 2;
|
||||
dec_state.UpdateOldPacket(packet);
|
||||
EXPECT_EQ(dec_state.sequence_num(), 1);
|
||||
// Now insert empty packet belonging to the same frame.
|
||||
packet->timestamp = 1;
|
||||
packet->seqNum = 2;
|
||||
packet->frameType = kFrameEmpty;
|
||||
packet->sizeBytes = 0;
|
||||
dec_state.UpdateOldPacket(packet);
|
||||
EXPECT_EQ(dec_state.sequence_num(), 2);
|
||||
// Now insert delta packet belonging to the same frame.
|
||||
packet->timestamp = 1;
|
||||
packet->seqNum = 3;
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->sizeBytes = 1400;
|
||||
dec_state.UpdateOldPacket(packet);
|
||||
EXPECT_EQ(dec_state.sequence_num(), 3);
|
||||
// Insert a packet belonging to an older timestamp - should not update the
|
||||
// sequence number.
|
||||
packet->timestamp = 0;
|
||||
packet->seqNum = 4;
|
||||
packet->frameType = kFrameEmpty;
|
||||
packet->sizeBytes = 0;
|
||||
dec_state.UpdateOldPacket(packet);
|
||||
EXPECT_EQ(dec_state.sequence_num(), 3);
|
||||
|
||||
delete packet;
|
||||
}
|
||||
|
||||
TEST(TestDecodingState, MultiLayerBehavior) {
|
||||
// Identify sync/non-sync when more than one layer.
|
||||
VCMDecodingState dec_state;
|
||||
// Identify packets belonging to old frames/packets.
|
||||
// Set state for current frames.
|
||||
// tl0PicIdx 0, temporal id 0.
|
||||
VCMFrameBuffer frame;
|
||||
VCMPacket* packet = new VCMPacket();
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->codecSpecificHeader.codec = kRTPVideoVP8;
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->timestamp = 0;
|
||||
packet->seqNum = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
// tl0PicIdx 0, temporal id 1.
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->timestamp = 1;
|
||||
packet->seqNum = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
// Lost tl0PicIdx 0, temporal id 2.
|
||||
// Insert tl0PicIdx 0, temporal id 3.
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->timestamp = 3;
|
||||
packet->seqNum = 3;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_FALSE(dec_state.full_sync());
|
||||
// Insert next base layer
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->timestamp = 4;
|
||||
packet->seqNum = 4;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_FALSE(dec_state.full_sync());
|
||||
// Insert key frame - should update sync value.
|
||||
// A key frame is always a base layer.
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->frameType = kVideoFrameKey;
|
||||
packet->isFirstPacket = 1;
|
||||
packet->timestamp = 5;
|
||||
packet->seqNum = 5;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 2;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
// After sync, a continuous PictureId is required
|
||||
// (continuous base layer is not enough )
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->timestamp = 6;
|
||||
packet->seqNum = 6;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 3;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->isFirstPacket = 1;
|
||||
packet->timestamp = 8;
|
||||
packet->seqNum = 8;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 8;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_FALSE(dec_state.full_sync());
|
||||
|
||||
// Insert a non-ref frame - should update sync value.
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->isFirstPacket = 1;
|
||||
packet->timestamp = 9;
|
||||
packet->seqNum = 9;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 9;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
|
||||
// The following test will verify the sync flag behavior after a loss.
|
||||
// Create the following pattern:
|
||||
// Update base layer, lose packet 1 (sync flag on, layer 2), insert packet 3
|
||||
// (sync flag on, layer 2) check continuity and sync flag after inserting
|
||||
// packet 2 (sync flag on, layer 1).
|
||||
// Base layer.
|
||||
frame.Reset();
|
||||
dec_state.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->isFirstPacket = 1;
|
||||
packet->markerBit = 1;
|
||||
packet->timestamp = 0;
|
||||
packet->seqNum = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.layerSync = false;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
// Layer 2 - 2 packets (insert one, lose one).
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->isFirstPacket = 1;
|
||||
packet->markerBit = 0;
|
||||
packet->timestamp = 1;
|
||||
packet->seqNum = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
|
||||
// Layer 1
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet->frameType = kVideoFrameDelta;
|
||||
packet->isFirstPacket = 1;
|
||||
packet->markerBit = 1;
|
||||
packet->timestamp = 2;
|
||||
packet->seqNum = 3;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.pictureId = 2;
|
||||
packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
|
||||
delete packet;
|
||||
}
|
||||
|
||||
TEST(TestDecodingState, DiscontinuousPicIdContinuousSeqNum) {
|
||||
VCMDecodingState dec_state;
|
||||
VCMFrameBuffer frame;
|
||||
VCMPacket packet;
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet.frameType = kVideoFrameKey;
|
||||
packet.codecSpecificHeader.codec = kRTPVideoVP8;
|
||||
packet.timestamp = 0;
|
||||
packet.seqNum = 0;
|
||||
packet.codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
|
||||
packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
|
||||
packet.codecSpecificHeader.codecHeader.VP8.pictureId = 0;
|
||||
frame.InsertPacket(packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_TRUE(dec_state.full_sync());
|
||||
|
||||
// Continuous sequence number but discontinuous picture id. This implies a
|
||||
// a loss and we have to fall back to only decoding the base layer.
|
||||
frame.Reset();
|
||||
frame.SetState(kStateEmpty);
|
||||
packet.frameType = kVideoFrameDelta;
|
||||
packet.timestamp += 3000;
|
||||
++packet.seqNum;
|
||||
packet.codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
|
||||
packet.codecSpecificHeader.codecHeader.VP8.pictureId = 2;
|
||||
frame.InsertPacket(packet, 0, false, 0);
|
||||
EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
|
||||
dec_state.SetState(&frame);
|
||||
EXPECT_FALSE(dec_state.full_sync());
|
||||
}
|
||||
|
||||
TEST(TestDecodingState, OldInput) {
|
||||
VCMDecodingState dec_state;
|
||||
// Identify packets belonging to old frames/packets.
|
||||
// Set state for current frames.
|
||||
VCMFrameBuffer frame;
|
||||
frame.SetState(kStateEmpty);
|
||||
VCMPacket* packet = new VCMPacket();
|
||||
packet->timestamp = 10;
|
||||
packet->seqNum = 1;
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
dec_state.SetState(&frame);
|
||||
packet->timestamp = 9;
|
||||
EXPECT_TRUE(dec_state.IsOldPacket(packet));
|
||||
// Check for old frame
|
||||
frame.Reset();
|
||||
frame.InsertPacket(*packet, 0, false, 0);
|
||||
EXPECT_TRUE(dec_state.IsOldFrame(&frame));
|
||||
|
||||
|
||||
delete packet;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
256
webrtc/modules/video_coding/main/source/encoded_frame.cc
Normal file
256
webrtc/modules/video_coding/main/source/encoded_frame.cc
Normal file
@@ -0,0 +1,256 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "encoded_frame.h"
|
||||
#include "generic_encoder.h"
|
||||
#include "jitter_buffer_common.h"
|
||||
#include "video_coding_defines.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VCMEncodedFrame::VCMEncodedFrame()
|
||||
:
|
||||
webrtc::EncodedImage(),
|
||||
_renderTimeMs(-1),
|
||||
_payloadType(0),
|
||||
_missingFrame(false),
|
||||
_codec(kVideoCodecUnknown),
|
||||
_fragmentation()
|
||||
{
|
||||
_codecSpecificInfo.codecType = kVideoCodecUnknown;
|
||||
}
|
||||
|
||||
VCMEncodedFrame::VCMEncodedFrame(const webrtc::EncodedImage& rhs)
|
||||
:
|
||||
webrtc::EncodedImage(rhs),
|
||||
_renderTimeMs(-1),
|
||||
_payloadType(0),
|
||||
_missingFrame(false),
|
||||
_codec(kVideoCodecUnknown),
|
||||
_fragmentation()
|
||||
{
|
||||
_codecSpecificInfo.codecType = kVideoCodecUnknown;
|
||||
_buffer = NULL;
|
||||
_size = 0;
|
||||
_length = 0;
|
||||
if (rhs._buffer != NULL)
|
||||
{
|
||||
VerifyAndAllocate(rhs._length);
|
||||
memcpy(_buffer, rhs._buffer, rhs._length);
|
||||
}
|
||||
}
|
||||
|
||||
VCMEncodedFrame::VCMEncodedFrame(const VCMEncodedFrame& rhs)
|
||||
:
|
||||
webrtc::EncodedImage(rhs),
|
||||
_renderTimeMs(rhs._renderTimeMs),
|
||||
_payloadType(rhs._payloadType),
|
||||
_missingFrame(rhs._missingFrame),
|
||||
_codecSpecificInfo(rhs._codecSpecificInfo),
|
||||
_codec(rhs._codec),
|
||||
_fragmentation() {
|
||||
_buffer = NULL;
|
||||
_size = 0;
|
||||
_length = 0;
|
||||
if (rhs._buffer != NULL)
|
||||
{
|
||||
VerifyAndAllocate(rhs._length);
|
||||
memcpy(_buffer, rhs._buffer, rhs._length);
|
||||
_length = rhs._length;
|
||||
}
|
||||
// Deep operator=
|
||||
_fragmentation = rhs._fragmentation;
|
||||
}
|
||||
|
||||
VCMEncodedFrame::~VCMEncodedFrame()
|
||||
{
|
||||
Free();
|
||||
}
|
||||
|
||||
void VCMEncodedFrame::Free()
|
||||
{
|
||||
Reset();
|
||||
if (_buffer != NULL)
|
||||
{
|
||||
delete [] _buffer;
|
||||
_buffer = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void VCMEncodedFrame::Reset()
|
||||
{
|
||||
_renderTimeMs = -1;
|
||||
_timeStamp = 0;
|
||||
_payloadType = 0;
|
||||
_frameType = kDeltaFrame;
|
||||
_encodedWidth = 0;
|
||||
_encodedHeight = 0;
|
||||
_completeFrame = false;
|
||||
_missingFrame = false;
|
||||
_length = 0;
|
||||
_codecSpecificInfo.codecType = kVideoCodecUnknown;
|
||||
_codec = kVideoCodecUnknown;
|
||||
}
|
||||
|
||||
void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header)
|
||||
{
|
||||
if (header)
|
||||
{
|
||||
switch (header->codec)
|
||||
{
|
||||
case kRTPVideoVP8:
|
||||
{
|
||||
if (_codecSpecificInfo.codecType != kVideoCodecVP8)
|
||||
{
|
||||
// This is the first packet for this frame.
|
||||
_codecSpecificInfo.codecSpecific.VP8.pictureId = -1;
|
||||
_codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0;
|
||||
_codecSpecificInfo.codecSpecific.VP8.layerSync = false;
|
||||
_codecSpecificInfo.codecSpecific.VP8.keyIdx = -1;
|
||||
_codecSpecificInfo.codecType = kVideoCodecVP8;
|
||||
}
|
||||
_codecSpecificInfo.codecSpecific.VP8.nonReference =
|
||||
header->codecHeader.VP8.nonReference;
|
||||
if (header->codecHeader.VP8.pictureId != kNoPictureId)
|
||||
{
|
||||
_codecSpecificInfo.codecSpecific.VP8.pictureId =
|
||||
header->codecHeader.VP8.pictureId;
|
||||
}
|
||||
if (header->codecHeader.VP8.temporalIdx != kNoTemporalIdx)
|
||||
{
|
||||
_codecSpecificInfo.codecSpecific.VP8.temporalIdx =
|
||||
header->codecHeader.VP8.temporalIdx;
|
||||
_codecSpecificInfo.codecSpecific.VP8.layerSync =
|
||||
header->codecHeader.VP8.layerSync;
|
||||
}
|
||||
if (header->codecHeader.VP8.keyIdx != kNoKeyIdx)
|
||||
{
|
||||
_codecSpecificInfo.codecSpecific.VP8.keyIdx =
|
||||
header->codecHeader.VP8.keyIdx;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
_codecSpecificInfo.codecType = kVideoCodecUnknown;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const RTPFragmentationHeader* VCMEncodedFrame::FragmentationHeader() const {
|
||||
return &_fragmentation;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMEncodedFrame::Store(VCMFrameStorageCallback& storeCallback) const
|
||||
{
|
||||
EncodedVideoData frameToStore;
|
||||
frameToStore.codec = _codec;
|
||||
if (_buffer != NULL)
|
||||
{
|
||||
frameToStore.VerifyAndAllocate(_length);
|
||||
memcpy(frameToStore.payloadData, _buffer, _length);
|
||||
frameToStore.payloadSize = _length;
|
||||
}
|
||||
frameToStore.completeFrame = _completeFrame;
|
||||
frameToStore.encodedWidth = _encodedWidth;
|
||||
frameToStore.encodedHeight = _encodedHeight;
|
||||
frameToStore.frameType = ConvertFrameType(_frameType);
|
||||
frameToStore.missingFrame = _missingFrame;
|
||||
frameToStore.payloadType = _payloadType;
|
||||
frameToStore.renderTimeMs = _renderTimeMs;
|
||||
frameToStore.timeStamp = _timeStamp;
|
||||
storeCallback.StoreReceivedFrame(frameToStore);
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMEncodedFrame::VerifyAndAllocate(const WebRtc_UWord32 minimumSize)
|
||||
{
|
||||
if(minimumSize > _size)
|
||||
{
|
||||
// create buffer of sufficient size
|
||||
WebRtc_UWord8* newBuffer = new WebRtc_UWord8[minimumSize];
|
||||
if (newBuffer == NULL)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
if(_buffer)
|
||||
{
|
||||
// copy old data
|
||||
memcpy(newBuffer, _buffer, _size);
|
||||
delete [] _buffer;
|
||||
}
|
||||
_buffer = newBuffer;
|
||||
_size = minimumSize;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType)
|
||||
{
|
||||
switch(frameType)
|
||||
{
|
||||
case kKeyFrame:
|
||||
{
|
||||
return kVideoFrameKey;
|
||||
}
|
||||
case kDeltaFrame:
|
||||
{
|
||||
return kVideoFrameDelta;
|
||||
}
|
||||
case kGoldenFrame:
|
||||
{
|
||||
return kVideoFrameGolden;
|
||||
}
|
||||
case kAltRefFrame:
|
||||
{
|
||||
return kVideoFrameAltRef;
|
||||
}
|
||||
case kSkipFrame:
|
||||
{
|
||||
return kFrameEmpty;
|
||||
}
|
||||
default:
|
||||
{
|
||||
return kVideoFrameDelta;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frame_type) {
|
||||
switch (frame_type) {
|
||||
case kVideoFrameKey:
|
||||
return kKeyFrame;
|
||||
case kVideoFrameDelta:
|
||||
return kDeltaFrame;
|
||||
case kVideoFrameGolden:
|
||||
return kGoldenFrame;
|
||||
case kVideoFrameAltRef:
|
||||
return kAltRefFrame;
|
||||
default:
|
||||
assert(false);
|
||||
return kDeltaFrame;
|
||||
}
|
||||
}
|
||||
|
||||
void VCMEncodedFrame::ConvertFrameTypes(
|
||||
const std::vector<webrtc::FrameType>& frame_types,
|
||||
std::vector<VideoFrameType>* video_frame_types) {
|
||||
assert(video_frame_types);
|
||||
video_frame_types->reserve(frame_types.size());
|
||||
for (size_t i = 0; i < frame_types.size(); ++i) {
|
||||
(*video_frame_types)[i] = ConvertFrameType(frame_types[i]);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
125
webrtc/modules/video_coding/main/source/encoded_frame.h
Normal file
125
webrtc/modules/video_coding/main/source/encoded_frame.h
Normal file
@@ -0,0 +1,125 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "common_types.h"
|
||||
#include "common_video/interface/video_image.h"
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "modules/video_coding/main/interface/video_coding_defines.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
class VCMEncodedFrame : protected EncodedImage
|
||||
{
|
||||
public:
|
||||
VCMEncodedFrame();
|
||||
VCMEncodedFrame(const webrtc::EncodedImage& rhs);
|
||||
VCMEncodedFrame(const VCMEncodedFrame& rhs);
|
||||
|
||||
~VCMEncodedFrame();
|
||||
/**
|
||||
* Delete VideoFrame and resets members to zero
|
||||
*/
|
||||
void Free();
|
||||
/**
|
||||
* Set render time in milliseconds
|
||||
*/
|
||||
void SetRenderTime(const WebRtc_Word64 renderTimeMs) {_renderTimeMs = renderTimeMs;}
|
||||
|
||||
/**
|
||||
* Set the encoded frame size
|
||||
*/
|
||||
void SetEncodedSize(WebRtc_UWord32 width, WebRtc_UWord32 height)
|
||||
{ _encodedWidth = width; _encodedHeight = height; }
|
||||
/**
|
||||
* Get the encoded image
|
||||
*/
|
||||
const webrtc::EncodedImage& EncodedImage() const
|
||||
{ return static_cast<const webrtc::EncodedImage&>(*this); }
|
||||
/**
|
||||
* Get pointer to frame buffer
|
||||
*/
|
||||
const WebRtc_UWord8* Buffer() const {return _buffer;}
|
||||
/**
|
||||
* Get frame length
|
||||
*/
|
||||
WebRtc_UWord32 Length() const {return _length;}
|
||||
/**
|
||||
* Get frame timestamp (90kHz)
|
||||
*/
|
||||
WebRtc_UWord32 TimeStamp() const {return _timeStamp;}
|
||||
/**
|
||||
* Get render time in milliseconds
|
||||
*/
|
||||
WebRtc_Word64 RenderTimeMs() const {return _renderTimeMs;}
|
||||
/**
|
||||
* Get frame type
|
||||
*/
|
||||
webrtc::FrameType FrameType() const {return ConvertFrameType(_frameType);}
|
||||
/**
|
||||
* True if this frame is complete, false otherwise
|
||||
*/
|
||||
bool Complete() const { return _completeFrame; }
|
||||
/**
|
||||
* True if there's a frame missing before this frame
|
||||
*/
|
||||
bool MissingFrame() const { return _missingFrame; }
|
||||
/**
|
||||
* Payload type of the encoded payload
|
||||
*/
|
||||
WebRtc_UWord8 PayloadType() const { return _payloadType; }
|
||||
/**
|
||||
* Get codec specific info.
|
||||
* The returned pointer is only valid as long as the VCMEncodedFrame
|
||||
* is valid. Also, VCMEncodedFrame owns the pointer and will delete
|
||||
* the object.
|
||||
*/
|
||||
const CodecSpecificInfo* CodecSpecific() const {return &_codecSpecificInfo;}
|
||||
|
||||
const RTPFragmentationHeader* FragmentationHeader() const;
|
||||
|
||||
WebRtc_Word32 Store(VCMFrameStorageCallback& storeCallback) const;
|
||||
|
||||
static webrtc::FrameType ConvertFrameType(VideoFrameType frameType);
|
||||
static VideoFrameType ConvertFrameType(webrtc::FrameType frameType);
|
||||
static void ConvertFrameTypes(
|
||||
const std::vector<webrtc::FrameType>& frame_types,
|
||||
std::vector<VideoFrameType>* video_frame_types);
|
||||
|
||||
protected:
|
||||
/**
|
||||
* Verifies that current allocated buffer size is larger than or equal to the input size.
|
||||
* If the current buffer size is smaller, a new allocation is made and the old buffer data
|
||||
* is copied to the new buffer.
|
||||
* Buffer size is updated to minimumSize.
|
||||
*/
|
||||
WebRtc_Word32 VerifyAndAllocate(const WebRtc_UWord32 minimumSize);
|
||||
|
||||
void Reset();
|
||||
|
||||
void CopyCodecSpecific(const RTPVideoHeader* header);
|
||||
|
||||
WebRtc_Word64 _renderTimeMs;
|
||||
WebRtc_UWord8 _payloadType;
|
||||
bool _missingFrame;
|
||||
CodecSpecificInfo _codecSpecificInfo;
|
||||
webrtc::VideoCodecType _codec;
|
||||
RTPFragmentationHeader _fragmentation;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
|
||||
38742
webrtc/modules/video_coding/main/source/er_tables_xor.h
Normal file
38742
webrtc/modules/video_coding/main/source/er_tables_xor.h
Normal file
File diff suppressed because it is too large
Load Diff
63
webrtc/modules/video_coding/main/source/event.h
Normal file
63
webrtc/modules/video_coding/main/source/event.h
Normal file
@@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
|
||||
|
||||
#include "event_wrapper.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
//#define EVENT_DEBUG
|
||||
|
||||
class VCMEvent : public EventWrapper
|
||||
{
|
||||
public:
|
||||
VCMEvent() : _event(*EventWrapper::Create()) {};
|
||||
|
||||
virtual ~VCMEvent() { delete &_event; };
|
||||
|
||||
/**
|
||||
* Release waiting threads
|
||||
*/
|
||||
bool Set() { return _event.Set(); };
|
||||
|
||||
bool Reset() { return _event.Reset(); };
|
||||
|
||||
/**
|
||||
* Wait for this event
|
||||
*/
|
||||
EventTypeWrapper Wait(unsigned long maxTime)
|
||||
{
|
||||
#ifdef EVENT_DEBUG
|
||||
return kEventTimeout;
|
||||
#else
|
||||
return _event.Wait(maxTime);
|
||||
#endif
|
||||
};
|
||||
|
||||
/**
|
||||
* Start a timer
|
||||
*/
|
||||
bool StartTimer(bool periodic, unsigned long time)
|
||||
{ return _event.StartTimer(periodic, time); };
|
||||
/**
|
||||
* Stop the timer
|
||||
*/
|
||||
bool StopTimer() { return _event.StopTimer(); };
|
||||
|
||||
private:
|
||||
EventWrapper& _event;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
|
||||
60
webrtc/modules/video_coding/main/source/exp_filter.cc
Normal file
60
webrtc/modules/video_coding/main/source/exp_filter.cc
Normal file
@@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "exp_filter.h"
|
||||
|
||||
#include <math.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
void
|
||||
VCMExpFilter::Reset(float alpha)
|
||||
{
|
||||
_alpha = alpha;
|
||||
_filtered = -1.0;
|
||||
}
|
||||
|
||||
float
|
||||
VCMExpFilter::Apply(float exp, float sample)
|
||||
{
|
||||
if (_filtered == -1.0)
|
||||
{
|
||||
// Initialize filtered bit rates
|
||||
_filtered = sample;
|
||||
}
|
||||
else if (exp == 1.0)
|
||||
{
|
||||
_filtered = _alpha * _filtered + (1 - _alpha) * sample;
|
||||
}
|
||||
else
|
||||
{
|
||||
float alpha = pow(_alpha, exp);
|
||||
_filtered = alpha * _filtered + (1 - alpha) * sample;
|
||||
}
|
||||
if (_max != -1 && _filtered > _max)
|
||||
{
|
||||
_filtered = _max;
|
||||
}
|
||||
return _filtered;
|
||||
}
|
||||
|
||||
void
|
||||
VCMExpFilter::UpdateBase(float alpha)
|
||||
{
|
||||
_alpha = alpha;
|
||||
}
|
||||
|
||||
float
|
||||
VCMExpFilter::Value() const
|
||||
{
|
||||
return _filtered;
|
||||
}
|
||||
|
||||
}
|
||||
58
webrtc/modules/video_coding/main/source/exp_filter.h
Normal file
58
webrtc/modules/video_coding/main/source/exp_filter.h
Normal file
@@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
/**********************/
|
||||
/* ExpFilter class */
|
||||
/**********************/
|
||||
|
||||
class VCMExpFilter
|
||||
{
|
||||
public:
|
||||
VCMExpFilter(float alpha, float max = -1.0) : _alpha(alpha), _filtered(-1.0), _max(max) {}
|
||||
|
||||
// Resets the filter to its initial state, and resets alpha to the given value
|
||||
//
|
||||
// Input:
|
||||
// - alpha : the new value of the filter factor base.
|
||||
void Reset(float alpha);
|
||||
|
||||
// Applies the filter with the given exponent on the provided sample
|
||||
//
|
||||
// Input:
|
||||
// - exp : Exponent T in y(k) = alpha^T * y(k-1) + (1 - alpha^T) * x(k)
|
||||
// - sample : x(k) in the above filter equation
|
||||
float Apply(float exp, float sample);
|
||||
|
||||
// Return current filtered value: y(k)
|
||||
//
|
||||
// Return value : The current filter output
|
||||
float Value() const;
|
||||
|
||||
// Change the filter factor base
|
||||
//
|
||||
// Input:
|
||||
// - alpha : The new filter factor base.
|
||||
void UpdateBase(float alpha);
|
||||
|
||||
private:
|
||||
float _alpha; // Filter factor base
|
||||
float _filtered; // Current filter output
|
||||
const float _max;
|
||||
}; // end of ExpFilter class
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
|
||||
6481
webrtc/modules/video_coding/main/source/fec_tables_xor.h
Normal file
6481
webrtc/modules/video_coding/main/source/fec_tables_xor.h
Normal file
File diff suppressed because it is too large
Load Diff
410
webrtc/modules/video_coding/main/source/frame_buffer.cc
Normal file
410
webrtc/modules/video_coding/main/source/frame_buffer.cc
Normal file
@@ -0,0 +1,410 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "frame_buffer.h"
|
||||
#include "packet.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <string.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VCMFrameBuffer::VCMFrameBuffer()
|
||||
:
|
||||
_state(kStateFree),
|
||||
_frameCounted(false),
|
||||
_nackCount(0),
|
||||
_latestPacketTimeMs(-1) {
|
||||
}
|
||||
|
||||
VCMFrameBuffer::~VCMFrameBuffer() {
|
||||
}
|
||||
|
||||
VCMFrameBuffer::VCMFrameBuffer(VCMFrameBuffer& rhs)
|
||||
:
|
||||
VCMEncodedFrame(rhs),
|
||||
_state(rhs._state),
|
||||
_frameCounted(rhs._frameCounted),
|
||||
_sessionInfo(),
|
||||
_nackCount(rhs._nackCount),
|
||||
_latestPacketTimeMs(rhs._latestPacketTimeMs)
|
||||
{
|
||||
_sessionInfo = rhs._sessionInfo;
|
||||
_sessionInfo.UpdateDataPointers(rhs._buffer, _buffer);
|
||||
}
|
||||
|
||||
webrtc::FrameType
|
||||
VCMFrameBuffer::FrameType() const
|
||||
{
|
||||
return _sessionInfo.FrameType();
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameBuffer::SetPreviousFrameLoss()
|
||||
{
|
||||
_sessionInfo.SetPreviousFrameLoss();
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMFrameBuffer::GetLowSeqNum() const
|
||||
{
|
||||
return _sessionInfo.LowSequenceNumber();
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMFrameBuffer::GetHighSeqNum() const
|
||||
{
|
||||
return _sessionInfo.HighSequenceNumber();
|
||||
}
|
||||
|
||||
int VCMFrameBuffer::PictureId() const {
|
||||
return _sessionInfo.PictureId();
|
||||
}
|
||||
|
||||
int VCMFrameBuffer::TemporalId() const {
|
||||
return _sessionInfo.TemporalId();
|
||||
}
|
||||
|
||||
bool VCMFrameBuffer::LayerSync() const {
|
||||
return _sessionInfo.LayerSync();
|
||||
}
|
||||
|
||||
int VCMFrameBuffer::Tl0PicId() const {
|
||||
return _sessionInfo.Tl0PicId();
|
||||
}
|
||||
|
||||
bool VCMFrameBuffer::NonReference() const {
|
||||
return _sessionInfo.NonReference();
|
||||
}
|
||||
|
||||
bool
|
||||
VCMFrameBuffer::IsSessionComplete() const
|
||||
{
|
||||
return _sessionInfo.complete();
|
||||
}
|
||||
|
||||
// Insert packet
|
||||
VCMFrameBufferEnum
|
||||
VCMFrameBuffer::InsertPacket(const VCMPacket& packet, WebRtc_Word64 timeInMs,
|
||||
bool enableDecodableState, WebRtc_UWord32 rttMS)
|
||||
{
|
||||
if (_state == kStateDecoding)
|
||||
{
|
||||
// Do not insert packet
|
||||
return kNoError;
|
||||
}
|
||||
|
||||
// Sanity to check if the frame has been freed. (Too old for example)
|
||||
if (_state == kStateFree)
|
||||
{
|
||||
return kStateError;
|
||||
}
|
||||
|
||||
// is this packet part of this frame
|
||||
if (TimeStamp() && (TimeStamp() != packet.timestamp))
|
||||
{
|
||||
return kTimeStampError;
|
||||
}
|
||||
|
||||
// sanity checks
|
||||
if (_size + packet.sizeBytes +
|
||||
(packet.insertStartCode ? kH264StartCodeLengthBytes : 0 )
|
||||
> kMaxJBFrameSizeBytes)
|
||||
{
|
||||
return kSizeError;
|
||||
}
|
||||
if (NULL == packet.dataPtr && packet.sizeBytes > 0)
|
||||
{
|
||||
return kSizeError;
|
||||
}
|
||||
if (packet.dataPtr != NULL)
|
||||
{
|
||||
_payloadType = packet.payloadType;
|
||||
}
|
||||
|
||||
if (kStateEmpty == _state)
|
||||
{
|
||||
// First packet (empty and/or media) inserted into this frame.
|
||||
// store some info and set some initial values.
|
||||
_timeStamp = packet.timestamp;
|
||||
_codec = packet.codec;
|
||||
if (packet.frameType != kFrameEmpty)
|
||||
{
|
||||
// first media packet
|
||||
SetState(kStateIncomplete);
|
||||
}
|
||||
}
|
||||
|
||||
WebRtc_UWord32 requiredSizeBytes = Length() + packet.sizeBytes +
|
||||
(packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
|
||||
if (requiredSizeBytes >= _size)
|
||||
{
|
||||
const WebRtc_UWord8* prevBuffer = _buffer;
|
||||
const WebRtc_UWord32 increments = requiredSizeBytes /
|
||||
kBufferIncStepSizeBytes +
|
||||
(requiredSizeBytes %
|
||||
kBufferIncStepSizeBytes > 0);
|
||||
const WebRtc_UWord32 newSize = _size +
|
||||
increments * kBufferIncStepSizeBytes;
|
||||
if (newSize > kMaxJBFrameSizeBytes)
|
||||
{
|
||||
return kSizeError;
|
||||
}
|
||||
if (VerifyAndAllocate(newSize) == -1)
|
||||
{
|
||||
return kSizeError;
|
||||
}
|
||||
_sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
|
||||
}
|
||||
|
||||
CopyCodecSpecific(&packet.codecSpecificHeader);
|
||||
|
||||
int retVal = _sessionInfo.InsertPacket(packet, _buffer,
|
||||
enableDecodableState,
|
||||
rttMS);
|
||||
if (retVal == -1)
|
||||
{
|
||||
return kSizeError;
|
||||
}
|
||||
else if (retVal == -2)
|
||||
{
|
||||
return kDuplicatePacket;
|
||||
}
|
||||
// update length
|
||||
_length = Length() + static_cast<WebRtc_UWord32>(retVal);
|
||||
|
||||
_latestPacketTimeMs = timeInMs;
|
||||
|
||||
if (_sessionInfo.complete()) {
|
||||
return kCompleteSession;
|
||||
} else if (_sessionInfo.decodable()) {
|
||||
SetState(kStateDecodable);
|
||||
return kDecodableSession;
|
||||
} else {
|
||||
// this layer is not complete
|
||||
if (_state == kStateComplete) {
|
||||
// we already have a complete layer
|
||||
// wait for all independent layers belonging to the same frame
|
||||
_state = kStateIncomplete;
|
||||
}
|
||||
}
|
||||
return kIncomplete;
|
||||
}
|
||||
|
||||
WebRtc_Word64
|
||||
VCMFrameBuffer::LatestPacketTimeMs() const
|
||||
{
|
||||
return _latestPacketTimeMs;
|
||||
}
|
||||
|
||||
// Build hard NACK list:Zero out all entries in list up to and including the
|
||||
// (first) entry equal to _lowSeqNum.
|
||||
int VCMFrameBuffer::BuildHardNackList(int* list, int num) {
|
||||
if (_sessionInfo.BuildHardNackList(list, num) != 0) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Build selective NACK list: Create a soft (selective) list of entries to zero
|
||||
// out up to and including the (first) entry equal to _lowSeqNum.
|
||||
int VCMFrameBuffer::BuildSoftNackList(int* list, int num, int rttMs) {
|
||||
return _sessionInfo.BuildSoftNackList(list, num, rttMs);
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameBuffer::IncrementNackCount()
|
||||
{
|
||||
_nackCount++;
|
||||
}
|
||||
|
||||
WebRtc_Word16
|
||||
VCMFrameBuffer::GetNackCount() const
|
||||
{
|
||||
return _nackCount;
|
||||
}
|
||||
|
||||
bool
|
||||
VCMFrameBuffer::HaveLastPacket() const
|
||||
{
|
||||
return _sessionInfo.HaveLastPacket();
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameBuffer::Reset()
|
||||
{
|
||||
_length = 0;
|
||||
_timeStamp = 0;
|
||||
_sessionInfo.Reset();
|
||||
_frameCounted = false;
|
||||
_payloadType = 0;
|
||||
_nackCount = 0;
|
||||
_latestPacketTimeMs = -1;
|
||||
_state = kStateFree;
|
||||
VCMEncodedFrame::Reset();
|
||||
}
|
||||
|
||||
// Makes sure the session contains a decodable stream.
|
||||
void
|
||||
VCMFrameBuffer::MakeSessionDecodable()
|
||||
{
|
||||
WebRtc_UWord32 retVal;
|
||||
#ifdef INDEPENDENT_PARTITIONS
|
||||
if (_codec != kVideoCodecVP8) {
|
||||
retVal = _sessionInfo.MakeDecodable();
|
||||
_length -= retVal;
|
||||
}
|
||||
#else
|
||||
retVal = _sessionInfo.MakeDecodable();
|
||||
_length -= retVal;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Set state of frame
|
||||
void
|
||||
VCMFrameBuffer::SetState(VCMFrameBufferStateEnum state)
|
||||
{
|
||||
if (_state == state)
|
||||
{
|
||||
return;
|
||||
}
|
||||
switch (state)
|
||||
{
|
||||
case kStateFree:
|
||||
// Reset everything
|
||||
// We can go to this state from all other states.
|
||||
// The one setting the state to free must ensure
|
||||
// that the frame is removed from the timestamp
|
||||
// ordered frame list in the jb.
|
||||
Reset();
|
||||
break;
|
||||
|
||||
case kStateIncomplete:
|
||||
// we can go to this state from state kStateEmpty
|
||||
assert(_state == kStateEmpty ||
|
||||
_state == kStateDecoding);
|
||||
|
||||
// Do nothing, we received a packet
|
||||
break;
|
||||
|
||||
case kStateComplete:
|
||||
assert(_state == kStateEmpty ||
|
||||
_state == kStateIncomplete ||
|
||||
_state == kStateDecodable);
|
||||
|
||||
break;
|
||||
|
||||
case kStateEmpty:
|
||||
assert(_state == kStateFree);
|
||||
// Do nothing
|
||||
break;
|
||||
|
||||
case kStateDecoding:
|
||||
// A frame might have received empty packets, or media packets might
|
||||
// have been removed when making the frame decodable. The frame can
|
||||
// still be set to decodable since it can be used to inform the
|
||||
// decoder of a frame loss.
|
||||
assert(_state == kStateComplete || _state == kStateIncomplete ||
|
||||
_state == kStateDecodable || _state == kStateEmpty);
|
||||
// Transfer frame information to EncodedFrame and create any codec
|
||||
// specific information
|
||||
RestructureFrameInformation();
|
||||
break;
|
||||
|
||||
case kStateDecodable:
|
||||
assert(_state == kStateEmpty ||
|
||||
_state == kStateIncomplete);
|
||||
break;
|
||||
}
|
||||
_state = state;
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameBuffer::RestructureFrameInformation()
|
||||
{
|
||||
PrepareForDecode();
|
||||
_frameType = ConvertFrameType(_sessionInfo.FrameType());
|
||||
_completeFrame = _sessionInfo.complete();
|
||||
_missingFrame = _sessionInfo.PreviousFrameLoss();
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMFrameBuffer::ExtractFromStorage(const EncodedVideoData& frameFromStorage)
|
||||
{
|
||||
_frameType = ConvertFrameType(frameFromStorage.frameType);
|
||||
_timeStamp = frameFromStorage.timeStamp;
|
||||
_payloadType = frameFromStorage.payloadType;
|
||||
_encodedWidth = frameFromStorage.encodedWidth;
|
||||
_encodedHeight = frameFromStorage.encodedHeight;
|
||||
_missingFrame = frameFromStorage.missingFrame;
|
||||
_completeFrame = frameFromStorage.completeFrame;
|
||||
_renderTimeMs = frameFromStorage.renderTimeMs;
|
||||
_codec = frameFromStorage.codec;
|
||||
const WebRtc_UWord8 *prevBuffer = _buffer;
|
||||
if (VerifyAndAllocate(frameFromStorage.payloadSize) < 0)
|
||||
{
|
||||
return VCM_MEMORY;
|
||||
}
|
||||
_sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
|
||||
memcpy(_buffer, frameFromStorage.payloadData, frameFromStorage.payloadSize);
|
||||
_length = frameFromStorage.payloadSize;
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
int VCMFrameBuffer::NotDecodablePackets() const {
|
||||
return _sessionInfo.packets_not_decodable();
|
||||
}
|
||||
|
||||
// Set counted status (as counted by JB or not)
|
||||
void VCMFrameBuffer::SetCountedFrame(bool frameCounted)
|
||||
{
|
||||
_frameCounted = frameCounted;
|
||||
}
|
||||
|
||||
bool VCMFrameBuffer::GetCountedFrame() const
|
||||
{
|
||||
return _frameCounted;
|
||||
}
|
||||
|
||||
// Get current state of frame
|
||||
VCMFrameBufferStateEnum
|
||||
VCMFrameBuffer::GetState() const
|
||||
{
|
||||
return _state;
|
||||
}
|
||||
|
||||
// Get current state of frame
|
||||
VCMFrameBufferStateEnum
|
||||
VCMFrameBuffer::GetState(WebRtc_UWord32& timeStamp) const
|
||||
{
|
||||
timeStamp = TimeStamp();
|
||||
return GetState();
|
||||
}
|
||||
|
||||
bool
|
||||
VCMFrameBuffer::IsRetransmitted() const
|
||||
{
|
||||
return _sessionInfo.session_nack();
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameBuffer::PrepareForDecode()
|
||||
{
|
||||
#ifdef INDEPENDENT_PARTITIONS
|
||||
if (_codec == kVideoCodecVP8)
|
||||
{
|
||||
_length =
|
||||
_sessionInfo.BuildVP8FragmentationHeader(_buffer, _length,
|
||||
&_fragmentation);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
}
|
||||
102
webrtc/modules/video_coding/main/source/frame_buffer.h
Normal file
102
webrtc/modules/video_coding/main/source/frame_buffer.h
Normal file
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
|
||||
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/main/source/encoded_frame.h"
|
||||
#include "modules/video_coding/main/source/jitter_buffer_common.h"
|
||||
#include "modules/video_coding/main/source/session_info.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
class VCMFrameBuffer : public VCMEncodedFrame
|
||||
{
|
||||
public:
|
||||
VCMFrameBuffer();
|
||||
virtual ~VCMFrameBuffer();
|
||||
|
||||
VCMFrameBuffer(VCMFrameBuffer& rhs);
|
||||
|
||||
virtual void Reset();
|
||||
|
||||
VCMFrameBufferEnum InsertPacket(const VCMPacket& packet,
|
||||
WebRtc_Word64 timeInMs,
|
||||
bool enableDecodableState,
|
||||
WebRtc_UWord32 rttMs);
|
||||
|
||||
// State
|
||||
// Get current state of frame
|
||||
VCMFrameBufferStateEnum GetState() const;
|
||||
// Get current state and timestamp of frame
|
||||
VCMFrameBufferStateEnum GetState(WebRtc_UWord32& timeStamp) const;
|
||||
void SetState(VCMFrameBufferStateEnum state); // Set state of frame
|
||||
|
||||
bool IsRetransmitted() const;
|
||||
bool IsSessionComplete() const;
|
||||
bool HaveLastPacket() const;
|
||||
// Makes sure the session contain a decodable stream.
|
||||
void MakeSessionDecodable();
|
||||
|
||||
// Sequence numbers
|
||||
// Get lowest packet sequence number in frame
|
||||
WebRtc_Word32 GetLowSeqNum() const;
|
||||
// Get highest packet sequence number in frame
|
||||
WebRtc_Word32 GetHighSeqNum() const;
|
||||
|
||||
int PictureId() const;
|
||||
int TemporalId() const;
|
||||
bool LayerSync() const;
|
||||
int Tl0PicId() const;
|
||||
bool NonReference() const;
|
||||
|
||||
// Set counted status (as counted by JB or not)
|
||||
void SetCountedFrame(bool frameCounted);
|
||||
bool GetCountedFrame() const;
|
||||
|
||||
// NACK - Building the NACK lists.
|
||||
// Build hard NACK list: Zero out all entries in list up to and including
|
||||
// _lowSeqNum.
|
||||
int BuildHardNackList(int* list, int num);
|
||||
// Build soft NACK list: Zero out only a subset of the packets, discard
|
||||
// empty packets.
|
||||
int BuildSoftNackList(int* list, int num, int rttMs);
|
||||
void IncrementNackCount();
|
||||
WebRtc_Word16 GetNackCount() const;
|
||||
|
||||
WebRtc_Word64 LatestPacketTimeMs() const;
|
||||
|
||||
webrtc::FrameType FrameType() const;
|
||||
void SetPreviousFrameLoss();
|
||||
|
||||
WebRtc_Word32 ExtractFromStorage(const EncodedVideoData& frameFromStorage);
|
||||
|
||||
// The number of packets discarded because the decoder can't make use of
|
||||
// them.
|
||||
int NotDecodablePackets() const;
|
||||
|
||||
protected:
|
||||
void RestructureFrameInformation();
|
||||
void PrepareForDecode();
|
||||
|
||||
private:
|
||||
VCMFrameBufferStateEnum _state; // Current state of the frame
|
||||
bool _frameCounted; // Was this frame counted by JB?
|
||||
VCMSessionInfo _sessionInfo;
|
||||
WebRtc_UWord16 _nackCount;
|
||||
WebRtc_Word64 _latestPacketTimeMs;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
|
||||
350
webrtc/modules/video_coding/main/source/frame_dropper.cc
Normal file
350
webrtc/modules/video_coding/main/source/frame_dropper.cc
Normal file
@@ -0,0 +1,350 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "frame_dropper.h"
|
||||
#include "internal_defines.h"
|
||||
#include "trace.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
VCMFrameDropper::VCMFrameDropper(WebRtc_Word32 vcmId)
|
||||
:
|
||||
_vcmId(vcmId),
|
||||
_keyFrameSizeAvgKbits(0.9f),
|
||||
_keyFrameRatio(0.99f),
|
||||
_dropRatio(0.9f, 0.96f)
|
||||
{
|
||||
Reset();
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::Reset()
|
||||
{
|
||||
_keyFrameRatio.Reset(0.99f);
|
||||
_keyFrameRatio.Apply(1.0f, 1.0f/300.0f); // 1 key frame every 10th second in 30 fps
|
||||
_keyFrameSizeAvgKbits.Reset(0.9f);
|
||||
_keyFrameCount = 0;
|
||||
_accumulator = 0.0f;
|
||||
_accumulatorMax = 150.0f; // assume 300 kb/s and 0.5 s window
|
||||
_targetBitRate = 300.0f;
|
||||
_incoming_frame_rate = 30;
|
||||
_keyFrameSpreadFrames = 0.5f * _incoming_frame_rate;
|
||||
_dropNext = false;
|
||||
_dropRatio.Reset(0.9f);
|
||||
_dropRatio.Apply(0.0f, 0.0f); // Initialize to 0
|
||||
_dropCount = 0;
|
||||
_windowSize = 0.5f;
|
||||
_wasBelowMax = true;
|
||||
_enabled = true;
|
||||
_fastMode = false; // start with normal (non-aggressive) mode
|
||||
// Cap for the encoder buffer level/accumulator, in secs.
|
||||
_cap_buffer_size = 3.0f;
|
||||
// Cap on maximum amount of dropped frames between kept frames, in secs.
|
||||
_max_time_drops = 4.0f;
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::Enable(bool enable)
|
||||
{
|
||||
_enabled = enable;
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::Fill(WebRtc_UWord32 frameSizeBytes, bool deltaFrame)
|
||||
{
|
||||
if (!_enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
float frameSizeKbits = 8.0f * static_cast<float>(frameSizeBytes) / 1000.0f;
|
||||
if (!deltaFrame && !_fastMode) // fast mode does not treat key-frames any different
|
||||
{
|
||||
_keyFrameSizeAvgKbits.Apply(1, frameSizeKbits);
|
||||
_keyFrameRatio.Apply(1.0, 1.0);
|
||||
if (frameSizeKbits > _keyFrameSizeAvgKbits.Value())
|
||||
{
|
||||
// Remove the average key frame size since we
|
||||
// compensate for key frames when adding delta
|
||||
// frames.
|
||||
frameSizeKbits -= _keyFrameSizeAvgKbits.Value();
|
||||
}
|
||||
else
|
||||
{
|
||||
// Shouldn't be negative, so zero is the lower bound.
|
||||
frameSizeKbits = 0;
|
||||
}
|
||||
if (_keyFrameRatio.Value() > 1e-5 && 1 / _keyFrameRatio.Value() < _keyFrameSpreadFrames)
|
||||
{
|
||||
// We are sending key frames more often than our upper bound for
|
||||
// how much we allow the key frame compensation to be spread
|
||||
// out in time. Therefor we must use the key frame ratio rather
|
||||
// than keyFrameSpreadFrames.
|
||||
_keyFrameCount = static_cast<WebRtc_Word32>(1 / _keyFrameRatio.Value() + 0.5);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Compensate for the key frame the following frames
|
||||
_keyFrameCount = static_cast<WebRtc_Word32>(_keyFrameSpreadFrames + 0.5);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Decrease the keyFrameRatio
|
||||
_keyFrameRatio.Apply(1.0, 0.0);
|
||||
}
|
||||
// Change the level of the accumulator (bucket)
|
||||
_accumulator += frameSizeKbits;
|
||||
CapAccumulator();
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::Leak(WebRtc_UWord32 inputFrameRate)
|
||||
{
|
||||
if (!_enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
if (inputFrameRate < 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
if (_targetBitRate < 0.0f)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_keyFrameSpreadFrames = 0.5f * inputFrameRate;
|
||||
// T is the expected bits per frame (target). If all frames were the same size,
|
||||
// we would get T bits per frame. Notice that T is also weighted to be able to
|
||||
// force a lower frame rate if wanted.
|
||||
float T = _targetBitRate / inputFrameRate;
|
||||
if (_keyFrameCount > 0)
|
||||
{
|
||||
// Perform the key frame compensation
|
||||
if (_keyFrameRatio.Value() > 0 && 1 / _keyFrameRatio.Value() < _keyFrameSpreadFrames)
|
||||
{
|
||||
T -= _keyFrameSizeAvgKbits.Value() * _keyFrameRatio.Value();
|
||||
}
|
||||
else
|
||||
{
|
||||
T -= _keyFrameSizeAvgKbits.Value() / _keyFrameSpreadFrames;
|
||||
}
|
||||
_keyFrameCount--;
|
||||
}
|
||||
_accumulator -= T;
|
||||
UpdateRatio();
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::UpdateNack(WebRtc_UWord32 nackBytes)
|
||||
{
|
||||
if (!_enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_accumulator += static_cast<float>(nackBytes) * 8.0f / 1000.0f;
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::FillBucket(float inKbits, float outKbits)
|
||||
{
|
||||
_accumulator += (inKbits - outKbits);
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::UpdateRatio()
|
||||
{
|
||||
if (_accumulator > 1.3f * _accumulatorMax)
|
||||
{
|
||||
// Too far above accumulator max, react faster
|
||||
_dropRatio.UpdateBase(0.8f);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Go back to normal reaction
|
||||
_dropRatio.UpdateBase(0.9f);
|
||||
}
|
||||
if (_accumulator > _accumulatorMax)
|
||||
{
|
||||
// We are above accumulator max, and should ideally
|
||||
// drop a frame. Increase the dropRatio and drop
|
||||
// the frame later.
|
||||
if (_wasBelowMax)
|
||||
{
|
||||
_dropNext = true;
|
||||
}
|
||||
if (_fastMode)
|
||||
{
|
||||
// always drop in aggressive mode
|
||||
_dropNext = true;
|
||||
}
|
||||
|
||||
_dropRatio.Apply(1.0f, 1.0f);
|
||||
_dropRatio.UpdateBase(0.9f);
|
||||
}
|
||||
else
|
||||
{
|
||||
_dropRatio.Apply(1.0f, 0.0f);
|
||||
}
|
||||
if (_accumulator < 0.0f)
|
||||
{
|
||||
_accumulator = 0.0f;
|
||||
}
|
||||
_wasBelowMax = _accumulator < _accumulatorMax;
|
||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId), "FrameDropper: dropRatio = %f accumulator = %f, accumulatorMax = %f", _dropRatio.Value(), _accumulator, _accumulatorMax);
|
||||
}
|
||||
|
||||
// This function signals when to drop frames to the caller. It makes use of the dropRatio
|
||||
// to smooth out the drops over time.
|
||||
bool
|
||||
VCMFrameDropper::DropFrame()
|
||||
{
|
||||
if (!_enabled)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if (_dropNext)
|
||||
{
|
||||
_dropNext = false;
|
||||
_dropCount = 0;
|
||||
}
|
||||
|
||||
if (_dropRatio.Value() >= 0.5f) // Drops per keep
|
||||
{
|
||||
// limit is the number of frames we should drop between each kept frame
|
||||
// to keep our drop ratio. limit is positive in this case.
|
||||
float denom = 1.0f - _dropRatio.Value();
|
||||
if (denom < 1e-5)
|
||||
{
|
||||
denom = (float)1e-5;
|
||||
}
|
||||
WebRtc_Word32 limit = static_cast<WebRtc_Word32>(1.0f / denom - 1.0f + 0.5f);
|
||||
// Put a bound on the max amount of dropped frames between each kept
|
||||
// frame, in terms of frame rate and window size (secs).
|
||||
int max_limit = static_cast<int>(_incoming_frame_rate *
|
||||
_max_time_drops);
|
||||
if (limit > max_limit) {
|
||||
limit = max_limit;
|
||||
}
|
||||
if (_dropCount < 0)
|
||||
{
|
||||
// Reset the _dropCount since it was negative and should be positive.
|
||||
if (_dropRatio.Value() > 0.4f)
|
||||
{
|
||||
_dropCount = -_dropCount;
|
||||
}
|
||||
else
|
||||
{
|
||||
_dropCount = 0;
|
||||
}
|
||||
}
|
||||
if (_dropCount < limit)
|
||||
{
|
||||
// As long we are below the limit we should drop frames.
|
||||
_dropCount++;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Only when we reset _dropCount a frame should be kept.
|
||||
_dropCount = 0;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (_dropRatio.Value() > 0.0f && _dropRatio.Value() < 0.5f) // Keeps per drop
|
||||
{
|
||||
// limit is the number of frames we should keep between each drop
|
||||
// in order to keep the drop ratio. limit is negative in this case,
|
||||
// and the _dropCount is also negative.
|
||||
float denom = _dropRatio.Value();
|
||||
if (denom < 1e-5)
|
||||
{
|
||||
denom = (float)1e-5;
|
||||
}
|
||||
WebRtc_Word32 limit = -static_cast<WebRtc_Word32>(1.0f / denom - 1.0f + 0.5f);
|
||||
if (_dropCount > 0)
|
||||
{
|
||||
// Reset the _dropCount since we have a positive
|
||||
// _dropCount, and it should be negative.
|
||||
if (_dropRatio.Value() < 0.6f)
|
||||
{
|
||||
_dropCount = -_dropCount;
|
||||
}
|
||||
else
|
||||
{
|
||||
_dropCount = 0;
|
||||
}
|
||||
}
|
||||
if (_dropCount > limit)
|
||||
{
|
||||
if (_dropCount == 0)
|
||||
{
|
||||
// Drop frames when we reset _dropCount.
|
||||
_dropCount--;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Keep frames as long as we haven't reached limit.
|
||||
_dropCount--;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_dropCount = 0;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
_dropCount = 0;
|
||||
return false;
|
||||
|
||||
// A simpler version, unfiltered and quicker
|
||||
//bool dropNext = _dropNext;
|
||||
//_dropNext = false;
|
||||
//return dropNext;
|
||||
}
|
||||
|
||||
void
|
||||
VCMFrameDropper::SetRates(float bitRate, float incoming_frame_rate)
|
||||
{
|
||||
// Bit rate of -1 means infinite bandwidth.
|
||||
_accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
|
||||
if (_targetBitRate > 0.0f && bitRate < _targetBitRate && _accumulator > _accumulatorMax)
|
||||
{
|
||||
// Rescale the accumulator level if the accumulator max decreases
|
||||
_accumulator = bitRate / _targetBitRate * _accumulator;
|
||||
}
|
||||
_targetBitRate = bitRate;
|
||||
CapAccumulator();
|
||||
_incoming_frame_rate = incoming_frame_rate;
|
||||
}
|
||||
|
||||
float
|
||||
VCMFrameDropper::ActualFrameRate(WebRtc_UWord32 inputFrameRate) const
|
||||
{
|
||||
if (!_enabled)
|
||||
{
|
||||
return static_cast<float>(inputFrameRate);
|
||||
}
|
||||
return inputFrameRate * (1.0f - _dropRatio.Value());
|
||||
}
|
||||
|
||||
// Put a cap on the accumulator, i.e., don't let it grow beyond some level.
|
||||
// This is a temporary fix for screencasting where very large frames from
|
||||
// encoder will cause very slow response (too many frame drops).
|
||||
void VCMFrameDropper::CapAccumulator() {
|
||||
float max_accumulator = _targetBitRate * _cap_buffer_size;
|
||||
if (_accumulator > max_accumulator) {
|
||||
_accumulator = max_accumulator;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
98
webrtc/modules/video_coding/main/source/frame_dropper.h
Normal file
98
webrtc/modules/video_coding/main/source/frame_dropper.h
Normal file
@@ -0,0 +1,98 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
|
||||
|
||||
#include "exp_filter.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
/******************************/
|
||||
/* VCMFrameDropper class */
|
||||
/****************************/
|
||||
// The Frame Dropper implements a variant of the leaky bucket algorithm
|
||||
// for keeping track of when to drop frames to avoid bit rate
|
||||
// over use when the encoder can't keep its bit rate.
|
||||
class VCMFrameDropper
|
||||
{
|
||||
public:
|
||||
VCMFrameDropper(WebRtc_Word32 vcmId = 0);
|
||||
// Resets the FrameDropper to its initial state.
|
||||
// This means that the frameRateWeight is set to its
|
||||
// default value as well.
|
||||
void Reset();
|
||||
|
||||
void Enable(bool enable);
|
||||
// Answers the question if it's time to drop a frame
|
||||
// if we want to reach a given frame rate. Must be
|
||||
// called for every frame.
|
||||
//
|
||||
// Return value : True if we should drop the current frame
|
||||
bool DropFrame();
|
||||
// Updates the FrameDropper with the size of the latest encoded
|
||||
// frame. The FrameDropper calculates a new drop ratio (can be
|
||||
// seen as the probability to drop a frame) and updates its
|
||||
// internal statistics.
|
||||
//
|
||||
// Input:
|
||||
// - frameSizeBytes : The size of the latest frame
|
||||
// returned from the encoder.
|
||||
// - deltaFrame : True if the encoder returned
|
||||
// a key frame.
|
||||
void Fill(WebRtc_UWord32 frameSizeBytes, bool deltaFrame);
|
||||
|
||||
void Leak(WebRtc_UWord32 inputFrameRate);
|
||||
|
||||
void UpdateNack(WebRtc_UWord32 nackBytes);
|
||||
|
||||
// Sets the target bit rate and the frame rate produced by
|
||||
// the camera.
|
||||
//
|
||||
// Input:
|
||||
// - bitRate : The target bit rate
|
||||
void SetRates(float bitRate, float incoming_frame_rate);
|
||||
|
||||
// Return value : The current average frame rate produced
|
||||
// if the DropFrame() function is used as
|
||||
// instruction of when to drop frames.
|
||||
float ActualFrameRate(WebRtc_UWord32 inputFrameRate) const;
|
||||
|
||||
|
||||
private:
|
||||
void FillBucket(float inKbits, float outKbits);
|
||||
void UpdateRatio();
|
||||
void CapAccumulator();
|
||||
|
||||
WebRtc_Word32 _vcmId;
|
||||
VCMExpFilter _keyFrameSizeAvgKbits;
|
||||
VCMExpFilter _keyFrameRatio;
|
||||
float _keyFrameSpreadFrames;
|
||||
WebRtc_Word32 _keyFrameCount;
|
||||
float _accumulator;
|
||||
float _accumulatorMax;
|
||||
float _targetBitRate;
|
||||
bool _dropNext;
|
||||
VCMExpFilter _dropRatio;
|
||||
WebRtc_Word32 _dropCount;
|
||||
float _windowSize;
|
||||
float _incoming_frame_rate;
|
||||
bool _wasBelowMax;
|
||||
bool _enabled;
|
||||
bool _fastMode;
|
||||
float _cap_buffer_size;
|
||||
float _max_time_drops;
|
||||
}; // end of VCMFrameDropper class
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
|
||||
221
webrtc/modules/video_coding/main/source/generic_decoder.cc
Normal file
221
webrtc/modules/video_coding/main/source/generic_decoder.cc
Normal file
@@ -0,0 +1,221 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video_coding.h"
|
||||
#include "trace.h"
|
||||
#include "generic_decoder.h"
|
||||
#include "internal_defines.h"
|
||||
#include "tick_time_base.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming& timing,
|
||||
TickTimeBase* clock)
|
||||
:
|
||||
_critSect(CriticalSectionWrapper::CreateCriticalSection()),
|
||||
_clock(clock),
|
||||
_receiveCallback(NULL),
|
||||
_timing(timing),
|
||||
_timestampMap(kDecoderFrameMemoryLength),
|
||||
_lastReceivedPictureID(0)
|
||||
{
|
||||
}
|
||||
|
||||
VCMDecodedFrameCallback::~VCMDecodedFrameCallback()
|
||||
{
|
||||
delete _critSect;
|
||||
}
|
||||
|
||||
void VCMDecodedFrameCallback::SetUserReceiveCallback(
|
||||
VCMReceiveCallback* receiveCallback)
|
||||
{
|
||||
CriticalSectionScoped cs(_critSect);
|
||||
_receiveCallback = receiveCallback;
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage)
|
||||
{
|
||||
// TODO(holmer): We should improve this so that we can handle multiple
|
||||
// callbacks from one call to Decode().
|
||||
CriticalSectionScoped cs(_critSect);
|
||||
VCMFrameInformation* frameInfo = static_cast<VCMFrameInformation*>(
|
||||
_timestampMap.Pop(decodedImage.TimeStamp()));
|
||||
if (frameInfo == NULL)
|
||||
{
|
||||
// The map should never be empty or full if this callback is called.
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
_timing.StopDecodeTimer(
|
||||
decodedImage.TimeStamp(),
|
||||
frameInfo->decodeStartTimeMs,
|
||||
_clock->MillisecondTimestamp());
|
||||
|
||||
if (_receiveCallback != NULL)
|
||||
{
|
||||
_frame.SwapFrame(decodedImage);
|
||||
_frame.SetRenderTime(frameInfo->renderTimeMs);
|
||||
WebRtc_Word32 callbackReturn = _receiveCallback->FrameToRender(_frame);
|
||||
if (callbackReturn < 0)
|
||||
{
|
||||
WEBRTC_TRACE(webrtc::kTraceDebug,
|
||||
webrtc::kTraceVideoCoding,
|
||||
-1,
|
||||
"Render callback returned error: %d", callbackReturn);
|
||||
}
|
||||
}
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
|
||||
const WebRtc_UWord64 pictureId)
|
||||
{
|
||||
CriticalSectionScoped cs(_critSect);
|
||||
if (_receiveCallback != NULL)
|
||||
{
|
||||
return _receiveCallback->ReceivedDecodedReferenceFrame(pictureId);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMDecodedFrameCallback::ReceivedDecodedFrame(const WebRtc_UWord64 pictureId)
|
||||
{
|
||||
_lastReceivedPictureID = pictureId;
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_UWord64 VCMDecodedFrameCallback::LastReceivedPictureID() const
|
||||
{
|
||||
return _lastReceivedPictureID;
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMDecodedFrameCallback::Map(WebRtc_UWord32 timestamp, VCMFrameInformation* frameInfo)
|
||||
{
|
||||
CriticalSectionScoped cs(_critSect);
|
||||
return _timestampMap.Add(timestamp, frameInfo);
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMDecodedFrameCallback::Pop(WebRtc_UWord32 timestamp)
|
||||
{
|
||||
CriticalSectionScoped cs(_critSect);
|
||||
if (_timestampMap.Pop(timestamp) == NULL)
|
||||
{
|
||||
return VCM_GENERAL_ERROR;
|
||||
}
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, WebRtc_Word32 id, bool isExternal)
|
||||
:
|
||||
_id(id),
|
||||
_callback(NULL),
|
||||
_frameInfos(),
|
||||
_nextFrameInfoIdx(0),
|
||||
_decoder(decoder),
|
||||
_codecType(kVideoCodecUnknown),
|
||||
_isExternal(isExternal),
|
||||
_requireKeyFrame(false),
|
||||
_keyFrameDecoded(false)
|
||||
{
|
||||
}
|
||||
|
||||
VCMGenericDecoder::~VCMGenericDecoder()
|
||||
{
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMGenericDecoder::InitDecode(const VideoCodec* settings,
|
||||
WebRtc_Word32 numberOfCores,
|
||||
bool requireKeyFrame)
|
||||
{
|
||||
_requireKeyFrame = requireKeyFrame;
|
||||
_keyFrameDecoded = false;
|
||||
_codecType = settings->codecType;
|
||||
|
||||
return _decoder.InitDecode(settings, numberOfCores);
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
|
||||
int64_t nowMs)
|
||||
{
|
||||
if (_requireKeyFrame &&
|
||||
!_keyFrameDecoded &&
|
||||
frame.FrameType() != kVideoFrameKey &&
|
||||
frame.FrameType() != kVideoFrameGolden)
|
||||
{
|
||||
// Require key frame is enabled, meaning that one key frame must be decoded
|
||||
// before we can decode delta frames.
|
||||
return VCM_CODEC_ERROR;
|
||||
}
|
||||
_frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
|
||||
_frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
|
||||
_callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
|
||||
|
||||
WEBRTC_TRACE(webrtc::kTraceDebug,
|
||||
webrtc::kTraceVideoCoding,
|
||||
VCMId(_id),
|
||||
"Decoding timestamp %u", frame.TimeStamp());
|
||||
|
||||
_nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
|
||||
|
||||
WebRtc_Word32 ret = _decoder.Decode(frame.EncodedImage(),
|
||||
frame.MissingFrame(),
|
||||
frame.FragmentationHeader(),
|
||||
frame.CodecSpecific(),
|
||||
frame.RenderTimeMs());
|
||||
|
||||
if (ret < WEBRTC_VIDEO_CODEC_OK)
|
||||
{
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_id), "Decoder error: %d\n", ret);
|
||||
_callback->Pop(frame.TimeStamp());
|
||||
return ret;
|
||||
}
|
||||
else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT ||
|
||||
ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
|
||||
{
|
||||
// No output
|
||||
_callback->Pop(frame.TimeStamp());
|
||||
}
|
||||
// Update the key frame decoded variable so that we know whether or not we've decoded a key frame since reset.
|
||||
_keyFrameDecoded = (frame.FrameType() == kVideoFrameKey || frame.FrameType() == kVideoFrameGolden);
|
||||
return ret;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericDecoder::Release()
|
||||
{
|
||||
_keyFrameDecoded = false;
|
||||
return _decoder.Release();
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMGenericDecoder::Reset()
|
||||
{
|
||||
_keyFrameDecoded = false;
|
||||
return _decoder.Reset();
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMGenericDecoder::SetCodecConfigParameters(const WebRtc_UWord8* buffer, WebRtc_Word32 size)
|
||||
{
|
||||
return _decoder.SetCodecConfigParameters(buffer, size);
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMGenericDecoder::RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback)
|
||||
{
|
||||
_callback = callback;
|
||||
return _decoder.RegisterDecodeCompleteCallback(callback);
|
||||
}
|
||||
|
||||
bool VCMGenericDecoder::External() const
|
||||
{
|
||||
return _isExternal;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
121
webrtc/modules/video_coding/main/source/generic_decoder.h
Normal file
121
webrtc/modules/video_coding/main/source/generic_decoder.h
Normal file
@@ -0,0 +1,121 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
|
||||
|
||||
#include "timing.h"
|
||||
#include "timestamp_map.h"
|
||||
#include "video_codec_interface.h"
|
||||
#include "encoded_frame.h"
|
||||
#include "module_common_types.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
class VCMReceiveCallback;
|
||||
|
||||
enum { kDecoderFrameMemoryLength = 10 };
|
||||
|
||||
struct VCMFrameInformation
|
||||
{
|
||||
WebRtc_Word64 renderTimeMs;
|
||||
WebRtc_Word64 decodeStartTimeMs;
|
||||
void* userData;
|
||||
};
|
||||
|
||||
class VCMDecodedFrameCallback : public DecodedImageCallback
|
||||
{
|
||||
public:
|
||||
VCMDecodedFrameCallback(VCMTiming& timing, TickTimeBase* clock);
|
||||
virtual ~VCMDecodedFrameCallback();
|
||||
void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
|
||||
|
||||
virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage);
|
||||
virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
|
||||
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
|
||||
|
||||
WebRtc_UWord64 LastReceivedPictureID() const;
|
||||
|
||||
WebRtc_Word32 Map(WebRtc_UWord32 timestamp, VCMFrameInformation* frameInfo);
|
||||
WebRtc_Word32 Pop(WebRtc_UWord32 timestamp);
|
||||
|
||||
private:
|
||||
CriticalSectionWrapper* _critSect;
|
||||
TickTimeBase* _clock;
|
||||
VideoFrame _frame;
|
||||
VCMReceiveCallback* _receiveCallback;
|
||||
VCMTiming& _timing;
|
||||
VCMTimestampMap _timestampMap;
|
||||
WebRtc_UWord64 _lastReceivedPictureID;
|
||||
};
|
||||
|
||||
|
||||
class VCMGenericDecoder
|
||||
{
|
||||
friend class VCMCodecDataBase;
|
||||
public:
|
||||
VCMGenericDecoder(VideoDecoder& decoder, WebRtc_Word32 id = 0, bool isExternal = false);
|
||||
~VCMGenericDecoder();
|
||||
|
||||
/**
|
||||
* Initialize the decoder with the information from the VideoCodec
|
||||
*/
|
||||
WebRtc_Word32 InitDecode(const VideoCodec* settings,
|
||||
WebRtc_Word32 numberOfCores,
|
||||
bool requireKeyFrame);
|
||||
|
||||
/**
|
||||
* Decode to a raw I420 frame,
|
||||
*
|
||||
* inputVideoBuffer reference to encoded video frame
|
||||
*/
|
||||
WebRtc_Word32 Decode(const VCMEncodedFrame& inputFrame, int64_t nowMs);
|
||||
|
||||
/**
|
||||
* Free the decoder memory
|
||||
*/
|
||||
WebRtc_Word32 Release();
|
||||
|
||||
/**
|
||||
* Reset the decoder state, prepare for a new call
|
||||
*/
|
||||
WebRtc_Word32 Reset();
|
||||
|
||||
/**
|
||||
* Codec configuration data sent out-of-band, i.e. in SIP call setup
|
||||
*
|
||||
* buffer pointer to the configuration data
|
||||
* size the size of the configuration data in bytes
|
||||
*/
|
||||
WebRtc_Word32 SetCodecConfigParameters(const WebRtc_UWord8* /*buffer*/,
|
||||
WebRtc_Word32 /*size*/);
|
||||
|
||||
WebRtc_Word32 RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback);
|
||||
|
||||
bool External() const;
|
||||
|
||||
protected:
|
||||
|
||||
WebRtc_Word32 _id;
|
||||
VCMDecodedFrameCallback* _callback;
|
||||
VCMFrameInformation _frameInfos[kDecoderFrameMemoryLength];
|
||||
WebRtc_UWord32 _nextFrameInfoIdx;
|
||||
VideoDecoder& _decoder;
|
||||
VideoCodecType _codecType;
|
||||
bool _isExternal;
|
||||
bool _requireKeyFrame;
|
||||
bool _keyFrameDecoded;
|
||||
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
|
||||
278
webrtc/modules/video_coding/main/source/generic_encoder.cc
Normal file
278
webrtc/modules/video_coding/main/source/generic_encoder.cc
Normal file
@@ -0,0 +1,278 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "encoded_frame.h"
|
||||
#include "generic_encoder.h"
|
||||
#include "media_optimization.h"
|
||||
#include "../../../../engine_configurations.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
//#define DEBUG_ENCODER_BIT_STREAM
|
||||
|
||||
VCMGenericEncoder::VCMGenericEncoder(VideoEncoder& encoder, bool internalSource /*= false*/)
|
||||
:
|
||||
_encoder(encoder),
|
||||
_codecType(kVideoCodecUnknown),
|
||||
_VCMencodedFrameCallback(NULL),
|
||||
_bitRate(0),
|
||||
_frameRate(0),
|
||||
_internalSource(false)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
VCMGenericEncoder::~VCMGenericEncoder()
|
||||
{
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMGenericEncoder::Release()
|
||||
{
|
||||
_bitRate = 0;
|
||||
_frameRate = 0;
|
||||
_VCMencodedFrameCallback = NULL;
|
||||
return _encoder.Release();
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericEncoder::InitEncode(const VideoCodec* settings,
|
||||
WebRtc_Word32 numberOfCores,
|
||||
WebRtc_UWord32 maxPayloadSize)
|
||||
{
|
||||
_bitRate = settings->startBitrate;
|
||||
_frameRate = settings->maxFramerate;
|
||||
_codecType = settings->codecType;
|
||||
if (_VCMencodedFrameCallback != NULL)
|
||||
{
|
||||
_VCMencodedFrameCallback->SetCodecType(_codecType);
|
||||
}
|
||||
return _encoder.InitEncode(settings, numberOfCores, maxPayloadSize);
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
const std::vector<FrameType>* frameTypes) {
|
||||
std::vector<VideoFrameType> video_frame_types(frameTypes->size(),
|
||||
kDeltaFrame);
|
||||
if (frameTypes) {
|
||||
VCMEncodedFrame::ConvertFrameTypes(*frameTypes, &video_frame_types);
|
||||
}
|
||||
return _encoder.Encode(inputFrame, codecSpecificInfo, &video_frame_types);
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericEncoder::SetChannelParameters(WebRtc_Word32 packetLoss, int rtt)
|
||||
{
|
||||
return _encoder.SetChannelParameters(packetLoss, rtt);
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericEncoder::SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate)
|
||||
{
|
||||
WebRtc_Word32 ret = _encoder.SetRates(newBitRate, frameRate);
|
||||
if (ret < 0)
|
||||
{
|
||||
return ret;
|
||||
}
|
||||
_bitRate = newBitRate;
|
||||
_frameRate = frameRate;
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericEncoder::CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size)
|
||||
{
|
||||
WebRtc_Word32 ret = _encoder.CodecConfigParameters(buffer, size);
|
||||
if (ret < 0)
|
||||
{
|
||||
return ret;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
WebRtc_UWord32 VCMGenericEncoder::BitRate() const
|
||||
{
|
||||
return _bitRate;
|
||||
}
|
||||
|
||||
WebRtc_UWord32 VCMGenericEncoder::FrameRate() const
|
||||
{
|
||||
return _frameRate;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericEncoder::SetPeriodicKeyFrames(bool enable)
|
||||
{
|
||||
return _encoder.SetPeriodicKeyFrames(enable);
|
||||
}
|
||||
|
||||
WebRtc_Word32 VCMGenericEncoder::RequestFrame(
|
||||
const std::vector<FrameType>* frame_types) {
|
||||
if (!frame_types) {
|
||||
return 0;
|
||||
}
|
||||
VideoFrame image;
|
||||
std::vector<VideoFrameType> video_frame_types(kVideoFrameDelta);
|
||||
if (frame_types) {
|
||||
VCMEncodedFrame::ConvertFrameTypes(*frame_types, &video_frame_types);
|
||||
}
|
||||
return _encoder.Encode(image, NULL, &video_frame_types);
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMGenericEncoder::RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback)
|
||||
{
|
||||
_VCMencodedFrameCallback = VCMencodedFrameCallback;
|
||||
|
||||
_VCMencodedFrameCallback->SetCodecType(_codecType);
|
||||
_VCMencodedFrameCallback->SetInternalSource(_internalSource);
|
||||
return _encoder.RegisterEncodeCompleteCallback(_VCMencodedFrameCallback);
|
||||
}
|
||||
|
||||
bool
|
||||
VCMGenericEncoder::InternalSource() const
|
||||
{
|
||||
return _internalSource;
|
||||
}
|
||||
|
||||
/***************************
|
||||
* Callback Implementation
|
||||
***************************/
|
||||
VCMEncodedFrameCallback::VCMEncodedFrameCallback():
|
||||
_sendCallback(),
|
||||
_mediaOpt(NULL),
|
||||
_encodedBytes(0),
|
||||
_payloadType(0),
|
||||
_codecType(kVideoCodecUnknown),
|
||||
_internalSource(false)
|
||||
#ifdef DEBUG_ENCODER_BIT_STREAM
|
||||
, _bitStreamAfterEncoder(NULL)
|
||||
#endif
|
||||
{
|
||||
#ifdef DEBUG_ENCODER_BIT_STREAM
|
||||
_bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
|
||||
#endif
|
||||
}
|
||||
|
||||
VCMEncodedFrameCallback::~VCMEncodedFrameCallback()
|
||||
{
|
||||
#ifdef DEBUG_ENCODER_BIT_STREAM
|
||||
fclose(_bitStreamAfterEncoder);
|
||||
#endif
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMEncodedFrameCallback::SetTransportCallback(VCMPacketizationCallback* transport)
|
||||
{
|
||||
_sendCallback = transport;
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VCMEncodedFrameCallback::Encoded(
|
||||
EncodedImage &encodedImage,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
const RTPFragmentationHeader* fragmentationHeader)
|
||||
{
|
||||
FrameType frameType = VCMEncodedFrame::ConvertFrameType(encodedImage._frameType);
|
||||
|
||||
WebRtc_UWord32 encodedBytes = 0;
|
||||
if (_sendCallback != NULL)
|
||||
{
|
||||
encodedBytes = encodedImage._length;
|
||||
|
||||
#ifdef DEBUG_ENCODER_BIT_STREAM
|
||||
if (_bitStreamAfterEncoder != NULL)
|
||||
{
|
||||
fwrite(encodedImage._buffer, 1, encodedImage._length, _bitStreamAfterEncoder);
|
||||
}
|
||||
#endif
|
||||
|
||||
RTPVideoHeader rtpVideoHeader;
|
||||
RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
|
||||
if (codecSpecificInfo)
|
||||
{
|
||||
CopyCodecSpecific(*codecSpecificInfo, &rtpVideoHeaderPtr);
|
||||
}
|
||||
else
|
||||
{
|
||||
rtpVideoHeaderPtr = NULL;
|
||||
}
|
||||
|
||||
WebRtc_Word32 callbackReturn = _sendCallback->SendData(
|
||||
frameType,
|
||||
_payloadType,
|
||||
encodedImage._timeStamp,
|
||||
encodedImage.capture_time_ms_,
|
||||
encodedImage._buffer,
|
||||
encodedBytes,
|
||||
*fragmentationHeader,
|
||||
rtpVideoHeaderPtr);
|
||||
if (callbackReturn < 0)
|
||||
{
|
||||
return callbackReturn;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return VCM_UNINITIALIZED;
|
||||
}
|
||||
_encodedBytes = encodedBytes;
|
||||
if (_mediaOpt != NULL) {
|
||||
_mediaOpt->UpdateWithEncodedData(_encodedBytes, frameType);
|
||||
if (_internalSource)
|
||||
{
|
||||
return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame
|
||||
}
|
||||
}
|
||||
return VCM_OK;
|
||||
}
|
||||
|
||||
WebRtc_UWord32
|
||||
VCMEncodedFrameCallback::EncodedBytes()
|
||||
{
|
||||
return _encodedBytes;
|
||||
}
|
||||
|
||||
void
|
||||
VCMEncodedFrameCallback::SetMediaOpt(VCMMediaOptimization *mediaOpt)
|
||||
{
|
||||
_mediaOpt = mediaOpt;
|
||||
}
|
||||
|
||||
void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info,
|
||||
RTPVideoHeader** rtp) {
|
||||
switch (info.codecType) {
|
||||
case kVideoCodecVP8: {
|
||||
(*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
|
||||
(*rtp)->codecHeader.VP8.pictureId =
|
||||
info.codecSpecific.VP8.pictureId;
|
||||
(*rtp)->codecHeader.VP8.nonReference =
|
||||
info.codecSpecific.VP8.nonReference;
|
||||
(*rtp)->codecHeader.VP8.temporalIdx =
|
||||
info.codecSpecific.VP8.temporalIdx;
|
||||
(*rtp)->codecHeader.VP8.layerSync =
|
||||
info.codecSpecific.VP8.layerSync;
|
||||
(*rtp)->codecHeader.VP8.tl0PicIdx =
|
||||
info.codecSpecific.VP8.tl0PicIdx;
|
||||
(*rtp)->codecHeader.VP8.keyIdx =
|
||||
info.codecSpecific.VP8.keyIdx;
|
||||
(*rtp)->simulcastIdx = info.codecSpecific.VP8.simulcastIdx;
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
// No codec specific info. Change RTP header pointer to NULL.
|
||||
*rtp = NULL;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
} // namespace webrtc
|
||||
145
webrtc/modules/video_coding/main/source/generic_encoder.h
Normal file
145
webrtc/modules/video_coding/main/source/generic_encoder.h
Normal file
@@ -0,0 +1,145 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
|
||||
|
||||
#include "video_codec_interface.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
class VCMMediaOptimization;
|
||||
|
||||
/*************************************/
|
||||
/* VCMEncodeFrameCallback class */
|
||||
/***********************************/
|
||||
class VCMEncodedFrameCallback : public EncodedImageCallback
|
||||
{
|
||||
public:
|
||||
VCMEncodedFrameCallback();
|
||||
virtual ~VCMEncodedFrameCallback();
|
||||
|
||||
/*
|
||||
* Callback implementation - codec encode complete
|
||||
*/
|
||||
WebRtc_Word32 Encoded(
|
||||
EncodedImage& encodedImage,
|
||||
const CodecSpecificInfo* codecSpecificInfo = NULL,
|
||||
const RTPFragmentationHeader* fragmentationHeader = NULL);
|
||||
/*
|
||||
* Get number of encoded bytes
|
||||
*/
|
||||
WebRtc_UWord32 EncodedBytes();
|
||||
/*
|
||||
* Callback implementation - generic encoder encode complete
|
||||
*/
|
||||
WebRtc_Word32 SetTransportCallback(VCMPacketizationCallback* transport);
|
||||
/**
|
||||
* Set media Optimization
|
||||
*/
|
||||
void SetMediaOpt (VCMMediaOptimization* mediaOpt);
|
||||
|
||||
void SetPayloadType(WebRtc_UWord8 payloadType) { _payloadType = payloadType; };
|
||||
void SetCodecType(VideoCodecType codecType) {_codecType = codecType;};
|
||||
void SetInternalSource(bool internalSource) { _internalSource = internalSource; };
|
||||
|
||||
private:
|
||||
/*
|
||||
* Map information from info into rtp. If no relevant information is found
|
||||
* in info, rtp is set to NULL.
|
||||
*/
|
||||
static void CopyCodecSpecific(const CodecSpecificInfo& info,
|
||||
RTPVideoHeader** rtp);
|
||||
|
||||
VCMPacketizationCallback* _sendCallback;
|
||||
VCMMediaOptimization* _mediaOpt;
|
||||
WebRtc_UWord32 _encodedBytes;
|
||||
WebRtc_UWord8 _payloadType;
|
||||
VideoCodecType _codecType;
|
||||
bool _internalSource;
|
||||
#ifdef DEBUG_ENCODER_BIT_STREAM
|
||||
FILE* _bitStreamAfterEncoder;
|
||||
#endif
|
||||
};// end of VCMEncodeFrameCallback class
|
||||
|
||||
|
||||
/******************************/
|
||||
/* VCMGenericEncoder class */
|
||||
/******************************/
|
||||
class VCMGenericEncoder
|
||||
{
|
||||
friend class VCMCodecDataBase;
|
||||
public:
|
||||
VCMGenericEncoder(VideoEncoder& encoder, bool internalSource = false);
|
||||
~VCMGenericEncoder();
|
||||
/**
|
||||
* Free encoder memory
|
||||
*/
|
||||
WebRtc_Word32 Release();
|
||||
/**
|
||||
* Initialize the encoder with the information from the VideoCodec
|
||||
*/
|
||||
WebRtc_Word32 InitEncode(const VideoCodec* settings,
|
||||
WebRtc_Word32 numberOfCores,
|
||||
WebRtc_UWord32 maxPayloadSize);
|
||||
/**
|
||||
* Encode raw image
|
||||
* inputFrame : Frame containing raw image
|
||||
* codecSpecificInfo : Specific codec data
|
||||
* cameraFrameRate : request or information from the remote side
|
||||
* frameType : The requested frame type to encode
|
||||
*/
|
||||
WebRtc_Word32 Encode(const VideoFrame& inputFrame,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
const std::vector<FrameType>* frameTypes);
|
||||
/**
|
||||
* Set new target bit rate and frame rate
|
||||
* Return Value: new bit rate if OK, otherwise <0s
|
||||
*/
|
||||
WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate);
|
||||
/**
|
||||
* Set a new packet loss rate and a new round-trip time in milliseconds.
|
||||
*/
|
||||
WebRtc_Word32 SetChannelParameters(WebRtc_Word32 packetLoss, int rtt);
|
||||
WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size);
|
||||
/**
|
||||
* Register a transport callback which will be called to deliver the encoded buffers
|
||||
*/
|
||||
WebRtc_Word32 RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback);
|
||||
/**
|
||||
* Get encoder bit rate
|
||||
*/
|
||||
WebRtc_UWord32 BitRate() const;
|
||||
/**
|
||||
* Get encoder frame rate
|
||||
*/
|
||||
WebRtc_UWord32 FrameRate() const;
|
||||
|
||||
WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
|
||||
|
||||
WebRtc_Word32 RequestFrame(const std::vector<FrameType>* frame_types);
|
||||
|
||||
bool InternalSource() const;
|
||||
|
||||
private:
|
||||
VideoEncoder& _encoder;
|
||||
VideoCodecType _codecType;
|
||||
VCMEncodedFrameCallback* _VCMencodedFrameCallback;
|
||||
WebRtc_UWord32 _bitRate;
|
||||
WebRtc_UWord32 _frameRate;
|
||||
bool _internalSource;
|
||||
}; // end of VCMGenericEncoder class
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
|
||||
114
webrtc/modules/video_coding/main/source/inter_frame_delay.cc
Normal file
114
webrtc/modules/video_coding/main/source/inter_frame_delay.cc
Normal file
@@ -0,0 +1,114 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "inter_frame_delay.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VCMInterFrameDelay::VCMInterFrameDelay(int64_t currentWallClock)
|
||||
{
|
||||
Reset(currentWallClock);
|
||||
}
|
||||
|
||||
// Resets the delay estimate
|
||||
void
|
||||
VCMInterFrameDelay::Reset(int64_t currentWallClock)
|
||||
{
|
||||
_zeroWallClock = currentWallClock;
|
||||
_wrapArounds = 0;
|
||||
_prevWallClock = 0;
|
||||
_prevTimestamp = 0;
|
||||
_dTS = 0;
|
||||
}
|
||||
|
||||
// Calculates the delay of a frame with the given timestamp.
|
||||
// This method is called when the frame is complete.
|
||||
bool
|
||||
VCMInterFrameDelay::CalculateDelay(WebRtc_UWord32 timestamp,
|
||||
WebRtc_Word64 *delay,
|
||||
int64_t currentWallClock)
|
||||
{
|
||||
if (_prevWallClock == 0)
|
||||
{
|
||||
// First set of data, initialization, wait for next frame
|
||||
_prevWallClock = currentWallClock;
|
||||
_prevTimestamp = timestamp;
|
||||
*delay = 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
WebRtc_Word32 prevWrapArounds = _wrapArounds;
|
||||
CheckForWrapArounds(timestamp);
|
||||
|
||||
// This will be -1 for backward wrap arounds and +1 for forward wrap arounds
|
||||
WebRtc_Word32 wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
|
||||
|
||||
// Account for reordering in jitter variance estimate in the future?
|
||||
// Note that this also captures incomplete frames which are grabbed
|
||||
// for decoding after a later frame has been complete, i.e. real
|
||||
// packet losses.
|
||||
if ((wrapAroundsSincePrev == 0 && timestamp < _prevTimestamp) || wrapAroundsSincePrev < 0)
|
||||
{
|
||||
*delay = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compute the compensated timestamp difference and convert it to ms and
|
||||
// round it to closest integer.
|
||||
_dTS = static_cast<WebRtc_Word64>((timestamp + wrapAroundsSincePrev *
|
||||
(static_cast<WebRtc_Word64>(1)<<32) - _prevTimestamp) / 90.0 + 0.5);
|
||||
|
||||
// frameDelay is the difference of dT and dTS -- i.e. the difference of
|
||||
// the wall clock time difference and the timestamp difference between
|
||||
// two following frames.
|
||||
*delay = static_cast<WebRtc_Word64>(currentWallClock - _prevWallClock - _dTS);
|
||||
|
||||
_prevTimestamp = timestamp;
|
||||
_prevWallClock = currentWallClock;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Returns the current difference between incoming timestamps
|
||||
WebRtc_UWord32 VCMInterFrameDelay::CurrentTimeStampDiffMs() const
|
||||
{
|
||||
if (_dTS < 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return static_cast<WebRtc_UWord32>(_dTS);
|
||||
}
|
||||
|
||||
// Investigates if the timestamp clock has overflowed since the last timestamp and
|
||||
// keeps track of the number of wrap arounds since reset.
|
||||
void
|
||||
VCMInterFrameDelay::CheckForWrapArounds(WebRtc_UWord32 timestamp)
|
||||
{
|
||||
if (timestamp < _prevTimestamp)
|
||||
{
|
||||
// This difference will probably be less than -2^31 if we have had a wrap around
|
||||
// (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is cast to a Word32,
|
||||
// it should be positive.
|
||||
if (static_cast<WebRtc_Word32>(timestamp - _prevTimestamp) > 0)
|
||||
{
|
||||
// Forward wrap around
|
||||
_wrapArounds++;
|
||||
}
|
||||
}
|
||||
// This difference will probably be less than -2^31 if we have had a backward wrap around.
|
||||
// Since it is cast to a Word32, it should be positive.
|
||||
else if (static_cast<WebRtc_Word32>(_prevTimestamp - timestamp) > 0)
|
||||
{
|
||||
// Backward wrap around
|
||||
_wrapArounds--;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
66
webrtc/modules/video_coding/main/source/inter_frame_delay.h
Normal file
66
webrtc/modules/video_coding/main/source/inter_frame_delay.h
Normal file
@@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
class VCMInterFrameDelay
|
||||
{
|
||||
public:
|
||||
VCMInterFrameDelay(int64_t currentWallClock);
|
||||
|
||||
// Resets the estimate. Zeros are given as parameters.
|
||||
void Reset(int64_t currentWallClock);
|
||||
|
||||
// Calculates the delay of a frame with the given timestamp.
|
||||
// This method is called when the frame is complete.
|
||||
//
|
||||
// Input:
|
||||
// - timestamp : RTP timestamp of a received frame
|
||||
// - *delay : Pointer to memory where the result should be stored
|
||||
// - currentWallClock : The current time in milliseconds.
|
||||
// Should be -1 for normal operation, only used for testing.
|
||||
// Return value : true if OK, false when reordered timestamps
|
||||
bool CalculateDelay(WebRtc_UWord32 timestamp,
|
||||
WebRtc_Word64 *delay,
|
||||
int64_t currentWallClock);
|
||||
|
||||
// Returns the current difference between incoming timestamps
|
||||
//
|
||||
// Return value : Wrap-around compensated difference between incoming
|
||||
// timestamps.
|
||||
WebRtc_UWord32 CurrentTimeStampDiffMs() const;
|
||||
|
||||
private:
|
||||
// Controls if the RTP timestamp counter has had a wrap around
|
||||
// between the current and the previously received frame.
|
||||
//
|
||||
// Input:
|
||||
// - timestmap : RTP timestamp of the current frame.
|
||||
void CheckForWrapArounds(WebRtc_UWord32 timestamp);
|
||||
|
||||
WebRtc_Word64 _zeroWallClock; // Local timestamp of the first video packet received
|
||||
WebRtc_Word32 _wrapArounds; // Number of wrapArounds detected
|
||||
// The previous timestamp passed to the delay estimate
|
||||
WebRtc_UWord32 _prevTimestamp;
|
||||
// The previous wall clock timestamp used by the delay estimate
|
||||
WebRtc_Word64 _prevWallClock;
|
||||
// Wrap-around compensated difference between incoming timestamps
|
||||
WebRtc_Word64 _dTS;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
|
||||
58
webrtc/modules/video_coding/main/source/internal_defines.h
Normal file
58
webrtc/modules/video_coding/main/source/internal_defines.h
Normal file
@@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
#define MASK_32_BITS(x) (0xFFFFFFFF & (x))
|
||||
|
||||
inline WebRtc_UWord32 MaskWord64ToUWord32(WebRtc_Word64 w64)
|
||||
{
|
||||
return static_cast<WebRtc_UWord32>(MASK_32_BITS(w64));
|
||||
}
|
||||
|
||||
#define VCM_MAX(a, b) (((a) > (b)) ? (a) : (b))
|
||||
#define VCM_MIN(a, b) (((a) < (b)) ? (a) : (b))
|
||||
|
||||
#define VCM_DEFAULT_CODEC_WIDTH 352
|
||||
#define VCM_DEFAULT_CODEC_HEIGHT 288
|
||||
#define VCM_DEFAULT_FRAME_RATE 30
|
||||
#define VCM_MIN_BITRATE 30
|
||||
#define VCM_FLUSH_INDICATOR 4
|
||||
|
||||
// Helper macros for creating the static codec list
|
||||
#define VCM_NO_CODEC_IDX -1
|
||||
#ifdef VIDEOCODEC_VP8
|
||||
#define VCM_VP8_IDX VCM_NO_CODEC_IDX + 1
|
||||
#else
|
||||
#define VCM_VP8_IDX VCM_NO_CODEC_IDX
|
||||
#endif
|
||||
#ifdef VIDEOCODEC_I420
|
||||
#define VCM_I420_IDX VCM_VP8_IDX + 1
|
||||
#else
|
||||
#define VCM_I420_IDX VCM_VP8_IDX
|
||||
#endif
|
||||
#define VCM_NUM_VIDEO_CODECS_AVAILABLE VCM_I420_IDX + 1
|
||||
|
||||
#define VCM_NO_RECEIVER_ID 0
|
||||
|
||||
inline WebRtc_Word32 VCMId(const WebRtc_Word32 vcmId, const WebRtc_Word32 receiverId = 0)
|
||||
{
|
||||
return static_cast<WebRtc_Word32>((vcmId << 16) + receiverId);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
|
||||
1432
webrtc/modules/video_coding/main/source/jitter_buffer.cc
Normal file
1432
webrtc/modules/video_coding/main/source/jitter_buffer.cc
Normal file
File diff suppressed because it is too large
Load Diff
266
webrtc/modules/video_coding/main/source/jitter_buffer.h
Normal file
266
webrtc/modules/video_coding/main/source/jitter_buffer.h
Normal file
@@ -0,0 +1,266 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_
|
||||
|
||||
#include <list>
|
||||
|
||||
#include "modules/interface/module_common_types.h"
|
||||
#include "modules/video_coding/main/interface/video_coding_defines.h"
|
||||
#include "modules/video_coding/main/source/decoding_state.h"
|
||||
#include "modules/video_coding/main/source/event.h"
|
||||
#include "modules/video_coding/main/source/inter_frame_delay.h"
|
||||
#include "modules/video_coding/main/source/jitter_buffer_common.h"
|
||||
#include "modules/video_coding/main/source/jitter_estimator.h"
|
||||
#include "system_wrappers/interface/constructor_magic.h"
|
||||
#include "system_wrappers/interface/critical_section_wrapper.h"
|
||||
#include "typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum VCMNackMode {
|
||||
kNackInfinite,
|
||||
kNackHybrid,
|
||||
kNoNack
|
||||
};
|
||||
|
||||
typedef std::list<VCMFrameBuffer*> FrameList;
|
||||
|
||||
// forward declarations
|
||||
class TickTimeBase;
|
||||
class VCMFrameBuffer;
|
||||
class VCMPacket;
|
||||
class VCMEncodedFrame;
|
||||
|
||||
struct VCMJitterSample {
|
||||
VCMJitterSample() : timestamp(0), frame_size(0), latest_packet_time(-1) {}
|
||||
uint32_t timestamp;
|
||||
uint32_t frame_size;
|
||||
int64_t latest_packet_time;
|
||||
};
|
||||
|
||||
class VCMJitterBuffer {
|
||||
public:
|
||||
VCMJitterBuffer(TickTimeBase* clock, int vcm_id = -1, int receiver_id = -1,
|
||||
bool master = true);
|
||||
virtual ~VCMJitterBuffer();
|
||||
|
||||
// Makes |this| a deep copy of |rhs|.
|
||||
void CopyFrom(const VCMJitterBuffer& rhs);
|
||||
|
||||
// Initializes and starts jitter buffer.
|
||||
void Start();
|
||||
|
||||
// Signals all internal events and stops the jitter buffer.
|
||||
void Stop();
|
||||
|
||||
// Returns true if the jitter buffer is running.
|
||||
bool Running() const;
|
||||
|
||||
// Empty the jitter buffer of all its data.
|
||||
void Flush();
|
||||
|
||||
// Get the number of received key and delta frames since the jitter buffer
|
||||
// was started.
|
||||
void FrameStatistics(uint32_t* received_delta_frames,
|
||||
uint32_t* received_key_frames) const;
|
||||
|
||||
// The number of packets discarded by the jitter buffer because the decoder
|
||||
// won't be able to decode them.
|
||||
int num_not_decodable_packets() const;
|
||||
|
||||
// Gets number of packets discarded by the jitter buffer.
|
||||
int num_discarded_packets() const;
|
||||
|
||||
// Statistics, Calculate frame and bit rates.
|
||||
void IncomingRateStatistics(unsigned int* framerate,
|
||||
unsigned int* bitrate);
|
||||
|
||||
// Waits for the first packet in the next frame to arrive and then returns
|
||||
// the timestamp of that frame. |incoming_frame_type| and |render_time_ms| are
|
||||
// set to the frame type and render time of the next frame.
|
||||
// Blocks for up to |max_wait_time_ms| ms. Returns -1 if no packet has arrived
|
||||
// after |max_wait_time_ms| ms.
|
||||
int64_t NextTimestamp(uint32_t max_wait_time_ms,
|
||||
FrameType* incoming_frame_type,
|
||||
int64_t* render_time_ms);
|
||||
|
||||
// Checks if the packet sequence will be complete if the next frame would be
|
||||
// grabbed for decoding. That is, if a frame has been lost between the
|
||||
// last decoded frame and the next, or if the next frame is missing one
|
||||
// or more packets.
|
||||
bool CompleteSequenceWithNextFrame();
|
||||
|
||||
// TODO(mikhal/stefan): Merge all GetFrameForDecoding into one.
|
||||
// Wait |max_wait_time_ms| for a complete frame to arrive. After timeout NULL
|
||||
// is returned.
|
||||
VCMEncodedFrame* GetCompleteFrameForDecoding(uint32_t max_wait_time_ms);
|
||||
|
||||
// Get a frame for decoding (even an incomplete) without delay.
|
||||
VCMEncodedFrame* GetFrameForDecoding();
|
||||
|
||||
// Releases a frame returned from the jitter buffer, should be called when
|
||||
// done with decoding.
|
||||
void ReleaseFrame(VCMEncodedFrame* frame);
|
||||
|
||||
// Returns the frame assigned to this timestamp.
|
||||
int GetFrame(const VCMPacket& packet, VCMEncodedFrame*&);
|
||||
VCMEncodedFrame* GetFrame(const VCMPacket& packet); // Deprecated.
|
||||
|
||||
// Returns the time in ms when the latest packet was inserted into the frame.
|
||||
// Retransmitted is set to true if any of the packets belonging to the frame
|
||||
// has been retransmitted.
|
||||
int64_t LastPacketTime(VCMEncodedFrame* frame, bool* retransmitted) const;
|
||||
|
||||
// Inserts a packet into a frame returned from GetFrame().
|
||||
VCMFrameBufferEnum InsertPacket(VCMEncodedFrame* frame,
|
||||
const VCMPacket& packet);
|
||||
|
||||
// Returns the estimated jitter in milliseconds.
|
||||
uint32_t EstimatedJitterMs();
|
||||
|
||||
// Updates the round-trip time estimate.
|
||||
void UpdateRtt(uint32_t rtt_ms);
|
||||
|
||||
// Set the NACK mode. |highRttNackThreshold| is an RTT threshold in ms above
|
||||
// which NACK will be disabled if the NACK mode is |kNackHybrid|, -1 meaning
|
||||
// that NACK is always enabled in the hybrid mode.
|
||||
// |lowRttNackThreshold| is an RTT threshold in ms below which we expect to
|
||||
// rely on NACK only, and therefore are using larger buffers to have time to
|
||||
// wait for retransmissions.
|
||||
void SetNackMode(VCMNackMode mode, int low_rtt_nack_threshold_ms,
|
||||
int high_rtt_nack_threshold_ms);
|
||||
|
||||
// Returns the current NACK mode.
|
||||
VCMNackMode nack_mode() const;
|
||||
|
||||
// Creates a list of missing sequence numbers.
|
||||
uint16_t* CreateNackList(uint16_t* nack_list_size, bool* list_extended);
|
||||
|
||||
int64_t LastDecodedTimestamp() const;
|
||||
|
||||
private:
|
||||
// In NACK-only mode this function doesn't return or release non-complete
|
||||
// frames unless we have a complete key frame. In hybrid mode, we may release
|
||||
// "decodable", incomplete frames.
|
||||
VCMEncodedFrame* GetFrameForDecodingNACK();
|
||||
|
||||
void ReleaseFrameIfNotDecoding(VCMFrameBuffer* frame);
|
||||
|
||||
// Gets an empty frame, creating a new frame if necessary (i.e. increases
|
||||
// jitter buffer size).
|
||||
VCMFrameBuffer* GetEmptyFrame();
|
||||
|
||||
// Recycles oldest frames until a key frame is found. Used if jitter buffer is
|
||||
// completely full. Returns true if a key frame was found.
|
||||
bool RecycleFramesUntilKeyFrame();
|
||||
|
||||
// Sets the state of |frame| to complete if it's not too old to be decoded.
|
||||
// Also updates the frame statistics. Signals the |frame_event| if this is
|
||||
// the next frame to be decoded.
|
||||
VCMFrameBufferEnum UpdateFrameState(VCMFrameBuffer* frame);
|
||||
|
||||
// Finds the oldest complete frame, used for getting next frame to decode.
|
||||
// Can return a decodable, incomplete frame if |enable_decodable| is true.
|
||||
FrameList::iterator FindOldestCompleteContinuousFrame(bool enable_decodable);
|
||||
|
||||
void CleanUpOldFrames();
|
||||
|
||||
// Sets the "decodable" and "frame loss" flags of a frame depending on which
|
||||
// packets have been received and which are missing.
|
||||
// A frame is "decodable" if enough packets of that frame has been received
|
||||
// for it to be usable by the decoder.
|
||||
// A frame has the "frame loss" flag set if packets are missing after the
|
||||
// last decoded frame and before |frame|.
|
||||
void VerifyAndSetPreviousFrameLost(VCMFrameBuffer* frame);
|
||||
|
||||
// Returns true if |packet| is likely to have been retransmitted.
|
||||
bool IsPacketRetransmitted(const VCMPacket& packet) const;
|
||||
|
||||
// The following three functions update the jitter estimate with the
|
||||
// payload size, receive time and RTP timestamp of a frame.
|
||||
void UpdateJitterEstimate(const VCMJitterSample& sample,
|
||||
bool incomplete_frame);
|
||||
void UpdateJitterEstimate(const VCMFrameBuffer& frame, bool incomplete_frame);
|
||||
void UpdateJitterEstimate(int64_t latest_packet_time_ms,
|
||||
uint32_t timestamp,
|
||||
unsigned int frame_size,
|
||||
bool incomplete_frame);
|
||||
|
||||
// Returns the lowest and highest known sequence numbers, where the lowest is
|
||||
// the last decoded sequence number if a frame has been decoded.
|
||||
// -1 is returned if a sequence number cannot be determined.
|
||||
void GetLowHighSequenceNumbers(int32_t* low_seq_num,
|
||||
int32_t* high_seq_num) const;
|
||||
|
||||
// Returns true if we should wait for retransmissions, false otherwise.
|
||||
bool WaitForRetransmissions();
|
||||
|
||||
int vcm_id_;
|
||||
int receiver_id_;
|
||||
TickTimeBase* clock_;
|
||||
// If we are running (have started) or not.
|
||||
bool running_;
|
||||
CriticalSectionWrapper* crit_sect_;
|
||||
bool master_;
|
||||
// Event to signal when we have a frame ready for decoder.
|
||||
VCMEvent frame_event_;
|
||||
// Event to signal when we have received a packet.
|
||||
VCMEvent packet_event_;
|
||||
// Number of allocated frames.
|
||||
int max_number_of_frames_;
|
||||
// Array of pointers to the frames in jitter buffer.
|
||||
VCMFrameBuffer* frame_buffers_[kMaxNumberOfFrames];
|
||||
FrameList frame_list_;
|
||||
VCMDecodingState last_decoded_state_;
|
||||
bool first_packet_;
|
||||
|
||||
// Statistics.
|
||||
int num_not_decodable_packets_;
|
||||
// Frame counter for each type (key, delta, golden, key-delta).
|
||||
unsigned int receive_statistics_[4];
|
||||
// Latest calculated frame rates of incoming stream.
|
||||
unsigned int incoming_frame_rate_;
|
||||
unsigned int incoming_frame_count_;
|
||||
int64_t time_last_incoming_frame_count_;
|
||||
unsigned int incoming_bit_count_;
|
||||
unsigned int incoming_bit_rate_;
|
||||
unsigned int drop_count_; // Frame drop counter.
|
||||
// Number of frames in a row that have been too old.
|
||||
int num_consecutive_old_frames_;
|
||||
// Number of packets in a row that have been too old.
|
||||
int num_consecutive_old_packets_;
|
||||
// Number of packets discarded by the jitter buffer.
|
||||
int num_discarded_packets_;
|
||||
|
||||
// Jitter estimation.
|
||||
// Filter for estimating jitter.
|
||||
VCMJitterEstimator jitter_estimate_;
|
||||
// Calculates network delays used for jitter calculations.
|
||||
VCMInterFrameDelay inter_frame_delay_;
|
||||
VCMJitterSample waiting_for_completion_;
|
||||
WebRtc_UWord32 rtt_ms_;
|
||||
|
||||
// NACK and retransmissions.
|
||||
VCMNackMode nack_mode_;
|
||||
int low_rtt_nack_threshold_ms_;
|
||||
int high_rtt_nack_threshold_ms_;
|
||||
// Holds the internal NACK list (the missing sequence numbers).
|
||||
int32_t nack_seq_nums_internal_[kNackHistoryLength];
|
||||
uint16_t nack_seq_nums_[kNackHistoryLength];
|
||||
unsigned int nack_seq_nums_length_;
|
||||
bool waiting_for_key_frame_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(VCMJitterBuffer);
|
||||
};
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_JITTER_BUFFER_H_
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user