Revert r3952 "VCM: Updating receiver logic"
TBR=phoglund@webrtc.org Review URL: https://webrtc-codereview.appspot.com/1410005 git-svn-id: http://webrtc.googlecode.com/svn/trunk@3963 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
273759048c
commit
4ce19b1664
@ -343,6 +343,54 @@ void VCMJitterBuffer::IncomingRateStatistics(unsigned int* framerate,
|
|||||||
TRACE_COUNTER1("webrtc", "JBIncomingBitrate", incoming_bit_rate_);
|
TRACE_COUNTER1("webrtc", "JBIncomingBitrate", incoming_bit_rate_);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Wait for the first packet in the next frame to arrive.
|
||||||
|
int64_t VCMJitterBuffer::NextTimestamp(uint32_t max_wait_time_ms,
|
||||||
|
FrameType* incoming_frame_type,
|
||||||
|
int64_t* render_time_ms) {
|
||||||
|
assert(incoming_frame_type);
|
||||||
|
assert(render_time_ms);
|
||||||
|
if (!running_) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
crit_sect_->Enter();
|
||||||
|
|
||||||
|
// Finding oldest frame ready for decoder, check sequence number and size.
|
||||||
|
CleanUpOldOrEmptyFrames();
|
||||||
|
|
||||||
|
FrameList::iterator it = frame_list_.begin();
|
||||||
|
|
||||||
|
if (it == frame_list_.end()) {
|
||||||
|
packet_event_->Reset();
|
||||||
|
crit_sect_->Leave();
|
||||||
|
|
||||||
|
if (packet_event_->Wait(max_wait_time_ms) == kEventSignaled) {
|
||||||
|
// are we closing down the Jitter buffer
|
||||||
|
if (!running_) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
crit_sect_->Enter();
|
||||||
|
|
||||||
|
CleanUpOldOrEmptyFrames();
|
||||||
|
it = frame_list_.begin();
|
||||||
|
} else {
|
||||||
|
crit_sect_->Enter();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (it == frame_list_.end()) {
|
||||||
|
crit_sect_->Leave();
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
// We have a frame.
|
||||||
|
*incoming_frame_type = (*it)->FrameType();
|
||||||
|
*render_time_ms = (*it)->RenderTimeMs();
|
||||||
|
const uint32_t timestamp = (*it)->TimeStamp();
|
||||||
|
crit_sect_->Leave();
|
||||||
|
|
||||||
|
return timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
// Answers the question:
|
// Answers the question:
|
||||||
// Will the packet sequence be complete if the next frame is grabbed for
|
// Will the packet sequence be complete if the next frame is grabbed for
|
||||||
// decoding right now? That is, have we lost a frame between the last decoded
|
// decoding right now? That is, have we lost a frame between the last decoded
|
||||||
@ -382,12 +430,12 @@ bool VCMJitterBuffer::CompleteSequenceWithNextFrame() {
|
|||||||
|
|
||||||
// Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
|
// Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
|
||||||
// complete frame, |max_wait_time_ms| decided by caller.
|
// complete frame, |max_wait_time_ms| decided by caller.
|
||||||
bool VCMJitterBuffer::NextCompleteTimestamp(
|
VCMEncodedFrame* VCMJitterBuffer::GetCompleteFrameForDecoding(
|
||||||
uint32_t max_wait_time_ms, uint32_t* timestamp) {
|
uint32_t max_wait_time_ms) {
|
||||||
TRACE_EVENT0("webrtc", "JB::NextCompleteTimestamp");
|
TRACE_EVENT0("webrtc", "JB::GetCompleteFrame");
|
||||||
crit_sect_->Enter();
|
crit_sect_->Enter();
|
||||||
if (!running_) {
|
if (!running_) {
|
||||||
return 0;
|
return NULL;
|
||||||
}
|
}
|
||||||
CleanUpOldOrEmptyFrames();
|
CleanUpOldOrEmptyFrames();
|
||||||
|
|
||||||
@ -405,7 +453,7 @@ bool VCMJitterBuffer::NextCompleteTimestamp(
|
|||||||
// Are we closing down the Jitter buffer?
|
// Are we closing down the Jitter buffer?
|
||||||
if (!running_) {
|
if (!running_) {
|
||||||
crit_sect_->Leave();
|
crit_sect_->Leave();
|
||||||
return false;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Finding oldest frame ready for decoder, but check
|
// Finding oldest frame ready for decoder, but check
|
||||||
@ -419,7 +467,7 @@ bool VCMJitterBuffer::NextCompleteTimestamp(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
crit_sect_->Leave();
|
crit_sect_->Leave();
|
||||||
return false;
|
return NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Inside |crit_sect_|.
|
// Inside |crit_sect_|.
|
||||||
@ -437,85 +485,81 @@ bool VCMJitterBuffer::NextCompleteTimestamp(
|
|||||||
|
|
||||||
if (it == frame_list_.end()) {
|
if (it == frame_list_.end()) {
|
||||||
crit_sect_->Leave();
|
crit_sect_->Leave();
|
||||||
return false;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
VCMFrameBuffer* oldest_frame = *it;
|
VCMFrameBuffer* oldest_frame = *it;
|
||||||
|
|
||||||
*timestamp = oldest_frame->TimeStamp();
|
it = frame_list_.erase(it);
|
||||||
|
if (frame_list_.empty()) {
|
||||||
|
TRACE_EVENT_INSTANT1("webrtc", "JB::FrameListEmptied",
|
||||||
|
"type", "GetCompleteFrameForDecoding");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update jitter estimate.
|
||||||
|
const bool retransmitted = (oldest_frame->GetNackCount() > 0);
|
||||||
|
if (retransmitted) {
|
||||||
|
jitter_estimate_.FrameNacked();
|
||||||
|
} else if (oldest_frame->Length() > 0) {
|
||||||
|
// Ignore retransmitted and empty frames.
|
||||||
|
UpdateJitterEstimate(*oldest_frame, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
oldest_frame->SetState(kStateDecoding);
|
||||||
|
|
||||||
|
// We have a frame - update decoded state with frame info.
|
||||||
|
last_decoded_state_.SetState(oldest_frame);
|
||||||
|
DropPacketsFromNackList(last_decoded_state_.sequence_num());
|
||||||
|
|
||||||
crit_sect_->Leave();
|
crit_sect_->Leave();
|
||||||
return true;
|
return oldest_frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(
|
VCMEncodedFrame* VCMJitterBuffer::MaybeGetIncompleteFrameForDecoding() {
|
||||||
uint32_t* timestamp) {
|
TRACE_EVENT0("webrtc", "JB::MaybeGetIncompleteFrameForDecoding");
|
||||||
TRACE_EVENT0("webrtc", "JB::NextMaybeIncompleteTimestamp");
|
|
||||||
CriticalSectionScoped cs(crit_sect_);
|
CriticalSectionScoped cs(crit_sect_);
|
||||||
if (!running_) {
|
if (!running_) {
|
||||||
return false;
|
return NULL;
|
||||||
}
|
}
|
||||||
if (!decode_with_errors_) {
|
if (!decode_with_errors_) {
|
||||||
// No point to continue, as we are not decoding with errors.
|
// No point to continue, as we are not decoding with errors.
|
||||||
return false;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
CleanUpOldOrEmptyFrames();
|
CleanUpOldOrEmptyFrames();
|
||||||
|
|
||||||
if (frame_list_.empty()) {
|
if (frame_list_.empty()) {
|
||||||
return false;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
VCMFrameBuffer* oldest_frame = frame_list_.front();
|
VCMFrameBuffer* oldest_frame = frame_list_.front();
|
||||||
// If we have only one frame in the buffer, release it only if it is complete.
|
// If we have only one frame in the buffer, release it only if it is complete.
|
||||||
if (frame_list_.size() <= 1 && oldest_frame->GetState() != kStateComplete) {
|
if (frame_list_.size() <= 1 && oldest_frame->GetState() != kStateComplete) {
|
||||||
return false;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always start with a key frame.
|
// Always start with a key frame.
|
||||||
if (last_decoded_state_.in_initial_state() &&
|
if (last_decoded_state_.in_initial_state() &&
|
||||||
oldest_frame->FrameType() != kVideoFrameKey) {
|
oldest_frame->FrameType() != kVideoFrameKey) {
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
*timestamp = oldest_frame->TimeStamp();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
|
|
||||||
TRACE_EVENT0("webrtc", "JB::ExtractAndSetDecode");
|
|
||||||
CriticalSectionScoped cs(crit_sect_);
|
|
||||||
|
|
||||||
if (!running_) {
|
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
// Extract the frame with the desired timestamp.
|
|
||||||
FrameList::iterator it = std::find_if(
|
|
||||||
frame_list_.begin(),
|
|
||||||
frame_list_.end(),
|
|
||||||
FrameEqualTimestamp(timestamp));
|
|
||||||
|
|
||||||
if (it == frame_list_.end()) {
|
// Incomplete frame pulled out from jitter buffer,
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
// We got the frame.
|
|
||||||
VCMFrameBuffer* frame = *it;
|
|
||||||
|
|
||||||
// Frame pulled out from jitter buffer,
|
|
||||||
// update the jitter estimate with what we currently know.
|
// update the jitter estimate with what we currently know.
|
||||||
const bool retransmitted = (frame->GetNackCount() > 0);
|
const bool retransmitted = (oldest_frame->GetNackCount() > 0);
|
||||||
if (retransmitted) {
|
if (retransmitted) {
|
||||||
jitter_estimate_.FrameNacked();
|
jitter_estimate_.FrameNacked();
|
||||||
} else if (frame->Length() > 0) {
|
} else if (oldest_frame->Length() > 0) {
|
||||||
// Ignore retransmitted and empty frames.
|
// Ignore retransmitted and empty frames.
|
||||||
// Update with the previous incomplete frame first
|
// Update with the previous incomplete frame first
|
||||||
if (waiting_for_completion_.latest_packet_time >= 0) {
|
if (waiting_for_completion_.latest_packet_time >= 0) {
|
||||||
UpdateJitterEstimate(waiting_for_completion_, true);
|
UpdateJitterEstimate(waiting_for_completion_, true);
|
||||||
}
|
}
|
||||||
// Then wait for this one to get complete
|
// Then wait for this one to get complete
|
||||||
waiting_for_completion_.frame_size = frame->Length();
|
waiting_for_completion_.frame_size = oldest_frame->Length();
|
||||||
waiting_for_completion_.latest_packet_time =
|
waiting_for_completion_.latest_packet_time =
|
||||||
frame->LatestPacketTimeMs();
|
oldest_frame->LatestPacketTimeMs();
|
||||||
waiting_for_completion_.timestamp = frame->TimeStamp();
|
waiting_for_completion_.timestamp = oldest_frame->TimeStamp();
|
||||||
}
|
}
|
||||||
frame_list_.erase(frame_list_.begin());
|
frame_list_.erase(frame_list_.begin());
|
||||||
if (frame_list_.empty()) {
|
if (frame_list_.empty()) {
|
||||||
@ -523,21 +567,21 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
|
|||||||
"type", "GetFrameForDecoding");
|
"type", "GetFrameForDecoding");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look for previous frame loss.
|
// Look for previous frame loss
|
||||||
VerifyAndSetPreviousFrameLost(frame);
|
VerifyAndSetPreviousFrameLost(oldest_frame);
|
||||||
|
|
||||||
// The state must be changed to decoding before cleaning up zero sized
|
// The state must be changed to decoding before cleaning up zero sized
|
||||||
// frames to avoid empty frames being cleaned up and then given to the
|
// frames to avoid empty frames being cleaned up and then given to the
|
||||||
// decoder.
|
// decoder.
|
||||||
// Set as decoding. Propagates the missing_frame bit.
|
// Set as decoding. Propagates the missing_frame bit.
|
||||||
frame->SetState(kStateDecoding);
|
oldest_frame->SetState(kStateDecoding);
|
||||||
|
|
||||||
num_not_decodable_packets_ += frame->NotDecodablePackets();
|
num_not_decodable_packets_ += oldest_frame->NotDecodablePackets();
|
||||||
|
|
||||||
// We have a frame - update decoded state with frame info.
|
// We have a frame - update decoded state with frame info.
|
||||||
last_decoded_state_.SetState(frame);
|
last_decoded_state_.SetState(oldest_frame);
|
||||||
DropPacketsFromNackList(last_decoded_state_.sequence_num());
|
DropPacketsFromNackList(last_decoded_state_.sequence_num());
|
||||||
return frame;
|
return oldest_frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Release frame when done with decoding. Should never be used to release
|
// Release frame when done with decoding. Should never be used to release
|
||||||
@ -723,8 +767,6 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(VCMEncodedFrame* encoded_frame,
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case kCompleteSession: {
|
case kCompleteSession: {
|
||||||
// Don't let the first packet be overridden by a complete session.
|
|
||||||
ret = kCompleteSession;
|
|
||||||
// Only update return value for a JB flush indicator.
|
// Only update return value for a JB flush indicator.
|
||||||
if (UpdateFrameState(frame) == kFlushIndicator)
|
if (UpdateFrameState(frame) == kFlushIndicator)
|
||||||
ret = kFlushIndicator;
|
ret = kFlushIndicator;
|
||||||
@ -941,15 +983,11 @@ int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
|
|||||||
return last_decoded_state_.time_stamp();
|
return last_decoded_state_.time_stamp();
|
||||||
}
|
}
|
||||||
|
|
||||||
void VCMJitterBuffer::RenderBufferSize(
|
int VCMJitterBuffer::RenderBufferSizeMs() {
|
||||||
uint32_t* timestamp_start, uint32_t* timestamp_end) {
|
|
||||||
CriticalSectionScoped cs(crit_sect_);
|
CriticalSectionScoped cs(crit_sect_);
|
||||||
CleanUpOldOrEmptyFrames();
|
CleanUpOldOrEmptyFrames();
|
||||||
*timestamp_start = 0u;
|
|
||||||
*timestamp_end = 0u;
|
|
||||||
|
|
||||||
if (frame_list_.empty()) {
|
if (frame_list_.empty()) {
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
FrameList::iterator frame_it = frame_list_.begin();
|
FrameList::iterator frame_it = frame_list_.begin();
|
||||||
VCMFrameBuffer* current_frame = *frame_it;
|
VCMFrameBuffer* current_frame = *frame_it;
|
||||||
@ -961,16 +999,16 @@ void VCMJitterBuffer::RenderBufferSize(
|
|||||||
frame_it = find_if(frame_list_.begin(), frame_list_.end(),
|
frame_it = find_if(frame_list_.begin(), frame_list_.end(),
|
||||||
CompleteKeyFrameCriteria());
|
CompleteKeyFrameCriteria());
|
||||||
if (frame_it == frame_list_.end()) {
|
if (frame_it == frame_list_.end()) {
|
||||||
return;
|
return 0;
|
||||||
}
|
}
|
||||||
*timestamp_start = last_decoded_state_.time_stamp();
|
|
||||||
current_frame = *frame_it;
|
current_frame = *frame_it;
|
||||||
previous_state.SetState(current_frame);
|
previous_state.SetState(current_frame);
|
||||||
++frame_it;
|
|
||||||
} else {
|
} else {
|
||||||
previous_state.CopyFrom(last_decoded_state_);
|
previous_state.CopyFrom(last_decoded_state_);
|
||||||
}
|
}
|
||||||
bool continuous_complete = true;
|
bool continuous_complete = true;
|
||||||
|
int64_t start_render = current_frame->RenderTimeMs();
|
||||||
|
++frame_it;
|
||||||
while (frame_it != frame_list_.end() && continuous_complete) {
|
while (frame_it != frame_list_.end() && continuous_complete) {
|
||||||
current_frame = *frame_it;
|
current_frame = *frame_it;
|
||||||
continuous_complete = current_frame->IsSessionComplete() &&
|
continuous_complete = current_frame->IsSessionComplete() &&
|
||||||
@ -981,7 +1019,8 @@ void VCMJitterBuffer::RenderBufferSize(
|
|||||||
// Desired frame is the previous one.
|
// Desired frame is the previous one.
|
||||||
--frame_it;
|
--frame_it;
|
||||||
current_frame = *frame_it;
|
current_frame = *frame_it;
|
||||||
*timestamp_end = current_frame->TimeStamp();
|
// Got the frame, now compute the time delta.
|
||||||
|
return static_cast<int>(current_frame->RenderTimeMs() - start_render);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the frame state to free and remove it from the sorted
|
// Set the frame state to free and remove it from the sorted
|
||||||
|
@ -89,25 +89,31 @@ class VCMJitterBuffer {
|
|||||||
void IncomingRateStatistics(unsigned int* framerate,
|
void IncomingRateStatistics(unsigned int* framerate,
|
||||||
unsigned int* bitrate);
|
unsigned int* bitrate);
|
||||||
|
|
||||||
|
// Waits for the first packet in the next frame to arrive and then returns
|
||||||
|
// the timestamp of that frame. |incoming_frame_type| and |render_time_ms| are
|
||||||
|
// set to the frame type and render time of the next frame.
|
||||||
|
// Blocks for up to |max_wait_time_ms| ms. Returns -1 if no packet has arrived
|
||||||
|
// after |max_wait_time_ms| ms.
|
||||||
|
int64_t NextTimestamp(uint32_t max_wait_time_ms,
|
||||||
|
FrameType* incoming_frame_type,
|
||||||
|
int64_t* render_time_ms);
|
||||||
|
|
||||||
// Checks if the packet sequence will be complete if the next frame would be
|
// Checks if the packet sequence will be complete if the next frame would be
|
||||||
// grabbed for decoding. That is, if a frame has been lost between the
|
// grabbed for decoding. That is, if a frame has been lost between the
|
||||||
// last decoded frame and the next, or if the next frame is missing one
|
// last decoded frame and the next, or if the next frame is missing one
|
||||||
// or more packets.
|
// or more packets.
|
||||||
bool CompleteSequenceWithNextFrame();
|
bool CompleteSequenceWithNextFrame();
|
||||||
|
|
||||||
// Wait |max_wait_time_ms| for a complete frame to arrive.
|
// Returns a complete frame ready for decoding. Allows max_wait_time_ms to
|
||||||
// The function returns true once such a frame is found, its corresponding
|
// wait for such a frame, if one is unavailable.
|
||||||
// timestamp is returned. Otherwise, returns false.
|
// Always starts with a key frame.
|
||||||
bool NextCompleteTimestamp(uint32_t max_wait_time_ms, uint32_t* timestamp);
|
VCMEncodedFrame* GetCompleteFrameForDecoding(uint32_t max_wait_time_ms);
|
||||||
|
|
||||||
// Locates a frame for decoding (even an incomplete) without delay.
|
// Get next frame for decoding without delay. If decoding with errors is not
|
||||||
// The function returns true once such a frame is found, its corresponding
|
// enabled, will return NULL. Actual returned frame will be the next one in
|
||||||
// timestamp is returned. Otherwise, returns false.
|
// the list, either complete or not.
|
||||||
bool NextMaybeIncompleteTimestamp(uint32_t* timestamp);
|
// TODO(mikhal): Consider only allowing decodable/complete.
|
||||||
|
VCMEncodedFrame* MaybeGetIncompleteFrameForDecoding();
|
||||||
// Extract frame corresponding to input timestamp.
|
|
||||||
// Frame will be set to a decoding state.
|
|
||||||
VCMEncodedFrame* ExtractAndSetDecode(uint32_t timestamp);
|
|
||||||
|
|
||||||
// Releases a frame returned from the jitter buffer, should be called when
|
// Releases a frame returned from the jitter buffer, should be called when
|
||||||
// done with decoding.
|
// done with decoding.
|
||||||
@ -127,7 +133,8 @@ class VCMJitterBuffer {
|
|||||||
const VCMPacket& packet);
|
const VCMPacket& packet);
|
||||||
|
|
||||||
// Enable a max filter on the jitter estimate by setting an initial
|
// Enable a max filter on the jitter estimate by setting an initial
|
||||||
// non-zero delay.
|
// non-zero delay. When set to zero (default), the last jitter
|
||||||
|
// estimate will be used.
|
||||||
void SetMaxJitterEstimate(bool enable);
|
void SetMaxJitterEstimate(bool enable);
|
||||||
|
|
||||||
// Returns the estimated jitter in milliseconds.
|
// Returns the estimated jitter in milliseconds.
|
||||||
@ -159,9 +166,8 @@ class VCMJitterBuffer {
|
|||||||
int64_t LastDecodedTimestamp() const;
|
int64_t LastDecodedTimestamp() const;
|
||||||
bool decode_with_errors() const {return decode_with_errors_;}
|
bool decode_with_errors() const {return decode_with_errors_;}
|
||||||
|
|
||||||
// Used to compute time of complete continuous frames. Returns the timestamps
|
// Returns size in time (milliseconds) of complete continuous frames.
|
||||||
// corresponding to the start and end of the continuous complete buffer.
|
int RenderBufferSizeMs();
|
||||||
void RenderBufferSize(uint32_t* timestamp_start, uint32_t* timestamp_end);
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
class SequenceNumberLessThan {
|
class SequenceNumberLessThan {
|
||||||
|
@ -101,23 +101,15 @@ class TestRunningJitterBuffer : public ::testing::Test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool DecodeCompleteFrame() {
|
bool DecodeCompleteFrame() {
|
||||||
uint32_t timestamp = 0;
|
VCMEncodedFrame* frame = jitter_buffer_->GetCompleteFrameForDecoding(0);
|
||||||
bool found_frame = jitter_buffer_->NextCompleteTimestamp(0, ×tamp);
|
|
||||||
if (!found_frame)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
|
|
||||||
bool ret = (frame != NULL);
|
bool ret = (frame != NULL);
|
||||||
jitter_buffer_->ReleaseFrame(frame);
|
jitter_buffer_->ReleaseFrame(frame);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DecodeIncompleteFrame() {
|
bool DecodeIncompleteFrame() {
|
||||||
uint32_t timestamp = 0;
|
VCMEncodedFrame* frame =
|
||||||
bool found_frame = jitter_buffer_->NextMaybeIncompleteTimestamp(×tamp);
|
jitter_buffer_->MaybeGetIncompleteFrameForDecoding();
|
||||||
if (!found_frame)
|
|
||||||
return false;
|
|
||||||
VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
|
|
||||||
bool ret = (frame != NULL);
|
bool ret = (frame != NULL);
|
||||||
jitter_buffer_->ReleaseFrame(frame);
|
jitter_buffer_->ReleaseFrame(frame);
|
||||||
return ret;
|
return ret;
|
||||||
|
@ -405,8 +405,7 @@ VCMJitterEstimator::UpdateMaxFrameSize(uint32_t frameSizeBytes)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VCMJitterEstimator::SetMaxJitterEstimate(bool enable)
|
void VCMJitterEstimator::SetMaxJitterEstimate(bool enable) {
|
||||||
{
|
|
||||||
if (enable) {
|
if (enable) {
|
||||||
_jitterEstimateMode = kMaxEstimate;
|
_jitterEstimateMode = kMaxEstimate;
|
||||||
} else {
|
} else {
|
||||||
|
@ -103,9 +103,41 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
|
|||||||
packet.seqNum, packet.timestamp,
|
packet.seqNum, packet.timestamp,
|
||||||
MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
|
MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
|
||||||
}
|
}
|
||||||
// First packet received belonging to this frame.
|
|
||||||
if (buffer->Length() == 0 && master_) {
|
|
||||||
const int64_t now_ms = clock_->TimeInMilliseconds();
|
const int64_t now_ms = clock_->TimeInMilliseconds();
|
||||||
|
|
||||||
|
int64_t render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
|
||||||
|
|
||||||
|
if (render_time_ms < 0) {
|
||||||
|
// Render time error. Assume that this is due to some change in the
|
||||||
|
// incoming video stream and reset the JB and the timing.
|
||||||
|
jitter_buffer_.Flush();
|
||||||
|
timing_->Reset(clock_->TimeInMilliseconds());
|
||||||
|
return VCM_FLUSH_INDICATOR;
|
||||||
|
} else if (render_time_ms < now_ms - max_video_delay_ms_) {
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
|
||||||
|
VCMId(vcm_id_, receiver_id_),
|
||||||
|
"This frame should have been rendered more than %u ms ago."
|
||||||
|
"Flushing jitter buffer and resetting timing.",
|
||||||
|
max_video_delay_ms_);
|
||||||
|
jitter_buffer_.Flush();
|
||||||
|
timing_->Reset(clock_->TimeInMilliseconds());
|
||||||
|
return VCM_FLUSH_INDICATOR;
|
||||||
|
} else if (static_cast<int>(timing_->TargetVideoDelay()) >
|
||||||
|
max_video_delay_ms_) {
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
|
||||||
|
VCMId(vcm_id_, receiver_id_),
|
||||||
|
"More than %u ms target delay. Flushing jitter buffer and"
|
||||||
|
"resetting timing.", max_video_delay_ms_);
|
||||||
|
jitter_buffer_.Flush();
|
||||||
|
timing_->Reset(clock_->TimeInMilliseconds());
|
||||||
|
return VCM_FLUSH_INDICATOR;
|
||||||
|
}
|
||||||
|
|
||||||
|
// First packet received belonging to this frame.
|
||||||
|
if (buffer->Length() == 0) {
|
||||||
|
const int64_t now_ms = clock_->TimeInMilliseconds();
|
||||||
|
if (master_) {
|
||||||
// Only trace the primary receiver to make it possible to parse and plot
|
// Only trace the primary receiver to make it possible to parse and plot
|
||||||
// the trace file.
|
// the trace file.
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
|
||||||
@ -113,21 +145,17 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
|
|||||||
"First packet of frame %u at %u", packet.timestamp,
|
"First packet of frame %u at %u", packet.timestamp,
|
||||||
MaskWord64ToUWord32(now_ms));
|
MaskWord64ToUWord32(now_ms));
|
||||||
}
|
}
|
||||||
|
render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
|
||||||
|
if (render_time_ms >= 0) {
|
||||||
|
buffer->SetRenderTime(render_time_ms);
|
||||||
|
} else {
|
||||||
|
buffer->SetRenderTime(now_ms);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Insert packet into the jitter buffer both media and empty packets.
|
// Insert packet into the jitter buffer both media and empty packets.
|
||||||
const VCMFrameBufferEnum
|
const VCMFrameBufferEnum
|
||||||
ret = jitter_buffer_.InsertPacket(buffer, packet);
|
ret = jitter_buffer_.InsertPacket(buffer, packet);
|
||||||
if (ret == kCompleteSession) {
|
|
||||||
bool retransmitted = false;
|
|
||||||
const int64_t last_packet_time_ms =
|
|
||||||
jitter_buffer_.LastPacketTime(buffer, &retransmitted);
|
|
||||||
if (last_packet_time_ms >= 0 && !retransmitted) {
|
|
||||||
// We don't want to include timestamps which have suffered from
|
|
||||||
// retransmission here, since we compensate with extra retransmission
|
|
||||||
// delay within the jitter estimate.
|
|
||||||
timing_->IncomingTimestamp(packet.timestamp, last_packet_time_ms);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (ret == kFlushIndicator) {
|
if (ret == kFlushIndicator) {
|
||||||
return VCM_FLUSH_INDICATOR;
|
return VCM_FLUSH_INDICATOR;
|
||||||
} else if (ret < 0) {
|
} else if (ret < 0) {
|
||||||
@ -147,14 +175,78 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(
|
|||||||
bool render_timing,
|
bool render_timing,
|
||||||
VCMReceiver* dual_receiver) {
|
VCMReceiver* dual_receiver) {
|
||||||
TRACE_EVENT0("webrtc", "Recv::FrameForDecoding");
|
TRACE_EVENT0("webrtc", "Recv::FrameForDecoding");
|
||||||
|
// No need to enter the critical section here since the jitter buffer
|
||||||
|
// is thread-safe.
|
||||||
|
FrameType incoming_frame_type = kVideoFrameDelta;
|
||||||
|
next_render_time_ms = -1;
|
||||||
const int64_t start_time_ms = clock_->TimeInMilliseconds();
|
const int64_t start_time_ms = clock_->TimeInMilliseconds();
|
||||||
uint32_t frame_timestamp = 0;
|
int64_t ret = jitter_buffer_.NextTimestamp(max_wait_time_ms,
|
||||||
// Exhaust wait time to get a complete frame for decoding.
|
&incoming_frame_type,
|
||||||
bool found_frame = jitter_buffer_.NextCompleteTimestamp(
|
&next_render_time_ms);
|
||||||
max_wait_time_ms, &frame_timestamp);
|
if (ret < 0) {
|
||||||
|
// No timestamp in jitter buffer at the moment.
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
const uint32_t time_stamp = static_cast<uint32_t>(ret);
|
||||||
|
|
||||||
if (!found_frame) {
|
// Update the timing.
|
||||||
// Get an incomplete frame when enabled.
|
timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
|
||||||
|
timing_->UpdateCurrentDelay(time_stamp);
|
||||||
|
|
||||||
|
const int32_t temp_wait_time = max_wait_time_ms -
|
||||||
|
static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
|
||||||
|
uint16_t new_max_wait_time = static_cast<uint16_t>(VCM_MAX(temp_wait_time,
|
||||||
|
0));
|
||||||
|
|
||||||
|
VCMEncodedFrame* frame = NULL;
|
||||||
|
|
||||||
|
if (render_timing) {
|
||||||
|
frame = FrameForDecoding(new_max_wait_time, next_render_time_ms,
|
||||||
|
dual_receiver);
|
||||||
|
} else {
|
||||||
|
frame = FrameForRendering(new_max_wait_time, next_render_time_ms,
|
||||||
|
dual_receiver);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (frame != NULL) {
|
||||||
|
bool retransmitted = false;
|
||||||
|
const int64_t last_packet_time_ms =
|
||||||
|
jitter_buffer_.LastPacketTime(frame, &retransmitted);
|
||||||
|
if (last_packet_time_ms >= 0 && !retransmitted) {
|
||||||
|
// We don't want to include timestamps which have suffered from
|
||||||
|
// retransmission here, since we compensate with extra retransmission
|
||||||
|
// delay within the jitter estimate.
|
||||||
|
timing_->IncomingTimestamp(time_stamp, last_packet_time_ms);
|
||||||
|
}
|
||||||
|
if (dual_receiver != NULL) {
|
||||||
|
dual_receiver->UpdateState(*frame);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
|
||||||
|
VCMEncodedFrame* VCMReceiver::FrameForDecoding(
|
||||||
|
uint16_t max_wait_time_ms,
|
||||||
|
int64_t next_render_time_ms,
|
||||||
|
VCMReceiver* dual_receiver) {
|
||||||
|
TRACE_EVENT1("webrtc", "FrameForDecoding",
|
||||||
|
"max_wait", max_wait_time_ms);
|
||||||
|
// How long can we wait until we must decode the next frame.
|
||||||
|
uint32_t wait_time_ms = timing_->MaxWaitingTime(
|
||||||
|
next_render_time_ms, clock_->TimeInMilliseconds());
|
||||||
|
|
||||||
|
// Try to get a complete frame from the jitter buffer.
|
||||||
|
VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
|
||||||
|
|
||||||
|
if (frame == NULL && max_wait_time_ms == 0 && wait_time_ms > 0) {
|
||||||
|
// If we're not allowed to wait for frames to get complete we must
|
||||||
|
// calculate if it's time to decode, and if it's not we will just return
|
||||||
|
// for now.
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (frame == NULL && VCM_MIN(wait_time_ms, max_wait_time_ms) == 0) {
|
||||||
|
// No time to wait for a complete frame, check if we have an incomplete.
|
||||||
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
|
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
|
||||||
dual_receiver->State() == kPassive &&
|
dual_receiver->State() == kPassive &&
|
||||||
dual_receiver->NackMode() == kNack);
|
dual_receiver->NackMode() == kNack);
|
||||||
@ -163,58 +255,48 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(
|
|||||||
// Jitter buffer state might get corrupt with this frame.
|
// Jitter buffer state might get corrupt with this frame.
|
||||||
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
|
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
|
||||||
}
|
}
|
||||||
found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(
|
frame = jitter_buffer_.MaybeGetIncompleteFrameForDecoding();
|
||||||
&frame_timestamp);
|
|
||||||
}
|
}
|
||||||
|
if (frame == NULL) {
|
||||||
if (!found_frame) {
|
// Wait for a complete frame.
|
||||||
|
frame = jitter_buffer_.GetCompleteFrameForDecoding(max_wait_time_ms);
|
||||||
|
}
|
||||||
|
if (frame == NULL) {
|
||||||
|
// Get an incomplete frame.
|
||||||
|
if (timing_->MaxWaitingTime(next_render_time_ms,
|
||||||
|
clock_->TimeInMilliseconds()) > 0) {
|
||||||
|
// Still time to wait for a complete frame.
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
// We have a frame - Set timing and render timestamp.
|
// No time left to wait, we must decode this frame now.
|
||||||
timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
|
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
|
||||||
const int64_t now_ms = clock_->TimeInMilliseconds();
|
dual_receiver->State() == kPassive &&
|
||||||
timing_->UpdateCurrentDelay(frame_timestamp);
|
dual_receiver->NackMode() == kNack);
|
||||||
next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
|
if (dual_receiver_enabled_and_passive &&
|
||||||
// Check render timing.
|
!jitter_buffer_.CompleteSequenceWithNextFrame()) {
|
||||||
bool timing_error = false;
|
// Jitter buffer state might get corrupt with this frame.
|
||||||
// Assume that render timing errors are due to changes in the video stream.
|
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
|
||||||
if (next_render_time_ms < 0) {
|
|
||||||
timing_error = true;
|
|
||||||
} else if (next_render_time_ms < now_ms - max_video_delay_ms_) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
|
|
||||||
VCMId(vcm_id_, receiver_id_),
|
|
||||||
"This frame should have been rendered more than %u ms ago."
|
|
||||||
"Flushing jitter buffer and resetting timing.",
|
|
||||||
max_video_delay_ms_);
|
|
||||||
timing_error = true;
|
|
||||||
} else if (static_cast<int>(timing_->TargetVideoDelay()) >
|
|
||||||
max_video_delay_ms_) {
|
|
||||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
|
|
||||||
VCMId(vcm_id_, receiver_id_),
|
|
||||||
"More than %u ms target delay. Flushing jitter buffer and"
|
|
||||||
"resetting timing.", max_video_delay_ms_);
|
|
||||||
timing_error = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (timing_error) {
|
frame = jitter_buffer_.MaybeGetIncompleteFrameForDecoding();
|
||||||
// Timing error => reset timing and flush the jitter buffer.
|
|
||||||
jitter_buffer_.Flush();
|
|
||||||
timing_->Reset(clock_->TimeInMilliseconds());
|
|
||||||
return NULL;
|
|
||||||
}
|
}
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
|
||||||
if (!render_timing) {
|
VCMEncodedFrame* VCMReceiver::FrameForRendering(uint16_t max_wait_time_ms,
|
||||||
// Decode frame as close as possible to the render timestamp.
|
int64_t next_render_time_ms,
|
||||||
|
VCMReceiver* dual_receiver) {
|
||||||
TRACE_EVENT0("webrtc", "FrameForRendering");
|
TRACE_EVENT0("webrtc", "FrameForRendering");
|
||||||
const int32_t available_wait_time = max_wait_time_ms -
|
// How long MUST we wait until we must decode the next frame. This is
|
||||||
static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
|
// different for the case where we have a renderer which can render at a
|
||||||
uint16_t new_max_wait_time = static_cast<uint16_t>(
|
// specified time. Here we must wait as long as possible before giving the
|
||||||
VCM_MAX(available_wait_time, 0));
|
// frame to the decoder, which will render the frame as soon as it has been
|
||||||
|
// decoded.
|
||||||
uint32_t wait_time_ms = timing_->MaxWaitingTime(
|
uint32_t wait_time_ms = timing_->MaxWaitingTime(
|
||||||
next_render_time_ms, clock_->TimeInMilliseconds());
|
next_render_time_ms, clock_->TimeInMilliseconds());
|
||||||
if (new_max_wait_time < wait_time_ms) {
|
if (max_wait_time_ms < wait_time_ms) {
|
||||||
// We're not allowed to wait until the frame is supposed to be rendered,
|
// If we're not allowed to wait until the frame is supposed to be rendered,
|
||||||
// waiting as long as we're allowed to avoid busy looping, and then return
|
// waiting as long as we're allowed to avoid busy looping, and then return
|
||||||
// NULL. Next call to this function might return the frame.
|
// NULL. Next call to this function might return the frame.
|
||||||
render_wait_event_->Wait(max_wait_time_ms);
|
render_wait_event_->Wait(max_wait_time_ms);
|
||||||
@ -222,26 +304,26 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(
|
|||||||
}
|
}
|
||||||
// Wait until it's time to render.
|
// Wait until it's time to render.
|
||||||
render_wait_event_->Wait(wait_time_ms);
|
render_wait_event_->Wait(wait_time_ms);
|
||||||
|
|
||||||
|
// Get a complete frame if possible.
|
||||||
|
// Note: This might cause us to wait more than a total of |max_wait_time_ms|.
|
||||||
|
// This is necessary to avoid a possible busy loop if no complete frame
|
||||||
|
// has been received.
|
||||||
|
VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(
|
||||||
|
max_wait_time_ms);
|
||||||
|
|
||||||
|
if (frame == NULL) {
|
||||||
|
// Get an incomplete frame.
|
||||||
|
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
|
||||||
|
dual_receiver->State() == kPassive &&
|
||||||
|
dual_receiver->NackMode() == kNack);
|
||||||
|
if (dual_receiver_enabled_and_passive &&
|
||||||
|
!jitter_buffer_.CompleteSequenceWithNextFrame()) {
|
||||||
|
// Jitter buffer state might get corrupt with this frame.
|
||||||
|
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract the frame from the jitter buffer and set the render time.
|
frame = jitter_buffer_.MaybeGetIncompleteFrameForDecoding();
|
||||||
VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp);
|
|
||||||
assert(frame);
|
|
||||||
frame->SetRenderTime(next_render_time_ms);
|
|
||||||
if (dual_receiver != NULL) {
|
|
||||||
dual_receiver->UpdateState(*frame);
|
|
||||||
}
|
|
||||||
if (!frame->Complete()) {
|
|
||||||
// Update stats for incomplete frames.
|
|
||||||
bool retransmitted = false;
|
|
||||||
const int64_t last_packet_time_ms =
|
|
||||||
jitter_buffer_.LastPacketTime(frame, &retransmitted);
|
|
||||||
if (last_packet_time_ms >= 0 && !retransmitted) {
|
|
||||||
// We don't want to include timestamps which have suffered from
|
|
||||||
// retransmission here, since we compensate with extra retransmission
|
|
||||||
// delay within the jitter estimate.
|
|
||||||
timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return frame;
|
return frame;
|
||||||
}
|
}
|
||||||
@ -348,6 +430,7 @@ int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
|
|||||||
if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
|
if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
// Enable a max filter on the jitter estimate for non-zero delays.
|
||||||
jitter_buffer_.SetMaxJitterEstimate(desired_delay_ms > 0);
|
jitter_buffer_.SetMaxJitterEstimate(desired_delay_ms > 0);
|
||||||
max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
|
max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
|
||||||
// Initializing timing to the desired delay.
|
// Initializing timing to the desired delay.
|
||||||
@ -356,21 +439,7 @@ int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int VCMReceiver::RenderBufferSizeMs() {
|
int VCMReceiver::RenderBufferSizeMs() {
|
||||||
uint32_t timestamp_start = 0u;
|
return jitter_buffer_.RenderBufferSizeMs();
|
||||||
uint32_t timestamp_end = 0u;
|
|
||||||
// Render timestamps are computed just prior to decoding. Therefore this is
|
|
||||||
// only an estimate based on frames' timestamps and current timing state.
|
|
||||||
jitter_buffer_.RenderBufferSize(×tamp_start, ×tamp_end);
|
|
||||||
if (timestamp_start == timestamp_end) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
// Update timing.
|
|
||||||
const int64_t now_ms = clock_->TimeInMilliseconds();
|
|
||||||
timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
|
|
||||||
// Get render timestamps.
|
|
||||||
uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
|
|
||||||
uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
|
|
||||||
return render_end - render_start;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void VCMReceiver::UpdateState(VCMReceiverState new_state) {
|
void VCMReceiver::UpdateState(VCMReceiverState new_state) {
|
||||||
|
@ -81,11 +81,16 @@ class VCMReceiver {
|
|||||||
bool DecodeWithErrors() const;
|
bool DecodeWithErrors() const;
|
||||||
|
|
||||||
// Returns size in time (milliseconds) of complete continuous frames in the
|
// Returns size in time (milliseconds) of complete continuous frames in the
|
||||||
// jitter buffer. The render time is estimated based on the render delay at
|
// jitter buffer.
|
||||||
// the time this function is called.
|
|
||||||
int RenderBufferSizeMs();
|
int RenderBufferSizeMs();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
VCMEncodedFrame* FrameForDecoding(uint16_t max_wait_time_ms,
|
||||||
|
int64_t nextrender_time_ms,
|
||||||
|
VCMReceiver* dual_receiver);
|
||||||
|
VCMEncodedFrame* FrameForRendering(uint16_t max_wait_time_ms,
|
||||||
|
int64_t nextrender_time_ms,
|
||||||
|
VCMReceiver* dual_receiver);
|
||||||
void CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver);
|
void CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver);
|
||||||
void UpdateState(VCMReceiverState new_state);
|
void UpdateState(VCMReceiverState new_state);
|
||||||
void UpdateState(const VCMEncodedFrame& frame);
|
void UpdateState(const VCMEncodedFrame& frame);
|
||||||
|
@ -97,9 +97,10 @@ TEST_F(TestVCMReceiver, RenderBufferSize_NotAllComplete) {
|
|||||||
EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
|
EXPECT_EQ(0, receiver_.RenderBufferSizeMs());
|
||||||
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
|
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
|
||||||
int num_of_frames = 10;
|
int num_of_frames = 10;
|
||||||
for (int i = 1; i < num_of_frames; ++i) {
|
for (int i = 0; i < num_of_frames; ++i) {
|
||||||
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
||||||
}
|
}
|
||||||
|
num_of_frames++;
|
||||||
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
|
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
|
||||||
for (int i = 0; i < num_of_frames; ++i) {
|
for (int i = 0; i < num_of_frames; ++i) {
|
||||||
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
||||||
@ -114,10 +115,6 @@ TEST_F(TestVCMReceiver, RenderBufferSize_NoKeyFrame) {
|
|||||||
for (int i = 0; i < num_of_frames; ++i) {
|
for (int i = 0; i < num_of_frames; ++i) {
|
||||||
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
||||||
}
|
}
|
||||||
int64_t next_render_time_ms = 0;
|
|
||||||
VCMEncodedFrame* frame = receiver_.FrameForDecoding(10, next_render_time_ms);
|
|
||||||
EXPECT_TRUE(frame == NULL);
|
|
||||||
receiver_.ReleaseFrame(frame);
|
|
||||||
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
|
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
|
||||||
for (int i = 0; i < num_of_frames; ++i) {
|
for (int i = 0; i < num_of_frames; ++i) {
|
||||||
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
|
||||||
|
@ -43,7 +43,7 @@ void StreamGenerator::GenerateFrame(FrameType type,
|
|||||||
int num_media_packets,
|
int num_media_packets,
|
||||||
int num_empty_packets,
|
int num_empty_packets,
|
||||||
int64_t current_time) {
|
int64_t current_time) {
|
||||||
timestamp_ = 90 * (current_time - start_time_);
|
timestamp_ += 90 * (current_time - start_time_);
|
||||||
// Move the sequence number counter if all packets from the previous frame
|
// Move the sequence number counter if all packets from the previous frame
|
||||||
// wasn't collected.
|
// wasn't collected.
|
||||||
sequence_number_ += packets_.size();
|
sequence_number_ += packets_.size();
|
||||||
|
@ -34,7 +34,8 @@ _renderDelayMs(kDefaultRenderDelayMs),
|
|||||||
_minTotalDelayMs(0),
|
_minTotalDelayMs(0),
|
||||||
_requiredDelayMs(0),
|
_requiredDelayMs(0),
|
||||||
_currentDelayMs(0),
|
_currentDelayMs(0),
|
||||||
_prevFrameTimestamp(0)
|
_prevFrameTimestamp(0),
|
||||||
|
_maxVideoDelayMs(kMaxVideoDelayMs)
|
||||||
{
|
{
|
||||||
if (masterTiming == NULL)
|
if (masterTiming == NULL)
|
||||||
{
|
{
|
||||||
@ -218,6 +219,10 @@ VCMTiming::RenderTimeMs(uint32_t frameTimestamp, int64_t nowMs) const
|
|||||||
{
|
{
|
||||||
CriticalSectionScoped cs(_critSect);
|
CriticalSectionScoped cs(_critSect);
|
||||||
const int64_t renderTimeMs = RenderTimeMsInternal(frameTimestamp, nowMs);
|
const int64_t renderTimeMs = RenderTimeMsInternal(frameTimestamp, nowMs);
|
||||||
|
if (renderTimeMs < 0)
|
||||||
|
{
|
||||||
|
return renderTimeMs;
|
||||||
|
}
|
||||||
if (_master)
|
if (_master)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
|
||||||
@ -234,6 +239,16 @@ VCMTiming::RenderTimeMsInternal(uint32_t frameTimestamp, int64_t nowMs) const
|
|||||||
{
|
{
|
||||||
int64_t estimatedCompleteTimeMs =
|
int64_t estimatedCompleteTimeMs =
|
||||||
_tsExtrapolator->ExtrapolateLocalTime(frameTimestamp);
|
_tsExtrapolator->ExtrapolateLocalTime(frameTimestamp);
|
||||||
|
if (estimatedCompleteTimeMs - nowMs > _maxVideoDelayMs)
|
||||||
|
{
|
||||||
|
if (_master)
|
||||||
|
{
|
||||||
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
|
||||||
|
"Timestamp arrived 2 seconds early, reset statistics",
|
||||||
|
frameTimestamp, estimatedCompleteTimeMs);
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
if (_master)
|
if (_master)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
|
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
|
||||||
@ -300,6 +315,12 @@ VCMTiming::EnoughTimeToDecode(uint32_t availableProcessingTimeMs) const
|
|||||||
return static_cast<int32_t>(availableProcessingTimeMs) - maxDecodeTimeMs > 0;
|
return static_cast<int32_t>(availableProcessingTimeMs) - maxDecodeTimeMs > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void VCMTiming::SetMaxVideoDelay(int maxVideoDelayMs)
|
||||||
|
{
|
||||||
|
CriticalSectionScoped cs(_critSect);
|
||||||
|
_maxVideoDelayMs = maxVideoDelayMs;
|
||||||
|
}
|
||||||
|
|
||||||
uint32_t
|
uint32_t
|
||||||
VCMTiming::TargetVideoDelay() const
|
VCMTiming::TargetVideoDelay() const
|
||||||
{
|
{
|
||||||
|
@ -62,8 +62,8 @@ public:
|
|||||||
int64_t startTimeMs,
|
int64_t startTimeMs,
|
||||||
int64_t nowMs);
|
int64_t nowMs);
|
||||||
|
|
||||||
// Used to report that a frame is passed to decoding. Updates the timestamp
|
// Used to report that a frame is passed to decoding. Updates the timestamp filter
|
||||||
// filter which is used to map between timestamps and receiver system time.
|
// which is used to map between timestamps and receiver system time.
|
||||||
void IncomingTimestamp(uint32_t timeStamp, int64_t lastPacketTimeMs);
|
void IncomingTimestamp(uint32_t timeStamp, int64_t lastPacketTimeMs);
|
||||||
|
|
||||||
// Returns the receiver system time when the frame with timestamp frameTimestamp
|
// Returns the receiver system time when the frame with timestamp frameTimestamp
|
||||||
@ -82,12 +82,16 @@ public:
|
|||||||
// certain amount of processing time.
|
// certain amount of processing time.
|
||||||
bool EnoughTimeToDecode(uint32_t availableProcessingTimeMs) const;
|
bool EnoughTimeToDecode(uint32_t availableProcessingTimeMs) const;
|
||||||
|
|
||||||
|
// Set the max allowed video delay.
|
||||||
|
void SetMaxVideoDelay(int maxVideoDelayMs);
|
||||||
|
|
||||||
enum { kDefaultRenderDelayMs = 10 };
|
enum { kDefaultRenderDelayMs = 10 };
|
||||||
enum { kDelayMaxChangeMsPerS = 100 };
|
enum { kDelayMaxChangeMsPerS = 100 };
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
int32_t MaxDecodeTimeMs(FrameType frameType = kVideoFrameDelta) const;
|
int32_t MaxDecodeTimeMs(FrameType frameType = kVideoFrameDelta) const;
|
||||||
int64_t RenderTimeMsInternal(uint32_t frameTimestamp, int64_t nowMs) const;
|
int64_t RenderTimeMsInternal(uint32_t frameTimestamp,
|
||||||
|
int64_t nowMs) const;
|
||||||
uint32_t TargetDelayInternal() const;
|
uint32_t TargetDelayInternal() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@ -103,6 +107,7 @@ private:
|
|||||||
uint32_t _requiredDelayMs;
|
uint32_t _requiredDelayMs;
|
||||||
uint32_t _currentDelayMs;
|
uint32_t _currentDelayMs;
|
||||||
uint32_t _prevFrameTimestamp;
|
uint32_t _prevFrameTimestamp;
|
||||||
|
int _maxVideoDelayMs;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -107,12 +107,10 @@ TEST_F(VCMRobustnessTest, TestHardNack) {
|
|||||||
InsertPacket(0, 0, true, false, kVideoFrameKey);
|
InsertPacket(0, 0, true, false, kVideoFrameKey);
|
||||||
InsertPacket(0, 1, false, false, kVideoFrameKey);
|
InsertPacket(0, 1, false, false, kVideoFrameKey);
|
||||||
InsertPacket(0, 2, false, true, kVideoFrameKey);
|
InsertPacket(0, 2, false, true, kVideoFrameKey);
|
||||||
clock_->AdvanceTimeMilliseconds(1000 / 30);
|
|
||||||
|
|
||||||
InsertPacket(3000, 3, true, false, kVideoFrameDelta);
|
InsertPacket(3000, 3, true, false, kVideoFrameDelta);
|
||||||
InsertPacket(3000, 4, false, false, kVideoFrameDelta);
|
InsertPacket(3000, 4, false, false, kVideoFrameDelta);
|
||||||
InsertPacket(3000, 5, false, true, kVideoFrameDelta);
|
InsertPacket(3000, 5, false, true, kVideoFrameDelta);
|
||||||
clock_->AdvanceTimeMilliseconds(1000 / 30);
|
|
||||||
|
|
||||||
ASSERT_EQ(VCM_OK, vcm_->Decode(0));
|
ASSERT_EQ(VCM_OK, vcm_->Decode(0));
|
||||||
ASSERT_EQ(VCM_OK, vcm_->Decode(0));
|
ASSERT_EQ(VCM_OK, vcm_->Decode(0));
|
||||||
|
@ -52,6 +52,7 @@
|
|||||||
'../test/codec_database_test.cc',
|
'../test/codec_database_test.cc',
|
||||||
'../test/decode_from_storage_test.cc',
|
'../test/decode_from_storage_test.cc',
|
||||||
'../test/generic_codec_test.cc',
|
'../test/generic_codec_test.cc',
|
||||||
|
'../test/jitter_buffer_test.cc',
|
||||||
'../test/media_opt_test.cc',
|
'../test/media_opt_test.cc',
|
||||||
'../test/mt_test_common.cc',
|
'../test/mt_test_common.cc',
|
||||||
'../test/mt_rx_tx_test.cc',
|
'../test/mt_rx_tx_test.cc',
|
||||||
|
@ -90,23 +90,6 @@ int CheckOutFrame(VCMEncodedFrame* frameOut, unsigned int size, bool startCode)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
VCMEncodedFrame* DecodeCompleteFrame(uint32_t max_wait_time_ms) {
|
|
||||||
uint32_t timestamp = 0;
|
|
||||||
bool found_frame = jb.NextCompleteTimestamp(max_wait_time_ms, ×tamp);
|
|
||||||
if (!found_frame)
|
|
||||||
return NULL;
|
|
||||||
|
|
||||||
return jb.ExtractAndSetDecode(timestamp);
|
|
||||||
}
|
|
||||||
|
|
||||||
VCMEncodedFrame* DecodeIncompleteFrame() {
|
|
||||||
uint32_t timestamp = 0;
|
|
||||||
bool found_frame =
|
|
||||||
jb.MaybeGetIncompleteFrameTimestampForDecoding(×tamp);
|
|
||||||
if (!found_frame)
|
|
||||||
return NULL;
|
|
||||||
return frame = jb.ExtractAndSetDecode(timestamp);
|
|
||||||
}
|
|
||||||
|
|
||||||
int JitterBufferTest(CmdArgs& args)
|
int JitterBufferTest(CmdArgs& args)
|
||||||
{
|
{
|
||||||
@ -124,7 +107,9 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
|
|
||||||
seqNum = 1234;
|
seqNum = 1234;
|
||||||
timeStamp = 123*90;
|
timeStamp = 123*90;
|
||||||
|
FrameType incomingFrameType(kVideoFrameKey);
|
||||||
VCMEncodedFrame* frameOut=NULL;
|
VCMEncodedFrame* frameOut=NULL;
|
||||||
|
int64_t renderTimeMs = 0;
|
||||||
packet.timestamp = timeStamp;
|
packet.timestamp = timeStamp;
|
||||||
packet.seqNum = seqNum;
|
packet.seqNum = seqNum;
|
||||||
|
|
||||||
@ -149,8 +134,9 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
|
|
||||||
// Not started
|
// Not started
|
||||||
TEST(0 == jb.GetFrame(packet));
|
TEST(0 == jb.GetFrame(packet));
|
||||||
TEST(0 == DecodeCompleteFrame(10));
|
TEST(-1 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
TEST(0 == DecodeIncompleteFrame());
|
TEST(0 == jb.GetCompleteFrameForDecoding(10));
|
||||||
|
TEST(0 == jb.MaybeGetIncompleteFrameForDecoding());
|
||||||
|
|
||||||
// Start
|
// Start
|
||||||
jb.Start();
|
jb.Start();
|
||||||
@ -163,7 +149,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(frameIn != 0);
|
TEST(frameIn != 0);
|
||||||
|
|
||||||
// No packets inserted
|
// No packets inserted
|
||||||
TEST(0 == DecodeCompleteFrame(10));
|
TEST(0 == jb.GetCompleteFrameForDecoding(10));
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
@ -181,20 +167,26 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// packet.isFirstPacket;
|
// packet.isFirstPacket;
|
||||||
// packet.markerBit;
|
// packet.markerBit;
|
||||||
//
|
//
|
||||||
packet.frameType = kVideoFrameKey;
|
packet.frameType = kVideoFrameDelta;
|
||||||
packet.isFirstPacket = true;
|
packet.isFirstPacket = true;
|
||||||
packet.markerBit = true;
|
packet.markerBit = true;
|
||||||
|
|
||||||
// Insert a packet into a frame.
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// Get the frame (always starts with a key frame).
|
// get packet notification
|
||||||
frameOut = DecodeCompleteFrame(10);
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
|
// get the frame
|
||||||
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
||||||
|
|
||||||
// check the frame type
|
// check the frame type
|
||||||
TEST(frameOut->FrameType() == kVideoFrameKey);
|
TEST(frameOut->FrameType() == kVideoFrameDelta);
|
||||||
|
|
||||||
// Release frame (when done with decoding)
|
// Release frame (when done with decoding)
|
||||||
jb.ReleaseFrame(frameOut);
|
jb.ReleaseFrame(frameOut);
|
||||||
@ -223,8 +215,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -241,7 +239,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
||||||
|
|
||||||
@ -276,8 +274,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameKey);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -312,7 +316,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
||||||
|
|
||||||
@ -346,8 +350,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -382,7 +392,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
||||||
|
|
||||||
@ -417,8 +427,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -453,7 +469,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
||||||
|
|
||||||
@ -488,8 +504,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -506,7 +528,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// check that we fail to get frame since seqnum is not continuous
|
// check that we fail to get frame since seqnum is not continuous
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
|
|
||||||
seqNum -= 3;
|
seqNum -= 3;
|
||||||
@ -523,6 +545,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = jb.GetCompleteFrameForDecoding(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
@ -541,7 +569,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
||||||
|
|
||||||
@ -552,7 +580,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
jb.ReleaseFrame(frameOut);
|
jb.ReleaseFrame(frameOut);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
||||||
|
|
||||||
@ -591,8 +619,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -612,7 +646,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
||||||
|
|
||||||
@ -647,8 +681,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -665,7 +705,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size * 2 + 4 * 2, true) == 0);
|
TEST(CheckOutFrame(frameOut, size * 2 + 4 * 2, true) == 0);
|
||||||
|
|
||||||
@ -723,8 +763,22 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// Get packet notification
|
||||||
|
TEST(timeStamp - 33 * 90 == jb.NextTimestamp(10, &incomingFrameType,
|
||||||
|
&renderTimeMs));
|
||||||
|
|
||||||
|
// Check incoming frame type
|
||||||
|
if (i == 0)
|
||||||
|
{
|
||||||
|
TEST(incomingFrameType == kVideoFrameKey);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
TEST(incomingFrameType == frametype);
|
||||||
|
}
|
||||||
|
|
||||||
// Get the frame
|
// Get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// Should not be complete
|
// Should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -757,7 +811,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// Get the frame
|
// Get the frame
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.MaybeGetIncompleteFrameForDecoding();
|
||||||
|
|
||||||
// One of the packets has been discarded by the jitter buffer.
|
// One of the packets has been discarded by the jitter buffer.
|
||||||
// Last frame can't be extracted yet.
|
// Last frame can't be extracted yet.
|
||||||
@ -827,7 +881,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// insert first packet
|
// insert first packet
|
||||||
timeStamp += 33*90;
|
timeStamp += 33*90;
|
||||||
seqNum = 0xfff0;
|
seqNum = 0xfff0;
|
||||||
packet.frameType = kVideoFrameKey;
|
packet.frameType = kVideoFrameDelta;
|
||||||
packet.isFirstPacket = true;
|
packet.isFirstPacket = true;
|
||||||
packet.markerBit = false;
|
packet.markerBit = false;
|
||||||
packet.seqNum = seqNum;
|
packet.seqNum = seqNum;
|
||||||
@ -839,13 +893,19 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
|
|
||||||
// Insert 98 packets.
|
// insert 98 packets
|
||||||
loop = 0;
|
loop = 0;
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
@ -860,8 +920,15 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(2, &incomingFrameType,
|
||||||
|
&renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(2);
|
frameOut = jb.GetCompleteFrameForDecoding(2);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -882,12 +949,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
||||||
|
|
||||||
// check the frame type
|
// check the frame type
|
||||||
TEST(frameOut->FrameType() == kVideoFrameKey);
|
TEST(frameOut->FrameType() == kVideoFrameDelta);
|
||||||
|
|
||||||
// Release frame (when done with decoding)
|
// Release frame (when done with decoding)
|
||||||
jb.ReleaseFrame(frameOut);
|
jb.ReleaseFrame(frameOut);
|
||||||
@ -908,7 +975,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// insert "first" packet last seqnum
|
// insert "first" packet last seqnum
|
||||||
timeStamp += 33*90;
|
timeStamp += 33*90;
|
||||||
seqNum = 10;
|
seqNum = 10;
|
||||||
packet.frameType = kVideoFrameKey;
|
packet.frameType = kVideoFrameDelta;
|
||||||
packet.isFirstPacket = false;
|
packet.isFirstPacket = false;
|
||||||
packet.markerBit = true;
|
packet.markerBit = true;
|
||||||
packet.seqNum = seqNum;
|
packet.seqNum = seqNum;
|
||||||
@ -920,8 +987,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -941,8 +1014,15 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(2, &incomingFrameType,
|
||||||
|
&renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(2);
|
frameOut = jb.GetCompleteFrameForDecoding(2);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -963,7 +1043,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*100, false) == 0);
|
||||||
|
|
||||||
@ -988,7 +1068,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// insert "first" packet last seqnum
|
// insert "first" packet last seqnum
|
||||||
timeStamp += 33*90;
|
timeStamp += 33*90;
|
||||||
seqNum = 1;
|
seqNum = 1;
|
||||||
packet.frameType = kVideoFrameKey;
|
packet.frameType = kVideoFrameDelta;
|
||||||
packet.isFirstPacket = false;
|
packet.isFirstPacket = false;
|
||||||
packet.markerBit = true;
|
packet.markerBit = true;
|
||||||
packet.seqNum = seqNum;
|
packet.seqNum = seqNum;
|
||||||
@ -1000,8 +1080,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -1018,8 +1104,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -1036,7 +1128,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*3, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*3, false) == 0);
|
||||||
|
|
||||||
@ -1073,8 +1165,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(3000 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Get the frame
|
// Get the frame
|
||||||
frameOut = DecodeCompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(3000 == frameOut->TimeStamp());
|
TEST(3000 == frameOut->TimeStamp());
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
||||||
@ -1123,8 +1219,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Get the frame
|
// Get the frame
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(timeStamp == frameOut->TimeStamp());
|
TEST(timeStamp == frameOut->TimeStamp());
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
||||||
@ -1170,8 +1270,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -1187,7 +1293,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
||||||
|
|
||||||
@ -1207,8 +1313,14 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeCompleteFrame(10);
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
// it should not be complete
|
// it should not be complete
|
||||||
TEST(frameOut == 0);
|
TEST(frameOut == 0);
|
||||||
@ -1225,7 +1337,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = DecodeIncompleteFrame();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
TEST(CheckOutFrame(frameOut, size*2, false) == 0);
|
||||||
|
|
||||||
@ -1261,6 +1373,10 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert first frame
|
// Insert first frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// Get packet notification
|
||||||
|
TEST(0xffffff00 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Insert next frame
|
// Insert next frame
|
||||||
seqNum++;
|
seqNum++;
|
||||||
timeStamp = 2700;
|
timeStamp = 2700;
|
||||||
@ -1276,8 +1392,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// Get packet notification
|
||||||
|
TEST(0xffffff00 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Get frame
|
// Get frame
|
||||||
frameOut = jb.GetFrameForDecoding();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(0xffffff00 == frameOut->TimeStamp());
|
TEST(0xffffff00 == frameOut->TimeStamp());
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
||||||
@ -1285,8 +1405,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// check the frame type
|
// check the frame type
|
||||||
TEST(frameOut->FrameType() == kVideoFrameDelta);
|
TEST(frameOut->FrameType() == kVideoFrameDelta);
|
||||||
|
|
||||||
|
// Get packet notification
|
||||||
|
TEST(2700 == jb.NextTimestamp(0, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Get frame
|
// Get frame
|
||||||
VCMEncodedFrame* frameOut2 = DecodeIncompleteFrame();
|
VCMEncodedFrame* frameOut2 = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(2700 == frameOut2->TimeStamp());
|
TEST(2700 == frameOut2->TimeStamp());
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut2, size, false) == 0);
|
TEST(CheckOutFrame(frameOut2, size, false) == 0);
|
||||||
@ -1324,6 +1448,10 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert first frame
|
// Insert first frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// Get packet notification
|
||||||
|
TEST(2700 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Insert second frame
|
// Insert second frame
|
||||||
seqNum--;
|
seqNum--;
|
||||||
timeStamp = 0xffffff00;
|
timeStamp = 0xffffff00;
|
||||||
@ -1339,8 +1467,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert a packet into a frame
|
// Insert a packet into a frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// Get packet notification
|
||||||
|
TEST(0xffffff00 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Get frame
|
// Get frame
|
||||||
frameOut = jb.GetFrameForDecoding();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(0xffffff00 == frameOut->TimeStamp());
|
TEST(0xffffff00 == frameOut->TimeStamp());
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
||||||
@ -1348,8 +1480,12 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// check the frame type
|
// check the frame type
|
||||||
TEST(frameOut->FrameType() == kVideoFrameDelta);
|
TEST(frameOut->FrameType() == kVideoFrameDelta);
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(2700 == jb.NextTimestamp(0, &incomingFrameType, &renderTimeMs));
|
||||||
|
TEST(kVideoFrameDelta == incomingFrameType);
|
||||||
|
|
||||||
// Get frame
|
// Get frame
|
||||||
frameOut2 = DecodeIncompleteFrame();
|
frameOut2 = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(2700 == frameOut2->TimeStamp());
|
TEST(2700 == frameOut2->TimeStamp());
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut2, size, false) == 0);
|
TEST(CheckOutFrame(frameOut2, size, false) == 0);
|
||||||
@ -1394,6 +1530,13 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get packet notification
|
||||||
|
TEST(packet.timestamp == jb.NextTimestamp(10, &incomingFrameType,
|
||||||
|
&renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
loop++;
|
loop++;
|
||||||
} while (loop < kMaxPacketsInSession);
|
} while (loop < kMaxPacketsInSession);
|
||||||
|
|
||||||
@ -1411,7 +1554,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert the packet -> frame recycled
|
// Insert the packet -> frame recycled
|
||||||
TEST(kSizeError == jb.InsertPacket(frameIn, packet));
|
TEST(kSizeError == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
TEST(0 == DecodeIncompleteFrame());
|
TEST(0 == jb.GetCompleteFrameForDecoding(10));
|
||||||
|
|
||||||
//printf("DONE fill frame - packets > max number of packets\n");
|
//printf("DONE fill frame - packets > max number of packets\n");
|
||||||
|
|
||||||
@ -1428,6 +1571,8 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
|
|
||||||
loop = 0;
|
loop = 0;
|
||||||
seqNum = 65485;
|
seqNum = 65485;
|
||||||
|
uint32_t timeStampStart = timeStamp + 33*90;
|
||||||
|
uint32_t timeStampFirstKey = 0;
|
||||||
VCMEncodedFrame* ptrLastDeltaFrame = NULL;
|
VCMEncodedFrame* ptrLastDeltaFrame = NULL;
|
||||||
VCMEncodedFrame* ptrFirstKeyFrame = NULL;
|
VCMEncodedFrame* ptrFirstKeyFrame = NULL;
|
||||||
// insert MAX_NUMBER_OF_FRAMES frames
|
// insert MAX_NUMBER_OF_FRAMES frames
|
||||||
@ -1451,11 +1596,19 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
{
|
{
|
||||||
ptrFirstKeyFrame = frameIn;
|
ptrFirstKeyFrame = frameIn;
|
||||||
packet.frameType = kVideoFrameKey;
|
packet.frameType = kVideoFrameKey;
|
||||||
|
timeStampFirstKey = packet.timestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert frame
|
// Insert frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// Get packet notification, should be first inserted frame
|
||||||
|
TEST(timeStampStart == jb.NextTimestamp(10, &incomingFrameType,
|
||||||
|
&renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameDelta);
|
||||||
|
|
||||||
loop++;
|
loop++;
|
||||||
} while (loop < kMaxNumberOfFrames);
|
} while (loop < kMaxNumberOfFrames);
|
||||||
|
|
||||||
@ -1477,8 +1630,15 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
// Insert frame
|
// Insert frame
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// First inserted key frame should be oldest in buffer
|
||||||
|
TEST(timeStampFirstKey == jb.NextTimestamp(10, &incomingFrameType,
|
||||||
|
&renderTimeMs));
|
||||||
|
|
||||||
|
// check incoming frame type
|
||||||
|
TEST(incomingFrameType == kVideoFrameKey);
|
||||||
|
|
||||||
// get the first key frame
|
// get the first key frame
|
||||||
frameOut = jb.GetFrameForDecoding();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
TEST(ptrFirstKeyFrame == frameOut);
|
TEST(ptrFirstKeyFrame == frameOut);
|
||||||
|
|
||||||
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
TEST(CheckOutFrame(frameOut, size, false) == 0);
|
||||||
@ -1584,6 +1744,9 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
frameIn = jb.GetFrame(packet);
|
frameIn = jb.GetFrame(packet);
|
||||||
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
|
||||||
|
|
||||||
|
// Get packet notification
|
||||||
|
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType,
|
||||||
|
&renderTimeMs));
|
||||||
frameOut = jb.MaybeGetIncompleteFrameForDecoding();
|
frameOut = jb.MaybeGetIncompleteFrameForDecoding();
|
||||||
|
|
||||||
// We can decode everything from a NALU until a packet has been lost.
|
// We can decode everything from a NALU until a packet has been lost.
|
||||||
@ -1700,7 +1863,7 @@ int JitterBufferTest(CmdArgs& args)
|
|||||||
TEST(kCompleteSession == jb.InsertPacket(frameIn, emptypacket));
|
TEST(kCompleteSession == jb.InsertPacket(frameIn, emptypacket));
|
||||||
|
|
||||||
// get the frame
|
// get the frame
|
||||||
frameOut = jb.GetFrameForDecoding();
|
frameOut = jb.GetCompleteFrameForDecoding(10);
|
||||||
// Only last NALU is complete
|
// Only last NALU is complete
|
||||||
TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0);
|
TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0);
|
||||||
|
|
||||||
|
@ -98,6 +98,7 @@ int main(int argc, char **argv) {
|
|||||||
ret = NormalTest::RunTest(args);
|
ret = NormalTest::RunTest(args);
|
||||||
ret |= CodecDataBaseTest::RunTest(args);
|
ret |= CodecDataBaseTest::RunTest(args);
|
||||||
ret |= ReceiverTimingTests(args);
|
ret |= ReceiverTimingTests(args);
|
||||||
|
ret |= JitterBufferTest(args);
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
ret = NormalTest::RunTest(args);
|
ret = NormalTest::RunTest(args);
|
||||||
@ -125,9 +126,12 @@ int main(int argc, char **argv) {
|
|||||||
ret = RtpPlayMT(args);
|
ret = RtpPlayMT(args);
|
||||||
break;
|
break;
|
||||||
case 9:
|
case 9:
|
||||||
ret = DecodeFromStorageTest(args);
|
ret = JitterBufferTest(args);
|
||||||
break;
|
break;
|
||||||
case 10:
|
case 10:
|
||||||
|
ret = DecodeFromStorageTest(args);
|
||||||
|
break;
|
||||||
|
case 11:
|
||||||
qualityModeTest(args);
|
qualityModeTest(args);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
Loading…
Reference in New Issue
Block a user