Relanding r3952: VCM: Updating receiver logic

BUG=r1734
R=stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1433004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3970 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org
2013-05-07 16:36:00 +00:00
parent 9c7685f9a6
commit 759b041019
14 changed files with 298 additions and 597 deletions

View File

@ -343,54 +343,6 @@ void VCMJitterBuffer::IncomingRateStatistics(unsigned int* framerate,
TRACE_COUNTER1("webrtc", "JBIncomingBitrate", incoming_bit_rate_); TRACE_COUNTER1("webrtc", "JBIncomingBitrate", incoming_bit_rate_);
} }
// Wait for the first packet in the next frame to arrive.
int64_t VCMJitterBuffer::NextTimestamp(uint32_t max_wait_time_ms,
FrameType* incoming_frame_type,
int64_t* render_time_ms) {
assert(incoming_frame_type);
assert(render_time_ms);
if (!running_) {
return -1;
}
crit_sect_->Enter();
// Finding oldest frame ready for decoder, check sequence number and size.
CleanUpOldOrEmptyFrames();
FrameList::iterator it = frame_list_.begin();
if (it == frame_list_.end()) {
packet_event_->Reset();
crit_sect_->Leave();
if (packet_event_->Wait(max_wait_time_ms) == kEventSignaled) {
// are we closing down the Jitter buffer
if (!running_) {
return -1;
}
crit_sect_->Enter();
CleanUpOldOrEmptyFrames();
it = frame_list_.begin();
} else {
crit_sect_->Enter();
}
}
if (it == frame_list_.end()) {
crit_sect_->Leave();
return -1;
}
// We have a frame.
*incoming_frame_type = (*it)->FrameType();
*render_time_ms = (*it)->RenderTimeMs();
const uint32_t timestamp = (*it)->TimeStamp();
crit_sect_->Leave();
return timestamp;
}
// Answers the question: // Answers the question:
// Will the packet sequence be complete if the next frame is grabbed for // Will the packet sequence be complete if the next frame is grabbed for
// decoding right now? That is, have we lost a frame between the last decoded // decoding right now? That is, have we lost a frame between the last decoded
@ -430,12 +382,12 @@ bool VCMJitterBuffer::CompleteSequenceWithNextFrame() {
// Returns immediately or a |max_wait_time_ms| ms event hang waiting for a // Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
// complete frame, |max_wait_time_ms| decided by caller. // complete frame, |max_wait_time_ms| decided by caller.
VCMEncodedFrame* VCMJitterBuffer::GetCompleteFrameForDecoding( bool VCMJitterBuffer::NextCompleteTimestamp(
uint32_t max_wait_time_ms) { uint32_t max_wait_time_ms, uint32_t* timestamp) {
TRACE_EVENT0("webrtc", "JB::GetCompleteFrame"); TRACE_EVENT0("webrtc", "JB::NextCompleteTimestamp");
crit_sect_->Enter(); crit_sect_->Enter();
if (!running_) { if (!running_) {
return NULL; return false;
} }
CleanUpOldOrEmptyFrames(); CleanUpOldOrEmptyFrames();
@ -453,9 +405,8 @@ VCMEncodedFrame* VCMJitterBuffer::GetCompleteFrameForDecoding(
// Are we closing down the Jitter buffer? // Are we closing down the Jitter buffer?
if (!running_) { if (!running_) {
crit_sect_->Leave(); crit_sect_->Leave();
return NULL; return false;
} }
// Finding oldest frame ready for decoder, but check // Finding oldest frame ready for decoder, but check
// sequence number and size // sequence number and size
CleanUpOldOrEmptyFrames(); CleanUpOldOrEmptyFrames();
@ -484,81 +435,85 @@ VCMEncodedFrame* VCMJitterBuffer::GetCompleteFrameForDecoding(
if (it == frame_list_.end()) { if (it == frame_list_.end()) {
crit_sect_->Leave(); crit_sect_->Leave();
return NULL; return false;
} }
VCMFrameBuffer* oldest_frame = *it; VCMFrameBuffer* oldest_frame = *it;
it = frame_list_.erase(it); *timestamp = oldest_frame->TimeStamp();
if (frame_list_.empty()) {
TRACE_EVENT_INSTANT1("webrtc", "JB::FrameListEmptied",
"type", "GetCompleteFrameForDecoding");
}
// Update jitter estimate.
const bool retransmitted = (oldest_frame->GetNackCount() > 0);
if (retransmitted) {
jitter_estimate_.FrameNacked();
} else if (oldest_frame->Length() > 0) {
// Ignore retransmitted and empty frames.
UpdateJitterEstimate(*oldest_frame, false);
}
oldest_frame->SetState(kStateDecoding);
// We have a frame - update decoded state with frame info.
last_decoded_state_.SetState(oldest_frame);
DropPacketsFromNackList(last_decoded_state_.sequence_num());
crit_sect_->Leave(); crit_sect_->Leave();
return oldest_frame; return true;
} }
VCMEncodedFrame* VCMJitterBuffer::MaybeGetIncompleteFrameForDecoding() { bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(
TRACE_EVENT0("webrtc", "JB::MaybeGetIncompleteFrameForDecoding"); uint32_t* timestamp) {
TRACE_EVENT0("webrtc", "JB::NextMaybeIncompleteTimestamp");
CriticalSectionScoped cs(crit_sect_); CriticalSectionScoped cs(crit_sect_);
if (!running_) { if (!running_) {
return NULL; return false;
} }
if (!decode_with_errors_) { if (!decode_with_errors_) {
// No point to continue, as we are not decoding with errors. // No point to continue, as we are not decoding with errors.
return NULL; return false;
} }
CleanUpOldOrEmptyFrames(); CleanUpOldOrEmptyFrames();
if (frame_list_.empty()) { if (frame_list_.empty()) {
return NULL; return false;
} }
VCMFrameBuffer* oldest_frame = frame_list_.front(); VCMFrameBuffer* oldest_frame = frame_list_.front();
// If we have only one frame in the buffer, release it only if it is complete. // If we have only one frame in the buffer, release it only if it is complete.
if (frame_list_.size() <= 1 && oldest_frame->GetState() != kStateComplete) { if (frame_list_.size() <= 1 && oldest_frame->GetState() != kStateComplete) {
return NULL; return false;
} }
// Always start with a key frame. // Always start with a key frame.
if (last_decoded_state_.in_initial_state() && if (last_decoded_state_.in_initial_state() &&
oldest_frame->FrameType() != kVideoFrameKey) { oldest_frame->FrameType() != kVideoFrameKey) {
return NULL; return false;
} }
// Incomplete frame pulled out from jitter buffer, *timestamp = oldest_frame->TimeStamp();
return true;
}
VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
TRACE_EVENT0("webrtc", "JB::ExtractAndSetDecode");
CriticalSectionScoped cs(crit_sect_);
if (!running_) {
return NULL;
}
// Extract the frame with the desired timestamp.
FrameList::iterator it = std::find_if(
frame_list_.begin(),
frame_list_.end(),
FrameEqualTimestamp(timestamp));
if (it == frame_list_.end()) {
return NULL;
}
// We got the frame.
VCMFrameBuffer* frame = *it;
// Frame pulled out from jitter buffer,
// update the jitter estimate with what we currently know. // update the jitter estimate with what we currently know.
const bool retransmitted = (oldest_frame->GetNackCount() > 0); const bool retransmitted = (frame->GetNackCount() > 0);
if (retransmitted) { if (retransmitted) {
jitter_estimate_.FrameNacked(); jitter_estimate_.FrameNacked();
} else if (oldest_frame->Length() > 0) { } else if (frame->Length() > 0) {
// Ignore retransmitted and empty frames. // Ignore retransmitted and empty frames.
// Update with the previous incomplete frame first // Update with the previous incomplete frame first
if (waiting_for_completion_.latest_packet_time >= 0) { if (waiting_for_completion_.latest_packet_time >= 0) {
UpdateJitterEstimate(waiting_for_completion_, true); UpdateJitterEstimate(waiting_for_completion_, true);
} }
// Then wait for this one to get complete // Then wait for this one to get complete
waiting_for_completion_.frame_size = oldest_frame->Length(); waiting_for_completion_.frame_size = frame->Length();
waiting_for_completion_.latest_packet_time = waiting_for_completion_.latest_packet_time =
oldest_frame->LatestPacketTimeMs(); frame->LatestPacketTimeMs();
waiting_for_completion_.timestamp = oldest_frame->TimeStamp(); waiting_for_completion_.timestamp = frame->TimeStamp();
} }
frame_list_.erase(frame_list_.begin()); frame_list_.erase(frame_list_.begin());
if (frame_list_.empty()) { if (frame_list_.empty()) {
@ -566,21 +521,21 @@ VCMEncodedFrame* VCMJitterBuffer::MaybeGetIncompleteFrameForDecoding() {
"type", "MaybeGetIncompleteFrameForDecoding"); "type", "MaybeGetIncompleteFrameForDecoding");
} }
// Look for previous frame loss // Look for previous frame loss.
VerifyAndSetPreviousFrameLost(oldest_frame); VerifyAndSetPreviousFrameLost(frame);
// The state must be changed to decoding before cleaning up zero sized // The state must be changed to decoding before cleaning up zero sized
// frames to avoid empty frames being cleaned up and then given to the // frames to avoid empty frames being cleaned up and then given to the
// decoder. // decoder.
// Set as decoding. Propagates the missing_frame bit. // Set as decoding. Propagates the missing_frame bit.
oldest_frame->SetState(kStateDecoding); frame->SetState(kStateDecoding);
num_not_decodable_packets_ += oldest_frame->NotDecodablePackets(); num_not_decodable_packets_ += frame->NotDecodablePackets();
// We have a frame - update decoded state with frame info. // We have a frame - update decoded state with frame info.
last_decoded_state_.SetState(oldest_frame); last_decoded_state_.SetState(frame);
DropPacketsFromNackList(last_decoded_state_.sequence_num()); DropPacketsFromNackList(last_decoded_state_.sequence_num());
return oldest_frame; return frame;
} }
// Release frame when done with decoding. Should never be used to release // Release frame when done with decoding. Should never be used to release
@ -766,6 +721,8 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(VCMEncodedFrame* encoded_frame,
break; break;
} }
case kCompleteSession: { case kCompleteSession: {
// Don't let the first packet be overridden by a complete session.
ret = kCompleteSession;
// Only update return value for a JB flush indicator. // Only update return value for a JB flush indicator.
if (UpdateFrameState(frame) == kFlushIndicator) if (UpdateFrameState(frame) == kFlushIndicator)
ret = kFlushIndicator; ret = kFlushIndicator;
@ -982,11 +939,15 @@ int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
return last_decoded_state_.time_stamp(); return last_decoded_state_.time_stamp();
} }
int VCMJitterBuffer::RenderBufferSizeMs() { void VCMJitterBuffer::RenderBufferSize(
uint32_t* timestamp_start, uint32_t* timestamp_end) {
CriticalSectionScoped cs(crit_sect_); CriticalSectionScoped cs(crit_sect_);
CleanUpOldOrEmptyFrames(); CleanUpOldOrEmptyFrames();
*timestamp_start = 0u;
*timestamp_end = 0u;
if (frame_list_.empty()) { if (frame_list_.empty()) {
return 0; return;
} }
FrameList::iterator frame_it = frame_list_.begin(); FrameList::iterator frame_it = frame_list_.begin();
VCMFrameBuffer* current_frame = *frame_it; VCMFrameBuffer* current_frame = *frame_it;
@ -998,16 +959,16 @@ int VCMJitterBuffer::RenderBufferSizeMs() {
frame_it = find_if(frame_list_.begin(), frame_list_.end(), frame_it = find_if(frame_list_.begin(), frame_list_.end(),
CompleteKeyFrameCriteria()); CompleteKeyFrameCriteria());
if (frame_it == frame_list_.end()) { if (frame_it == frame_list_.end()) {
return 0; return;
} }
*timestamp_start = last_decoded_state_.time_stamp();
current_frame = *frame_it; current_frame = *frame_it;
previous_state.SetState(current_frame); previous_state.SetState(current_frame);
++frame_it;
} else { } else {
previous_state.CopyFrom(last_decoded_state_); previous_state.CopyFrom(last_decoded_state_);
} }
bool continuous_complete = true; bool continuous_complete = true;
int64_t start_render = current_frame->RenderTimeMs();
++frame_it;
while (frame_it != frame_list_.end() && continuous_complete) { while (frame_it != frame_list_.end() && continuous_complete) {
current_frame = *frame_it; current_frame = *frame_it;
continuous_complete = current_frame->IsSessionComplete() && continuous_complete = current_frame->IsSessionComplete() &&
@ -1018,8 +979,7 @@ int VCMJitterBuffer::RenderBufferSizeMs() {
// Desired frame is the previous one. // Desired frame is the previous one.
--frame_it; --frame_it;
current_frame = *frame_it; current_frame = *frame_it;
// Got the frame, now compute the time delta. *timestamp_end = current_frame->TimeStamp();
return static_cast<int>(current_frame->RenderTimeMs() - start_render);
} }
// Set the frame state to free and remove it from the sorted // Set the frame state to free and remove it from the sorted

View File

@ -89,31 +89,25 @@ class VCMJitterBuffer {
void IncomingRateStatistics(unsigned int* framerate, void IncomingRateStatistics(unsigned int* framerate,
unsigned int* bitrate); unsigned int* bitrate);
// Waits for the first packet in the next frame to arrive and then returns
// the timestamp of that frame. |incoming_frame_type| and |render_time_ms| are
// set to the frame type and render time of the next frame.
// Blocks for up to |max_wait_time_ms| ms. Returns -1 if no packet has arrived
// after |max_wait_time_ms| ms.
int64_t NextTimestamp(uint32_t max_wait_time_ms,
FrameType* incoming_frame_type,
int64_t* render_time_ms);
// Checks if the packet sequence will be complete if the next frame would be // Checks if the packet sequence will be complete if the next frame would be
// grabbed for decoding. That is, if a frame has been lost between the // grabbed for decoding. That is, if a frame has been lost between the
// last decoded frame and the next, or if the next frame is missing one // last decoded frame and the next, or if the next frame is missing one
// or more packets. // or more packets.
bool CompleteSequenceWithNextFrame(); bool CompleteSequenceWithNextFrame();
// Returns a complete frame ready for decoding. Allows max_wait_time_ms to // Wait |max_wait_time_ms| for a complete frame to arrive.
// wait for such a frame, if one is unavailable. // The function returns true once such a frame is found, its corresponding
// Always starts with a key frame. // timestamp is returned. Otherwise, returns false.
VCMEncodedFrame* GetCompleteFrameForDecoding(uint32_t max_wait_time_ms); bool NextCompleteTimestamp(uint32_t max_wait_time_ms, uint32_t* timestamp);
// Get next frame for decoding without delay. If decoding with errors is not // Locates a frame for decoding (even an incomplete) without delay.
// enabled, will return NULL. Actual returned frame will be the next one in // The function returns true once such a frame is found, its corresponding
// the list, either complete or not. // timestamp is returned. Otherwise, returns false.
// TODO(mikhal): Consider only allowing decodable/complete. bool NextMaybeIncompleteTimestamp(uint32_t* timestamp);
VCMEncodedFrame* MaybeGetIncompleteFrameForDecoding();
// Extract frame corresponding to input timestamp.
// Frame will be set to a decoding state.
VCMEncodedFrame* ExtractAndSetDecode(uint32_t timestamp);
// Releases a frame returned from the jitter buffer, should be called when // Releases a frame returned from the jitter buffer, should be called when
// done with decoding. // done with decoding.
@ -133,8 +127,7 @@ class VCMJitterBuffer {
const VCMPacket& packet); const VCMPacket& packet);
// Enable a max filter on the jitter estimate by setting an initial // Enable a max filter on the jitter estimate by setting an initial
// non-zero delay. When set to zero (default), the last jitter // non-zero delay.
// estimate will be used.
void SetMaxJitterEstimate(bool enable); void SetMaxJitterEstimate(bool enable);
// Returns the estimated jitter in milliseconds. // Returns the estimated jitter in milliseconds.
@ -166,8 +159,9 @@ class VCMJitterBuffer {
int64_t LastDecodedTimestamp() const; int64_t LastDecodedTimestamp() const;
bool decode_with_errors() const {return decode_with_errors_;} bool decode_with_errors() const {return decode_with_errors_;}
// Returns size in time (milliseconds) of complete continuous frames. // Used to compute time of complete continuous frames. Returns the timestamps
int RenderBufferSizeMs(); // corresponding to the start and end of the continuous complete buffer.
void RenderBufferSize(uint32_t* timestamp_start, uint32_t* timestamp_end);
private: private:
class SequenceNumberLessThan { class SequenceNumberLessThan {

View File

@ -101,15 +101,23 @@ class TestRunningJitterBuffer : public ::testing::Test {
} }
bool DecodeCompleteFrame() { bool DecodeCompleteFrame() {
VCMEncodedFrame* frame = jitter_buffer_->GetCompleteFrameForDecoding(0); uint32_t timestamp = 0;
bool found_frame = jitter_buffer_->NextCompleteTimestamp(0, &timestamp);
if (!found_frame)
return false;
VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
bool ret = (frame != NULL); bool ret = (frame != NULL);
jitter_buffer_->ReleaseFrame(frame); jitter_buffer_->ReleaseFrame(frame);
return ret; return ret;
} }
bool DecodeIncompleteFrame() { bool DecodeIncompleteFrame() {
VCMEncodedFrame* frame = uint32_t timestamp = 0;
jitter_buffer_->MaybeGetIncompleteFrameForDecoding(); bool found_frame = jitter_buffer_->NextMaybeIncompleteTimestamp(&timestamp);
if (!found_frame)
return false;
VCMEncodedFrame* frame = jitter_buffer_->ExtractAndSetDecode(timestamp);
bool ret = (frame != NULL); bool ret = (frame != NULL);
jitter_buffer_->ReleaseFrame(frame); jitter_buffer_->ReleaseFrame(frame);
return ret; return ret;
@ -470,7 +478,7 @@ TEST_F(TestJitterBufferNack, NormalOperationWrap2) {
clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs); clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
for (int i = 0; i < 5; ++i) { for (int i = 0; i < 5; ++i) {
if (stream_generator_->NextSequenceNumber() != 65535) { if (stream_generator_->NextSequenceNumber() != 65535) {
EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0)); EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
EXPECT_FALSE(request_key_frame); EXPECT_FALSE(request_key_frame);
} else { } else {
stream_generator_->NextPacket(NULL); // Drop packet stream_generator_->NextPacket(NULL); // Drop packet
@ -479,7 +487,7 @@ TEST_F(TestJitterBufferNack, NormalOperationWrap2) {
clock_->TimeInMilliseconds()); clock_->TimeInMilliseconds());
clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs); clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
} }
EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0)); EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
EXPECT_FALSE(request_key_frame); EXPECT_FALSE(request_key_frame);
uint16_t nack_list_size = 0; uint16_t nack_list_size = 0;
bool extended = false; bool extended = false;

View File

@ -405,7 +405,8 @@ VCMJitterEstimator::UpdateMaxFrameSize(uint32_t frameSizeBytes)
} }
} }
void VCMJitterEstimator::SetMaxJitterEstimate(bool enable) { void VCMJitterEstimator::SetMaxJitterEstimate(bool enable)
{
if (enable) { if (enable) {
_jitterEstimateMode = kMaxEstimate; _jitterEstimateMode = kMaxEstimate;
} else { } else {

View File

@ -103,41 +103,9 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
packet.seqNum, packet.timestamp, packet.seqNum, packet.timestamp,
MaskWord64ToUWord32(clock_->TimeInMilliseconds())); MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
} }
const int64_t now_ms = clock_->TimeInMilliseconds();
int64_t render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
if (render_time_ms < 0) {
// Render time error. Assume that this is due to some change in the
// incoming video stream and reset the JB and the timing.
jitter_buffer_.Flush();
timing_->Reset(clock_->TimeInMilliseconds());
return VCM_FLUSH_INDICATOR;
} else if (render_time_ms < now_ms - max_video_delay_ms_) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
VCMId(vcm_id_, receiver_id_),
"This frame should have been rendered more than %u ms ago."
"Flushing jitter buffer and resetting timing.",
max_video_delay_ms_);
jitter_buffer_.Flush();
timing_->Reset(clock_->TimeInMilliseconds());
return VCM_FLUSH_INDICATOR;
} else if (static_cast<int>(timing_->TargetVideoDelay()) >
max_video_delay_ms_) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
VCMId(vcm_id_, receiver_id_),
"More than %u ms target delay. Flushing jitter buffer and"
"resetting timing.", max_video_delay_ms_);
jitter_buffer_.Flush();
timing_->Reset(clock_->TimeInMilliseconds());
return VCM_FLUSH_INDICATOR;
}
// First packet received belonging to this frame. // First packet received belonging to this frame.
if (buffer->Length() == 0) { if (buffer->Length() == 0 && master_) {
const int64_t now_ms = clock_->TimeInMilliseconds(); const int64_t now_ms = clock_->TimeInMilliseconds();
if (master_) {
// Only trace the primary receiver to make it possible to parse and plot // Only trace the primary receiver to make it possible to parse and plot
// the trace file. // the trace file.
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
@ -145,17 +113,21 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
"First packet of frame %u at %u", packet.timestamp, "First packet of frame %u at %u", packet.timestamp,
MaskWord64ToUWord32(now_ms)); MaskWord64ToUWord32(now_ms));
} }
render_time_ms = timing_->RenderTimeMs(packet.timestamp, now_ms);
if (render_time_ms >= 0) {
buffer->SetRenderTime(render_time_ms);
} else {
buffer->SetRenderTime(now_ms);
}
}
// Insert packet into the jitter buffer both media and empty packets. // Insert packet into the jitter buffer both media and empty packets.
const VCMFrameBufferEnum const VCMFrameBufferEnum
ret = jitter_buffer_.InsertPacket(buffer, packet); ret = jitter_buffer_.InsertPacket(buffer, packet);
if (ret == kCompleteSession) {
bool retransmitted = false;
const int64_t last_packet_time_ms =
jitter_buffer_.LastPacketTime(buffer, &retransmitted);
if (last_packet_time_ms >= 0 && !retransmitted) {
// We don't want to include timestamps which have suffered from
// retransmission here, since we compensate with extra retransmission
// delay within the jitter estimate.
timing_->IncomingTimestamp(packet.timestamp, last_packet_time_ms);
}
}
if (ret == kFlushIndicator) { if (ret == kFlushIndicator) {
return VCM_FLUSH_INDICATOR; return VCM_FLUSH_INDICATOR;
} else if (ret < 0) { } else if (ret < 0) {
@ -175,128 +147,74 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(
bool render_timing, bool render_timing,
VCMReceiver* dual_receiver) { VCMReceiver* dual_receiver) {
TRACE_EVENT0("webrtc", "Recv::FrameForDecoding"); TRACE_EVENT0("webrtc", "Recv::FrameForDecoding");
// No need to enter the critical section here since the jitter buffer
// is thread-safe.
FrameType incoming_frame_type = kVideoFrameDelta;
next_render_time_ms = -1;
const int64_t start_time_ms = clock_->TimeInMilliseconds(); const int64_t start_time_ms = clock_->TimeInMilliseconds();
int64_t ret = jitter_buffer_.NextTimestamp(max_wait_time_ms, uint32_t frame_timestamp = 0;
&incoming_frame_type, // Exhaust wait time to get a complete frame for decoding.
&next_render_time_ms); bool found_frame = jitter_buffer_.NextCompleteTimestamp(
if (ret < 0) { max_wait_time_ms, &frame_timestamp);
// No timestamp in jitter buffer at the moment.
if (!found_frame) {
// Get an incomplete frame when enabled.
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
dual_receiver->State() == kPassive &&
dual_receiver->NackMode() == kNack);
if (dual_receiver_enabled_and_passive &&
!jitter_buffer_.CompleteSequenceWithNextFrame()) {
// Jitter buffer state might get corrupt with this frame.
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
}
found_frame = jitter_buffer_.NextMaybeIncompleteTimestamp(
&frame_timestamp);
}
if (!found_frame) {
return NULL; return NULL;
} }
const uint32_t time_stamp = static_cast<uint32_t>(ret);
// Update the timing. // We have a frame - Set timing and render timestamp.
timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs()); timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
timing_->UpdateCurrentDelay(time_stamp); const int64_t now_ms = clock_->TimeInMilliseconds();
timing_->UpdateCurrentDelay(frame_timestamp);
const int32_t temp_wait_time = max_wait_time_ms - next_render_time_ms = timing_->RenderTimeMs(frame_timestamp, now_ms);
static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms); // Check render timing.
uint16_t new_max_wait_time = static_cast<uint16_t>(VCM_MAX(temp_wait_time, bool timing_error = false;
0)); // Assume that render timing errors are due to changes in the video stream.
if (next_render_time_ms < 0) {
VCMEncodedFrame* frame = NULL; timing_error = true;
} else if (next_render_time_ms < now_ms - max_video_delay_ms_) {
if (render_timing) { WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
frame = FrameForDecoding(new_max_wait_time, next_render_time_ms, VCMId(vcm_id_, receiver_id_),
dual_receiver); "This frame should have been rendered more than %u ms ago."
} else { "Flushing jitter buffer and resetting timing.",
frame = FrameForRendering(new_max_wait_time, next_render_time_ms, max_video_delay_ms_);
dual_receiver); timing_error = true;
} else if (static_cast<int>(timing_->TargetVideoDelay()) >
max_video_delay_ms_) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
VCMId(vcm_id_, receiver_id_),
"More than %u ms target delay. Flushing jitter buffer and"
"resetting timing.", max_video_delay_ms_);
timing_error = true;
} }
if (frame != NULL) { if (timing_error) {
bool retransmitted = false; // Timing error => reset timing and flush the jitter buffer.
const int64_t last_packet_time_ms = jitter_buffer_.Flush();
jitter_buffer_.LastPacketTime(frame, &retransmitted); timing_->Reset(clock_->TimeInMilliseconds());
if (last_packet_time_ms >= 0 && !retransmitted) {
// We don't want to include timestamps which have suffered from
// retransmission here, since we compensate with extra retransmission
// delay within the jitter estimate.
timing_->IncomingTimestamp(time_stamp, last_packet_time_ms);
}
if (dual_receiver != NULL) {
dual_receiver->UpdateState(*frame);
}
}
return frame;
}
VCMEncodedFrame* VCMReceiver::FrameForDecoding(
uint16_t max_wait_time_ms,
int64_t next_render_time_ms,
VCMReceiver* dual_receiver) {
TRACE_EVENT1("webrtc", "FrameForDecoding",
"max_wait", max_wait_time_ms);
// How long can we wait until we must decode the next frame.
uint32_t wait_time_ms = timing_->MaxWaitingTime(
next_render_time_ms, clock_->TimeInMilliseconds());
// Try to get a complete frame from the jitter buffer.
VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(0);
if (frame == NULL && max_wait_time_ms == 0 && wait_time_ms > 0) {
// If we're not allowed to wait for frames to get complete we must
// calculate if it's time to decode, and if it's not we will just return
// for now.
return NULL; return NULL;
} }
if (frame == NULL && VCM_MIN(wait_time_ms, max_wait_time_ms) == 0) { if (!render_timing) {
// No time to wait for a complete frame, check if we have an incomplete. // Decode frame as close as possible to the render timestamp.
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
dual_receiver->State() == kPassive &&
dual_receiver->NackMode() == kNack);
if (dual_receiver_enabled_and_passive &&
!jitter_buffer_.CompleteSequenceWithNextFrame()) {
// Jitter buffer state might get corrupt with this frame.
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
}
frame = jitter_buffer_.MaybeGetIncompleteFrameForDecoding();
}
if (frame == NULL) {
// Wait for a complete frame.
frame = jitter_buffer_.GetCompleteFrameForDecoding(max_wait_time_ms);
}
if (frame == NULL) {
// Get an incomplete frame.
if (timing_->MaxWaitingTime(next_render_time_ms,
clock_->TimeInMilliseconds()) > 0) {
// Still time to wait for a complete frame.
return NULL;
}
// No time left to wait, we must decode this frame now.
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
dual_receiver->State() == kPassive &&
dual_receiver->NackMode() == kNack);
if (dual_receiver_enabled_and_passive &&
!jitter_buffer_.CompleteSequenceWithNextFrame()) {
// Jitter buffer state might get corrupt with this frame.
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
}
frame = jitter_buffer_.MaybeGetIncompleteFrameForDecoding();
}
return frame;
}
VCMEncodedFrame* VCMReceiver::FrameForRendering(uint16_t max_wait_time_ms,
int64_t next_render_time_ms,
VCMReceiver* dual_receiver) {
TRACE_EVENT0("webrtc", "FrameForRendering"); TRACE_EVENT0("webrtc", "FrameForRendering");
// How long MUST we wait until we must decode the next frame. This is const int32_t available_wait_time = max_wait_time_ms -
// different for the case where we have a renderer which can render at a static_cast<int32_t>(clock_->TimeInMilliseconds() - start_time_ms);
// specified time. Here we must wait as long as possible before giving the uint16_t new_max_wait_time = static_cast<uint16_t>(
// frame to the decoder, which will render the frame as soon as it has been VCM_MAX(available_wait_time, 0));
// decoded.
uint32_t wait_time_ms = timing_->MaxWaitingTime( uint32_t wait_time_ms = timing_->MaxWaitingTime(
next_render_time_ms, clock_->TimeInMilliseconds()); next_render_time_ms, clock_->TimeInMilliseconds());
if (max_wait_time_ms < wait_time_ms) { if (new_max_wait_time < wait_time_ms) {
// If we're not allowed to wait until the frame is supposed to be rendered, // We're not allowed to wait until the frame is supposed to be rendered,
// waiting as long as we're allowed to avoid busy looping, and then return // waiting as long as we're allowed to avoid busy looping, and then return
// NULL. Next call to this function might return the frame. // NULL. Next call to this function might return the frame.
render_wait_event_->Wait(max_wait_time_ms); render_wait_event_->Wait(max_wait_time_ms);
@ -304,26 +222,26 @@ VCMEncodedFrame* VCMReceiver::FrameForRendering(uint16_t max_wait_time_ms,
} }
// Wait until it's time to render. // Wait until it's time to render.
render_wait_event_->Wait(wait_time_ms); render_wait_event_->Wait(wait_time_ms);
// Get a complete frame if possible.
// Note: This might cause us to wait more than a total of |max_wait_time_ms|.
// This is necessary to avoid a possible busy loop if no complete frame
// has been received.
VCMEncodedFrame* frame = jitter_buffer_.GetCompleteFrameForDecoding(
max_wait_time_ms);
if (frame == NULL) {
// Get an incomplete frame.
const bool dual_receiver_enabled_and_passive = (dual_receiver != NULL &&
dual_receiver->State() == kPassive &&
dual_receiver->NackMode() == kNack);
if (dual_receiver_enabled_and_passive &&
!jitter_buffer_.CompleteSequenceWithNextFrame()) {
// Jitter buffer state might get corrupt with this frame.
dual_receiver->CopyJitterBufferStateFromReceiver(*this);
} }
frame = jitter_buffer_.MaybeGetIncompleteFrameForDecoding(); // Extract the frame from the jitter buffer and set the render time.
VCMEncodedFrame* frame = jitter_buffer_.ExtractAndSetDecode(frame_timestamp);
assert(frame);
frame->SetRenderTime(next_render_time_ms);
if (dual_receiver != NULL) {
dual_receiver->UpdateState(*frame);
}
if (!frame->Complete()) {
// Update stats for incomplete frames.
bool retransmitted = false;
const int64_t last_packet_time_ms =
jitter_buffer_.LastPacketTime(frame, &retransmitted);
if (last_packet_time_ms >= 0 && !retransmitted) {
// We don't want to include timestamps which have suffered from
// retransmission here, since we compensate with extra retransmission
// delay within the jitter estimate.
timing_->IncomingTimestamp(frame_timestamp, last_packet_time_ms);
}
} }
return frame; return frame;
} }
@ -430,7 +348,6 @@ int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) { if (desired_delay_ms < 0 || desired_delay_ms > kMaxReceiverDelayMs) {
return -1; return -1;
} }
// Enable a max filter on the jitter estimate for non-zero delays.
jitter_buffer_.SetMaxJitterEstimate(desired_delay_ms > 0); jitter_buffer_.SetMaxJitterEstimate(desired_delay_ms > 0);
max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs; max_video_delay_ms_ = desired_delay_ms + kMaxVideoDelayMs;
// Initializing timing to the desired delay. // Initializing timing to the desired delay.
@ -439,7 +356,21 @@ int VCMReceiver::SetMinReceiverDelay(int desired_delay_ms) {
} }
int VCMReceiver::RenderBufferSizeMs() { int VCMReceiver::RenderBufferSizeMs() {
return jitter_buffer_.RenderBufferSizeMs(); uint32_t timestamp_start = 0u;
uint32_t timestamp_end = 0u;
// Render timestamps are computed just prior to decoding. Therefore this is
// only an estimate based on frames' timestamps and current timing state.
jitter_buffer_.RenderBufferSize(&timestamp_start, &timestamp_end);
if (timestamp_start == timestamp_end) {
return 0;
}
// Update timing.
const int64_t now_ms = clock_->TimeInMilliseconds();
timing_->SetRequiredDelay(jitter_buffer_.EstimatedJitterMs());
// Get render timestamps.
uint32_t render_start = timing_->RenderTimeMs(timestamp_start, now_ms);
uint32_t render_end = timing_->RenderTimeMs(timestamp_end, now_ms);
return render_end - render_start;
} }
void VCMReceiver::UpdateState(VCMReceiverState new_state) { void VCMReceiver::UpdateState(VCMReceiverState new_state) {

View File

@ -81,16 +81,11 @@ class VCMReceiver {
bool DecodeWithErrors() const; bool DecodeWithErrors() const;
// Returns size in time (milliseconds) of complete continuous frames in the // Returns size in time (milliseconds) of complete continuous frames in the
// jitter buffer. // jitter buffer. The render time is estimated based on the render delay at
// the time this function is called.
int RenderBufferSizeMs(); int RenderBufferSizeMs();
private: private:
VCMEncodedFrame* FrameForDecoding(uint16_t max_wait_time_ms,
int64_t nextrender_time_ms,
VCMReceiver* dual_receiver);
VCMEncodedFrame* FrameForRendering(uint16_t max_wait_time_ms,
int64_t nextrender_time_ms,
VCMReceiver* dual_receiver);
void CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver); void CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver);
void UpdateState(VCMReceiverState new_state); void UpdateState(VCMReceiverState new_state);
void UpdateState(const VCMEncodedFrame& frame); void UpdateState(const VCMEncodedFrame& frame);

View File

@ -115,6 +115,10 @@ TEST_F(TestVCMReceiver, RenderBufferSize_NoKeyFrame) {
for (int i = 0; i < num_of_frames; ++i) { for (int i = 0; i < num_of_frames; ++i) {
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError); EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
} }
int64_t next_render_time_ms = 0;
VCMEncodedFrame* frame = receiver_.FrameForDecoding(10, next_render_time_ms);
EXPECT_TRUE(frame == NULL);
receiver_.ReleaseFrame(frame);
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError); EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
for (int i = 0; i < num_of_frames; ++i) { for (int i = 0; i < num_of_frames; ++i) {
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError); EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);

View File

@ -43,7 +43,7 @@ void StreamGenerator::GenerateFrame(FrameType type,
int num_media_packets, int num_media_packets,
int num_empty_packets, int num_empty_packets,
int64_t current_time) { int64_t current_time) {
timestamp_ += 90 * (current_time - start_time_); timestamp_ = 90 * (current_time - start_time_);
// Move the sequence number counter if all packets from the previous frame // Move the sequence number counter if all packets from the previous frame
// wasn't collected. // wasn't collected.
sequence_number_ += packets_.size(); sequence_number_ += packets_.size();

View File

@ -34,8 +34,7 @@ _renderDelayMs(kDefaultRenderDelayMs),
_minTotalDelayMs(0), _minTotalDelayMs(0),
_requiredDelayMs(0), _requiredDelayMs(0),
_currentDelayMs(0), _currentDelayMs(0),
_prevFrameTimestamp(0), _prevFrameTimestamp(0)
_maxVideoDelayMs(kMaxVideoDelayMs)
{ {
if (masterTiming == NULL) if (masterTiming == NULL)
{ {
@ -219,10 +218,6 @@ VCMTiming::RenderTimeMs(uint32_t frameTimestamp, int64_t nowMs) const
{ {
CriticalSectionScoped cs(_critSect); CriticalSectionScoped cs(_critSect);
const int64_t renderTimeMs = RenderTimeMsInternal(frameTimestamp, nowMs); const int64_t renderTimeMs = RenderTimeMsInternal(frameTimestamp, nowMs);
if (renderTimeMs < 0)
{
return renderTimeMs;
}
if (_master) if (_master)
{ {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId), WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
@ -239,16 +234,6 @@ VCMTiming::RenderTimeMsInternal(uint32_t frameTimestamp, int64_t nowMs) const
{ {
int64_t estimatedCompleteTimeMs = int64_t estimatedCompleteTimeMs =
_tsExtrapolator->ExtrapolateLocalTime(frameTimestamp); _tsExtrapolator->ExtrapolateLocalTime(frameTimestamp);
if (estimatedCompleteTimeMs - nowMs > _maxVideoDelayMs)
{
if (_master)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
"Timestamp arrived 2 seconds early, reset statistics",
frameTimestamp, estimatedCompleteTimeMs);
}
return -1;
}
if (_master) if (_master)
{ {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId), WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
@ -315,12 +300,6 @@ VCMTiming::EnoughTimeToDecode(uint32_t availableProcessingTimeMs) const
return static_cast<int32_t>(availableProcessingTimeMs) - maxDecodeTimeMs > 0; return static_cast<int32_t>(availableProcessingTimeMs) - maxDecodeTimeMs > 0;
} }
void VCMTiming::SetMaxVideoDelay(int maxVideoDelayMs)
{
CriticalSectionScoped cs(_critSect);
_maxVideoDelayMs = maxVideoDelayMs;
}
uint32_t uint32_t
VCMTiming::TargetVideoDelay() const VCMTiming::TargetVideoDelay() const
{ {

View File

@ -62,8 +62,8 @@ public:
int64_t startTimeMs, int64_t startTimeMs,
int64_t nowMs); int64_t nowMs);
// Used to report that a frame is passed to decoding. Updates the timestamp filter // Used to report that a frame is passed to decoding. Updates the timestamp
// which is used to map between timestamps and receiver system time. // filter which is used to map between timestamps and receiver system time.
void IncomingTimestamp(uint32_t timeStamp, int64_t lastPacketTimeMs); void IncomingTimestamp(uint32_t timeStamp, int64_t lastPacketTimeMs);
// Returns the receiver system time when the frame with timestamp frameTimestamp // Returns the receiver system time when the frame with timestamp frameTimestamp
@ -82,16 +82,12 @@ public:
// certain amount of processing time. // certain amount of processing time.
bool EnoughTimeToDecode(uint32_t availableProcessingTimeMs) const; bool EnoughTimeToDecode(uint32_t availableProcessingTimeMs) const;
// Set the max allowed video delay.
void SetMaxVideoDelay(int maxVideoDelayMs);
enum { kDefaultRenderDelayMs = 10 }; enum { kDefaultRenderDelayMs = 10 };
enum { kDelayMaxChangeMsPerS = 100 }; enum { kDelayMaxChangeMsPerS = 100 };
protected: protected:
int32_t MaxDecodeTimeMs(FrameType frameType = kVideoFrameDelta) const; int32_t MaxDecodeTimeMs(FrameType frameType = kVideoFrameDelta) const;
int64_t RenderTimeMsInternal(uint32_t frameTimestamp, int64_t RenderTimeMsInternal(uint32_t frameTimestamp, int64_t nowMs) const;
int64_t nowMs) const;
uint32_t TargetDelayInternal() const; uint32_t TargetDelayInternal() const;
private: private:
@ -107,7 +103,6 @@ private:
uint32_t _requiredDelayMs; uint32_t _requiredDelayMs;
uint32_t _currentDelayMs; uint32_t _currentDelayMs;
uint32_t _prevFrameTimestamp; uint32_t _prevFrameTimestamp;
int _maxVideoDelayMs;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -107,10 +107,12 @@ TEST_F(VCMRobustnessTest, TestHardNack) {
InsertPacket(0, 0, true, false, kVideoFrameKey); InsertPacket(0, 0, true, false, kVideoFrameKey);
InsertPacket(0, 1, false, false, kVideoFrameKey); InsertPacket(0, 1, false, false, kVideoFrameKey);
InsertPacket(0, 2, false, true, kVideoFrameKey); InsertPacket(0, 2, false, true, kVideoFrameKey);
clock_->AdvanceTimeMilliseconds(1000 / 30);
InsertPacket(3000, 3, true, false, kVideoFrameDelta); InsertPacket(3000, 3, true, false, kVideoFrameDelta);
InsertPacket(3000, 4, false, false, kVideoFrameDelta); InsertPacket(3000, 4, false, false, kVideoFrameDelta);
InsertPacket(3000, 5, false, true, kVideoFrameDelta); InsertPacket(3000, 5, false, true, kVideoFrameDelta);
clock_->AdvanceTimeMilliseconds(1000 / 30);
ASSERT_EQ(VCM_OK, vcm_->Decode(0)); ASSERT_EQ(VCM_OK, vcm_->Decode(0));
ASSERT_EQ(VCM_OK, vcm_->Decode(0)); ASSERT_EQ(VCM_OK, vcm_->Decode(0));

View File

@ -52,7 +52,6 @@
'../test/codec_database_test.cc', '../test/codec_database_test.cc',
'../test/decode_from_storage_test.cc', '../test/decode_from_storage_test.cc',
'../test/generic_codec_test.cc', '../test/generic_codec_test.cc',
'../test/jitter_buffer_test.cc',
'../test/media_opt_test.cc', '../test/media_opt_test.cc',
'../test/mt_test_common.cc', '../test/mt_test_common.cc',
'../test/mt_rx_tx_test.cc', '../test/mt_rx_tx_test.cc',

View File

@ -90,6 +90,23 @@ int CheckOutFrame(VCMEncodedFrame* frameOut, unsigned int size, bool startCode)
return 0; return 0;
} }
VCMEncodedFrame* DecodeCompleteFrame(uint32_t max_wait_time_ms) {
uint32_t timestamp = 0;
bool found_frame = jb.NextCompleteTimestamp(max_wait_time_ms, &timestamp);
if (!found_frame)
return NULL;
return jb.ExtractAndSetDecode(timestamp);
}
VCMEncodedFrame* DecodeIncompleteFrame() {
uint32_t timestamp = 0;
bool found_frame =
jb.MaybeGetIncompleteFrameTimestampForDecoding(&timestamp);
if (!found_frame)
return NULL;
return frame = jb.ExtractAndSetDecode(timestamp);
}
int JitterBufferTest(CmdArgs& args) int JitterBufferTest(CmdArgs& args)
{ {
@ -107,9 +124,7 @@ int JitterBufferTest(CmdArgs& args)
seqNum = 1234; seqNum = 1234;
timeStamp = 123*90; timeStamp = 123*90;
FrameType incomingFrameType(kVideoFrameKey);
VCMEncodedFrame* frameOut=NULL; VCMEncodedFrame* frameOut=NULL;
int64_t renderTimeMs = 0;
packet.timestamp = timeStamp; packet.timestamp = timeStamp;
packet.seqNum = seqNum; packet.seqNum = seqNum;
@ -134,9 +149,8 @@ int JitterBufferTest(CmdArgs& args)
// Not started // Not started
TEST(0 == jb.GetFrame(packet)); TEST(0 == jb.GetFrame(packet));
TEST(-1 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs)); TEST(0 == DecodeCompleteFrame(10));
TEST(0 == jb.GetCompleteFrameForDecoding(10)); TEST(0 == DecodeIncompleteFrame());
TEST(0 == jb.MaybeGetIncompleteFrameForDecoding());
// Start // Start
jb.Start(); jb.Start();
@ -149,7 +163,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(frameIn != 0); TEST(frameIn != 0);
// No packets inserted // No packets inserted
TEST(0 == jb.GetCompleteFrameForDecoding(10)); TEST(0 == DecodeCompleteFrame(10));
// //
@ -167,26 +181,20 @@ int JitterBufferTest(CmdArgs& args)
// packet.isFirstPacket; // packet.isFirstPacket;
// packet.markerBit; // packet.markerBit;
// //
packet.frameType = kVideoFrameDelta; packet.frameType = kVideoFrameKey;
packet.isFirstPacket = true; packet.isFirstPacket = true;
packet.markerBit = true; packet.markerBit = true;
// Insert a packet into a frame // Insert a packet into a frame.
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification // Get the frame (always starts with a key frame).
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs)); frameOut = DecodeCompleteFrame(10);
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame
frameOut = jb.GetCompleteFrameForDecoding(10);
TEST(CheckOutFrame(frameOut, size, false) == 0); TEST(CheckOutFrame(frameOut, size, false) == 0);
// check the frame type // check the frame type
TEST(frameOut->FrameType() == kVideoFrameDelta); TEST(frameOut->FrameType() == kVideoFrameKey);
// Release frame (when done with decoding) // Release frame (when done with decoding)
jb.ReleaseFrame(frameOut); jb.ReleaseFrame(frameOut);
@ -215,14 +223,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -239,7 +241,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*2, false) == 0); TEST(CheckOutFrame(frameOut, size*2, false) == 0);
@ -274,14 +276,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameKey);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -316,7 +312,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*100, false) == 0); TEST(CheckOutFrame(frameOut, size*100, false) == 0);
@ -350,14 +346,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -392,7 +382,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*100, false) == 0); TEST(CheckOutFrame(frameOut, size*100, false) == 0);
@ -427,14 +417,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -469,7 +453,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*100, false) == 0); TEST(CheckOutFrame(frameOut, size*100, false) == 0);
@ -504,14 +488,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -528,7 +506,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// check that we fail to get frame since seqnum is not continuous // check that we fail to get frame since seqnum is not continuous
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(frameOut == 0); TEST(frameOut == 0);
seqNum -= 3; seqNum -= 3;
@ -545,12 +523,6 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = jb.GetCompleteFrameForDecoding(10);
@ -569,7 +541,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*2, false) == 0); TEST(CheckOutFrame(frameOut, size*2, false) == 0);
@ -580,7 +552,7 @@ int JitterBufferTest(CmdArgs& args)
jb.ReleaseFrame(frameOut); jb.ReleaseFrame(frameOut);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*2, false) == 0); TEST(CheckOutFrame(frameOut, size*2, false) == 0);
@ -619,14 +591,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -646,7 +612,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*2, false) == 0); TEST(CheckOutFrame(frameOut, size*2, false) == 0);
@ -681,14 +647,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -705,7 +665,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size * 2 + 4 * 2, true) == 0); TEST(CheckOutFrame(frameOut, size * 2 + 4 * 2, true) == 0);
@ -763,22 +723,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// Get packet notification
TEST(timeStamp - 33 * 90 == jb.NextTimestamp(10, &incomingFrameType,
&renderTimeMs));
// Check incoming frame type
if (i == 0)
{
TEST(incomingFrameType == kVideoFrameKey);
}
else
{
TEST(incomingFrameType == frametype);
}
// Get the frame // Get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// Should not be complete // Should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -811,7 +757,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kIncomplete == jb.InsertPacket(frameIn, packet)); TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
// Get the frame // Get the frame
frameOut = jb.MaybeGetIncompleteFrameForDecoding(); frameOut = DecodeIncompleteFrame();
// One of the packets has been discarded by the jitter buffer. // One of the packets has been discarded by the jitter buffer.
// Last frame can't be extracted yet. // Last frame can't be extracted yet.
@ -881,7 +827,7 @@ int JitterBufferTest(CmdArgs& args)
// insert first packet // insert first packet
timeStamp += 33*90; timeStamp += 33*90;
seqNum = 0xfff0; seqNum = 0xfff0;
packet.frameType = kVideoFrameDelta; packet.frameType = kVideoFrameKey;
packet.isFirstPacket = true; packet.isFirstPacket = true;
packet.markerBit = false; packet.markerBit = false;
packet.seqNum = seqNum; packet.seqNum = seqNum;
@ -893,19 +839,13 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
// insert 98 packets // Insert 98 packets.
loop = 0; loop = 0;
do do
{ {
@ -920,15 +860,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kIncomplete == jb.InsertPacket(frameIn, packet)); TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(2, &incomingFrameType,
&renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(2); frameOut = DecodeCompleteFrame(2);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -949,12 +882,12 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
TEST(CheckOutFrame(frameOut, size*100, false) == 0); TEST(CheckOutFrame(frameOut, size*100, false) == 0);
// check the frame type // check the frame type
TEST(frameOut->FrameType() == kVideoFrameDelta); TEST(frameOut->FrameType() == kVideoFrameKey);
// Release frame (when done with decoding) // Release frame (when done with decoding)
jb.ReleaseFrame(frameOut); jb.ReleaseFrame(frameOut);
@ -975,7 +908,7 @@ int JitterBufferTest(CmdArgs& args)
// insert "first" packet last seqnum // insert "first" packet last seqnum
timeStamp += 33*90; timeStamp += 33*90;
seqNum = 10; seqNum = 10;
packet.frameType = kVideoFrameDelta; packet.frameType = kVideoFrameKey;
packet.isFirstPacket = false; packet.isFirstPacket = false;
packet.markerBit = true; packet.markerBit = true;
packet.seqNum = seqNum; packet.seqNum = seqNum;
@ -987,14 +920,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeIncompleteFrame();
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -1014,15 +941,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kIncomplete == jb.InsertPacket(frameIn, packet)); TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(2, &incomingFrameType,
&renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(2); frameOut = DecodeCompleteFrame(2);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -1043,7 +963,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeIncompleteFrame();
TEST(CheckOutFrame(frameOut, size*100, false) == 0); TEST(CheckOutFrame(frameOut, size*100, false) == 0);
@ -1068,7 +988,7 @@ int JitterBufferTest(CmdArgs& args)
// insert "first" packet last seqnum // insert "first" packet last seqnum
timeStamp += 33*90; timeStamp += 33*90;
seqNum = 1; seqNum = 1;
packet.frameType = kVideoFrameDelta; packet.frameType = kVideoFrameKey;
packet.isFirstPacket = false; packet.isFirstPacket = false;
packet.markerBit = true; packet.markerBit = true;
packet.seqNum = seqNum; packet.seqNum = seqNum;
@ -1080,14 +1000,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -1104,14 +1018,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kIncomplete == jb.InsertPacket(frameIn, packet)); TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeIncompleteFrame();
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -1128,7 +1036,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeIncompleteFrame();
TEST(CheckOutFrame(frameOut, size*3, false) == 0); TEST(CheckOutFrame(frameOut, size*3, false) == 0);
@ -1165,12 +1073,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(3000 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Get the frame // Get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame();
TEST(3000 == frameOut->TimeStamp()); TEST(3000 == frameOut->TimeStamp());
TEST(CheckOutFrame(frameOut, size, false) == 0); TEST(CheckOutFrame(frameOut, size, false) == 0);
@ -1219,12 +1123,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Get the frame // Get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeIncompleteFrame();
TEST(timeStamp == frameOut->TimeStamp()); TEST(timeStamp == frameOut->TimeStamp());
TEST(CheckOutFrame(frameOut, size, false) == 0); TEST(CheckOutFrame(frameOut, size, false) == 0);
@ -1270,14 +1170,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -1293,7 +1187,7 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeIncompleteFrame();
TEST(CheckOutFrame(frameOut, size*2, false) == 0); TEST(CheckOutFrame(frameOut, size*2, false) == 0);
@ -1313,14 +1207,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeCompleteFrame(10);
// it should not be complete // it should not be complete
TEST(frameOut == 0); TEST(frameOut == 0);
@ -1337,7 +1225,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, packet)); TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = DecodeIncompleteFrame();
TEST(CheckOutFrame(frameOut, size*2, false) == 0); TEST(CheckOutFrame(frameOut, size*2, false) == 0);
@ -1373,10 +1261,6 @@ int JitterBufferTest(CmdArgs& args)
// Insert first frame // Insert first frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// Get packet notification
TEST(0xffffff00 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Insert next frame // Insert next frame
seqNum++; seqNum++;
timeStamp = 2700; timeStamp = 2700;
@ -1392,12 +1276,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// Get packet notification
TEST(0xffffff00 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Get frame // Get frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = jb.GetFrameForDecoding();
TEST(0xffffff00 == frameOut->TimeStamp()); TEST(0xffffff00 == frameOut->TimeStamp());
TEST(CheckOutFrame(frameOut, size, false) == 0); TEST(CheckOutFrame(frameOut, size, false) == 0);
@ -1405,12 +1285,8 @@ int JitterBufferTest(CmdArgs& args)
// check the frame type // check the frame type
TEST(frameOut->FrameType() == kVideoFrameDelta); TEST(frameOut->FrameType() == kVideoFrameDelta);
// Get packet notification
TEST(2700 == jb.NextTimestamp(0, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Get frame // Get frame
VCMEncodedFrame* frameOut2 = jb.GetCompleteFrameForDecoding(10); VCMEncodedFrame* frameOut2 = DecodeIncompleteFrame();
TEST(2700 == frameOut2->TimeStamp()); TEST(2700 == frameOut2->TimeStamp());
TEST(CheckOutFrame(frameOut2, size, false) == 0); TEST(CheckOutFrame(frameOut2, size, false) == 0);
@ -1448,10 +1324,6 @@ int JitterBufferTest(CmdArgs& args)
// Insert first frame // Insert first frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// Get packet notification
TEST(2700 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Insert second frame // Insert second frame
seqNum--; seqNum--;
timeStamp = 0xffffff00; timeStamp = 0xffffff00;
@ -1467,12 +1339,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert a packet into a frame // Insert a packet into a frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// Get packet notification
TEST(0xffffff00 == jb.NextTimestamp(10, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Get frame // Get frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = jb.GetFrameForDecoding();
TEST(0xffffff00 == frameOut->TimeStamp()); TEST(0xffffff00 == frameOut->TimeStamp());
TEST(CheckOutFrame(frameOut, size, false) == 0); TEST(CheckOutFrame(frameOut, size, false) == 0);
@ -1480,12 +1348,8 @@ int JitterBufferTest(CmdArgs& args)
// check the frame type // check the frame type
TEST(frameOut->FrameType() == kVideoFrameDelta); TEST(frameOut->FrameType() == kVideoFrameDelta);
// get packet notification
TEST(2700 == jb.NextTimestamp(0, &incomingFrameType, &renderTimeMs));
TEST(kVideoFrameDelta == incomingFrameType);
// Get frame // Get frame
frameOut2 = jb.GetCompleteFrameForDecoding(10); frameOut2 = DecodeIncompleteFrame();
TEST(2700 == frameOut2->TimeStamp()); TEST(2700 == frameOut2->TimeStamp());
TEST(CheckOutFrame(frameOut2, size, false) == 0); TEST(CheckOutFrame(frameOut2, size, false) == 0);
@ -1530,13 +1394,6 @@ int JitterBufferTest(CmdArgs& args)
TEST(kIncomplete == jb.InsertPacket(frameIn, packet)); TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
} }
// get packet notification
TEST(packet.timestamp == jb.NextTimestamp(10, &incomingFrameType,
&renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
loop++; loop++;
} while (loop < kMaxPacketsInSession); } while (loop < kMaxPacketsInSession);
@ -1554,7 +1411,7 @@ int JitterBufferTest(CmdArgs& args)
// Insert the packet -> frame recycled // Insert the packet -> frame recycled
TEST(kSizeError == jb.InsertPacket(frameIn, packet)); TEST(kSizeError == jb.InsertPacket(frameIn, packet));
TEST(0 == jb.GetCompleteFrameForDecoding(10)); TEST(0 == DecodeIncompleteFrame());
//printf("DONE fill frame - packets > max number of packets\n"); //printf("DONE fill frame - packets > max number of packets\n");
@ -1571,8 +1428,6 @@ int JitterBufferTest(CmdArgs& args)
loop = 0; loop = 0;
seqNum = 65485; seqNum = 65485;
uint32_t timeStampStart = timeStamp + 33*90;
uint32_t timeStampFirstKey = 0;
VCMEncodedFrame* ptrLastDeltaFrame = NULL; VCMEncodedFrame* ptrLastDeltaFrame = NULL;
VCMEncodedFrame* ptrFirstKeyFrame = NULL; VCMEncodedFrame* ptrFirstKeyFrame = NULL;
// insert MAX_NUMBER_OF_FRAMES frames // insert MAX_NUMBER_OF_FRAMES frames
@ -1596,19 +1451,11 @@ int JitterBufferTest(CmdArgs& args)
{ {
ptrFirstKeyFrame = frameIn; ptrFirstKeyFrame = frameIn;
packet.frameType = kVideoFrameKey; packet.frameType = kVideoFrameKey;
timeStampFirstKey = packet.timestamp;
} }
// Insert frame // Insert frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// Get packet notification, should be first inserted frame
TEST(timeStampStart == jb.NextTimestamp(10, &incomingFrameType,
&renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameDelta);
loop++; loop++;
} while (loop < kMaxNumberOfFrames); } while (loop < kMaxNumberOfFrames);
@ -1630,15 +1477,8 @@ int JitterBufferTest(CmdArgs& args)
// Insert frame // Insert frame
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// First inserted key frame should be oldest in buffer
TEST(timeStampFirstKey == jb.NextTimestamp(10, &incomingFrameType,
&renderTimeMs));
// check incoming frame type
TEST(incomingFrameType == kVideoFrameKey);
// get the first key frame // get the first key frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = jb.GetFrameForDecoding();
TEST(ptrFirstKeyFrame == frameOut); TEST(ptrFirstKeyFrame == frameOut);
TEST(CheckOutFrame(frameOut, size, false) == 0); TEST(CheckOutFrame(frameOut, size, false) == 0);
@ -1744,9 +1584,6 @@ int JitterBufferTest(CmdArgs& args)
frameIn = jb.GetFrame(packet); frameIn = jb.GetFrame(packet);
TEST(kFirstPacket == jb.InsertPacket(frameIn, packet)); TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
// Get packet notification
TEST(timeStamp == jb.NextTimestamp(10, &incomingFrameType,
&renderTimeMs));
frameOut = jb.MaybeGetIncompleteFrameForDecoding(); frameOut = jb.MaybeGetIncompleteFrameForDecoding();
// We can decode everything from a NALU until a packet has been lost. // We can decode everything from a NALU until a packet has been lost.
@ -1863,7 +1700,7 @@ int JitterBufferTest(CmdArgs& args)
TEST(kCompleteSession == jb.InsertPacket(frameIn, emptypacket)); TEST(kCompleteSession == jb.InsertPacket(frameIn, emptypacket));
// get the frame // get the frame
frameOut = jb.GetCompleteFrameForDecoding(10); frameOut = jb.GetFrameForDecoding();
// Only last NALU is complete // Only last NALU is complete
TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0); TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0);

View File

@ -98,7 +98,6 @@ int main(int argc, char **argv) {
ret = NormalTest::RunTest(args); ret = NormalTest::RunTest(args);
ret |= CodecDataBaseTest::RunTest(args); ret |= CodecDataBaseTest::RunTest(args);
ret |= ReceiverTimingTests(args); ret |= ReceiverTimingTests(args);
ret |= JitterBufferTest(args);
break; break;
case 1: case 1:
ret = NormalTest::RunTest(args); ret = NormalTest::RunTest(args);
@ -126,12 +125,9 @@ int main(int argc, char **argv) {
ret = RtpPlayMT(args); ret = RtpPlayMT(args);
break; break;
case 9: case 9:
ret = JitterBufferTest(args);
break;
case 10:
ret = DecodeFromStorageTest(args); ret = DecodeFromStorageTest(args);
break; break;
case 11: case 10:
qualityModeTest(args); qualityModeTest(args);
break; break;
default: default: