Create VideoReceiver with external VCMTiming object.

In order for the VCMTiming object to be correctly updated with decoding timings
when running the WebRTC-NewVideoJitterBuffer experiment the VCMTiming object
has to be available in both the VideoReceiver and the video_coding::FrameBuffer
class. Therefore the VCMTiming object is created in VideoRecieveStream and
then passed to VideoReceiver/video_coding::FrameBuffer as they are constructed.

BUG=webrtc:5514

Review-Url: https://codereview.webrtc.org/2575473004
Cr-Commit-Position: refs/heads/master@{#15638}
This commit is contained in:
philipel
2016-12-15 07:10:57 -08:00
committed by Commit bot
parent ac8d5164f0
commit 721d402d71
6 changed files with 28 additions and 18 deletions

View File

@ -26,16 +26,17 @@ namespace vcm {
VideoReceiver::VideoReceiver(Clock* clock,
EventFactory* event_factory,
EncodedImageCallback* pre_decode_image_callback,
VCMTiming* timing,
NackSender* nack_sender,
KeyFrameRequestSender* keyframe_request_sender)
: clock_(clock),
_timing(clock_),
_receiver(&_timing,
_timing(timing),
_receiver(_timing,
clock_,
event_factory,
nack_sender,
keyframe_request_sender),
_decodedFrameCallback(&_timing, clock_),
_decodedFrameCallback(_timing, clock_),
_frameTypeCallback(nullptr),
_receiveStatsCallback(nullptr),
_decoderTimingCallback(nullptr),
@ -73,9 +74,9 @@ void VideoReceiver::Process() {
int jitter_buffer_ms;
int min_playout_delay_ms;
int render_delay_ms;
_timing.GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
&target_delay_ms, &jitter_buffer_ms,
&min_playout_delay_ms, &render_delay_ms);
_timing->GetTimings(&decode_ms, &max_decode_ms, &current_delay_ms,
&target_delay_ms, &jitter_buffer_ms,
&min_playout_delay_ms, &render_delay_ms);
_decoderTimingCallback->OnDecoderTiming(
decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
@ -276,8 +277,8 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
rtc::CritScope cs(&receive_crit_);
// If this frame was too late, we should adjust the delay accordingly
_timing.UpdateCurrentDelay(frame->RenderTimeMs(),
clock_->TimeInMilliseconds());
_timing->UpdateCurrentDelay(frame->RenderTimeMs(),
clock_->TimeInMilliseconds());
if (first_frame_received_()) {
LOG(LS_INFO) << "Received first "
@ -440,20 +441,20 @@ int32_t VideoReceiver::IncomingPacket(const uint8_t* incomingPayload,
// to sync with audio. Not included in VideoCodingModule::Delay()
// Defaults to 0 ms.
int32_t VideoReceiver::SetMinimumPlayoutDelay(uint32_t minPlayoutDelayMs) {
_timing.set_min_playout_delay(minPlayoutDelayMs);
_timing->set_min_playout_delay(minPlayoutDelayMs);
return VCM_OK;
}
// The estimated delay caused by rendering, defaults to
// kDefaultRenderDelayMs = 10 ms
int32_t VideoReceiver::SetRenderDelay(uint32_t timeMS) {
_timing.set_render_delay(timeMS);
_timing->set_render_delay(timeMS);
return VCM_OK;
}
// Current video delay
int32_t VideoReceiver::Delay() const {
return _timing.TargetVideoDelay();
return _timing->TargetVideoDelay();
}
uint32_t VideoReceiver::DiscardedPackets() const {