Make the new jitter buffer the default jitter buffer.
This CL contains only the changes necessary to make the switch to the new jitter buffer, clean up will be done in follow up CLs. In this CL: - Removed the WebRTC-NewVideoJitterBuffer experiment and made the new video jitter buffer the default one. - Moved WebRTC.Video.KeyFramesReceivedInPermille and WebRTC.Video.JitterBufferDelayInMs to the ReceiveStatisticsProxy. BUG=webrtc:5514 Review-Url: https://codereview.webrtc.org/2627463004 Cr-Commit-Position: refs/heads/master@{#16114}
This commit is contained in:
@ -16,6 +16,7 @@
|
||||
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
|
||||
#include "webrtc/modules/video_coding/jitter_estimator.h"
|
||||
#include "webrtc/modules/video_coding/timing.h"
|
||||
#include "webrtc/system_wrappers/include/clock.h"
|
||||
@ -34,7 +35,8 @@ constexpr int kMaxFramesHistory = 50;
|
||||
|
||||
FrameBuffer::FrameBuffer(Clock* clock,
|
||||
VCMJitterEstimator* jitter_estimator,
|
||||
VCMTiming* timing)
|
||||
VCMTiming* timing,
|
||||
VCMReceiveStatisticsCallback* stats_callback)
|
||||
: clock_(clock),
|
||||
new_countinuous_frame_event_(false, false),
|
||||
jitter_estimator_(jitter_estimator),
|
||||
@ -45,11 +47,10 @@ FrameBuffer::FrameBuffer(Clock* clock,
|
||||
num_frames_history_(0),
|
||||
num_frames_buffered_(0),
|
||||
stopped_(false),
|
||||
protection_mode_(kProtectionNack) {}
|
||||
protection_mode_(kProtectionNack),
|
||||
stats_callback_(stats_callback) {}
|
||||
|
||||
FrameBuffer::~FrameBuffer() {
|
||||
UpdateHistograms();
|
||||
}
|
||||
FrameBuffer::~FrameBuffer() {}
|
||||
|
||||
FrameBuffer::ReturnReason FrameBuffer::NextFrame(
|
||||
int64_t max_wait_time_ms,
|
||||
@ -162,9 +163,8 @@ int FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
RTC_DCHECK(frame);
|
||||
|
||||
++num_total_frames_;
|
||||
if (frame->num_references == 0)
|
||||
++num_key_frames_;
|
||||
if (stats_callback_)
|
||||
stats_callback_->OnCompleteFrame(frame->num_references == 0, frame->size());
|
||||
|
||||
FrameKey key(frame->picture_id, frame->spatial_layer);
|
||||
int last_continuous_picture_id =
|
||||
@ -365,28 +365,22 @@ bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
|
||||
}
|
||||
|
||||
void FrameBuffer::UpdateJitterDelay() {
|
||||
int unused;
|
||||
int delay;
|
||||
timing_->GetTimings(&unused, &unused, &unused, &unused, &delay, &unused,
|
||||
&unused);
|
||||
if (!stats_callback_)
|
||||
return;
|
||||
|
||||
accumulated_delay_ += delay;
|
||||
++accumulated_delay_samples_;
|
||||
}
|
||||
|
||||
void FrameBuffer::UpdateHistograms() const {
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (num_total_frames_ > 0) {
|
||||
int key_frames_permille = (static_cast<float>(num_key_frames_) * 1000.0f /
|
||||
static_cast<float>(num_total_frames_) +
|
||||
0.5f);
|
||||
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
|
||||
key_frames_permille);
|
||||
}
|
||||
|
||||
if (accumulated_delay_samples_ > 0) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
|
||||
accumulated_delay_ / accumulated_delay_samples_);
|
||||
int decode_ms;
|
||||
int max_decode_ms;
|
||||
int current_delay_ms;
|
||||
int target_delay_ms;
|
||||
int jitter_buffer_ms;
|
||||
int min_playout_delay_ms;
|
||||
int render_delay_ms;
|
||||
if (timing_->GetTimings(&decode_ms, &max_decode_ms, ¤t_delay_ms,
|
||||
&target_delay_ms, &jitter_buffer_ms,
|
||||
&min_playout_delay_ms, &render_delay_ms)) {
|
||||
stats_callback_->OnFrameBufferTimingsUpdated(
|
||||
decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
|
||||
jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -28,6 +28,7 @@
|
||||
namespace webrtc {
|
||||
|
||||
class Clock;
|
||||
class VCMReceiveStatisticsCallback;
|
||||
class VCMJitterEstimator;
|
||||
class VCMTiming;
|
||||
|
||||
@ -39,7 +40,8 @@ class FrameBuffer {
|
||||
|
||||
FrameBuffer(Clock* clock,
|
||||
VCMJitterEstimator* jitter_estimator,
|
||||
VCMTiming* timing);
|
||||
VCMTiming* timing,
|
||||
VCMReceiveStatisticsCallback* stats_proxy);
|
||||
|
||||
virtual ~FrameBuffer();
|
||||
|
||||
@ -141,8 +143,6 @@ class FrameBuffer {
|
||||
|
||||
void UpdateJitterDelay() EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
void UpdateHistograms() const;
|
||||
|
||||
FrameMap frames_ GUARDED_BY(crit_);
|
||||
|
||||
rtc::CriticalSection crit_;
|
||||
@ -157,16 +157,9 @@ class FrameBuffer {
|
||||
int num_frames_buffered_ GUARDED_BY(crit_);
|
||||
bool stopped_ GUARDED_BY(crit_);
|
||||
VCMVideoProtection protection_mode_ GUARDED_BY(crit_);
|
||||
VCMReceiveStatisticsCallback* const stats_callback_;
|
||||
|
||||
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(FrameBuffer);
|
||||
|
||||
// For WebRTC.Video.JitterBufferDelayInMs metric.
|
||||
int64_t accumulated_delay_ = 0;
|
||||
int64_t accumulated_delay_samples_ = 0;
|
||||
|
||||
// For WebRTC.Video.KeyFramesReceivedInPermille metric.
|
||||
int64_t num_total_frames_ = 0;
|
||||
int64_t num_key_frames_ = 0;
|
||||
};
|
||||
|
||||
} // namespace video_coding
|
||||
|
@ -25,6 +25,9 @@
|
||||
#include "webrtc/test/gmock.h"
|
||||
#include "webrtc/test/gtest.h"
|
||||
|
||||
using testing::_;
|
||||
using testing::Return;
|
||||
|
||||
namespace webrtc {
|
||||
namespace video_coding {
|
||||
|
||||
@ -54,6 +57,16 @@ class VCMTimingFake : public VCMTiming {
|
||||
return std::max<int>(0, render_time_ms - now_ms - kDecodeTime);
|
||||
}
|
||||
|
||||
bool GetTimings(int* decode_ms,
|
||||
int* max_decode_ms,
|
||||
int* current_delay_ms,
|
||||
int* target_delay_ms,
|
||||
int* jitter_buffer_ms,
|
||||
int* min_playout_delay_ms,
|
||||
int* render_delay_ms) const override {
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
static constexpr int kDelayMs = 50;
|
||||
static constexpr int kDecodeTime = kDelayMs / 2;
|
||||
@ -82,6 +95,27 @@ class FrameObjectFake : public FrameObject {
|
||||
int64_t ReceivedTime() const override { return 0; }
|
||||
|
||||
int64_t RenderTime() const override { return _renderTimeMs; }
|
||||
|
||||
// In EncodedImage |_length| is used to descibe its size and |_size| to
|
||||
// describe its capacity.
|
||||
void SetSize(int size) { _length = size; }
|
||||
};
|
||||
|
||||
class VCMReceiveStatisticsCallbackMock : public VCMReceiveStatisticsCallback {
|
||||
public:
|
||||
MOCK_METHOD2(OnReceiveRatesUpdated,
|
||||
void(uint32_t bitRate, uint32_t frameRate));
|
||||
MOCK_METHOD2(OnCompleteFrame, void(bool is_keyframe, size_t size_bytes));
|
||||
MOCK_METHOD1(OnDiscardedPacketsUpdated, void(int discarded_packets));
|
||||
MOCK_METHOD1(OnFrameCountsUpdated, void(const FrameCounts& frame_counts));
|
||||
MOCK_METHOD7(OnFrameBufferTimingsUpdated,
|
||||
void(int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms));
|
||||
};
|
||||
|
||||
class TestFrameBuffer2 : public ::testing::Test {
|
||||
@ -95,7 +129,7 @@ class TestFrameBuffer2 : public ::testing::Test {
|
||||
: clock_(0),
|
||||
timing_(&clock_),
|
||||
jitter_estimator_(&clock_),
|
||||
buffer_(&clock_, &jitter_estimator_, &timing_),
|
||||
buffer_(&clock_, &jitter_estimator_, &timing_, &stats_callback_),
|
||||
rand_(0x34678213),
|
||||
tear_down_(false),
|
||||
extract_thread_(&ExtractLoop, this, "Extract Thread"),
|
||||
@ -190,6 +224,7 @@ class TestFrameBuffer2 : public ::testing::Test {
|
||||
FrameBuffer buffer_;
|
||||
std::vector<std::unique_ptr<FrameObject>> frames_;
|
||||
Random rand_;
|
||||
::testing::NiceMock<VCMReceiveStatisticsCallbackMock> stats_callback_;
|
||||
|
||||
int64_t max_wait_time_;
|
||||
bool tear_down_;
|
||||
@ -419,5 +454,30 @@ TEST_F(TestFrameBuffer2, LastContinuousFrameTwoLayers) {
|
||||
EXPECT_EQ(pid + 3, InsertFrame(pid + 3, 1, ts, true, pid + 2));
|
||||
}
|
||||
|
||||
TEST_F(TestFrameBuffer2, StatsCallback) {
|
||||
uint16_t pid = Rand();
|
||||
uint32_t ts = Rand();
|
||||
const int kFrameSize = 5000;
|
||||
|
||||
EXPECT_CALL(stats_callback_, OnCompleteFrame(true, kFrameSize));
|
||||
EXPECT_CALL(stats_callback_,
|
||||
OnFrameBufferTimingsUpdated(_, _, _, _, _, _, _));
|
||||
|
||||
{
|
||||
std::unique_ptr<FrameObjectFake> frame(new FrameObjectFake());
|
||||
frame->SetSize(kFrameSize);
|
||||
frame->picture_id = pid;
|
||||
frame->spatial_layer = 0;
|
||||
frame->timestamp = ts;
|
||||
frame->num_references = 0;
|
||||
frame->inter_layer_predicted = false;
|
||||
|
||||
EXPECT_EQ(buffer_.InsertFrame(std::move(frame)), pid);
|
||||
}
|
||||
|
||||
ExtractFrame();
|
||||
CheckFrame(0, pid, 0);
|
||||
}
|
||||
|
||||
} // namespace video_coding
|
||||
} // namespace webrtc
|
||||
|
@ -90,8 +90,16 @@ class VCMSendStatisticsCallback {
|
||||
class VCMReceiveStatisticsCallback {
|
||||
public:
|
||||
virtual void OnReceiveRatesUpdated(uint32_t bitRate, uint32_t frameRate) = 0;
|
||||
virtual void OnCompleteFrame(bool is_keyframe, size_t size_bytes) = 0;
|
||||
virtual void OnDiscardedPacketsUpdated(int discarded_packets) = 0;
|
||||
virtual void OnFrameCountsUpdated(const FrameCounts& frame_counts) = 0;
|
||||
virtual void OnFrameBufferTimingsUpdated(int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VCMReceiveStatisticsCallback() {}
|
||||
|
@ -94,13 +94,13 @@ class VCMTiming {
|
||||
|
||||
// Return current timing information. Returns true if the first frame has been
|
||||
// decoded, false otherwise.
|
||||
bool GetTimings(int* decode_ms,
|
||||
int* max_decode_ms,
|
||||
int* current_delay_ms,
|
||||
int* target_delay_ms,
|
||||
int* jitter_buffer_ms,
|
||||
int* min_playout_delay_ms,
|
||||
int* render_delay_ms) const;
|
||||
virtual bool GetTimings(int* decode_ms,
|
||||
int* max_decode_ms,
|
||||
int* current_delay_ms,
|
||||
int* target_delay_ms,
|
||||
int* jitter_buffer_ms,
|
||||
int* min_playout_delay_ms,
|
||||
int* render_delay_ms) const;
|
||||
|
||||
enum { kDefaultRenderDelayMs = 10 };
|
||||
enum { kDelayMaxChangeMsPerS = 100 };
|
||||
|
@ -56,31 +56,14 @@ VideoReceiver::~VideoReceiver() {}
|
||||
|
||||
void VideoReceiver::Process() {
|
||||
// Receive-side statistics
|
||||
|
||||
// TODO(philipel): Remove this if block when we know what to do with
|
||||
// ReceiveStatisticsProxy::QualitySample.
|
||||
if (_receiveStatsTimer.TimeUntilProcess() == 0) {
|
||||
_receiveStatsTimer.Processed();
|
||||
rtc::CritScope cs(&process_crit_);
|
||||
if (_receiveStatsCallback != nullptr) {
|
||||
uint32_t bitRate;
|
||||
uint32_t frameRate;
|
||||
_receiver.ReceiveStatistics(&bitRate, &frameRate);
|
||||
_receiveStatsCallback->OnReceiveRatesUpdated(bitRate, frameRate);
|
||||
}
|
||||
|
||||
if (_decoderTimingCallback != nullptr) {
|
||||
int decode_ms;
|
||||
int max_decode_ms;
|
||||
int current_delay_ms;
|
||||
int target_delay_ms;
|
||||
int jitter_buffer_ms;
|
||||
int min_playout_delay_ms;
|
||||
int render_delay_ms;
|
||||
if (_timing->GetTimings(&decode_ms, &max_decode_ms, ¤t_delay_ms,
|
||||
&target_delay_ms, &jitter_buffer_ms,
|
||||
&min_playout_delay_ms, &render_delay_ms)) {
|
||||
_decoderTimingCallback->OnDecoderTiming(
|
||||
decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
|
||||
jitter_buffer_ms, min_playout_delay_ms, render_delay_ms);
|
||||
}
|
||||
_receiveStatsCallback->OnReceiveRatesUpdated(0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@ -292,7 +275,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Used for the WebRTC-NewVideoJitterBuffer experiment.
|
||||
// Used for the new jitter buffer.
|
||||
// TODO(philipel): Clean up among the Decode functions as we replace
|
||||
// VCMEncodedFrame with FrameObject.
|
||||
int32_t VideoReceiver::Decode(const webrtc::VCMEncodedFrame* frame) {
|
||||
|
@ -1227,9 +1227,6 @@ TEST_P(EndToEndTest, ReceivesPliAndRecoversWithNack) {
|
||||
}
|
||||
|
||||
TEST_P(EndToEndTest, ReceivesPliAndRecoversWithoutNack) {
|
||||
// This test makes no sense for the new video jitter buffer.
|
||||
if (GetParam() == new_jb_enabled)
|
||||
return;
|
||||
ReceivesPliAndRecovers(0);
|
||||
}
|
||||
|
||||
@ -3029,10 +3026,6 @@ TEST_P(EndToEndTest, GetStats) {
|
||||
ReceiveStreamRenderer receive_stream_renderer_;
|
||||
} test;
|
||||
|
||||
// TODO(philipel): Implement statistics for the new video jitter buffer.
|
||||
if (GetParam() == new_jb_enabled)
|
||||
return;
|
||||
|
||||
RunBaseTest(&test);
|
||||
}
|
||||
|
||||
|
@ -10,7 +10,9 @@
|
||||
|
||||
#include "webrtc/video/receive_statistics_proxy.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <utility>
|
||||
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
@ -40,6 +42,9 @@ const int kLowQpThresholdVp8 = 60;
|
||||
const int kHighQpThresholdVp8 = 70;
|
||||
const int kLowVarianceThreshold = 1;
|
||||
const int kHighVarianceThreshold = 2;
|
||||
|
||||
// How large window we use to calculate the framerate/bitrate.
|
||||
const int kRateStatisticsWindowSizeMs = 1000;
|
||||
} // namespace
|
||||
|
||||
ReceiveStatisticsProxy::ReceiveStatisticsProxy(
|
||||
@ -69,7 +74,9 @@ ReceiveStatisticsProxy::ReceiveStatisticsProxy(
|
||||
render_fps_tracker_(100, 10u),
|
||||
render_pixel_tracker_(100, 10u),
|
||||
freq_offset_counter_(clock, nullptr, kFreqOffsetProcessIntervalMs),
|
||||
first_report_block_time_ms_(-1) {
|
||||
first_report_block_time_ms_(-1),
|
||||
avg_rtt_ms_(0),
|
||||
frame_window_accumulated_bytes_(0) {
|
||||
stats_.ssrc = config_.rtp.remote_ssrc;
|
||||
for (auto it : config_.rtp.rtx)
|
||||
rtx_stats_[it.second.ssrc] = StreamDataCounters();
|
||||
@ -121,6 +128,17 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
|
||||
<< freq_offset_stats.ToString();
|
||||
}
|
||||
|
||||
if (stats_.frame_counts.key_frames > 0 ||
|
||||
stats_.frame_counts.delta_frames > 0) {
|
||||
float num_key_frames = stats_.frame_counts.key_frames;
|
||||
float num_total_frames =
|
||||
stats_.frame_counts.key_frames + stats_.frame_counts.delta_frames;
|
||||
int key_frames_permille =
|
||||
(num_key_frames * 1000.0f / num_total_frames + 0.5f);
|
||||
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.KeyFramesReceivedInPermille",
|
||||
key_frames_permille);
|
||||
}
|
||||
|
||||
int qp = qp_counters_.vp8.Avg(kMinRequiredSamples);
|
||||
if (qp != -1)
|
||||
RTC_HISTOGRAM_COUNTS_200("WebRTC.Video.Decoded.Vp8.Qp", qp);
|
||||
@ -132,15 +150,12 @@ void ReceiveStatisticsProxy::UpdateHistograms() {
|
||||
if (decode_ms != -1)
|
||||
RTC_HISTOGRAM_COUNTS_1000("WebRTC.Video.DecodeTimeInMs", decode_ms);
|
||||
|
||||
if (field_trial::FindFullName("WebRTC-NewVideoJitterBuffer") !=
|
||||
"Enabled") {
|
||||
int jb_delay_ms =
|
||||
jitter_buffer_delay_counter_.Avg(kMinRequiredDecodeSamples);
|
||||
if (jb_delay_ms != -1) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
|
||||
jb_delay_ms);
|
||||
}
|
||||
int jb_delay_ms = jitter_buffer_delay_counter_.Avg(kMinRequiredDecodeSamples);
|
||||
if (jb_delay_ms != -1) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.JitterBufferDelayInMs",
|
||||
jb_delay_ms);
|
||||
}
|
||||
|
||||
int target_delay_ms = target_delay_counter_.Avg(kMinRequiredDecodeSamples);
|
||||
if (target_delay_ms != -1) {
|
||||
RTC_HISTOGRAM_COUNTS_10000("WebRTC.Video.TargetDelayInMs", target_delay_ms);
|
||||
@ -297,8 +312,25 @@ void ReceiveStatisticsProxy::QualitySample() {
|
||||
}
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::UpdateFrameAndBitrate(int64_t now_ms) const {
|
||||
int64_t old_frames_ms = now_ms - kRateStatisticsWindowSizeMs;
|
||||
while (!frame_window_.empty() &&
|
||||
frame_window_.begin()->first < old_frames_ms) {
|
||||
frame_window_accumulated_bytes_ -= frame_window_.begin()->second;
|
||||
frame_window_.erase(frame_window_.begin());
|
||||
}
|
||||
|
||||
size_t framerate =
|
||||
(frame_window_.size() * 1000 + 500) / kRateStatisticsWindowSizeMs;
|
||||
size_t bitrate_bps =
|
||||
frame_window_accumulated_bytes_ * 8000 / kRateStatisticsWindowSizeMs;
|
||||
stats_.network_frame_rate = static_cast<int>(framerate);
|
||||
stats_.total_bitrate_bps = static_cast<int>(bitrate_bps);
|
||||
}
|
||||
|
||||
VideoReceiveStream::Stats ReceiveStatisticsProxy::GetStats() const {
|
||||
rtc::CritScope lock(&crit_);
|
||||
UpdateFrameAndBitrate(clock_->TimeInMilliseconds());
|
||||
return stats_;
|
||||
}
|
||||
|
||||
@ -317,18 +349,16 @@ void ReceiveStatisticsProxy::OnIncomingRate(unsigned int framerate,
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (stats_.rtp_stats.first_packet_time_ms != -1)
|
||||
QualitySample();
|
||||
stats_.network_frame_rate = framerate;
|
||||
stats_.total_bitrate_bps = bitrate_bps;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnDecoderTiming(int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms,
|
||||
int64_t rtt_ms) {
|
||||
void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated(
|
||||
int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
stats_.decode_ms = decode_ms;
|
||||
stats_.max_decode_ms = max_decode_ms;
|
||||
@ -343,7 +373,7 @@ void ReceiveStatisticsProxy::OnDecoderTiming(int decode_ms,
|
||||
current_delay_counter_.Add(current_delay_ms);
|
||||
// Network delay (rtt/2) + target_delay_ms (jitter delay + decode time +
|
||||
// render delay).
|
||||
delay_counter_.Add(target_delay_ms + rtt_ms / 2);
|
||||
delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated(
|
||||
@ -447,6 +477,20 @@ void ReceiveStatisticsProxy::OnReceiveRatesUpdated(uint32_t bitRate,
|
||||
uint32_t frameRate) {
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnCompleteFrame(bool is_keyframe,
|
||||
size_t size_bytes) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
if (is_keyframe)
|
||||
++stats_.frame_counts.key_frames;
|
||||
else
|
||||
++stats_.frame_counts.delta_frames;
|
||||
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
frame_window_accumulated_bytes_ += size_bytes;
|
||||
frame_window_.insert(std::make_pair(now_ms, size_bytes));
|
||||
UpdateFrameAndBitrate(now_ms);
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnFrameCountsUpdated(
|
||||
const FrameCounts& frame_counts) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
@ -488,4 +532,10 @@ void ReceiveStatisticsProxy::SampleCounter::Reset() {
|
||||
sum = 0;
|
||||
}
|
||||
|
||||
void ReceiveStatisticsProxy::OnRttUpdate(int64_t avg_rtt_ms,
|
||||
int64_t max_rtt_ms) {
|
||||
rtc::CritScope lock(&crit_);
|
||||
avg_rtt_ms_ = avg_rtt_ms;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -37,7 +37,8 @@ struct CodecSpecificInfo;
|
||||
class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
public RtcpStatisticsCallback,
|
||||
public RtcpPacketTypeCounterObserver,
|
||||
public StreamDataCountersCallback {
|
||||
public StreamDataCountersCallback,
|
||||
public CallStatsObserver {
|
||||
public:
|
||||
ReceiveStatisticsProxy(const VideoReceiveStream::Config* config,
|
||||
Clock* clock);
|
||||
@ -51,14 +52,6 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
void OnIncomingPayloadType(int payload_type);
|
||||
void OnDecoderImplementationName(const char* implementation_name);
|
||||
void OnIncomingRate(unsigned int framerate, unsigned int bitrate_bps);
|
||||
void OnDecoderTiming(int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms,
|
||||
int64_t rtt_ms);
|
||||
|
||||
void OnPreDecode(const EncodedImage& encoded_image,
|
||||
const CodecSpecificInfo* codec_specific_info);
|
||||
@ -67,6 +60,14 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
void OnReceiveRatesUpdated(uint32_t bitRate, uint32_t frameRate) override;
|
||||
void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
|
||||
void OnDiscardedPacketsUpdated(int discarded_packets) override;
|
||||
void OnCompleteFrame(bool is_keyframe, size_t size_bytes) override;
|
||||
void OnFrameBufferTimingsUpdated(int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) override;
|
||||
|
||||
// Overrides RtcpStatisticsCallback.
|
||||
void StatisticsUpdated(const webrtc::RtcpStatistics& statistics,
|
||||
@ -81,6 +82,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
void DataCountersUpdated(const webrtc::StreamDataCounters& counters,
|
||||
uint32_t ssrc) override;
|
||||
|
||||
// Implements CallStatsObserver.
|
||||
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;
|
||||
|
||||
private:
|
||||
struct SampleCounter {
|
||||
SampleCounter() : sum(0), num_samples(0) {}
|
||||
@ -100,6 +104,10 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
|
||||
void QualitySample() EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
// Removes info about old frames and then updates the framerate/bitrate.
|
||||
void UpdateFrameAndBitrate(int64_t now_ms) const
|
||||
EXCLUSIVE_LOCKS_REQUIRED(crit_);
|
||||
|
||||
Clock* const clock_;
|
||||
// Ownership of this object lies with the owner of the ReceiveStatisticsProxy
|
||||
// instance. Lifetime is guaranteed to outlive |this|.
|
||||
@ -119,7 +127,7 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
SampleCounter qp_sample_ GUARDED_BY(crit_);
|
||||
int num_bad_states_ GUARDED_BY(crit_);
|
||||
int num_certain_states_ GUARDED_BY(crit_);
|
||||
VideoReceiveStream::Stats stats_ GUARDED_BY(crit_);
|
||||
mutable VideoReceiveStream::Stats stats_ GUARDED_BY(crit_);
|
||||
RateStatistics decode_fps_estimator_ GUARDED_BY(crit_);
|
||||
RateStatistics renders_fps_estimator_ GUARDED_BY(crit_);
|
||||
rtc::RateTracker render_fps_tracker_ GUARDED_BY(crit_);
|
||||
@ -138,6 +146,9 @@ class ReceiveStatisticsProxy : public VCMReceiveStatisticsCallback,
|
||||
ReportBlockStats report_block_stats_ GUARDED_BY(crit_);
|
||||
QpCounters qp_counters_; // Only accessed on the decoding thread.
|
||||
std::map<uint32_t, StreamDataCounters> rtx_stats_ GUARDED_BY(crit_);
|
||||
int64_t avg_rtt_ms_ GUARDED_BY(crit_);
|
||||
mutable std::map<int64_t, size_t> frame_window_ GUARDED_BY(&crit_);
|
||||
mutable size_t frame_window_accumulated_bytes_ GUARDED_BY(&crit_);
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -74,12 +74,14 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsDecoderImplementationName) {
|
||||
kName, statistics_proxy_->GetStats().decoder_implementation_name.c_str());
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsIncomingRate) {
|
||||
const int kFramerate = 28;
|
||||
const int kBitrateBps = 311000;
|
||||
statistics_proxy_->OnIncomingRate(kFramerate, kBitrateBps);
|
||||
EXPECT_EQ(kFramerate, statistics_proxy_->GetStats().network_frame_rate);
|
||||
EXPECT_EQ(kBitrateBps, statistics_proxy_->GetStats().total_bitrate_bps);
|
||||
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsOnCompleteFrame) {
|
||||
const int kFrameSizeBytes = 1000;
|
||||
statistics_proxy_->OnCompleteFrame(true, kFrameSizeBytes);
|
||||
VideoReceiveStream::Stats stats = statistics_proxy_->GetStats();
|
||||
EXPECT_EQ(1, stats.network_frame_rate);
|
||||
EXPECT_EQ(kFrameSizeBytes * 8, stats.total_bitrate_bps);
|
||||
EXPECT_EQ(1, stats.frame_counts.key_frames);
|
||||
EXPECT_EQ(0, stats.frame_counts.delta_frames);
|
||||
}
|
||||
|
||||
TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsDecodeTimingStats) {
|
||||
@ -91,9 +93,10 @@ TEST_F(ReceiveStatisticsProxyTest, GetStatsReportsDecodeTimingStats) {
|
||||
const int kMinPlayoutDelayMs = 6;
|
||||
const int kRenderDelayMs = 7;
|
||||
const int64_t kRttMs = 8;
|
||||
statistics_proxy_->OnDecoderTiming(
|
||||
statistics_proxy_->OnRttUpdate(kRttMs, 0);
|
||||
statistics_proxy_->OnFrameBufferTimingsUpdated(
|
||||
kDecodeMs, kMaxDecodeMs, kCurrentDelayMs, kTargetDelayMs, kJitterBufferMs,
|
||||
kMinPlayoutDelayMs, kRenderDelayMs, kRttMs);
|
||||
kMinPlayoutDelayMs, kRenderDelayMs);
|
||||
VideoReceiveStream::Stats stats = statistics_proxy_->GetStats();
|
||||
EXPECT_EQ(kDecodeMs, stats.decode_ms);
|
||||
EXPECT_EQ(kMaxDecodeMs, stats.max_decode_ms);
|
||||
|
@ -199,25 +199,21 @@ RtpStreamReceiver::RtpStreamReceiver(
|
||||
|
||||
process_thread_->RegisterModule(rtp_rtcp_.get());
|
||||
|
||||
jitter_buffer_experiment_ =
|
||||
field_trial::FindFullName("WebRTC-NewVideoJitterBuffer") == "Enabled";
|
||||
nack_module_.reset(
|
||||
new NackModule(clock_, nack_sender, keyframe_request_sender));
|
||||
if (config_.rtp.nack.rtp_history_ms == 0)
|
||||
nack_module_->Stop();
|
||||
process_thread_->RegisterModule(nack_module_.get());
|
||||
|
||||
if (jitter_buffer_experiment_) {
|
||||
nack_module_.reset(
|
||||
new NackModule(clock_, nack_sender, keyframe_request_sender));
|
||||
process_thread_->RegisterModule(nack_module_.get());
|
||||
|
||||
packet_buffer_ = video_coding::PacketBuffer::Create(
|
||||
clock_, kPacketBufferStartSize, kPacketBufferMaxSixe, this);
|
||||
reference_finder_.reset(new video_coding::RtpFrameReferenceFinder(this));
|
||||
}
|
||||
packet_buffer_ = video_coding::PacketBuffer::Create(
|
||||
clock_, kPacketBufferStartSize, kPacketBufferMaxSixe, this);
|
||||
reference_finder_.reset(new video_coding::RtpFrameReferenceFinder(this));
|
||||
}
|
||||
|
||||
RtpStreamReceiver::~RtpStreamReceiver() {
|
||||
process_thread_->DeRegisterModule(rtp_rtcp_.get());
|
||||
|
||||
if (jitter_buffer_experiment_)
|
||||
process_thread_->DeRegisterModule(nack_module_.get());
|
||||
process_thread_->DeRegisterModule(nack_module_.get());
|
||||
|
||||
packet_router_->RemoveRtpModule(rtp_rtcp_.get());
|
||||
rtp_rtcp_->SetREMBStatus(false);
|
||||
@ -261,43 +257,35 @@ int32_t RtpStreamReceiver::OnReceivedPayloadData(
|
||||
WebRtcRTPHeader rtp_header_with_ntp = *rtp_header;
|
||||
rtp_header_with_ntp.ntp_time_ms =
|
||||
ntp_estimator_.Estimate(rtp_header->header.timestamp);
|
||||
if (jitter_buffer_experiment_) {
|
||||
VCMPacket packet(payload_data, payload_size, rtp_header_with_ntp);
|
||||
timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
|
||||
packet.timesNacked = nack_module_->OnReceivedPacket(packet);
|
||||
VCMPacket packet(payload_data, payload_size, rtp_header_with_ntp);
|
||||
timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
|
||||
packet.timesNacked = nack_module_->OnReceivedPacket(packet);
|
||||
|
||||
if (packet.codec == kVideoCodecH264) {
|
||||
// Only when we start to receive packets will we know what payload type
|
||||
// that will be used. When we know the payload type insert the correct
|
||||
// sps/pps into the tracker.
|
||||
if (packet.payloadType != last_payload_type_) {
|
||||
last_payload_type_ = packet.payloadType;
|
||||
InsertSpsPpsIntoTracker(packet.payloadType);
|
||||
}
|
||||
|
||||
switch (tracker_.CopyAndFixBitstream(&packet)) {
|
||||
case video_coding::H264SpsPpsTracker::kRequestKeyframe:
|
||||
keyframe_request_sender_->RequestKeyFrame();
|
||||
FALLTHROUGH();
|
||||
case video_coding::H264SpsPpsTracker::kDrop:
|
||||
return 0;
|
||||
case video_coding::H264SpsPpsTracker::kInsert:
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
uint8_t* data = new uint8_t[packet.sizeBytes];
|
||||
memcpy(data, packet.dataPtr, packet.sizeBytes);
|
||||
packet.dataPtr = data;
|
||||
if (packet.codec == kVideoCodecH264) {
|
||||
// Only when we start to receive packets will we know what payload type
|
||||
// that will be used. When we know the payload type insert the correct
|
||||
// sps/pps into the tracker.
|
||||
if (packet.payloadType != last_payload_type_) {
|
||||
last_payload_type_ = packet.payloadType;
|
||||
InsertSpsPpsIntoTracker(packet.payloadType);
|
||||
}
|
||||
|
||||
packet_buffer_->InsertPacket(&packet);
|
||||
switch (tracker_.CopyAndFixBitstream(&packet)) {
|
||||
case video_coding::H264SpsPpsTracker::kRequestKeyframe:
|
||||
keyframe_request_sender_->RequestKeyFrame();
|
||||
FALLTHROUGH();
|
||||
case video_coding::H264SpsPpsTracker::kDrop:
|
||||
return 0;
|
||||
case video_coding::H264SpsPpsTracker::kInsert:
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
if (video_receiver_->IncomingPacket(payload_data, payload_size,
|
||||
rtp_header_with_ntp) != 0) {
|
||||
// Check this...
|
||||
return -1;
|
||||
}
|
||||
uint8_t* data = new uint8_t[packet.sizeBytes];
|
||||
memcpy(data, packet.dataPtr, packet.sizeBytes);
|
||||
packet.dataPtr = data;
|
||||
}
|
||||
|
||||
packet_buffer_->InsertPacket(&packet);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -434,8 +422,7 @@ void RtpStreamReceiver::OnCompleteFrame(
|
||||
}
|
||||
|
||||
void RtpStreamReceiver::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
|
||||
if (jitter_buffer_experiment_)
|
||||
nack_module_->UpdateRtt(max_rtt_ms);
|
||||
nack_module_->UpdateRtt(max_rtt_ms);
|
||||
}
|
||||
|
||||
bool RtpStreamReceiver::ReceivePacket(const uint8_t* packet,
|
||||
@ -563,36 +550,32 @@ bool RtpStreamReceiver::DeliverRtcp(const uint8_t* rtcp_packet,
|
||||
}
|
||||
|
||||
void RtpStreamReceiver::FrameContinuous(uint16_t picture_id) {
|
||||
if (jitter_buffer_experiment_) {
|
||||
int seq_num = -1;
|
||||
{
|
||||
rtc::CritScope lock(&last_seq_num_cs_);
|
||||
auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
|
||||
if (seq_num_it != last_seq_num_for_pic_id_.end())
|
||||
seq_num = seq_num_it->second;
|
||||
}
|
||||
if (seq_num != -1)
|
||||
nack_module_->ClearUpTo(seq_num);
|
||||
int seq_num = -1;
|
||||
{
|
||||
rtc::CritScope lock(&last_seq_num_cs_);
|
||||
auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
|
||||
if (seq_num_it != last_seq_num_for_pic_id_.end())
|
||||
seq_num = seq_num_it->second;
|
||||
}
|
||||
if (seq_num != -1)
|
||||
nack_module_->ClearUpTo(seq_num);
|
||||
}
|
||||
|
||||
void RtpStreamReceiver::FrameDecoded(uint16_t picture_id) {
|
||||
if (jitter_buffer_experiment_) {
|
||||
int seq_num = -1;
|
||||
{
|
||||
rtc::CritScope lock(&last_seq_num_cs_);
|
||||
auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
|
||||
if (seq_num_it != last_seq_num_for_pic_id_.end()) {
|
||||
seq_num = seq_num_it->second;
|
||||
last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(),
|
||||
++seq_num_it);
|
||||
}
|
||||
}
|
||||
if (seq_num != -1) {
|
||||
packet_buffer_->ClearTo(seq_num);
|
||||
reference_finder_->ClearTo(seq_num);
|
||||
int seq_num = -1;
|
||||
{
|
||||
rtc::CritScope lock(&last_seq_num_cs_);
|
||||
auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
|
||||
if (seq_num_it != last_seq_num_for_pic_id_.end()) {
|
||||
seq_num = seq_num_it->second;
|
||||
last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(),
|
||||
++seq_num_it);
|
||||
}
|
||||
}
|
||||
if (seq_num != -1) {
|
||||
packet_buffer_->ClearTo(seq_num);
|
||||
reference_finder_->ClearTo(seq_num);
|
||||
}
|
||||
}
|
||||
|
||||
void RtpStreamReceiver::SignalNetworkState(NetworkState state) {
|
||||
|
@ -189,7 +189,6 @@ class RtpStreamReceiver : public RtpData,
|
||||
const std::unique_ptr<RtpRtcp> rtp_rtcp_;
|
||||
|
||||
// Members for the new jitter buffer experiment.
|
||||
bool jitter_buffer_experiment_;
|
||||
video_coding::OnCompleteFrameCallback* complete_frame_callback_;
|
||||
KeyFrameRequestSender* keyframe_request_sender_;
|
||||
VCMTiming* timing_;
|
||||
|
@ -223,10 +223,7 @@ VideoReceiveStream::VideoReceiveStream(
|
||||
this, // KeyFrameRequestSender
|
||||
this, // OnCompleteFrameCallback
|
||||
timing_.get()),
|
||||
rtp_stream_sync_(&video_receiver_, &rtp_stream_receiver_),
|
||||
jitter_buffer_experiment_(
|
||||
field_trial::FindFullName("WebRTC-NewVideoJitterBuffer") ==
|
||||
"Enabled") {
|
||||
rtp_stream_sync_(&video_receiver_, &rtp_stream_receiver_) {
|
||||
LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString();
|
||||
|
||||
RTC_DCHECK(process_thread_);
|
||||
@ -246,11 +243,9 @@ VideoReceiveStream::VideoReceiveStream(
|
||||
|
||||
video_receiver_.SetRenderDelay(config.render_delay_ms);
|
||||
|
||||
if (jitter_buffer_experiment_) {
|
||||
jitter_estimator_.reset(new VCMJitterEstimator(clock_));
|
||||
frame_buffer_.reset(new video_coding::FrameBuffer(
|
||||
clock_, jitter_estimator_.get(), timing_.get()));
|
||||
}
|
||||
jitter_estimator_.reset(new VCMJitterEstimator(clock_));
|
||||
frame_buffer_.reset(new video_coding::FrameBuffer(
|
||||
clock_, jitter_estimator_.get(), timing_.get(), &stats_proxy_));
|
||||
|
||||
process_thread_->RegisterModule(&video_receiver_);
|
||||
process_thread_->RegisterModule(&rtp_stream_sync_);
|
||||
@ -290,15 +285,15 @@ bool VideoReceiveStream::OnRecoveredPacket(const uint8_t* packet,
|
||||
void VideoReceiveStream::Start() {
|
||||
if (decode_thread_.IsRunning())
|
||||
return;
|
||||
if (jitter_buffer_experiment_) {
|
||||
frame_buffer_->Start();
|
||||
call_stats_->RegisterStatsObserver(&rtp_stream_receiver_);
|
||||
|
||||
if (rtp_stream_receiver_.IsRetransmissionsEnabled() &&
|
||||
rtp_stream_receiver_.IsUlpfecEnabled()) {
|
||||
frame_buffer_->SetProtectionMode(kProtectionNackFEC);
|
||||
}
|
||||
frame_buffer_->Start();
|
||||
call_stats_->RegisterStatsObserver(&rtp_stream_receiver_);
|
||||
|
||||
if (rtp_stream_receiver_.IsRetransmissionsEnabled() &&
|
||||
rtp_stream_receiver_.IsUlpfecEnabled()) {
|
||||
frame_buffer_->SetProtectionMode(kProtectionNackFEC);
|
||||
}
|
||||
|
||||
transport_adapter_.Enable();
|
||||
rtc::VideoSinkInterface<VideoFrame>* renderer = nullptr;
|
||||
if (config_.renderer) {
|
||||
@ -343,10 +338,8 @@ void VideoReceiveStream::Stop() {
|
||||
// before joining the decoder thread thread.
|
||||
video_receiver_.TriggerDecoderShutdown();
|
||||
|
||||
if (jitter_buffer_experiment_) {
|
||||
frame_buffer_->Stop();
|
||||
call_stats_->DeregisterStatsObserver(&rtp_stream_receiver_);
|
||||
}
|
||||
frame_buffer_->Stop();
|
||||
call_stats_->DeregisterStatsObserver(&rtp_stream_receiver_);
|
||||
|
||||
if (decode_thread_.IsRunning()) {
|
||||
decode_thread_.Stop();
|
||||
@ -442,26 +435,21 @@ bool VideoReceiveStream::DecodeThreadFunction(void* ptr) {
|
||||
}
|
||||
|
||||
void VideoReceiveStream::Decode() {
|
||||
static const int kMaxDecodeWaitTimeMs = 50;
|
||||
if (jitter_buffer_experiment_) {
|
||||
static const int kMaxWaitForFrameMs = 3000;
|
||||
std::unique_ptr<video_coding::FrameObject> frame;
|
||||
video_coding::FrameBuffer::ReturnReason res =
|
||||
frame_buffer_->NextFrame(kMaxWaitForFrameMs, &frame);
|
||||
static const int kMaxWaitForFrameMs = 3000;
|
||||
std::unique_ptr<video_coding::FrameObject> frame;
|
||||
video_coding::FrameBuffer::ReturnReason res =
|
||||
frame_buffer_->NextFrame(kMaxWaitForFrameMs, &frame);
|
||||
|
||||
if (res == video_coding::FrameBuffer::ReturnReason::kStopped)
|
||||
return;
|
||||
if (res == video_coding::FrameBuffer::ReturnReason::kStopped)
|
||||
return;
|
||||
|
||||
if (frame) {
|
||||
if (video_receiver_.Decode(frame.get()) == VCM_OK)
|
||||
rtp_stream_receiver_.FrameDecoded(frame->picture_id);
|
||||
} else {
|
||||
LOG(LS_WARNING) << "No decodable frame in " << kMaxWaitForFrameMs
|
||||
<< " ms, requesting keyframe.";
|
||||
RequestKeyFrame();
|
||||
}
|
||||
if (frame) {
|
||||
if (video_receiver_.Decode(frame.get()) == VCM_OK)
|
||||
rtp_stream_receiver_.FrameDecoded(frame->picture_id);
|
||||
} else {
|
||||
video_receiver_.Decode(kMaxDecodeWaitTimeMs);
|
||||
LOG(LS_WARNING) << "No decodable frame in " << kMaxWaitForFrameMs
|
||||
<< " ms, requesting keyframe.";
|
||||
RequestKeyFrame();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -130,7 +130,6 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
|
||||
std::unique_ptr<IvfFileWriter> ivf_writer_ GUARDED_BY(ivf_writer_lock_);
|
||||
|
||||
// Members for the new jitter buffer experiment.
|
||||
const bool jitter_buffer_experiment_;
|
||||
std::unique_ptr<VCMJitterEstimator> jitter_estimator_;
|
||||
std::unique_ptr<video_coding::FrameBuffer> frame_buffer_;
|
||||
};
|
||||
|
@ -122,17 +122,17 @@ void VideoStreamDecoder::OnDecoderTiming(int decode_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) {
|
||||
int last_rtt = -1;
|
||||
{
|
||||
rtc::CritScope lock(&crit_);
|
||||
last_rtt = last_rtt_ms_;
|
||||
}
|
||||
int render_delay_ms) {}
|
||||
|
||||
receive_stats_callback_->OnDecoderTiming(
|
||||
decode_ms, max_decode_ms, current_delay_ms, target_delay_ms,
|
||||
jitter_buffer_ms, min_playout_delay_ms, render_delay_ms, last_rtt);
|
||||
}
|
||||
void VideoStreamDecoder::OnFrameBufferTimingsUpdated(int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) {}
|
||||
|
||||
void VideoStreamDecoder::OnCompleteFrame(bool is_keyframe, size_t size_bytes) {}
|
||||
|
||||
void VideoStreamDecoder::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) {
|
||||
video_receiver_->SetReceiveChannelParameters(max_rtt_ms);
|
||||
|
@ -69,6 +69,14 @@ class VideoStreamDecoder : public VCMReceiveCallback,
|
||||
void OnReceiveRatesUpdated(uint32_t bit_rate, uint32_t frame_rate) override;
|
||||
void OnDiscardedPacketsUpdated(int discarded_packets) override;
|
||||
void OnFrameCountsUpdated(const FrameCounts& frame_counts) override;
|
||||
void OnCompleteFrame(bool is_keyframe, size_t size_bytes) override;
|
||||
void OnFrameBufferTimingsUpdated(int decode_ms,
|
||||
int max_decode_ms,
|
||||
int current_delay_ms,
|
||||
int target_delay_ms,
|
||||
int jitter_buffer_ms,
|
||||
int min_playout_delay_ms,
|
||||
int render_delay_ms) override;
|
||||
|
||||
// Implements VCMDecoderTimingCallback.
|
||||
void OnDecoderTiming(int decode_ms,
|
||||
|
Reference in New Issue
Block a user