From ac287ee8b57a487cdea1a2a1f220949f073c5664 Mon Sep 17 00:00:00 2001 From: danilchap Date: Mon, 29 Feb 2016 12:17:04 -0800 Subject: [PATCH] VideoCaptureInput enforce VideoFrame::render_time to be generated by webrtc clock. render_time time field (means capture time for sender side) is used by rtcp SenderReport to calculate offset since last frame and to estimate rtp timestamp for the time SenderReport should be send at. mapping between rtp timestamp and ntp time in SenderReport is used for stream synchronization. calculation of rtp_timestamp (using ntp_time of incoming video frame) for rtp packets is unchanged. BUG=webrtc:5433, webrtc:5504, webrtc:5505 Review URL: https://codereview.webrtc.org/1693443002 Cr-Commit-Position: refs/heads/master@{#11820} --- webrtc/call/call_perf_tests.cc | 4 +-- webrtc/video/video_capture_input.cc | 33 +++++++++++--------- webrtc/video/video_capture_input.h | 2 ++ webrtc/video/video_capture_input_unittest.cc | 4 +-- webrtc/video/video_send_stream_tests.cc | 2 -- 5 files changed, 22 insertions(+), 23 deletions(-) diff --git a/webrtc/call/call_perf_tests.cc b/webrtc/call/call_perf_tests.cc index 2bb836aecb..e27b1fefd2 100644 --- a/webrtc/call/call_perf_tests.cc +++ b/webrtc/call/call_perf_tests.cc @@ -383,9 +383,7 @@ void CallPerfTest::TestAudioVideoSync(FecMode fec, VoiceEngine::Delete(voice_engine); } -// TODO(danilchap): Reenable after adding support for frame capture clock -// that is not in sync with local TickTime clock. -TEST_F(CallPerfTest, DISABLED_PlaysOutAudioAndVideoInSyncWithVideoNtpDrift) { +TEST_F(CallPerfTest, PlaysOutAudioAndVideoInSyncWithVideoNtpDrift) { TestAudioVideoSync(FecMode::kOff, CreateOrder::kAudioFirst, DriftingClock::PercentsFaster(10.0f), DriftingClock::kNoDrift, DriftingClock::kNoDrift); diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc index dfdf5ae482..5f8015b21b 100644 --- a/webrtc/video/video_capture_input.cc +++ b/webrtc/video/video_capture_input.cc @@ -17,8 +17,6 @@ #include "webrtc/modules/video_capture/video_capture_factory.h" #include "webrtc/modules/video_processing/include/video_processing.h" #include "webrtc/modules/video_render/video_render_defines.h" -#include "webrtc/system_wrappers/include/clock.h" -#include "webrtc/system_wrappers/include/tick_util.h" #include "webrtc/video/overuse_frame_detector.h" #include "webrtc/video/send_statistics_proxy.h" #include "webrtc/video/vie_encoder.h" @@ -36,10 +34,13 @@ VideoCaptureInput::VideoCaptureInput(VideoCaptureCallback* frame_callback, encoder_thread_(EncoderThreadFunction, this, "EncoderThread"), capture_event_(false, false), stop_(0), + // TODO(danilchap): Pass clock from outside to ensure it is same clock + // rtcp module use to calculate offset since last frame captured + // to estimate rtp timestamp for SenderReport. + clock_(Clock::GetRealTimeClock()), last_captured_timestamp_(0), - delta_ntp_internal_ms_( - Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() - - TickTime::MillisecondTimestamp()), + delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() - + clock_->TimeInMilliseconds()), overuse_detector_(overuse_detector) { encoder_thread_.Start(); encoder_thread_.SetPriority(rtc::kHighPriority); @@ -62,18 +63,20 @@ void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) { VideoFrame incoming_frame = video_frame; - if (incoming_frame.ntp_time_ms() != 0) { - // If a NTP time stamp is set, this is the time stamp we will use. - incoming_frame.set_render_time_ms(incoming_frame.ntp_time_ms() - - delta_ntp_internal_ms_); - } else { // NTP time stamp not set. - int64_t render_time = incoming_frame.render_time_ms() != 0 - ? incoming_frame.render_time_ms() - : TickTime::MillisecondTimestamp(); + // Local time in webrtc time base. + int64_t current_time = clock_->TimeInMilliseconds(); + incoming_frame.set_render_time_ms(current_time); - incoming_frame.set_render_time_ms(render_time); - incoming_frame.set_ntp_time_ms(render_time + delta_ntp_internal_ms_); + // Capture time may come from clock with an offset and drift from clock_. + int64_t capture_ntp_time_ms; + if (video_frame.ntp_time_ms() != 0) { + capture_ntp_time_ms = video_frame.ntp_time_ms(); + } else if (video_frame.render_time_ms() != 0) { + capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_; + } else { + capture_ntp_time_ms = current_time + delta_ntp_internal_ms_; } + incoming_frame.set_ntp_time_ms(capture_ntp_time_ms); // Convert NTP time, in ms, to RTP timestamp. const int kMsToRtpTimestamp = 90; diff --git a/webrtc/video/video_capture_input.h b/webrtc/video/video_capture_input.h index 87b6452538..be265bf9a8 100644 --- a/webrtc/video/video_capture_input.h +++ b/webrtc/video/video_capture_input.h @@ -23,6 +23,7 @@ #include "webrtc/modules/video_coding/include/video_codec_interface.h" #include "webrtc/modules/video_coding/include/video_coding.h" #include "webrtc/modules/video_processing/include/video_processing.h" +#include "webrtc/system_wrappers/include/clock.h" #include "webrtc/typedefs.h" #include "webrtc/video_send_stream.h" @@ -68,6 +69,7 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput { volatile int stop_; VideoFrame captured_frame_ GUARDED_BY(crit_); + Clock* const clock_; // Used to make sure incoming time stamp is increasing for every frame. int64_t last_captured_timestamp_; // Delta used for translating between NTP and internal timestamps. diff --git a/webrtc/video/video_capture_input_unittest.cc b/webrtc/video/video_capture_input_unittest.cc index 86b701bf12..d20b999c2b 100644 --- a/webrtc/video/video_capture_input_unittest.cc +++ b/webrtc/video/video_capture_input_unittest.cc @@ -249,8 +249,7 @@ bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) { return ((frame1.native_handle() == frame2.native_handle()) && (frame1.width() == frame2.width()) && - (frame1.height() == frame2.height()) && - (frame1.render_time_ms() == frame2.render_time_ms())); + (frame1.height() == frame2.height())); } bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2) { @@ -259,7 +258,6 @@ bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2) { (frame1.stride(kYPlane) == frame2.stride(kYPlane)) && (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && (frame1.stride(kVPlane) == frame2.stride(kVPlane)) && - (frame1.render_time_ms() == frame2.render_time_ms()) && (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) && (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) && diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc index c54bb4300e..e8f1101606 100644 --- a/webrtc/video/video_send_stream_tests.cc +++ b/webrtc/video/video_send_stream_tests.cc @@ -1257,7 +1257,6 @@ void ExpectEqualTextureFrames(const VideoFrame& frame1, EXPECT_EQ(frame1.native_handle(), frame2.native_handle()); EXPECT_EQ(frame1.width(), frame2.width()); EXPECT_EQ(frame1.height(), frame2.height()); - EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms()); } void ExpectEqualBufferFrames(const VideoFrame& frame1, @@ -1267,7 +1266,6 @@ void ExpectEqualBufferFrames(const VideoFrame& frame1, EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane)); EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane)); EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane)); - EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms()); ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane)); EXPECT_EQ(0, memcmp(frame1.buffer(kYPlane),