Remove frame time scheduing in IncomingVideoStream

This is part of the project that makes RTC rendering more
smooth. We've already finished the developement of the
frame selection algorithm in WebMediaPlayerMS, where we
managed a frame pool, and based on the vsync interval, we
actively select the best frame to render in order to
maximize the rendering smoothness.

Thus the frame timeline control in IncomingVideoStream is
no longer needed, because with sophisticated frame
selection algorithm in WebMediaPlayerMS, the time control
in IncomingVideoStream will do nothing but add some extra
delay.

BUG=514873

Review URL: https://codereview.webrtc.org/1419673014

Cr-Commit-Position: refs/heads/master@{#10781}
This commit is contained in:
qiangchen
2015-11-24 18:07:56 -08:00
committed by Commit bot
parent 953eabc027
commit 444682acf9
11 changed files with 109 additions and 61 deletions

View File

@ -256,7 +256,7 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
int ice_connection_receiving_timeout; int ice_connection_receiving_timeout;
ContinualGatheringPolicy continual_gathering_policy; ContinualGatheringPolicy continual_gathering_policy;
std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates; std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates;
bool disable_prerenderer_smoothing;
RTCConfiguration() RTCConfiguration()
: type(kAll), : type(kAll),
bundle_policy(kBundlePolicyBalanced), bundle_policy(kBundlePolicyBalanced),
@ -265,7 +265,8 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
audio_jitter_buffer_max_packets(kAudioJitterBufferMaxPackets), audio_jitter_buffer_max_packets(kAudioJitterBufferMaxPackets),
audio_jitter_buffer_fast_accelerate(false), audio_jitter_buffer_fast_accelerate(false),
ice_connection_receiving_timeout(kUndefined), ice_connection_receiving_timeout(kUndefined),
continual_gathering_policy(GATHER_ONCE) {} continual_gathering_policy(GATHER_ONCE),
disable_prerenderer_smoothing(false) {}
}; };
struct RTCOfferAnswerOptions { struct RTCOfferAnswerOptions {

View File

@ -593,6 +593,8 @@ bool WebRtcSession::Initialize(
const PeerConnectionInterface::RTCConfiguration& rtc_configuration) { const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
bundle_policy_ = rtc_configuration.bundle_policy; bundle_policy_ = rtc_configuration.bundle_policy;
rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy; rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy;
video_options_.disable_prerenderer_smoothing =
rtc::Optional<bool>(rtc_configuration.disable_prerenderer_smoothing);
transport_controller_->SetSslMaxProtocolVersion(options.ssl_max_version); transport_controller_->SetSslMaxProtocolVersion(options.ssl_max_version);
// Obtain a certificate from RTCConfiguration if any were provided (optional). // Obtain a certificate from RTCConfiguration if any were provided (optional).

View File

@ -267,6 +267,8 @@ struct VideoOptions {
change.unsignalled_recv_stream_limit); change.unsignalled_recv_stream_limit);
SetFrom(&use_simulcast_adapter, change.use_simulcast_adapter); SetFrom(&use_simulcast_adapter, change.use_simulcast_adapter);
SetFrom(&screencast_min_bitrate, change.screencast_min_bitrate); SetFrom(&screencast_min_bitrate, change.screencast_min_bitrate);
SetFrom(&disable_prerenderer_smoothing,
change.disable_prerenderer_smoothing);
} }
bool operator==(const VideoOptions& o) const { bool operator==(const VideoOptions& o) const {
@ -293,7 +295,8 @@ struct VideoOptions {
suspend_below_min_bitrate == o.suspend_below_min_bitrate && suspend_below_min_bitrate == o.suspend_below_min_bitrate &&
unsignalled_recv_stream_limit == o.unsignalled_recv_stream_limit && unsignalled_recv_stream_limit == o.unsignalled_recv_stream_limit &&
use_simulcast_adapter == o.use_simulcast_adapter && use_simulcast_adapter == o.use_simulcast_adapter &&
screencast_min_bitrate == o.screencast_min_bitrate; screencast_min_bitrate == o.screencast_min_bitrate &&
disable_prerenderer_smoothing == o.disable_prerenderer_smoothing;
} }
std::string ToString() const { std::string ToString() const {
@ -379,6 +382,13 @@ struct VideoOptions {
rtc::Optional<bool> use_simulcast_adapter; rtc::Optional<bool> use_simulcast_adapter;
// Force screencast to use a minimum bitrate // Force screencast to use a minimum bitrate
rtc::Optional<int> screencast_min_bitrate; rtc::Optional<int> screencast_min_bitrate;
// Set to true if the renderer has an algorithm of frame selection.
// If the value is true, then WebRTC will hand over a frame as soon as
// possible without delay, and rendering smoothness is completely the duty
// of the renderer;
// If the value is false, then WebRTC is responsible to delay frame release
// in order to increase rendering smoothness.
rtc::Optional<bool> disable_prerenderer_smoothing;
private: private:
template <typename T> template <typename T>

View File

@ -1225,7 +1225,7 @@ bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp,
receive_streams_[ssrc] = new WebRtcVideoReceiveStream( receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
call_, sp, config, external_decoder_factory_, default_stream, call_, sp, config, external_decoder_factory_, default_stream,
recv_codecs_); recv_codecs_, options_.disable_prerenderer_smoothing.value_or(false));
return true; return true;
} }
@ -2335,7 +2335,8 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
const webrtc::VideoReceiveStream::Config& config, const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory, WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream, bool default_stream,
const std::vector<VideoCodecSettings>& recv_codecs) const std::vector<VideoCodecSettings>& recv_codecs,
bool disable_prerenderer_smoothing)
: call_(call), : call_(call),
ssrcs_(sp.ssrcs), ssrcs_(sp.ssrcs),
ssrc_groups_(sp.ssrc_groups), ssrc_groups_(sp.ssrc_groups),
@ -2343,6 +2344,7 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
default_stream_(default_stream), default_stream_(default_stream),
config_(config), config_(config),
external_decoder_factory_(external_decoder_factory), external_decoder_factory_(external_decoder_factory),
disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
renderer_(NULL), renderer_(NULL),
last_width_(-1), last_width_(-1),
last_height_(-1), last_height_(-1),
@ -2558,6 +2560,11 @@ bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsTextureSupported() const {
return true; return true;
} }
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::SmoothsRenderedFrames()
const {
return disable_prerenderer_smoothing_;
}
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsDefaultStream() const { bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsDefaultStream() const {
return default_stream_; return default_stream_;
} }

View File

@ -394,7 +394,8 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
const webrtc::VideoReceiveStream::Config& config, const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory, WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream, bool default_stream,
const std::vector<VideoCodecSettings>& recv_codecs); const std::vector<VideoCodecSettings>& recv_codecs,
bool disable_prerenderer_smoothing);
~WebRtcVideoReceiveStream(); ~WebRtcVideoReceiveStream();
const std::vector<uint32_t>& GetSsrcs() const; const std::vector<uint32_t>& GetSsrcs() const;
@ -409,6 +410,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
void RenderFrame(const webrtc::VideoFrame& frame, void RenderFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) override; int time_to_render_ms) override;
bool IsTextureSupported() const override; bool IsTextureSupported() const override;
bool SmoothsRenderedFrames() const override;
bool IsDefaultStream() const; bool IsDefaultStream() const;
void SetRenderer(cricket::VideoRenderer* renderer); void SetRenderer(cricket::VideoRenderer* renderer);
@ -449,6 +451,8 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
WebRtcVideoDecoderFactory* const external_decoder_factory_; WebRtcVideoDecoderFactory* const external_decoder_factory_;
std::vector<AllocatedDecoder> allocated_decoders_; std::vector<AllocatedDecoder> allocated_decoders_;
const bool disable_prerenderer_smoothing_;
rtc::CriticalSection renderer_lock_; rtc::CriticalSection renderer_lock_;
cricket::VideoRenderer* renderer_ GUARDED_BY(renderer_lock_); cricket::VideoRenderer* renderer_ GUARDED_BY(renderer_lock_);
int last_width_ GUARDED_BY(renderer_lock_); int last_width_ GUARDED_BY(renderer_lock_);

View File

@ -19,6 +19,7 @@ namespace webrtc {
class CriticalSectionWrapper; class CriticalSectionWrapper;
class EventTimerWrapper; class EventTimerWrapper;
class PlatformThread; class PlatformThread;
class VideoRenderer;
class VideoRenderCallback { class VideoRenderCallback {
public: public:
@ -31,7 +32,7 @@ class VideoRenderCallback {
class IncomingVideoStream : public VideoRenderCallback { class IncomingVideoStream : public VideoRenderCallback {
public: public:
explicit IncomingVideoStream(uint32_t stream_id); IncomingVideoStream(uint32_t stream_id, bool disable_prerenderer_smoothing);
~IncomingVideoStream(); ~IncomingVideoStream();
// Get callback to deliver frames to the module. // Get callback to deliver frames to the module.
@ -72,7 +73,10 @@ class IncomingVideoStream : public VideoRenderCallback {
enum { kEventMaxWaitTimeMs = 100 }; enum { kEventMaxWaitTimeMs = 100 };
enum { kFrameRatePeriodMs = 1000 }; enum { kFrameRatePeriodMs = 1000 };
void DeliverFrame(const VideoFrame& video_frame);
uint32_t const stream_id_; uint32_t const stream_id_;
const bool disable_prerenderer_smoothing_;
// Critsects in allowed to enter order. // Critsects in allowed to enter order.
const rtc::scoped_ptr<CriticalSectionWrapper> stream_critsect_; const rtc::scoped_ptr<CriticalSectionWrapper> stream_critsect_;
const rtc::scoped_ptr<CriticalSectionWrapper> thread_critsect_; const rtc::scoped_ptr<CriticalSectionWrapper> thread_critsect_;

View File

@ -28,11 +28,14 @@
#include "webrtc/system_wrappers/include/event_wrapper.h" #include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h" #include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h" #include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/video_renderer.h"
namespace webrtc { namespace webrtc {
IncomingVideoStream::IncomingVideoStream(uint32_t stream_id) IncomingVideoStream::IncomingVideoStream(uint32_t stream_id,
bool disable_prerenderer_smoothing)
: stream_id_(stream_id), : stream_id_(stream_id),
disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
stream_critsect_(CriticalSectionWrapper::CreateCriticalSection()), stream_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
thread_critsect_(CriticalSectionWrapper::CreateCriticalSection()), thread_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
buffer_critsect_(CriticalSectionWrapper::CreateCriticalSection()), buffer_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
@ -49,8 +52,7 @@ IncomingVideoStream::IncomingVideoStream(uint32_t stream_id)
temp_frame_(), temp_frame_(),
start_image_(), start_image_(),
timeout_image_(), timeout_image_(),
timeout_time_() { timeout_time_() {}
}
IncomingVideoStream::~IncomingVideoStream() { IncomingVideoStream::~IncomingVideoStream() {
Stop(); Stop();
@ -80,11 +82,15 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
last_rate_calculation_time_ms_ = now_ms; last_rate_calculation_time_ms_ = now_ms;
} }
// Insert frame. // Hand over or insert frame.
if (disable_prerenderer_smoothing_) {
DeliverFrame(video_frame);
} else {
CriticalSectionScoped csB(buffer_critsect_.get()); CriticalSectionScoped csB(buffer_critsect_.get());
if (render_buffers_->AddFrame(video_frame) == 1) if (render_buffers_->AddFrame(video_frame) == 1) {
deliver_buffer_event_->Set(); deliver_buffer_event_->Set();
}
}
return 0; return 0;
} }
@ -128,6 +134,7 @@ int32_t IncomingVideoStream::Start() {
return 0; return 0;
} }
if (!disable_prerenderer_smoothing_) {
CriticalSectionScoped csT(thread_critsect_.get()); CriticalSectionScoped csT(thread_critsect_.get());
assert(incoming_render_thread_ == NULL); assert(incoming_render_thread_ == NULL);
@ -143,7 +150,7 @@ int32_t IncomingVideoStream::Start() {
} }
incoming_render_thread_->SetPriority(kRealtimePriority); incoming_render_thread_->SetPriority(kRealtimePriority);
deliver_buffer_event_->StartTimer(false, kEventStartupTimeMs); deliver_buffer_event_->StartTimer(false, kEventStartupTimeMs);
}
running_ = true; running_ = true;
return 0; return 0;
} }
@ -205,6 +212,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
// Terminating // Terminating
return false; return false;
} }
// Get a new frame to render and the time for the frame after this one. // Get a new frame to render and the time for the frame after this one.
VideoFrame frame_to_render; VideoFrame frame_to_render;
uint32_t wait_time; uint32_t wait_time;
@ -220,7 +228,14 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
} }
deliver_buffer_event_->StartTimer(false, wait_time); deliver_buffer_event_->StartTimer(false, wait_time);
if (frame_to_render.IsZeroSize()) { DeliverFrame(frame_to_render);
}
return true;
}
void IncomingVideoStream::DeliverFrame(const VideoFrame& video_frame) {
CriticalSectionScoped cs(thread_critsect_.get());
if (video_frame.IsZeroSize()) {
if (render_callback_) { if (render_callback_) {
if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) { if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
// We have not rendered anything and have a start image. // We have not rendered anything and have a start image.
@ -236,21 +251,18 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
} }
// No frame. // No frame.
return true; return;
} }
// Send frame for rendering. // Send frame for rendering.
if (external_callback_) { if (external_callback_) {
external_callback_->RenderFrame(stream_id_, frame_to_render); external_callback_->RenderFrame(stream_id_, video_frame);
} else if (render_callback_) { } else if (render_callback_) {
render_callback_->RenderFrame(stream_id_, frame_to_render); render_callback_->RenderFrame(stream_id_, video_frame);
} }
// We're done with this frame. // We're done with this frame.
if (!frame_to_render.IsZeroSize()) last_render_time_ms_ = video_frame.render_time_ms();
last_render_time_ms_ = frame_to_render.render_time_ms();
}
return true;
} }
} // namespace webrtc } // namespace webrtc

View File

@ -197,7 +197,8 @@ ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
} }
// Create platform independant code // Create platform independant code
IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(streamId); IncomingVideoStream* ptrIncomingStream =
new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback); ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback(); VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();

View File

@ -420,7 +420,8 @@ ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
} }
// Create platform independant code // Create platform independant code
IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(streamId); IncomingVideoStream* ptrIncomingStream =
new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback); ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback(); VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();

View File

@ -272,7 +272,8 @@ VideoReceiveStream::VideoReceiveStream(
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec)); RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
} }
incoming_video_stream_.reset(new IncomingVideoStream(0)); incoming_video_stream_.reset(new IncomingVideoStream(
0, config.renderer ? config.renderer->SmoothsRenderedFrames() : false));
incoming_video_stream_->SetExpectedRenderDelay(config.render_delay_ms); incoming_video_stream_->SetExpectedRenderDelay(config.render_delay_ms);
incoming_video_stream_->SetExternalCallback(this); incoming_video_stream_->SetExternalCallback(this);
vie_channel_->SetIncomingVideoStream(incoming_video_stream_.get()); vie_channel_->SetIncomingVideoStream(incoming_video_stream_.get());

View File

@ -25,6 +25,11 @@ class VideoRenderer {
virtual bool IsTextureSupported() const = 0; virtual bool IsTextureSupported() const = 0;
// This function returns true if WebRTC should not delay frames for
// smoothness. In general, this case means the renderer can schedule frames to
// optimize smoothness.
virtual bool SmoothsRenderedFrames() const { return false; }
protected: protected:
virtual ~VideoRenderer() {} virtual ~VideoRenderer() {}
}; };