Remove frame time scheduing in IncomingVideoStream

This is part of the project that makes RTC rendering more
smooth. We've already finished the developement of the
frame selection algorithm in WebMediaPlayerMS, where we
managed a frame pool, and based on the vsync interval, we
actively select the best frame to render in order to
maximize the rendering smoothness.

Thus the frame timeline control in IncomingVideoStream is
no longer needed, because with sophisticated frame
selection algorithm in WebMediaPlayerMS, the time control
in IncomingVideoStream will do nothing but add some extra
delay.

BUG=514873

Review URL: https://codereview.webrtc.org/1419673014

Cr-Commit-Position: refs/heads/master@{#10781}
This commit is contained in:
qiangchen
2015-11-24 18:07:56 -08:00
committed by Commit bot
parent 953eabc027
commit 444682acf9
11 changed files with 109 additions and 61 deletions

View File

@ -256,7 +256,7 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
int ice_connection_receiving_timeout;
ContinualGatheringPolicy continual_gathering_policy;
std::vector<rtc::scoped_refptr<rtc::RTCCertificate>> certificates;
bool disable_prerenderer_smoothing;
RTCConfiguration()
: type(kAll),
bundle_policy(kBundlePolicyBalanced),
@ -265,7 +265,8 @@ class PeerConnectionInterface : public rtc::RefCountInterface {
audio_jitter_buffer_max_packets(kAudioJitterBufferMaxPackets),
audio_jitter_buffer_fast_accelerate(false),
ice_connection_receiving_timeout(kUndefined),
continual_gathering_policy(GATHER_ONCE) {}
continual_gathering_policy(GATHER_ONCE),
disable_prerenderer_smoothing(false) {}
};
struct RTCOfferAnswerOptions {

View File

@ -593,6 +593,8 @@ bool WebRtcSession::Initialize(
const PeerConnectionInterface::RTCConfiguration& rtc_configuration) {
bundle_policy_ = rtc_configuration.bundle_policy;
rtcp_mux_policy_ = rtc_configuration.rtcp_mux_policy;
video_options_.disable_prerenderer_smoothing =
rtc::Optional<bool>(rtc_configuration.disable_prerenderer_smoothing);
transport_controller_->SetSslMaxProtocolVersion(options.ssl_max_version);
// Obtain a certificate from RTCConfiguration if any were provided (optional).

View File

@ -267,6 +267,8 @@ struct VideoOptions {
change.unsignalled_recv_stream_limit);
SetFrom(&use_simulcast_adapter, change.use_simulcast_adapter);
SetFrom(&screencast_min_bitrate, change.screencast_min_bitrate);
SetFrom(&disable_prerenderer_smoothing,
change.disable_prerenderer_smoothing);
}
bool operator==(const VideoOptions& o) const {
@ -293,7 +295,8 @@ struct VideoOptions {
suspend_below_min_bitrate == o.suspend_below_min_bitrate &&
unsignalled_recv_stream_limit == o.unsignalled_recv_stream_limit &&
use_simulcast_adapter == o.use_simulcast_adapter &&
screencast_min_bitrate == o.screencast_min_bitrate;
screencast_min_bitrate == o.screencast_min_bitrate &&
disable_prerenderer_smoothing == o.disable_prerenderer_smoothing;
}
std::string ToString() const {
@ -379,6 +382,13 @@ struct VideoOptions {
rtc::Optional<bool> use_simulcast_adapter;
// Force screencast to use a minimum bitrate
rtc::Optional<int> screencast_min_bitrate;
// Set to true if the renderer has an algorithm of frame selection.
// If the value is true, then WebRTC will hand over a frame as soon as
// possible without delay, and rendering smoothness is completely the duty
// of the renderer;
// If the value is false, then WebRTC is responsible to delay frame release
// in order to increase rendering smoothness.
rtc::Optional<bool> disable_prerenderer_smoothing;
private:
template <typename T>

View File

@ -1225,7 +1225,7 @@ bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp,
receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
call_, sp, config, external_decoder_factory_, default_stream,
recv_codecs_);
recv_codecs_, options_.disable_prerenderer_smoothing.value_or(false));
return true;
}
@ -2335,7 +2335,8 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
const std::vector<VideoCodecSettings>& recv_codecs)
const std::vector<VideoCodecSettings>& recv_codecs,
bool disable_prerenderer_smoothing)
: call_(call),
ssrcs_(sp.ssrcs),
ssrc_groups_(sp.ssrc_groups),
@ -2343,6 +2344,7 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
default_stream_(default_stream),
config_(config),
external_decoder_factory_(external_decoder_factory),
disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
renderer_(NULL),
last_width_(-1),
last_height_(-1),
@ -2558,6 +2560,11 @@ bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsTextureSupported() const {
return true;
}
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::SmoothsRenderedFrames()
const {
return disable_prerenderer_smoothing_;
}
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsDefaultStream() const {
return default_stream_;
}

View File

@ -394,7 +394,8 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
const webrtc::VideoReceiveStream::Config& config,
WebRtcVideoDecoderFactory* external_decoder_factory,
bool default_stream,
const std::vector<VideoCodecSettings>& recv_codecs);
const std::vector<VideoCodecSettings>& recv_codecs,
bool disable_prerenderer_smoothing);
~WebRtcVideoReceiveStream();
const std::vector<uint32_t>& GetSsrcs() const;
@ -409,6 +410,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
void RenderFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) override;
bool IsTextureSupported() const override;
bool SmoothsRenderedFrames() const override;
bool IsDefaultStream() const;
void SetRenderer(cricket::VideoRenderer* renderer);
@ -449,6 +451,8 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
WebRtcVideoDecoderFactory* const external_decoder_factory_;
std::vector<AllocatedDecoder> allocated_decoders_;
const bool disable_prerenderer_smoothing_;
rtc::CriticalSection renderer_lock_;
cricket::VideoRenderer* renderer_ GUARDED_BY(renderer_lock_);
int last_width_ GUARDED_BY(renderer_lock_);

View File

@ -19,6 +19,7 @@ namespace webrtc {
class CriticalSectionWrapper;
class EventTimerWrapper;
class PlatformThread;
class VideoRenderer;
class VideoRenderCallback {
public:
@ -31,7 +32,7 @@ class VideoRenderCallback {
class IncomingVideoStream : public VideoRenderCallback {
public:
explicit IncomingVideoStream(uint32_t stream_id);
IncomingVideoStream(uint32_t stream_id, bool disable_prerenderer_smoothing);
~IncomingVideoStream();
// Get callback to deliver frames to the module.
@ -72,7 +73,10 @@ class IncomingVideoStream : public VideoRenderCallback {
enum { kEventMaxWaitTimeMs = 100 };
enum { kFrameRatePeriodMs = 1000 };
void DeliverFrame(const VideoFrame& video_frame);
uint32_t const stream_id_;
const bool disable_prerenderer_smoothing_;
// Critsects in allowed to enter order.
const rtc::scoped_ptr<CriticalSectionWrapper> stream_critsect_;
const rtc::scoped_ptr<CriticalSectionWrapper> thread_critsect_;

View File

@ -28,11 +28,14 @@
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
#include "webrtc/video_renderer.h"
namespace webrtc {
IncomingVideoStream::IncomingVideoStream(uint32_t stream_id)
IncomingVideoStream::IncomingVideoStream(uint32_t stream_id,
bool disable_prerenderer_smoothing)
: stream_id_(stream_id),
disable_prerenderer_smoothing_(disable_prerenderer_smoothing),
stream_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
thread_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
buffer_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
@ -49,8 +52,7 @@ IncomingVideoStream::IncomingVideoStream(uint32_t stream_id)
temp_frame_(),
start_image_(),
timeout_image_(),
timeout_time_() {
}
timeout_time_() {}
IncomingVideoStream::~IncomingVideoStream() {
Stop();
@ -80,11 +82,15 @@ int32_t IncomingVideoStream::RenderFrame(const uint32_t stream_id,
last_rate_calculation_time_ms_ = now_ms;
}
// Insert frame.
CriticalSectionScoped csB(buffer_critsect_.get());
if (render_buffers_->AddFrame(video_frame) == 1)
deliver_buffer_event_->Set();
// Hand over or insert frame.
if (disable_prerenderer_smoothing_) {
DeliverFrame(video_frame);
} else {
CriticalSectionScoped csB(buffer_critsect_.get());
if (render_buffers_->AddFrame(video_frame) == 1) {
deliver_buffer_event_->Set();
}
}
return 0;
}
@ -128,22 +134,23 @@ int32_t IncomingVideoStream::Start() {
return 0;
}
CriticalSectionScoped csT(thread_critsect_.get());
assert(incoming_render_thread_ == NULL);
if (!disable_prerenderer_smoothing_) {
CriticalSectionScoped csT(thread_critsect_.get());
assert(incoming_render_thread_ == NULL);
incoming_render_thread_ = PlatformThread::CreateThread(
IncomingVideoStreamThreadFun, this, "IncomingVideoStreamThread");
if (!incoming_render_thread_) {
return -1;
incoming_render_thread_ = PlatformThread::CreateThread(
IncomingVideoStreamThreadFun, this, "IncomingVideoStreamThread");
if (!incoming_render_thread_) {
return -1;
}
if (incoming_render_thread_->Start()) {
} else {
return -1;
}
incoming_render_thread_->SetPriority(kRealtimePriority);
deliver_buffer_event_->StartTimer(false, kEventStartupTimeMs);
}
if (incoming_render_thread_->Start()) {
} else {
return -1;
}
incoming_render_thread_->SetPriority(kRealtimePriority);
deliver_buffer_event_->StartTimer(false, kEventStartupTimeMs);
running_ = true;
return 0;
}
@ -205,6 +212,7 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
// Terminating
return false;
}
// Get a new frame to render and the time for the frame after this one.
VideoFrame frame_to_render;
uint32_t wait_time;
@ -220,37 +228,41 @@ bool IncomingVideoStream::IncomingVideoStreamProcess() {
}
deliver_buffer_event_->StartTimer(false, wait_time);
if (frame_to_render.IsZeroSize()) {
if (render_callback_) {
if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
// We have not rendered anything and have a start image.
temp_frame_.CopyFrame(start_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
} else if (!timeout_image_.IsZeroSize() &&
last_render_time_ms_ + timeout_time_ <
TickTime::MillisecondTimestamp()) {
// Render a timeout image.
temp_frame_.CopyFrame(timeout_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
}
}
// No frame.
return true;
}
// Send frame for rendering.
if (external_callback_) {
external_callback_->RenderFrame(stream_id_, frame_to_render);
} else if (render_callback_) {
render_callback_->RenderFrame(stream_id_, frame_to_render);
}
// We're done with this frame.
if (!frame_to_render.IsZeroSize())
last_render_time_ms_ = frame_to_render.render_time_ms();
DeliverFrame(frame_to_render);
}
return true;
}
void IncomingVideoStream::DeliverFrame(const VideoFrame& video_frame) {
CriticalSectionScoped cs(thread_critsect_.get());
if (video_frame.IsZeroSize()) {
if (render_callback_) {
if (last_render_time_ms_ == 0 && !start_image_.IsZeroSize()) {
// We have not rendered anything and have a start image.
temp_frame_.CopyFrame(start_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
} else if (!timeout_image_.IsZeroSize() &&
last_render_time_ms_ + timeout_time_ <
TickTime::MillisecondTimestamp()) {
// Render a timeout image.
temp_frame_.CopyFrame(timeout_image_);
render_callback_->RenderFrame(stream_id_, temp_frame_);
}
}
// No frame.
return;
}
// Send frame for rendering.
if (external_callback_) {
external_callback_->RenderFrame(stream_id_, video_frame);
} else if (render_callback_) {
render_callback_->RenderFrame(stream_id_, video_frame);
}
// We're done with this frame.
last_render_time_ms_ = video_frame.render_time_ms();
}
} // namespace webrtc

View File

@ -197,7 +197,8 @@ ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
}
// Create platform independant code
IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(streamId);
IncomingVideoStream* ptrIncomingStream =
new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();

View File

@ -420,7 +420,8 @@ ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
}
// Create platform independant code
IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(streamId);
IncomingVideoStream* ptrIncomingStream =
new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();

View File

@ -272,7 +272,8 @@ VideoReceiveStream::VideoReceiveStream(
RTC_CHECK_EQ(0, vie_channel_->SetReceiveCodec(codec));
}
incoming_video_stream_.reset(new IncomingVideoStream(0));
incoming_video_stream_.reset(new IncomingVideoStream(
0, config.renderer ? config.renderer->SmoothsRenderedFrames() : false));
incoming_video_stream_->SetExpectedRenderDelay(config.render_delay_ms);
incoming_video_stream_->SetExternalCallback(this);
vie_channel_->SetIncomingVideoStream(incoming_video_stream_.get());

View File

@ -25,6 +25,11 @@ class VideoRenderer {
virtual bool IsTextureSupported() const = 0;
// This function returns true if WebRTC should not delay frames for
// smoothness. In general, this case means the renderer can schedule frames to
// optimize smoothness.
virtual bool SmoothsRenderedFrames() const { return false; }
protected:
virtual ~VideoRenderer() {}
};