This is an initial cleanup step, aiming to delete the

webrtc::VideoRenderer class, replacing it by rtc::VideoSinkInterface.

The next step is to convert all places where a renderer is attached to
rtc::VideoSourceInterface, and at that point, the
SmoothsRenderedFrames method can be replaced by a flag
rtc::VideoSinkWants::smoothed_frames.

Delete unused method IsTextureSupported.
Delete unused time argument to RenderFrame.
Let webrtc::VideoRenderer inherit rtc::VideoSinkInterface. Rename RenderFrame --> OnFrame.

TBR=kjellander@webrtc.org
BUG=webrtc:5426

Review URL: https://codereview.webrtc.org/1814763002

Cr-Commit-Position: refs/heads/master@{#12070}
This commit is contained in:
nisse
2016-03-21 01:27:56 -07:00
committed by Commit bot
parent 105831ef4a
commit eb83a1a10f
22 changed files with 40 additions and 105 deletions

View File

@ -44,4 +44,7 @@ specific_include_rules = {
"video_frame\.h": [
"+webrtc/common_video",
],
"video_renderer\.h": [
"+webrtc/media/base",
],
}

View File

@ -88,8 +88,7 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver,
first_time_in_sync_(-1),
receive_stream_(nullptr) {}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
VideoReceiveStream::Stats stats;
{
rtc::CritScope lock(&crit_);
@ -129,8 +128,6 @@ class VideoRtcpAndSyncObserver : public test::RtpRtcpObserver,
}
}
bool IsTextureSupported() const override { return false; }
void set_receive_stream(VideoReceiveStream* receive_stream) {
rtc::CritScope lock(&crit_);
receive_stream_ = receive_stream;
@ -377,8 +374,7 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
nullptr, this, test::PacketTransport::kReceiver, net_config_);
}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
rtc::CritScope lock(&crit_);
if (video_frame.ntp_time_ms() <= 0) {
// Haven't got enough RTCP SR in order to calculate the capture ntp
@ -417,8 +413,6 @@ void CallPerfTest::TestCaptureNtpTime(const FakeNetworkPipe::Config& net_config,
EXPECT_TRUE(std::abs(time_offset_ms) < threshold_ms_);
}
bool IsTextureSupported() const override { return false; }
virtual Action OnSendRtp(const uint8_t* packet, size_t length) {
rtc::CritScope lock(&crit_);
RTPHeader header;

View File

@ -202,9 +202,8 @@ bool FakeVideoReceiveStream::IsReceiving() const {
return receiving_;
}
void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) {
config_.renderer->RenderFrame(frame, time_to_render_ms);
void FakeVideoReceiveStream::InjectFrame(const webrtc::VideoFrame& frame) {
config_.renderer->OnFrame(frame);
}
webrtc::VideoReceiveStream::Stats FakeVideoReceiveStream::GetStats() const {

View File

@ -159,7 +159,7 @@ class FakeVideoReceiveStream final : public webrtc::VideoReceiveStream {
bool IsReceiving() const;
void InjectFrame(const webrtc::VideoFrame& frame, int time_to_render_ms);
void InjectFrame(const webrtc::VideoFrame& frame);
void SetStats(const webrtc::VideoReceiveStream::Stats& stats);

View File

@ -2358,9 +2358,8 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::ClearDecoders(
allocated_decoders->clear();
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
const webrtc::VideoFrame& frame,
int time_to_render_ms) {
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::OnFrame(
const webrtc::VideoFrame& frame) {
rtc::CritScope crit(&sink_lock_);
if (first_frame_timestamp_ < 0)
@ -2387,10 +2386,6 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
sink_->OnFrame(render_frame);
}
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsTextureSupported() const {
return true;
}
bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::SmoothsRenderedFrames()
const {
return disable_prerenderer_smoothing_;

View File

@ -417,9 +417,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
webrtc::RtcpMode rtcp_mode);
void SetRecvParameters(const ChangedRecvParameters& recv_params);
void RenderFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) override;
bool IsTextureSupported() const override;
void OnFrame(const webrtc::VideoFrame& frame) override;
bool SmoothsRenderedFrames() const override;
bool IsDefaultStream() const;

View File

@ -2068,7 +2068,7 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeCorrectly) {
video_frame.set_timestamp(kInitialTimestamp);
// Initial NTP time is not available on the first frame, but should still be
// able to be estimated.
stream->InjectFrame(video_frame, 0);
stream->InjectFrame(video_frame);
EXPECT_EQ(1, renderer.num_rendered_frames());
@ -2077,7 +2077,7 @@ TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeCorrectly) {
// here.
video_frame.set_timestamp(kFrameOffsetMs * 90 - 1);
video_frame.set_ntp_time_ms(kInitialNtpTimeMs + kFrameOffsetMs);
stream->InjectFrame(video_frame, 0);
stream->InjectFrame(video_frame);
EXPECT_EQ(2, renderer.num_rendered_frames());

View File

@ -69,8 +69,7 @@ void GlRenderer::ResizeVideo(size_t width, size_t height) {
GL_UNSIGNED_INT_8_8_8_8, static_cast<GLvoid*>(buffer_));
}
void GlRenderer::RenderFrame(const webrtc::VideoFrame& frame,
int /*render_delay_ms*/) {
void GlRenderer::OnFrame(const webrtc::VideoFrame& frame) {
assert(is_init_);
if (static_cast<size_t>(frame.width()) != width_ ||

View File

@ -26,8 +26,7 @@ namespace test {
class GlRenderer : public VideoRenderer {
public:
void RenderFrame(const webrtc::VideoFrame& frame,
int time_to_render_ms) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
protected:
GlRenderer();

View File

@ -144,8 +144,7 @@ void GlxRenderer::Resize(size_t width, size_t height) {
XConfigureWindow(display_, window_, CWWidth | CWHeight, &wc);
}
void GlxRenderer::RenderFrame(const webrtc::VideoFrame& frame,
int /*render_delay_ms*/) {
void GlxRenderer::OnFrame(const webrtc::VideoFrame& frame) {
if (static_cast<size_t>(frame.width()) != width_ ||
static_cast<size_t>(frame.height()) != height_) {
Resize(static_cast<size_t>(frame.width()),
@ -168,7 +167,7 @@ void GlxRenderer::RenderFrame(const webrtc::VideoFrame& frame,
}
}
GlRenderer::RenderFrame(frame, 0);
GlRenderer::OnFrame(frame);
glXSwapBuffers(display_, window_);
if (!glXMakeCurrent(display_, None, NULL)) {

View File

@ -26,8 +26,7 @@ class GlxRenderer : public GlRenderer {
size_t height);
virtual ~GlxRenderer();
void RenderFrame(const webrtc::VideoFrame& frame, int delta) override;
bool IsTextureSupported() const override { return false; }
void OnFrame(const webrtc::VideoFrame& frame) override;
private:
GlxRenderer(size_t width, size_t height);

View File

@ -27,8 +27,7 @@ class MacRenderer : public GlRenderer {
bool Init(const char* window_title, int width, int height);
// Implements GlRenderer.
void RenderFrame(const VideoFrame& frame, int delta) override;
bool IsTextureSupported() const override { return false; }
void OnFrame(const VideoFrame& frame) override;
private:
CocoaWindow* window_;

View File

@ -125,9 +125,9 @@ bool MacRenderer::Init(const char* window_title, int width, int height) {
return true;
}
void MacRenderer::RenderFrame(const VideoFrame& frame, int /*delta*/) {
void MacRenderer::OnFrame(const VideoFrame& frame) {
[window_ makeCurrentContext];
GlRenderer::RenderFrame(frame, 0);
GlRenderer::OnFrame(frame);
}
} // test

View File

@ -17,9 +17,7 @@ namespace webrtc {
namespace test {
class NullRenderer : public VideoRenderer {
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {}
bool IsTextureSupported() const override { return false; }
void OnFrame(const VideoFrame& video_frame) override {}
};
VideoRenderer* VideoRenderer::Create(const char* window_title,

View File

@ -191,8 +191,7 @@ void D3dRenderer::Resize(size_t width, size_t height) {
vertex_buffer_->Unlock();
}
void D3dRenderer::RenderFrame(const webrtc::VideoFrame& frame,
int /*render_delay_ms*/) {
void D3dRenderer::OnFrame(const webrtc::VideoFrame& frame) {
if (static_cast<size_t>(frame.width()) != width_ ||
static_cast<size_t>(frame.height()) != height_) {
Resize(static_cast<size_t>(frame.width()),

View File

@ -27,8 +27,7 @@ class D3dRenderer : public VideoRenderer {
size_t height);
virtual ~D3dRenderer();
void RenderFrame(const webrtc::VideoFrame& frame, int delta) override;
bool IsTextureSupported() const override { return false; }
void OnFrame(const webrtc::VideoFrame& frame) override;
private:
D3dRenderer(size_t width, size_t height);

View File

@ -127,12 +127,7 @@ TEST_F(EndToEndTest, RendersSingleDelayedFrame) {
public:
Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
event_.Set();
}
bool IsTextureSupported() const override { return false; }
void OnFrame(const VideoFrame& video_frame) override { event_.Set(); }
bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
@ -195,11 +190,7 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
public:
Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
event_.Set();
}
bool IsTextureSupported() const override { return false; }
void OnFrame(const VideoFrame& video_frame) override { event_.Set(); }
bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
@ -272,15 +263,12 @@ TEST_F(EndToEndTest, SendsAndReceivesVP9) {
(*receive_configs)[0].decoders[0].decoder = decoder_.get();
}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
const int kRequiredFrames = 500;
if (++frame_counter_ == kRequiredFrames)
observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
private:
std::unique_ptr<webrtc::VideoEncoder> encoder_;
std::unique_ptr<webrtc::VideoDecoder> decoder_;
@ -328,15 +316,12 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
(*receive_configs)[0].decoders[0].decoder = decoder_.get();
}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
const int kRequiredFrames = 500;
if (++frame_counter_ == kRequiredFrames)
observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
private:
std::unique_ptr<webrtc::VideoEncoder> encoder_;
std::unique_ptr<webrtc::VideoDecoder> decoder_;
@ -521,8 +506,7 @@ TEST_F(EndToEndTest, CanReceiveFec) {
return SEND_PACKET;
}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
rtc::CritScope lock(&crit_);
// Rendering frame with timestamp of packet that was dropped -> FEC
// protection worked.
@ -530,8 +514,6 @@ TEST_F(EndToEndTest, CanReceiveFec) {
observation_complete_.Set();
}
bool IsTextureSupported() const override { return false; }
enum {
kFirstPacket,
kDropEveryOtherPacketUntilFec,
@ -850,16 +832,13 @@ TEST_F(EndToEndTest, UsesFrameCallbacks) {
public:
Renderer() : event_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int /*time_to_render_ms*/) override {
void OnFrame(const VideoFrame& video_frame) override {
EXPECT_EQ(0, *video_frame.buffer(kYPlane))
<< "Rendered frame should have zero luma which is applied by the "
"pre-render callback.";
event_.Set();
}
bool IsTextureSupported() const override { return false; }
bool Wait() { return event_.Wait(kDefaultTimeoutMs); }
rtc::Event event_;
} renderer;
@ -997,8 +976,7 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
return SEND_PACKET;
}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
rtc::CritScope lock(&crit_);
if (received_pli_ &&
video_frame.timestamp() > highest_dropped_timestamp_) {
@ -1008,8 +986,6 @@ void EndToEndTest::ReceivesPliAndRecovers(int rtp_history_ms) {
frames_to_drop_ = kPacketsToDrop;
}
bool IsTextureSupported() const override { return false; }
void ModifyVideoConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
@ -1323,8 +1299,7 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
frame_generator_(frame_generator),
done_(false, false) {}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
EXPECT_EQ(settings_.width, video_frame.width());
EXPECT_EQ(settings_.height, video_frame.height());
(*frame_generator_)->Stop();
@ -1333,8 +1308,6 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
uint32_t Ssrc() { return ssrc_; }
bool IsTextureSupported() const override { return false; }
bool Wait() { return done_.Wait(kDefaultTimeoutMs); }
private:

View File

@ -152,10 +152,9 @@ class FileRenderPassthrough : public VideoRenderer {
}
private:
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
if (renderer_ != nullptr)
renderer_->RenderFrame(video_frame, time_to_render_ms);
renderer_->OnFrame(video_frame);
if (basename_.empty())
return;
if (last_width_ != video_frame.width() ||
@ -182,8 +181,6 @@ class FileRenderPassthrough : public VideoRenderer {
PrintVideoFrame(video_frame, file_);
}
bool IsTextureSupported() const override { return false; }
const std::string basename_;
VideoRenderer* const renderer_;
FILE* file_;

View File

@ -47,7 +47,7 @@ void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) {
// TODO(pbos): Remove local rendering, it should be handled by the client code
// if required.
if (local_renderer_)
local_renderer_->RenderFrame(video_frame, 0);
local_renderer_->OnFrame(video_frame);
stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height());

View File

@ -205,8 +205,7 @@ class VideoAnalyzer : public PacketReceiver,
encoded_frame_size_.AddSample(frame.length_);
}
void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) override {
void OnFrame(const VideoFrame& video_frame) override {
int64_t render_time_ms =
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
@ -246,8 +245,6 @@ class VideoAnalyzer : public PacketReceiver,
last_rendered_frame_ = video_frame;
}
bool IsTextureSupported() const override { return false; }
void Wait() {
// Frame comparisons can be very expensive. Wait for test to be done, but
// at time-out check if frames_processed is going up. If so, give it more

View File

@ -388,14 +388,8 @@ int VideoReceiveStream::RenderFrame(const uint32_t /*stream_id*/,
if (vie_sync_.GetStreamSyncOffsetInMs(video_frame, &sync_offset_ms))
stats_proxy_.OnSyncOffsetUpdated(sync_offset_ms);
// TODO(pbos): Wire up config_.render->IsTextureSupported() and convert if not
// supported. Or provide methods for converting a texture frame in
// VideoFrame.
if (config_.renderer != nullptr)
config_.renderer->RenderFrame(
video_frame,
video_frame.render_time_ms() - clock_->TimeInMilliseconds());
config_.renderer->OnFrame(video_frame);
stats_proxy_.OnRenderedFrame(video_frame);

View File

@ -11,20 +11,14 @@
#ifndef WEBRTC_VIDEO_RENDERER_H_
#define WEBRTC_VIDEO_RENDERER_H_
#include "webrtc/media/base/videosinkinterface.h"
namespace webrtc {
class VideoFrame;
class VideoRenderer {
class VideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
public:
// This function should return as soon as possible and not block until it's
// time to render the frame.
// TODO(mflodman) Remove time_to_render_ms when VideoFrame contains NTP.
virtual void RenderFrame(const VideoFrame& video_frame,
int time_to_render_ms) = 0;
virtual bool IsTextureSupported() const = 0;
// This function returns true if WebRTC should not delay frames for
// smoothness. In general, this case means the renderer can schedule frames to
// optimize smoothness.