Deleted VideoCapturer::screencast_max_pixels, together with

VideoChannel::GetScreencastMaxPixels and VideoChannel::GetScreencastFps.

Unused in webrtc, also unused in everything indexed by google and chromium code search. With the exception of the magicflute plugin, which I'm told doesn't matter.

Review URL: https://codereview.webrtc.org/1532133002

Cr-Commit-Position: refs/heads/master@{#11108}
This commit is contained in:
nisse
2015-12-21 13:18:13 -08:00
committed by Commit bot
parent db8cf50c59
commit e6bf587259
5 changed files with 5 additions and 95 deletions

View File

@ -121,7 +121,6 @@ void VideoCapturer::Construct() {
SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
scaled_width_ = 0;
scaled_height_ = 0;
screencast_max_pixels_ = 0;
muted_ = false;
black_frame_count_down_ = kNumBlackFramesOnMute;
enable_video_adapter_ = true;
@ -360,16 +359,11 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
if (IsScreencast()) {
int scaled_width, scaled_height;
if (screencast_max_pixels_ > 0) {
ComputeScaleMaxPixels(captured_frame->width, captured_frame->height,
screencast_max_pixels_, &scaled_width, &scaled_height);
} else {
int desired_screencast_fps = capture_format_.get() ?
VideoFormat::IntervalToFps(capture_format_->interval) :
kDefaultScreencastFps;
ComputeScale(captured_frame->width, captured_frame->height,
desired_screencast_fps, &scaled_width, &scaled_height);
}
if (FOURCC_ARGB == captured_frame->fourcc &&
(scaled_width != captured_frame->width ||

View File

@ -262,17 +262,6 @@ class VideoCapturer
sigslot::signal2<VideoCapturer*, const VideoFrame*,
sigslot::multi_threaded_local> SignalVideoFrame;
// If 'screencast_max_pixels' is set greater than zero, screencasts will be
// scaled to be no larger than this value.
// If set to zero, the max pixels will be limited to
// Retina MacBookPro 15" resolution of 2880 x 1800.
// For high fps, maximum pixels limit is set based on common 24" monitor
// resolution of 2048 x 1280.
int screencast_max_pixels() const { return screencast_max_pixels_; }
void set_screencast_max_pixels(int p) {
screencast_max_pixels_ = std::max(0, p);
}
// If true, run video adaptation. By default, video adaptation is enabled
// and users must call video_adapter()->OnOutputFormatRequest()
// to receive frames.
@ -369,7 +358,6 @@ class VideoCapturer
bool square_pixel_aspect_ratio_; // Enable scaling to square pixels.
int scaled_width_; // Current output size from ComputeScale.
int scaled_height_;
int screencast_max_pixels_; // Downscale screencasts further if requested.
bool muted_;
int black_frame_count_down_;

View File

@ -196,39 +196,6 @@ TEST_F(VideoCapturerTest, CameraOffOnMute) {
EXPECT_EQ(33, video_frames_received());
}
TEST_F(VideoCapturerTest, ScreencastScaledMaxPixels) {
capturer_.SetScreencast(true);
int kWidth = 1280;
int kHeight = 720;
// Screencasts usually have large weird dimensions and are ARGB.
std::vector<cricket::VideoFormat> formats;
formats.push_back(cricket::VideoFormat(kWidth, kHeight,
cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB));
formats.push_back(cricket::VideoFormat(2 * kWidth, 2 * kHeight,
cricket::VideoFormat::FpsToInterval(5), cricket::FOURCC_ARGB));
capturer_.ResetSupportedFormats(formats);
EXPECT_EQ(0, capturer_.screencast_max_pixels());
EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
2 * kWidth,
2 * kHeight,
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_ARGB)));
EXPECT_TRUE(capturer_.IsRunning());
EXPECT_EQ(0, renderer_.num_rendered_frames());
renderer_.SetSize(2 * kWidth, 2 * kHeight, 0);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(1, renderer_.num_rendered_frames());
capturer_.set_screencast_max_pixels(kWidth * kHeight);
renderer_.SetSize(kWidth, kHeight, 0);
EXPECT_TRUE(capturer_.CaptureFrame());
EXPECT_EQ(2, renderer_.num_rendered_frames());
}
TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
capturer_.SetScreencast(true);

View File

@ -114,15 +114,6 @@ struct DataChannelErrorMessageData : public rtc::MessageData {
DataMediaChannel::Error error;
};
struct VideoChannel::ScreencastDetailsData {
explicit ScreencastDetailsData(uint32_t s)
: ssrc(s), fps(0), screencast_max_pixels(0) {}
uint32_t ssrc;
int fps;
int screencast_max_pixels;
};
static const char* PacketType(bool rtcp) {
return (!rtcp) ? "RTP" : "RTCP";
}
@ -1716,20 +1707,6 @@ bool VideoChannel::IsScreencasting() {
return InvokeOnWorker(Bind(&VideoChannel::IsScreencasting_w, this));
}
int VideoChannel::GetScreencastFps(uint32_t ssrc) {
ScreencastDetailsData data(ssrc);
worker_thread()->Invoke<void>(Bind(
&VideoChannel::GetScreencastDetails_w, this, &data));
return data.fps;
}
int VideoChannel::GetScreencastMaxPixels(uint32_t ssrc) {
ScreencastDetailsData data(ssrc);
worker_thread()->Invoke<void>(Bind(
&VideoChannel::GetScreencastDetails_w, this, &data));
return data.screencast_max_pixels;
}
bool VideoChannel::SendIntraFrame() {
worker_thread()->Invoke<void>(Bind(
&VideoMediaChannel::SendIntraFrame, media_channel()));
@ -1942,18 +1919,6 @@ bool VideoChannel::IsScreencasting_w() const {
return !screencast_capturers_.empty();
}
void VideoChannel::GetScreencastDetails_w(
ScreencastDetailsData* data) const {
ScreencastMap::const_iterator iter = screencast_capturers_.find(data->ssrc);
if (iter == screencast_capturers_.end()) {
return;
}
VideoCapturer* capturer = iter->second;
const VideoFormat* video_format = capturer->GetCaptureFormat();
data->fps = VideoFormat::IntervalToFps(video_format->interval);
data->screencast_max_pixels = capturer->screencast_max_pixels();
}
void VideoChannel::OnScreencastWindowEvent_s(uint32_t ssrc,
rtc::WindowEvent we) {
ASSERT(signaling_thread() == rtc::Thread::Current());

View File

@ -467,8 +467,6 @@ class VideoChannel : public BaseChannel {
// True if we've added a screencast. Doesn't matter if the capturer
// has been started or not.
bool IsScreencasting();
int GetScreencastFps(uint32_t ssrc);
int GetScreencastMaxPixels(uint32_t ssrc);
// Get statistics about the current media session.
bool GetStats(VideoMediaInfo* stats);
@ -487,7 +485,6 @@ class VideoChannel : public BaseChannel {
private:
typedef std::map<uint32_t, VideoCapturer*> ScreencastMap;
struct ScreencastDetailsData;
// overrides from BaseChannel
virtual void ChangeState();
@ -504,7 +501,6 @@ class VideoChannel : public BaseChannel {
bool RemoveScreencast_w(uint32_t ssrc);
void OnScreencastWindowEvent_s(uint32_t ssrc, rtc::WindowEvent we);
bool IsScreencasting_w() const;
void GetScreencastDetails_w(ScreencastDetailsData* d) const;
bool GetStats_w(VideoMediaInfo* stats);
virtual void OnMessage(rtc::Message* pmsg);