Delete test VideoSendStreamTest.CapturesTextureAndVideoFrames.

This test was added in
https://webrtc-codereview.appspot.com/15789004/, which looks like it
was early on in adding support for capture and encode with native
textures on Android. Since then, the VideoFrame/VideoFrameBuffer
interfaces have emerged and texture support has changed a lot (now
using VideoFrameBuffer with type kNative).

The test only exercises the parts of the video pipeline before
VideoStreamEncoder::pre_encode_callback_, which doesn't care at all
about the type of the corresponding VideoFrameBuffer. That's not so
useful, and since it blocks removal of pre_encode_callback, let's
delete this old test.

Bug: webrtc:9864, chromium:362437
Change-Id: I2eb6c4c48557883309fd6431bc25528441c83078
Reviewed-on: https://webrtc-review.googlesource.com/c/115411
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26086}
This commit is contained in:
Niels Möller
2018-12-21 11:49:18 +01:00
committed by Commit Bot
parent 91085072fa
commit cf85e24661

View File

@ -78,8 +78,6 @@ enum VideoFormat {
};
} // namespace
void ExpectEqualFramesVector(const std::vector<VideoFrame>& frames1,
const std::vector<VideoFrame>& frames2);
VideoFrame CreateVideoFrame(int width, int height, uint8_t data);
class VideoSendStreamTest : public test::CallTest,
@ -2226,91 +2224,6 @@ TEST_P(VideoSendStreamTest, VideoSendStreamUpdateActiveSimulcastLayers) {
DestroyCalls();
});
}
TEST_P(VideoSendStreamTest, CapturesTextureAndVideoFrames) {
class FrameObserver : public rtc::VideoSinkInterface<VideoFrame> {
public:
void OnFrame(const VideoFrame& video_frame) override {
output_frames_.push_back(video_frame);
output_frame_event_.Set();
}
void WaitOutputFrame() {
const int kWaitFrameTimeoutMs = 3000;
EXPECT_TRUE(output_frame_event_.Wait(kWaitFrameTimeoutMs))
<< "Timeout while waiting for output frames.";
}
const std::vector<VideoFrame>& output_frames() const {
return output_frames_;
}
private:
// Delivered output frames.
std::vector<VideoFrame> output_frames_;
// Indicate an output frame has arrived.
rtc::Event output_frame_event_;
};
test::NullTransport transport;
FrameObserver observer;
std::vector<VideoFrame> input_frames;
task_queue_.SendTask([this, &transport, &observer, &input_frames]() {
// Initialize send stream.
CreateSenderCall();
CreateSendConfig(1, 0, 0, &transport);
GetVideoSendConfig()->pre_encode_callback = &observer;
CreateVideoStreams();
// Prepare five input frames. Send ordinary VideoFrame and texture frames
// alternatively.
int width = 168;
int height = 132;
input_frames.push_back(test::FakeNativeBuffer::CreateFrame(
width, height, 1, 1, kVideoRotation_0));
input_frames.push_back(test::FakeNativeBuffer::CreateFrame(
width, height, 2, 2, kVideoRotation_0));
input_frames.push_back(CreateVideoFrame(width, height, 3));
input_frames.push_back(CreateVideoFrame(width, height, 4));
input_frames.push_back(test::FakeNativeBuffer::CreateFrame(
width, height, 5, 5, kVideoRotation_0));
GetVideoSendStream()->Start();
test::FrameForwarder forwarder;
GetVideoSendStream()->SetSource(&forwarder,
DegradationPreference::MAINTAIN_FRAMERATE);
for (size_t i = 0; i < input_frames.size(); i++) {
forwarder.IncomingCapturedFrame(input_frames[i]);
// Wait until the output frame is received before sending the next input
// frame. Or the previous input frame may be replaced without delivering.
observer.WaitOutputFrame();
}
GetVideoSendStream()->Stop();
GetVideoSendStream()->SetSource(nullptr,
DegradationPreference::MAINTAIN_FRAMERATE);
});
// Test if the input and output frames are the same. render_time_ms and
// timestamp are not compared because capturer sets those values.
ExpectEqualFramesVector(input_frames, observer.output_frames());
task_queue_.SendTask([this]() {
DestroyStreams();
DestroyCalls();
});
}
void ExpectEqualFramesVector(const std::vector<VideoFrame>& frames1,
const std::vector<VideoFrame>& frames2) {
EXPECT_EQ(frames1.size(), frames2.size());
for (size_t i = 0; i < std::min(frames1.size(), frames2.size()); ++i)
// Compare frame buffers, since we don't care about differing timestamps.
EXPECT_TRUE(test::FrameBufsEqual(frames1[i].video_frame_buffer(),
frames2[i].video_frame_buffer()));
}
VideoFrame CreateVideoFrame(int width, int height, uint8_t data) {
const int kSizeY = width * height * 2;