Delete VideoSendStream::EnableEncodedFrameRecording.

Use in VideoQualityTest replaced by creating a wrapper for the encoder.

Bug: None
Change-Id: I5c5519e147ca7ddb97696b0d6958a8a1f5cc6e83
Reviewed-on: https://webrtc-review.googlesource.com/94152
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24533}
This commit is contained in:
Niels Möller
2018-09-03 14:21:44 +02:00
committed by Commit Bot
parent 3bc0166a4e
commit 1beef1a97a
11 changed files with 119 additions and 124 deletions

View File

@ -178,22 +178,6 @@ class VideoSendStream {
virtual Stats GetStats() = 0;
// Takes ownership of each file, is responsible for closing them later.
// Calling this method will close and finalize any current logs.
// Some codecs produce multiple streams (VP8 only at present), each of these
// streams will log to a separate file. kMaxSimulcastStreams in common_types.h
// gives the max number of such streams. If there is no file for a stream, or
// the file is rtc::kInvalidPlatformFileValue, frames from that stream will
// not be logged.
// If a frame to be written would make the log too large the write fails and
// the log is closed and finalized. A |byte_limit| of 0 means no limit.
virtual void EnableEncodedFrameRecording(
const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) = 0;
inline void DisableEncodedFrameRecording() {
EnableEncodedFrameRecording(std::vector<rtc::PlatformFile>(), 0);
}
protected:
virtual ~VideoSendStream() {}
};

View File

@ -218,13 +218,6 @@ webrtc::VideoSendStream::Stats FakeVideoSendStream::GetStats() {
return stats_;
}
void FakeVideoSendStream::EnableEncodedFrameRecording(
const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) {
for (rtc::PlatformFile file : files)
rtc::ClosePlatformFile(file);
}
void FakeVideoSendStream::ReconfigureVideoEncoder(
webrtc::VideoEncoderConfig config) {
int width, height;

View File

@ -143,9 +143,6 @@ class FakeVideoSendStream final
return num_encoder_reconfigurations_;
}
void EnableEncodedFrameRecording(const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) override;
bool resolution_scaling_enabled() const {
return resolution_scaling_enabled_;
}

View File

@ -95,6 +95,9 @@ rtc_static_library("video") {
"../modules/rtp_rtcp",
"../modules/utility",
"../modules/video_coding",
# TODO(nisse): Drop this dep, when EnableEncodedFrameRecording is
# deleted also from VideoReceiveStream.
"../modules/video_coding:video_coding_utility",
"../modules/video_processing",
"../rtc_base:rtc_base_approved",
@ -217,6 +220,7 @@ if (rtc_include_tests) {
"../modules/audio_mixer:audio_mixer_impl",
"../modules/rtp_rtcp",
"../modules/video_coding:video_coding",
"../modules/video_coding:video_coding_utility",
"../modules/video_coding:webrtc_h264",
"../modules/video_coding:webrtc_multiplex",
"../modules/video_coding:webrtc_vp8",

View File

@ -34,14 +34,6 @@ class LogEndToEndTest : public test::CallTest {
return rtc::OpenPlatformFile(paths_[idx]);
}
void LogSend(bool open) {
if (open) {
GetVideoSendStream()->EnableEncodedFrameRecording(
std::vector<rtc::PlatformFile>(1, OpenFile(AddFile())), 0);
} else {
GetVideoSendStream()->DisableEncodedFrameRecording();
}
}
void LogReceive(bool open) {
if (open) {
video_receive_streams_[0]->EnableEncodedFrameRecording(
@ -66,7 +58,6 @@ TEST_F(LogEndToEndTest, LogsEncodedFramesWhenRequested) {
recorded_frames_(0) {}
void PerformTest() override {
fixture_->LogSend(true);
fixture_->LogReceive(true);
ASSERT_TRUE(Wait()) << "Timed out while waiting for frame logging.";
}
@ -93,14 +84,11 @@ TEST_F(LogEndToEndTest, LogsEncodedFramesWhenRequested) {
void EncodedFrameCallback(const EncodedFrame& encoded_frame) override {
rtc::CritScope lock(&crit_);
if (recorded_frames_++ > kNumFramesToRecord) {
fixture_->LogSend(false);
fixture_->LogReceive(false);
rtc::File send_file(fixture_->OpenFile(0));
rtc::File receive_file(fixture_->OpenFile(1));
rtc::File receive_file(fixture_->OpenFile(0));
uint8_t out[100];
// If logging has worked correctly neither file should be empty, i.e.
// If logging has worked correctly file shouldn't be empty, i.e.
// we should be able to read something from them.
EXPECT_LT(0u, send_file.Read(out, 100));
EXPECT_LT(0u, receive_file.Read(out, 100));
observation_complete_.Set();
}

View File

@ -30,6 +30,8 @@
#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/utility/ivf_file_writer.h"
#include "rtc_base/strings/string_builder.h"
#include "test/run_loop.h"
#include "test/testsupport/fileutils.h"
#include "test/video_renderer.h"
@ -38,6 +40,8 @@
#include "modules/audio_device/include/audio_device_factory.h"
#endif
namespace webrtc {
namespace {
constexpr char kSyncGroup[] = "av_sync";
constexpr int kOpusMinBitrateBps = 6000;
@ -49,18 +53,18 @@ constexpr uint32_t kThumbnailRtxSsrcStart = 0xF0000;
constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax;
class VideoStreamFactory
: public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface {
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
explicit VideoStreamFactory(const std::vector<webrtc::VideoStream>& streams)
explicit VideoStreamFactory(const std::vector<VideoStream>& streams)
: streams_(streams) {}
private:
std::vector<webrtc::VideoStream> CreateEncoderStreams(
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) override {
const VideoEncoderConfig& encoder_config) override {
// The highest layer must match the incoming resolution.
std::vector<webrtc::VideoStream> streams = streams_;
std::vector<VideoStream> streams = streams_;
streams[streams_.size() - 1].height = height;
streams[streams_.size() - 1].width = width;
@ -68,22 +72,118 @@ class VideoStreamFactory
return streams;
}
std::vector<webrtc::VideoStream> streams_;
std::vector<VideoStream> streams_;
};
} // namespace
namespace webrtc {
// An encoder wrapper that writes the encoded frames to file, one per simulcast
// layer.
class FrameDumpingEncoder : public VideoEncoder, private EncodedImageCallback {
public:
FrameDumpingEncoder(std::unique_ptr<VideoEncoder> encoder,
std::vector<rtc::PlatformFile> files)
: encoder_(std::move(encoder)) {
for (rtc::PlatformFile file : files) {
writers_.push_back(
IvfFileWriter::Wrap(rtc::File(file), 100000000 /* byte_limit */));
}
}
// Implement VideoEncoder
int32_t InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
size_t max_payload_size) override {
return encoder_->InitEncode(codec_settings, number_of_cores,
max_payload_size);
}
int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) override {
callback_ = callback;
return encoder_->RegisterEncodeCompleteCallback(this);
}
int32_t Release() override { return encoder_->Release(); }
int32_t Encode(const VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) {
return encoder_->Encode(frame, codec_specific_info, frame_types);
}
int32_t SetChannelParameters(uint32_t packet_loss, int64_t rtt) override {
return encoder_->SetChannelParameters(packet_loss, rtt);
}
int32_t SetRates(uint32_t bitrate, uint32_t framerate) override {
return encoder_->SetRates(bitrate, framerate);
}
int32_t SetRateAllocation(const VideoBitrateAllocation& allocation,
uint32_t framerate) override {
return encoder_->SetRateAllocation(allocation, framerate);
}
ScalingSettings GetScalingSettings() const override {
return encoder_->GetScalingSettings();
}
bool SupportsNativeHandle() const override {
return encoder_->SupportsNativeHandle();
}
const char* ImplementationName() const override {
return encoder_->ImplementationName();
}
private:
// Implement EncodedImageCallback
Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
if (codec_specific_info) {
int simulcast_index;
if (codec_specific_info->codecType == kVideoCodecVP9) {
simulcast_index = 0;
} else {
simulcast_index = encoded_image.SpatialIndex().value_or(0);
}
RTC_DCHECK_GE(simulcast_index, 0);
if (static_cast<size_t>(simulcast_index) < writers_.size()) {
writers_[simulcast_index]->WriteFrame(encoded_image,
codec_specific_info->codecType);
}
}
return callback_->OnEncodedImage(encoded_image, codec_specific_info,
fragmentation);
}
void OnDroppedFrame(DropReason reason) override {
callback_->OnDroppedFrame(reason);
}
std::unique_ptr<VideoEncoder> encoder_;
EncodedImageCallback* callback_ = nullptr;
std::vector<std::unique_ptr<IvfFileWriter>> writers_;
};
} // namespace
std::unique_ptr<VideoEncoder> VideoQualityTest::CreateVideoEncoder(
const SdpVideoFormat& format) {
std::unique_ptr<VideoEncoder> encoder;
if (format.name == "VP8") {
return absl::make_unique<VP8EncoderSimulcastProxy>(
encoder = absl::make_unique<VP8EncoderSimulcastProxy>(
&internal_encoder_factory_, format);
} else if (format.name == "multiplex") {
return absl::make_unique<MultiplexEncoderAdapter>(
encoder = absl::make_unique<MultiplexEncoderAdapter>(
&internal_encoder_factory_, SdpVideoFormat(cricket::kVp9CodecName));
} else {
encoder = internal_encoder_factory_.CreateVideoEncoder(format);
}
return internal_encoder_factory_.CreateVideoEncoder(format);
if (!params_.logging.encoded_frame_base_path.empty()) {
char ss_buf[100];
rtc::SimpleStringBuilder sb(ss_buf);
sb << send_logs_++;
std::string prefix =
params_.logging.encoded_frame_base_path + "." + sb.str() + ".send.";
encoder = absl::make_unique<FrameDumpingEncoder>(
std::move(encoder), std::vector<rtc::PlatformFile>(
{rtc::CreatePlatformFile(prefix + "1.ivf"),
rtc::CreatePlatformFile(prefix + "2.ivf"),
rtc::CreatePlatformFile(prefix + "3.ivf")}));
}
return encoder;
}
VideoQualityTest::VideoQualityTest(
@ -969,7 +1069,6 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
video_capturers_[video_idx].get(), degradation_preference_);
}
StartEncodedFrameLogs(GetVideoSendStream());
StartEncodedFrameLogs(
video_receive_streams_[params_.ss[0].selected_stream]);
@ -1186,7 +1285,6 @@ void VideoQualityTest::RunWithRenderers(const Params& params) {
for (VideoReceiveStream* receive_stream : video_receive_streams_)
StartEncodedFrameLogs(receive_stream);
StartEncodedFrameLogs(GetVideoSendStream());
Start();
});
@ -1207,21 +1305,6 @@ void VideoQualityTest::RunWithRenderers(const Params& params) {
});
}
void VideoQualityTest::StartEncodedFrameLogs(VideoSendStream* stream) {
if (!params_.logging.encoded_frame_base_path.empty()) {
std::ostringstream str;
str << send_logs_++;
std::string prefix =
params_.logging.encoded_frame_base_path + "." + str.str() + ".send.";
stream->EnableEncodedFrameRecording(
std::vector<rtc::PlatformFile>(
{rtc::CreatePlatformFile(prefix + "1.ivf"),
rtc::CreatePlatformFile(prefix + "2.ivf"),
rtc::CreatePlatformFile(prefix + "3.ivf")}),
100000000);
}
}
void VideoQualityTest::StartEncodedFrameLogs(VideoReceiveStream* stream) {
if (!params_.logging.encoded_frame_base_path.empty()) {
std::ostringstream str;

View File

@ -89,7 +89,6 @@ class VideoQualityTest :
bool use_real_adm);
void SetupAudio(Transport* transport);
void StartEncodedFrameLogs(VideoSendStream* stream);
void StartEncodedFrameLogs(VideoReceiveStream* stream);
virtual std::unique_ptr<test::LayerFilteringTransport> CreateSendTransport();

View File

@ -210,11 +210,5 @@ bool VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
return send_stream_->DeliverRtcp(packet, length);
}
void VideoSendStream::EnableEncodedFrameRecording(
const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) {
send_stream_->EnableEncodedFrameRecording(files, byte_limit);
}
} // namespace internal
} // namespace webrtc

View File

@ -85,15 +85,6 @@ class VideoSendStream : public webrtc::VideoSendStream {
void ReconfigureVideoEncoder(VideoEncoderConfig) override;
Stats GetStats() override;
// Takes ownership of each file, is responsible for closing them later.
// Calling this method will close and finalize any current logs.
// Giving rtc::kInvalidPlatformFileValue in any position disables logging
// for the corresponding stream.
// If a frame to be written would make the log too large the write fails and
// the log is closed and finalized. A |byte_limit| of 0 means no limit.
void EnableEncodedFrameRecording(const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) override;
void StopPermanentlyAndGetRtpStates(RtpStateMap* rtp_state_map,
RtpPayloadStateMap* payload_state_map);

View File

@ -541,19 +541,8 @@ EncodedImageCallback::Result VideoSendStreamImpl::OnEncodedImage(
fec_controller_->UpdateWithEncodedData(encoded_image._length,
encoded_image._frameType);
EncodedImageCallback::Result result = rtp_video_sender_->OnEncodedImage(
encoded_image, codec_specific_info, fragmentation);
{
rtc::CritScope lock(&ivf_writers_crit_);
if (file_writers_[simulcast_idx].get()) {
bool ok = file_writers_[simulcast_idx]->WriteFrame(
encoded_image, codec_specific_info->codecType);
RTC_DCHECK(ok);
}
}
return result;
return rtp_video_sender_->OnEncodedImage(encoded_image, codec_specific_info,
fragmentation);
}
std::map<uint32_t, RtpState> VideoSendStreamImpl::GetRtpStates() const {
@ -617,27 +606,6 @@ uint32_t VideoSendStreamImpl::OnBitrateUpdated(uint32_t bitrate_bps,
return protection_bitrate;
}
void VideoSendStreamImpl::EnableEncodedFrameRecording(
const std::vector<rtc::PlatformFile>& files,
size_t byte_limit) {
{
rtc::CritScope lock(&ivf_writers_crit_);
for (unsigned int i = 0; i < kMaxSimulcastStreams; ++i) {
if (i < files.size()) {
file_writers_[i] = IvfFileWriter::Wrap(rtc::File(files[i]), byte_limit);
} else {
file_writers_[i].reset();
}
}
}
if (!files.empty()) {
// Make a keyframe appear as early as possible in the logs, to give actually
// decodable output.
video_stream_encoder_->SendKeyFrame();
}
}
int VideoSendStreamImpl::ProtectionRequest(
const FecProtectionParams* delta_params,
const FecProtectionParams* key_params,

View File

@ -21,7 +21,6 @@
#include "call/rtp_video_sender_interface.h"
#include "common_types.h" // NOLINT(build/include)
#include "modules/utility/include/process_thread.h"
#include "modules/video_coding/utility/ivf_file_writer.h"
#include "rtc_base/weak_ptr.h"
#include "video/call_stats.h"
#include "video/encoder_rtcp_feedback.h"
@ -82,9 +81,6 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
std::map<uint32_t, RtpPayloadState> GetRtpPayloadStates() const;
void EnableEncodedFrameRecording(const std::vector<rtc::PlatformFile>& files,
size_t byte_limit);
void SetTransportOverhead(size_t transport_overhead_per_packet);
absl::optional<float> configured_pacing_factor_;
@ -156,8 +152,6 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver,
BitrateAllocatorInterface* const bitrate_allocator_;
rtc::CriticalSection ivf_writers_crit_;
std::unique_ptr<IvfFileWriter>
file_writers_[kMaxSimulcastStreams] RTC_GUARDED_BY(ivf_writers_crit_);
int max_padding_bitrate_;
int encoder_min_bitrate_bps_;