Fix heap use overrun in FakeEncoder

By removing unnecessary fixed size buffer.

BUG=webrtc:10276

Change-Id: I303303d8c4aa356372875abe6db5711cd10bcc71
Reviewed-on: https://webrtc-review.googlesource.com/c/120811
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Per Kjellander <perkj@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26509}
This commit is contained in:
Per Kjellander
2019-02-01 11:27:43 +01:00
committed by Commit Bot
parent 4f6c539105
commit 65cc52ebca
3 changed files with 6 additions and 18 deletions

View File

@ -34,8 +34,7 @@ FakeVideoEncoderFactory::FakeVideoEncoderFactory() = default;
// static
std::unique_ptr<VideoEncoder> FakeVideoEncoderFactory::CreateVideoEncoder() {
return absl::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock(),
10000000);
return absl::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock());
}
std::vector<SdpVideoFormat> FakeVideoEncoderFactory::GetSupportedFormats()
@ -51,8 +50,7 @@ VideoEncoderFactory::CodecInfo FakeVideoEncoderFactory::QueryVideoEncoder(
std::unique_ptr<VideoEncoder> FakeVideoEncoderFactory::CreateVideoEncoder(
const SdpVideoFormat& format) {
return absl::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock(),
10000000);
return absl::make_unique<test::FakeEncoder>(Clock::GetRealTimeClock());
}
FakeVideoDecoderFactory::FakeVideoDecoderFactory() = default;

View File

@ -47,9 +47,7 @@ void WriteCounter(unsigned char* payload, uint32_t counter) {
}; // namespace
FakeEncoder::FakeEncoder(Clock* clock) : FakeEncoder(clock, 100000) {}
FakeEncoder::FakeEncoder(Clock* clock, size_t buffer_size)
FakeEncoder::FakeEncoder(Clock* clock)
: clock_(clock),
callback_(nullptr),
configured_input_framerate_(-1),
@ -57,11 +55,6 @@ FakeEncoder::FakeEncoder(Clock* clock, size_t buffer_size)
pending_keyframe_(true),
counter_(0),
debt_bytes_(0) {
// Generate some arbitrary not-all-zero data
encoded_buffer_.resize(buffer_size);
for (size_t i = 0; i < encoded_buffer_.size(); ++i) {
encoded_buffer_[i] = static_cast<uint8_t>(i);
}
for (bool& used : used_layers_) {
used = false;
}
@ -133,12 +126,12 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
specifics.codecType = kVideoCodecGeneric;
std::unique_ptr<uint8_t[]> encoded_buffer(
new uint8_t[frame_info.layers[i].size]);
memcpy(encoded_buffer.get(), encoded_buffer_.data(),
frame_info.layers[i].size - 4);
// Fill the buffer with arbitrary data. Write someting to make Asan happy.
memset(encoded_buffer.get(), 9, frame_info.layers[i].size);
// Write a counter to the image to make each frame unique.
WriteCounter(encoded_buffer.get() + frame_info.layers[i].size - 4, counter);
EncodedImage encoded(encoded_buffer.get(), frame_info.layers[i].size,
encoded_buffer_.size());
frame_info.layers[i].size);
encoded.SetTimestamp(input_image.timestamp());
encoded.capture_time_ms_ = input_image.render_time_ms();
encoded._frameType =

View File

@ -36,7 +36,6 @@ namespace test {
class FakeEncoder : public VideoEncoder {
public:
explicit FakeEncoder(Clock* clock);
FakeEncoder(Clock* clock, size_t buffer_size);
virtual ~FakeEncoder() = default;
// Sets max bitrate. Not thread-safe, call before registering the encoder.
@ -91,8 +90,6 @@ class FakeEncoder : public VideoEncoder {
bool pending_keyframe_ RTC_GUARDED_BY(crit_sect_);
uint32_t counter_ RTC_GUARDED_BY(crit_sect_);
rtc::CriticalSection crit_sect_;
std::vector<uint8_t> encoded_buffer_;
bool used_layers_[kMaxSimulcastStreams];
// Current byte debt to be payed over a number of frames.