New FrameBuffer3.
FrameBuffer3 keep track of order, decodability and continuity of the inserted frames. Compared to FrameBuffer2 which schedule frames for decoding and is thread safe, FrameBuffer3 does not schedule decoding and is thread unsafe. Change-Id: Ic3bd540c4f69cec26fce53a40425f3bcd9afe085 Bug: webrtc:13343 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/238985 Reviewed-by: Danil Chapovalov <danilchap@webrtc.org> Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org> Reviewed-by: Evan Shrubsole <eshr@webrtc.org> Commit-Queue: Philip Eliasson <philipel@webrtc.org> Cr-Commit-Position: refs/heads/main@{#35494}
This commit is contained in:
@ -148,6 +148,26 @@ rtc_library("h264_packet_buffer") {
|
||||
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
|
||||
}
|
||||
|
||||
rtc_library("frame_buffer") {
|
||||
sources = [
|
||||
"frame_buffer3.cc",
|
||||
"frame_buffer3.h",
|
||||
]
|
||||
deps = [
|
||||
":video_coding_utility",
|
||||
"../../api/units:timestamp",
|
||||
"../../api/video:encoded_frame",
|
||||
"../../rtc_base:logging",
|
||||
"../../rtc_base:rtc_numerics",
|
||||
"../../system_wrappers:field_trial",
|
||||
]
|
||||
absl_deps = [
|
||||
"//third_party/abseil-cpp/absl/algorithm:container",
|
||||
"//third_party/abseil-cpp/absl/container:inlined_vector",
|
||||
"//third_party/abseil-cpp/absl/types:optional",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_library("video_coding") {
|
||||
visibility = [ "*" ]
|
||||
sources = [
|
||||
@ -206,6 +226,7 @@ rtc_library("video_coding") {
|
||||
deps = [
|
||||
":codec_globals_headers",
|
||||
":encoded_frame",
|
||||
":frame_buffer",
|
||||
":packet_buffer",
|
||||
":video_codec_interface",
|
||||
":video_coding_utility",
|
||||
@ -1011,6 +1032,7 @@ if (rtc_include_tests) {
|
||||
"decoding_state_unittest.cc",
|
||||
"fec_controller_unittest.cc",
|
||||
"frame_buffer2_unittest.cc",
|
||||
"frame_buffer3_unittest.cc",
|
||||
"frame_dependencies_calculator_unittest.cc",
|
||||
"generic_decoder_unittest.cc",
|
||||
"h264_packet_buffer_unittest.cc",
|
||||
@ -1057,6 +1079,7 @@ if (rtc_include_tests) {
|
||||
":chain_diff_calculator",
|
||||
":codec_globals_headers",
|
||||
":encoded_frame",
|
||||
":frame_buffer",
|
||||
":frame_dependencies_calculator",
|
||||
":h264_packet_buffer",
|
||||
":nack_requester",
|
||||
|
277
modules/video_coding/frame_buffer3.cc
Normal file
277
modules/video_coding/frame_buffer3.cc
Normal file
@ -0,0 +1,277 @@
|
||||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "modules/video_coding/frame_buffer3.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <iterator>
|
||||
#include <queue>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/sequence_number_util.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
bool ValidReferences(const EncodedFrame& frame) {
|
||||
// All references must point backwards, and duplicates are not allowed.
|
||||
for (size_t i = 0; i < frame.num_references; ++i) {
|
||||
if (frame.references[i] >= frame.Id())
|
||||
return false;
|
||||
|
||||
for (size_t j = i + 1; j < frame.num_references; ++j) {
|
||||
if (frame.references[i] == frame.references[j])
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Since FrameBuffer::FrameInfo is private it can't be used in the function
|
||||
// signature, hence the FrameIteratorT type.
|
||||
template <typename FrameIteratorT>
|
||||
rtc::ArrayView<const int64_t> GetReferences(const FrameIteratorT& it) {
|
||||
return {it->second.encoded_frame->references,
|
||||
std::min<size_t>(it->second.encoded_frame->num_references,
|
||||
EncodedFrame::kMaxFrameReferences)};
|
||||
}
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
int64_t GetFrameId(const FrameIteratorT& it) {
|
||||
return it->first;
|
||||
}
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
int64_t GetTimestamp(const FrameIteratorT& it) {
|
||||
return it->second.encoded_frame->Timestamp();
|
||||
}
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
bool IsLastFrameInTemporalUnit(const FrameIteratorT& it) {
|
||||
return it->second.encoded_frame->is_last_spatial_layer;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
FrameBuffer::FrameBuffer(int max_size, int max_decode_history)
|
||||
: legacy_frame_id_jump_behavior_(
|
||||
field_trial::IsEnabled("WebRTC-LegacyFrameIdJumpBehavior")),
|
||||
max_size_(max_size),
|
||||
decoded_frame_history_(max_decode_history) {}
|
||||
|
||||
void FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
|
||||
if (!ValidReferences(*frame)) {
|
||||
RTC_DLOG(LS_WARNING) << "Frame " << frame->Id()
|
||||
<< " has invalid references, dropping frame.";
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame->Id() <= decoded_frame_history_.GetLastDecodedFrameId()) {
|
||||
if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() &&
|
||||
AheadOf(frame->Timestamp(),
|
||||
*decoded_frame_history_.GetLastDecodedFrameTimestamp())) {
|
||||
RTC_DLOG(LS_WARNING)
|
||||
<< "Keyframe " << frame->Id()
|
||||
<< " has newer timestamp but older picture id, clearing buffer.";
|
||||
Clear();
|
||||
} else {
|
||||
// Already decoded past this frame.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (frames_.size() == max_size_) {
|
||||
if (frame->is_keyframe()) {
|
||||
RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id()
|
||||
<< " inserted into full buffer, clearing buffer.";
|
||||
Clear();
|
||||
} else {
|
||||
// No space for this frame.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const int64_t frame_id = frame->Id();
|
||||
auto insert_res = frames_.emplace(frame_id, FrameInfo{std::move(frame)});
|
||||
if (!insert_res.second) {
|
||||
// Frame has already been inserted.
|
||||
return;
|
||||
}
|
||||
|
||||
if (frames_.size() == max_size_) {
|
||||
RTC_DLOG(LS_WARNING) << "Frame " << frame_id
|
||||
<< " inserted, buffer is now full.";
|
||||
}
|
||||
|
||||
PropagateContinuity(insert_res.first);
|
||||
FindNextAndLastDecodableTemporalUnit();
|
||||
}
|
||||
|
||||
absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4>
|
||||
FrameBuffer::ExtractNextDecodableTemporalUnit() {
|
||||
absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4> res;
|
||||
if (!next_decodable_temporal_unit_) {
|
||||
return res;
|
||||
}
|
||||
|
||||
auto end_it = std::next(next_decodable_temporal_unit_->last_frame);
|
||||
for (auto it = next_decodable_temporal_unit_->first_frame; it != end_it;
|
||||
++it) {
|
||||
decoded_frame_history_.InsertDecoded(GetFrameId(it), GetTimestamp(it));
|
||||
res.push_back(std::move(it->second.encoded_frame));
|
||||
}
|
||||
|
||||
DropNextDecodableTemporalUnit();
|
||||
return res;
|
||||
}
|
||||
|
||||
void FrameBuffer::DropNextDecodableTemporalUnit() {
|
||||
if (!next_decodable_temporal_unit_) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto end_it = std::next(next_decodable_temporal_unit_->last_frame);
|
||||
num_dropped_frames_ += std::count_if(
|
||||
frames_.begin(), end_it,
|
||||
[](const auto& f) { return f.second.encoded_frame != nullptr; });
|
||||
|
||||
frames_.erase(frames_.begin(), end_it);
|
||||
FindNextAndLastDecodableTemporalUnit();
|
||||
}
|
||||
|
||||
absl::optional<int64_t> FrameBuffer::LastContinuousFrameId() const {
|
||||
return last_continuous_frame_id_;
|
||||
}
|
||||
|
||||
absl::optional<int64_t> FrameBuffer::LastContinuousTemporalUnitFrameId() const {
|
||||
return last_continuous_temporal_unit_frame_id_;
|
||||
}
|
||||
|
||||
absl::optional<uint32_t> FrameBuffer::NextDecodableTemporalUnitRtpTimestamp()
|
||||
const {
|
||||
if (!next_decodable_temporal_unit_) {
|
||||
return absl::nullopt;
|
||||
}
|
||||
return GetTimestamp(next_decodable_temporal_unit_->first_frame);
|
||||
}
|
||||
|
||||
absl::optional<uint32_t> FrameBuffer::LastDecodableTemporalUnitRtpTimestamp()
|
||||
const {
|
||||
return last_decodable_temporal_unit_timestamp_;
|
||||
}
|
||||
|
||||
int FrameBuffer::GetTotalNumberOfContinuousTemporalUnits() const {
|
||||
return num_continuous_temporal_units_;
|
||||
}
|
||||
int FrameBuffer::GetTotalNumberOfDroppedFrames() const {
|
||||
return num_dropped_frames_;
|
||||
}
|
||||
|
||||
bool FrameBuffer::IsContinuous(const FrameIterator& it) const {
|
||||
for (int64_t reference : GetReferences(it)) {
|
||||
if (decoded_frame_history_.WasDecoded(reference)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto reference_frame_it = frames_.find(reference);
|
||||
if (reference_frame_it != frames_.end() &&
|
||||
reference_frame_it->second.continuous) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void FrameBuffer::PropagateContinuity(const FrameIterator& frame_it) {
|
||||
for (auto it = frame_it; it != frames_.end(); ++it) {
|
||||
if (!it->second.continuous) {
|
||||
if (IsContinuous(it)) {
|
||||
it->second.continuous = true;
|
||||
if (last_continuous_frame_id_ < GetFrameId(it)) {
|
||||
last_continuous_frame_id_ = GetFrameId(it);
|
||||
}
|
||||
if (IsLastFrameInTemporalUnit(it)) {
|
||||
num_continuous_temporal_units_++;
|
||||
if (last_continuous_temporal_unit_frame_id_ < GetFrameId(it)) {
|
||||
last_continuous_temporal_unit_frame_id_ = GetFrameId(it);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void FrameBuffer::FindNextAndLastDecodableTemporalUnit() {
|
||||
next_decodable_temporal_unit_.reset();
|
||||
last_decodable_temporal_unit_timestamp_.reset();
|
||||
|
||||
if (!last_continuous_temporal_unit_frame_id_) {
|
||||
return;
|
||||
}
|
||||
|
||||
FrameIterator first_frame_it = frames_.begin();
|
||||
FrameIterator last_frame_it = frames_.begin();
|
||||
absl::InlinedVector<int64_t, 4> frames_in_temporal_unit;
|
||||
for (auto frame_it = frames_.begin(); frame_it != frames_.end();) {
|
||||
if (GetFrameId(frame_it) > *last_continuous_temporal_unit_frame_id_) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (GetTimestamp(frame_it) != GetTimestamp(first_frame_it)) {
|
||||
frames_in_temporal_unit.clear();
|
||||
first_frame_it = frame_it;
|
||||
}
|
||||
|
||||
frames_in_temporal_unit.push_back(GetFrameId(frame_it));
|
||||
|
||||
last_frame_it = frame_it++;
|
||||
|
||||
if (IsLastFrameInTemporalUnit(last_frame_it)) {
|
||||
bool temporal_unit_decodable = true;
|
||||
for (auto it = first_frame_it; it != frame_it && temporal_unit_decodable;
|
||||
++it) {
|
||||
for (int64_t reference : GetReferences(it)) {
|
||||
if (!decoded_frame_history_.WasDecoded(reference) &&
|
||||
!absl::c_linear_search(frames_in_temporal_unit, reference)) {
|
||||
// A frame in the temporal unit has a non-decoded reference outside
|
||||
// the temporal unit, so it's not yet ready to be decoded.
|
||||
temporal_unit_decodable = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (temporal_unit_decodable) {
|
||||
if (!next_decodable_temporal_unit_) {
|
||||
next_decodable_temporal_unit_ = {first_frame_it, last_frame_it};
|
||||
}
|
||||
|
||||
last_decodable_temporal_unit_timestamp_ = GetTimestamp(first_frame_it);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void FrameBuffer::Clear() {
|
||||
frames_.clear();
|
||||
next_decodable_temporal_unit_.reset();
|
||||
last_decodable_temporal_unit_timestamp_.reset();
|
||||
last_continuous_frame_id_.reset();
|
||||
last_continuous_temporal_unit_frame_id_.reset();
|
||||
decoded_frame_history_.Clear();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
96
modules/video_coding/frame_buffer3.h
Normal file
96
modules/video_coding/frame_buffer3.h
Normal file
@ -0,0 +1,96 @@
|
||||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef MODULES_VIDEO_CODING_FRAME_BUFFER3_H_
|
||||
#define MODULES_VIDEO_CODING_FRAME_BUFFER3_H_
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "api/video/encoded_frame.h"
|
||||
#include "modules/video_coding/utility/decoded_frames_history.h"
|
||||
|
||||
namespace webrtc {
|
||||
// The high level idea of the FrameBuffer is to order frames received from the
|
||||
// network into a decodable stream. Frames are order by frame ID, and grouped
|
||||
// into temporal units by timestamp. A temporal unit is decodable after all
|
||||
// referenced frames outside the unit has been decoded, and a temporal unit is
|
||||
// continuous if all referenced frames are directly or indirectly decodable.
|
||||
// The FrameBuffer is thread-unsafe.
|
||||
class FrameBuffer {
|
||||
public:
|
||||
// The `max_size` determines the maxmimum number of frames the buffer will
|
||||
// store, and max_decode_history determines how far back (by frame ID) the
|
||||
// buffer will store if a frame was decoded or not.
|
||||
FrameBuffer(int max_size, int max_decode_history);
|
||||
FrameBuffer(const FrameBuffer&) = delete;
|
||||
FrameBuffer& operator=(const FrameBuffer&) = delete;
|
||||
~FrameBuffer() = default;
|
||||
|
||||
// Inserted frames may only reference backwards, and must have no duplicate
|
||||
// references.
|
||||
void InsertFrame(std::unique_ptr<EncodedFrame> frame);
|
||||
|
||||
// Mark all frames belonging to the next decodable temporal unit as decoded
|
||||
// and returns them.
|
||||
absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4>
|
||||
ExtractNextDecodableTemporalUnit();
|
||||
|
||||
// Drop all frames in the next decodable unit.
|
||||
void DropNextDecodableTemporalUnit();
|
||||
|
||||
absl::optional<int64_t> LastContinuousFrameId() const;
|
||||
absl::optional<int64_t> LastContinuousTemporalUnitFrameId() const;
|
||||
absl::optional<uint32_t> NextDecodableTemporalUnitRtpTimestamp() const;
|
||||
absl::optional<uint32_t> LastDecodableTemporalUnitRtpTimestamp() const;
|
||||
|
||||
int GetTotalNumberOfContinuousTemporalUnits() const;
|
||||
int GetTotalNumberOfDroppedFrames() const;
|
||||
|
||||
private:
|
||||
struct FrameInfo {
|
||||
std::unique_ptr<EncodedFrame> encoded_frame;
|
||||
bool continuous = false;
|
||||
};
|
||||
|
||||
using FrameMap = std::map<int64_t, FrameInfo>;
|
||||
using FrameIterator = FrameMap::iterator;
|
||||
|
||||
struct TemporalUnit {
|
||||
// Both first and last are inclusive.
|
||||
FrameIterator first_frame;
|
||||
FrameIterator last_frame;
|
||||
};
|
||||
|
||||
bool IsContinuous(const FrameIterator& it) const;
|
||||
void PropagateContinuity(const FrameIterator& frame_it);
|
||||
void FindNextAndLastDecodableTemporalUnit();
|
||||
void Clear();
|
||||
|
||||
const bool legacy_frame_id_jump_behavior_;
|
||||
const size_t max_size_;
|
||||
FrameMap frames_;
|
||||
absl::optional<TemporalUnit> next_decodable_temporal_unit_;
|
||||
absl::optional<uint32_t> last_decodable_temporal_unit_timestamp_;
|
||||
absl::optional<int64_t> last_continuous_frame_id_;
|
||||
absl::optional<int64_t> last_continuous_temporal_unit_frame_id_;
|
||||
video_coding::DecodedFramesHistory decoded_frame_history_;
|
||||
|
||||
int num_continuous_temporal_units_ = 0;
|
||||
int num_dropped_frames_ = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // MODULES_VIDEO_CODING_FRAME_BUFFER3_H_
|
344
modules/video_coding/frame_buffer3_unittest.cc
Normal file
344
modules/video_coding/frame_buffer3_unittest.cc
Normal file
@ -0,0 +1,344 @@
|
||||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "modules/video_coding/frame_buffer3.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "api/video/encoded_frame.h"
|
||||
#include "test/field_trial.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
|
||||
using ::testing::ElementsAre;
|
||||
using ::testing::Eq;
|
||||
using ::testing::IsEmpty;
|
||||
using ::testing::Matches;
|
||||
|
||||
MATCHER_P(FrameWithId, id, "") {
|
||||
return Matches(Eq(id))(arg->Id());
|
||||
}
|
||||
|
||||
class FakeEncodedFrame : public EncodedFrame {
|
||||
public:
|
||||
int64_t ReceivedTime() const override { return 0; }
|
||||
int64_t RenderTime() const override { return 0; }
|
||||
};
|
||||
|
||||
class Builder {
|
||||
public:
|
||||
Builder& Time(uint32_t rtp_timestamp) {
|
||||
rtp_timestamp_ = rtp_timestamp;
|
||||
return *this;
|
||||
}
|
||||
Builder& Id(int64_t frame_id) {
|
||||
frame_id_ = frame_id;
|
||||
return *this;
|
||||
}
|
||||
Builder& AsLast() {
|
||||
last_spatial_layer_ = true;
|
||||
return *this;
|
||||
}
|
||||
Builder& Refs(const std::vector<int64_t>& references) {
|
||||
references_ = references;
|
||||
return *this;
|
||||
}
|
||||
|
||||
std::unique_ptr<FakeEncodedFrame> Build() {
|
||||
RTC_CHECK_LE(references_.size(), EncodedFrame::kMaxFrameReferences);
|
||||
RTC_CHECK(rtp_timestamp_.has_value());
|
||||
RTC_CHECK(frame_id_.has_value());
|
||||
|
||||
auto frame = std::make_unique<FakeEncodedFrame>();
|
||||
frame->SetTimestamp(*rtp_timestamp_);
|
||||
frame->SetId(*frame_id_);
|
||||
frame->is_last_spatial_layer = last_spatial_layer_;
|
||||
|
||||
for (int64_t ref : references_) {
|
||||
frame->references[frame->num_references] = ref;
|
||||
frame->num_references++;
|
||||
}
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
||||
private:
|
||||
absl::optional<uint32_t> rtp_timestamp_;
|
||||
absl::optional<int64_t> frame_id_;
|
||||
bool last_spatial_layer_ = false;
|
||||
std::vector<int64_t> references_;
|
||||
};
|
||||
|
||||
TEST(FrameBuffer3Test, RejectInvalidRefs) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
// Ref must be less than the id of this frame.
|
||||
buffer.InsertFrame(Builder().Time(0).Id(0).Refs({0}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt));
|
||||
|
||||
// Duplicate ids are also invalid.
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1, 1}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousUpdatesOnInsertedFrames) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(2).Refs({1}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(2));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(2));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousFrameReordering) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(30).Id(3).Refs({2}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(3));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousTemporalUnit) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).Build());
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
buffer.InsertFrame(Builder().Time(10).Id(2).Refs({1}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(2));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousTemporalUnitReordering) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(3).Refs({1}).Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(4).Refs({2, 3}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(2).Refs({1}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(4));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, NextDecodable) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
EXPECT_THAT(buffer.NextDecodableTemporalUnitRtpTimestamp(),
|
||||
Eq(absl::nullopt));
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
EXPECT_THAT(buffer.NextDecodableTemporalUnitRtpTimestamp(), Eq(10U));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, AdvanceNextDecodableOnExtraction) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(30).Id(3).Refs({2}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.NextDecodableTemporalUnitRtpTimestamp(), Eq(10U));
|
||||
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
EXPECT_THAT(buffer.NextDecodableTemporalUnitRtpTimestamp(), Eq(20U));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(2)));
|
||||
EXPECT_THAT(buffer.NextDecodableTemporalUnitRtpTimestamp(), Eq(30U));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, AdvanceLastDecodableOnExtraction) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(30).Id(3).Refs({1}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastDecodableTemporalUnitRtpTimestamp(), Eq(10U));
|
||||
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
EXPECT_THAT(buffer.LastDecodableTemporalUnitRtpTimestamp(), Eq(30U));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, FrameUpdatesNextDecodable) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).AsLast().Build());
|
||||
EXPECT_THAT(buffer.NextDecodableTemporalUnitRtpTimestamp(), Eq(20U));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
EXPECT_THAT(buffer.NextDecodableTemporalUnitRtpTimestamp(), Eq(10U));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, KeyframeClearsFullBuffer) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/5, /*max_decode_history=*/10);
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(30).Id(3).Refs({2}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(40).Id(4).Refs({3}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(50).Id(5).Refs({4}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(5));
|
||||
|
||||
// Frame buffer is full
|
||||
buffer.InsertFrame(Builder().Time(60).Id(6).Refs({5}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(5));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(70).Id(7).AsLast().Build());
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(7));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, DropNextDecodableTemporalUnit) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(30).Id(3).Refs({1}).AsLast().Build());
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
buffer.DropNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, OldFramesAreIgnored) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).AsLast().Build());
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(30).Id(3).Refs({1}).AsLast().Build());
|
||||
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, ReturnFullTemporalUnitKSVC) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).Build());
|
||||
buffer.InsertFrame(Builder().Time(10).Id(2).Refs({1}).Build());
|
||||
buffer.InsertFrame(Builder().Time(10).Id(3).Refs({2}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1), FrameWithId(2), FrameWithId(3)));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(20).Id(4).Refs({3}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(4)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, InterleavedStream) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(30).Id(3).Refs({1}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(40).Id(4).Refs({2}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(50).Id(5).Refs({3}).AsLast().Build());
|
||||
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(2)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(4)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(5)));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(70).Id(7).Refs({5}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(7)));
|
||||
buffer.InsertFrame(Builder().Time(60).Id(6).Refs({4}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
|
||||
buffer.InsertFrame(Builder().Time(90).Id(9).Refs({7}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(9)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LegacyFrameIdJumpBehavior) {
|
||||
{
|
||||
// WebRTC-LegacyFrameIdJumpBehavior is disabled by default.
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(20).Id(3).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
buffer.InsertFrame(Builder().Time(30).Id(2).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
|
||||
}
|
||||
|
||||
{
|
||||
test::ScopedFieldTrials field_trial(
|
||||
"WebRTC-LegacyFrameIdJumpBehavior/Enabled/");
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
|
||||
buffer.InsertFrame(Builder().Time(20).Id(3).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
buffer.InsertFrame(Builder().Time(30).Id(2).Refs({1}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
|
||||
buffer.InsertFrame(Builder().Time(40).Id(1).AsLast().Build());
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
}
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, TotalNumberOfContinuousTemporalUnits) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(0));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).Build());
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(40).Id(4).Refs({2}).Build());
|
||||
buffer.InsertFrame(Builder().Time(40).Id(5).Refs({3, 4}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
|
||||
|
||||
// Reordered
|
||||
buffer.InsertFrame(Builder().Time(20).Id(3).Refs({2}).AsLast().Build());
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(3));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, TotalNumberOfDroppedFrames) {
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100);
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(0));
|
||||
|
||||
buffer.InsertFrame(Builder().Time(10).Id(1).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(2).Refs({1}).Build());
|
||||
buffer.InsertFrame(Builder().Time(20).Id(3).Refs({2}).AsLast().Build());
|
||||
buffer.InsertFrame(Builder().Time(40).Id(4).Refs({1}).Build());
|
||||
buffer.InsertFrame(Builder().Time(40).Id(5).Refs({4}).AsLast().Build());
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(0));
|
||||
|
||||
buffer.DropNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(2));
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(2));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace webrtc
|
@ -590,6 +590,16 @@ webrtc_fuzzer_test("frame_buffer2_fuzzer") {
|
||||
]
|
||||
}
|
||||
|
||||
webrtc_fuzzer_test("frame_buffer3_fuzzer") {
|
||||
sources = [ "frame_buffer3_fuzzer.cc" ]
|
||||
deps = [
|
||||
":fuzz_data_helper",
|
||||
"../../api:array_view",
|
||||
"../../api/video:encoded_frame",
|
||||
"../../modules/video_coding:frame_buffer",
|
||||
]
|
||||
}
|
||||
|
||||
webrtc_fuzzer_test("field_trial_fuzzer") {
|
||||
sources = [ "field_trial_fuzzer.cc" ]
|
||||
deps = [ "../../system_wrappers:field_trial" ]
|
||||
|
81
test/fuzzers/frame_buffer3_fuzzer.cc
Normal file
81
test/fuzzers/frame_buffer3_fuzzer.cc
Normal file
@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/array_view.h"
|
||||
#include "api/video/encoded_frame.h"
|
||||
#include "modules/video_coding/frame_buffer3.h"
|
||||
#include "test/fuzzers/fuzz_data_helper.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
class FuzzyFrameObject : public EncodedFrame {
|
||||
public:
|
||||
int64_t ReceivedTime() const override { return 0; }
|
||||
int64_t RenderTime() const override { return 0; }
|
||||
};
|
||||
} // namespace
|
||||
|
||||
void FuzzOneInput(const uint8_t* data, size_t size) {
|
||||
if (size > 10000) {
|
||||
return;
|
||||
}
|
||||
|
||||
FrameBuffer buffer(/*max_frame_slots=*/100, /*max_decode_history=*/1000);
|
||||
test::FuzzDataHelper helper(rtc::MakeArrayView(data, size));
|
||||
|
||||
while (helper.BytesLeft() > 0) {
|
||||
int action = helper.ReadOrDefaultValue<uint8_t>(0) % 7;
|
||||
|
||||
switch (action) {
|
||||
case 0: {
|
||||
buffer.LastContinuousFrameId();
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
buffer.LastContinuousTemporalUnitFrameId();
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
buffer.NextDecodableTemporalUnitRtpTimestamp();
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
buffer.LastDecodableTemporalUnitRtpTimestamp();
|
||||
break;
|
||||
}
|
||||
case 4: {
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
break;
|
||||
}
|
||||
case 5: {
|
||||
buffer.DropNextDecodableTemporalUnit();
|
||||
break;
|
||||
}
|
||||
case 6: {
|
||||
auto frame = std::make_unique<FuzzyFrameObject>();
|
||||
frame->SetTimestamp(helper.ReadOrDefaultValue<uint32_t>(0));
|
||||
frame->SetId(helper.ReadOrDefaultValue<int64_t>(0));
|
||||
frame->is_last_spatial_layer = helper.ReadOrDefaultValue<bool>(false);
|
||||
|
||||
frame->num_references = helper.ReadOrDefaultValue<uint8_t>(0) %
|
||||
EncodedFrame::kMaxFrameReferences;
|
||||
|
||||
for (uint8_t i = 0; i < frame->num_references; ++i) {
|
||||
frame->references[i] = helper.ReadOrDefaultValue<int64_t>(0);
|
||||
}
|
||||
|
||||
buffer.InsertFrame(std::move(frame));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
Reference in New Issue
Block a user