Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,46 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h"
#include <stdint.h>
namespace webrtc {
DefaultVideoBitrateAllocator::DefaultVideoBitrateAllocator(
const VideoCodec& codec)
: codec_(codec) {}
DefaultVideoBitrateAllocator::~DefaultVideoBitrateAllocator() {}
BitrateAllocation DefaultVideoBitrateAllocator::GetAllocation(
uint32_t total_bitrate_bps,
uint32_t framerate) {
BitrateAllocation allocation;
if (total_bitrate_bps == 0)
return allocation;
if (total_bitrate_bps < codec_.minBitrate * 1000) {
allocation.SetBitrate(0, 0, codec_.minBitrate * 1000);
} else if (codec_.maxBitrate > 0 &&
total_bitrate_bps > codec_.maxBitrate * 1000) {
allocation.SetBitrate(0, 0, codec_.maxBitrate * 1000);
} else {
allocation.SetBitrate(0, 0, total_bitrate_bps);
}
return allocation;
}
uint32_t DefaultVideoBitrateAllocator::GetPreferredBitrateBps(
uint32_t framerate) {
return GetAllocation(codec_.maxBitrate * 1000, framerate).get_sum_bps();
}
} // namespace webrtc

View File

@ -0,0 +1,33 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_DEFAULT_VIDEO_BITRATE_ALLOCATOR_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_DEFAULT_VIDEO_BITRATE_ALLOCATOR_H_
#include "webrtc/common_video/include/video_bitrate_allocator.h"
namespace webrtc {
class DefaultVideoBitrateAllocator : public VideoBitrateAllocator {
public:
explicit DefaultVideoBitrateAllocator(const VideoCodec& codec);
~DefaultVideoBitrateAllocator() override;
BitrateAllocation GetAllocation(uint32_t total_bitrate,
uint32_t framerate) override;
uint32_t GetPreferredBitrateBps(uint32_t framerate) override;
private:
const VideoCodec codec_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_DEFAULT_VIDEO_BITRATE_ALLOCATOR_H_

View File

@ -0,0 +1,80 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <limits>
#include <memory>
#include "webrtc/modules/video_coding/utility/default_video_bitrate_allocator.h"
#include "webrtc/test/gtest.h"
namespace webrtc {
namespace {
uint32_t kMaxBitrateBps = 1000000;
uint32_t kMinBitrateBps = 50000;
uint32_t kMaxFramerate = 30;
} // namespace
class DefaultVideoBitrateAllocatorTest : public ::testing::Test {
public:
DefaultVideoBitrateAllocatorTest() {}
virtual ~DefaultVideoBitrateAllocatorTest() {}
void SetUp() override {
codec_.codecType = kVideoCodecVP8;
codec_.minBitrate = kMinBitrateBps / 1000;
codec_.maxBitrate = kMaxBitrateBps / 1000;
codec_.targetBitrate = (kMinBitrateBps + kMaxBitrateBps) / 2000;
codec_.maxFramerate = kMaxFramerate;
allocator_.reset(new DefaultVideoBitrateAllocator(codec_));
}
protected:
VideoCodec codec_;
std::unique_ptr<DefaultVideoBitrateAllocator> allocator_;
};
TEST_F(DefaultVideoBitrateAllocatorTest, ZeroIsOff) {
BitrateAllocation allocation = allocator_->GetAllocation(0, kMaxFramerate);
EXPECT_EQ(0u, allocation.get_sum_bps());
}
TEST_F(DefaultVideoBitrateAllocatorTest, CapsToMin) {
BitrateAllocation allocation = allocator_->GetAllocation(1, kMaxFramerate);
EXPECT_EQ(kMinBitrateBps, allocation.get_sum_bps());
allocation = allocator_->GetAllocation(kMinBitrateBps - 1, kMaxFramerate);
EXPECT_EQ(kMinBitrateBps, allocation.get_sum_bps());
allocation = allocator_->GetAllocation(kMinBitrateBps, kMaxFramerate);
EXPECT_EQ(kMinBitrateBps, allocation.get_sum_bps());
}
TEST_F(DefaultVideoBitrateAllocatorTest, CapsToMax) {
BitrateAllocation allocation =
allocator_->GetAllocation(kMaxBitrateBps, kMaxFramerate);
EXPECT_EQ(kMaxBitrateBps, allocation.get_sum_bps());
allocation = allocator_->GetAllocation(kMaxBitrateBps + 1, kMaxFramerate);
EXPECT_EQ(kMaxBitrateBps, allocation.get_sum_bps());
allocation = allocator_->GetAllocation(std::numeric_limits<uint32_t>::max(),
kMaxFramerate);
EXPECT_EQ(kMaxBitrateBps, allocation.get_sum_bps());
}
TEST_F(DefaultVideoBitrateAllocatorTest, GoodInBetween) {
BitrateAllocation allocation =
allocator_->GetAllocation(kMinBitrateBps + 1, kMaxFramerate);
EXPECT_EQ(kMinBitrateBps + 1, allocation.get_sum_bps());
allocation = allocator_->GetAllocation(kMaxBitrateBps - 1, kMaxFramerate);
EXPECT_EQ(kMaxBitrateBps - 1, allocation.get_sum_bps());
}
} // namespace webrtc

View File

@ -0,0 +1,279 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/frame_dropper.h"
#include <algorithm>
#include "webrtc/rtc_base/logging.h"
namespace webrtc {
namespace {
const float kDefaultFrameSizeAlpha = 0.9f;
const float kDefaultKeyFrameRatioAlpha = 0.99f;
// 1 key frame every 10th second in 30 fps.
const float kDefaultKeyFrameRatioValue = 1 / 300.0f;
const float kDefaultDropRatioAlpha = 0.9f;
const float kDefaultDropRatioValue = 0.96f;
// Maximum duration over which frames are continuously dropped.
const float kDefaultMaxDropDurationSecs = 4.0f;
// Default target bitrate.
// TODO(isheriff): Should this be higher to avoid dropping too many packets when
// the bandwidth is unknown at the start ?
const float kDefaultTargetBitrateKbps = 300.0f;
const float kDefaultIncomingFrameRate = 30;
const float kLeakyBucketSizeSeconds = 0.5f;
// A delta frame that is bigger than |kLargeDeltaFactor| times the average
// delta frame is a large frame that is spread out for accumulation.
const int kLargeDeltaFactor = 3;
// Cap on the frame size accumulator to prevent excessive drops.
const float kAccumulatorCapBufferSizeSecs = 3.0f;
} // namespace
FrameDropper::FrameDropper()
: key_frame_ratio_(kDefaultKeyFrameRatioAlpha),
delta_frame_size_avg_kbits_(kDefaultFrameSizeAlpha),
drop_ratio_(kDefaultDropRatioAlpha, kDefaultDropRatioValue),
enabled_(true),
max_drop_duration_secs_(kDefaultMaxDropDurationSecs) {
Reset();
}
FrameDropper::FrameDropper(float max_drop_duration_secs)
: key_frame_ratio_(kDefaultKeyFrameRatioAlpha),
delta_frame_size_avg_kbits_(kDefaultFrameSizeAlpha),
drop_ratio_(kDefaultDropRatioAlpha, kDefaultDropRatioValue),
enabled_(true),
max_drop_duration_secs_(max_drop_duration_secs) {
Reset();
}
void FrameDropper::Reset() {
key_frame_ratio_.Reset(kDefaultKeyFrameRatioAlpha);
key_frame_ratio_.Apply(1.0f, kDefaultKeyFrameRatioValue);
delta_frame_size_avg_kbits_.Reset(kDefaultFrameSizeAlpha);
accumulator_ = 0.0f;
accumulator_max_ = kDefaultTargetBitrateKbps / 2;
target_bitrate_ = kDefaultTargetBitrateKbps;
incoming_frame_rate_ = kDefaultIncomingFrameRate;
large_frame_accumulation_count_ = 0;
large_frame_accumulation_chunk_size_ = 0;
large_frame_accumulation_spread_ = 0.5 * kDefaultIncomingFrameRate;
drop_next_ = false;
drop_ratio_.Reset(0.9f);
drop_ratio_.Apply(0.0f, 0.0f);
drop_count_ = 0;
was_below_max_ = true;
}
void FrameDropper::Enable(bool enable) {
enabled_ = enable;
}
void FrameDropper::Fill(size_t framesize_bytes, bool delta_frame) {
if (!enabled_) {
return;
}
float framesize_kbits = 8.0f * static_cast<float>(framesize_bytes) / 1000.0f;
if (!delta_frame) {
key_frame_ratio_.Apply(1.0, 1.0);
// Do not spread if we are already doing it (or we risk dropping bits that
// need accumulation). Given we compute the key
// frame ratio and spread based on that, this should not normally happen.
if (large_frame_accumulation_count_ == 0) {
if (key_frame_ratio_.filtered() > 1e-5 &&
1 / key_frame_ratio_.filtered() < large_frame_accumulation_spread_) {
large_frame_accumulation_count_ =
static_cast<int32_t>(1 / key_frame_ratio_.filtered() + 0.5);
} else {
large_frame_accumulation_count_ =
static_cast<int32_t>(large_frame_accumulation_spread_ + 0.5);
}
large_frame_accumulation_chunk_size_ =
framesize_kbits / large_frame_accumulation_count_;
framesize_kbits = 0;
}
} else {
// Identify if it is an unusually large delta frame and spread accumulation
// if that is the case.
if (delta_frame_size_avg_kbits_.filtered() != -1 &&
(framesize_kbits >
kLargeDeltaFactor * delta_frame_size_avg_kbits_.filtered()) &&
large_frame_accumulation_count_ == 0) {
large_frame_accumulation_count_ =
static_cast<int32_t>(large_frame_accumulation_spread_ + 0.5);
large_frame_accumulation_chunk_size_ =
framesize_kbits / large_frame_accumulation_count_;
framesize_kbits = 0;
} else {
delta_frame_size_avg_kbits_.Apply(1, framesize_kbits);
}
key_frame_ratio_.Apply(1.0, 0.0);
}
// Change the level of the accumulator (bucket)
accumulator_ += framesize_kbits;
CapAccumulator();
}
void FrameDropper::Leak(uint32_t input_framerate) {
if (!enabled_) {
return;
}
if (input_framerate < 1) {
return;
}
if (target_bitrate_ < 0.0f) {
return;
}
// Add lower bound for large frame accumulation spread.
large_frame_accumulation_spread_ = std::max(0.5 * input_framerate, 5.0);
// Expected bits per frame based on current input frame rate.
float expected_bits_per_frame = target_bitrate_ / input_framerate;
if (large_frame_accumulation_count_ > 0) {
expected_bits_per_frame -= large_frame_accumulation_chunk_size_;
--large_frame_accumulation_count_;
}
accumulator_ -= expected_bits_per_frame;
if (accumulator_ < 0.0f) {
accumulator_ = 0.0f;
}
UpdateRatio();
}
void FrameDropper::UpdateRatio() {
if (accumulator_ > 1.3f * accumulator_max_) {
// Too far above accumulator max, react faster
drop_ratio_.UpdateBase(0.8f);
} else {
// Go back to normal reaction
drop_ratio_.UpdateBase(0.9f);
}
if (accumulator_ > accumulator_max_) {
// We are above accumulator max, and should ideally
// drop a frame. Increase the dropRatio and drop
// the frame later.
if (was_below_max_) {
drop_next_ = true;
}
drop_ratio_.Apply(1.0f, 1.0f);
drop_ratio_.UpdateBase(0.9f);
} else {
drop_ratio_.Apply(1.0f, 0.0f);
}
was_below_max_ = accumulator_ < accumulator_max_;
}
// This function signals when to drop frames to the caller. It makes use of the
// dropRatio
// to smooth out the drops over time.
bool FrameDropper::DropFrame() {
if (!enabled_) {
return false;
}
if (drop_next_) {
drop_next_ = false;
drop_count_ = 0;
}
if (drop_ratio_.filtered() >= 0.5f) { // Drops per keep
// limit is the number of frames we should drop between each kept frame
// to keep our drop ratio. limit is positive in this case.
float denom = 1.0f - drop_ratio_.filtered();
if (denom < 1e-5) {
denom = 1e-5f;
}
int32_t limit = static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
// Put a bound on the max amount of dropped frames between each kept
// frame, in terms of frame rate and window size (secs).
int max_limit =
static_cast<int>(incoming_frame_rate_ * max_drop_duration_secs_);
if (limit > max_limit) {
limit = max_limit;
}
if (drop_count_ < 0) {
// Reset the drop_count_ since it was negative and should be positive.
drop_count_ = -drop_count_;
}
if (drop_count_ < limit) {
// As long we are below the limit we should drop frames.
drop_count_++;
return true;
} else {
// Only when we reset drop_count_ a frame should be kept.
drop_count_ = 0;
return false;
}
} else if (drop_ratio_.filtered() > 0.0f &&
drop_ratio_.filtered() < 0.5f) { // Keeps per drop
// limit is the number of frames we should keep between each drop
// in order to keep the drop ratio. limit is negative in this case,
// and the drop_count_ is also negative.
float denom = drop_ratio_.filtered();
if (denom < 1e-5) {
denom = 1e-5f;
}
int32_t limit = -static_cast<int32_t>(1.0f / denom - 1.0f + 0.5f);
if (drop_count_ > 0) {
// Reset the drop_count_ since we have a positive
// drop_count_, and it should be negative.
drop_count_ = -drop_count_;
}
if (drop_count_ > limit) {
if (drop_count_ == 0) {
// Drop frames when we reset drop_count_.
drop_count_--;
return true;
} else {
// Keep frames as long as we haven't reached limit.
drop_count_--;
return false;
}
} else {
drop_count_ = 0;
return false;
}
}
drop_count_ = 0;
return false;
}
void FrameDropper::SetRates(float bitrate, float incoming_frame_rate) {
// Bit rate of -1 means infinite bandwidth.
accumulator_max_ = bitrate * kLeakyBucketSizeSeconds;
if (target_bitrate_ > 0.0f && bitrate < target_bitrate_ &&
accumulator_ > accumulator_max_) {
// Rescale the accumulator level if the accumulator max decreases
accumulator_ = bitrate / target_bitrate_ * accumulator_;
}
target_bitrate_ = bitrate;
CapAccumulator();
incoming_frame_rate_ = incoming_frame_rate;
}
// Put a cap on the accumulator, i.e., don't let it grow beyond some level.
// This is a temporary fix for screencasting where very large frames from
// encoder will cause very slow response (too many frame drops).
// TODO(isheriff): Remove this now that large delta frames are also spread out ?
void FrameDropper::CapAccumulator() {
float max_accumulator = target_bitrate_ * kAccumulatorCapBufferSizeSecs;
if (accumulator_ > max_accumulator) {
accumulator_ = max_accumulator;
}
}
} // namespace webrtc

View File

@ -0,0 +1,99 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_FRAME_DROPPER_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_FRAME_DROPPER_H_
#include <cstddef>
#include "webrtc/rtc_base/numerics/exp_filter.h"
#include "webrtc/typedefs.h"
namespace webrtc {
// The Frame Dropper implements a variant of the leaky bucket algorithm
// for keeping track of when to drop frames to avoid bit rate
// over use when the encoder can't keep its bit rate.
class FrameDropper {
public:
FrameDropper();
explicit FrameDropper(float max_time_drops);
virtual ~FrameDropper() {}
// Resets the FrameDropper to its initial state.
// This means that the frameRateWeight is set to its
// default value as well.
virtual void Reset();
virtual void Enable(bool enable);
// Answers the question if it's time to drop a frame
// if we want to reach a given frame rate. Must be
// called for every frame.
//
// Return value : True if we should drop the current frame
virtual bool DropFrame();
// Updates the FrameDropper with the size of the latest encoded
// frame. The FrameDropper calculates a new drop ratio (can be
// seen as the probability to drop a frame) and updates its
// internal statistics.
//
// Input:
// - frameSizeBytes : The size of the latest frame
// returned from the encoder.
// - deltaFrame : True if the encoder returned
// a key frame.
virtual void Fill(size_t frameSizeBytes, bool deltaFrame);
virtual void Leak(uint32_t inputFrameRate);
// Sets the target bit rate and the frame rate produced by
// the camera.
//
// Input:
// - bitRate : The target bit rate
virtual void SetRates(float bitRate, float incoming_frame_rate);
private:
void UpdateRatio();
void CapAccumulator();
rtc::ExpFilter key_frame_ratio_;
rtc::ExpFilter delta_frame_size_avg_kbits_;
// Key frames and large delta frames are not immediately accumulated in the
// bucket since they can immediately overflow the bucket leading to large
// drops on the following packets that may be much smaller. Instead these
// large frames are accumulated over several frames when the bucket leaks.
// |large_frame_accumulation_spread_| represents the number of frames over
// which a large frame is accumulated.
float large_frame_accumulation_spread_;
// |large_frame_accumulation_count_| represents the number of frames left
// to finish accumulating a large frame.
int large_frame_accumulation_count_;
// |large_frame_accumulation_chunk_size_| represents the size of a single
// chunk for large frame accumulation.
float large_frame_accumulation_chunk_size_;
float accumulator_;
float accumulator_max_;
float target_bitrate_;
bool drop_next_;
rtc::ExpFilter drop_ratio_;
int drop_count_;
float incoming_frame_rate_;
bool was_below_max_;
bool enabled_;
const float max_drop_duration_secs_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_FRAME_DROPPER_H_

View File

@ -0,0 +1,161 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/frame_dropper.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/test/gtest.h"
namespace webrtc {
namespace {
const float kTargetBitRateKbps = 300;
const float kIncomingFrameRate = 30;
const size_t kFrameSizeBytes = 1250;
const size_t kLargeFrameSizeBytes = 25000;
const bool kIncludeKeyFrame = true;
const bool kDoNotIncludeKeyFrame = false;
} // namespace
class FrameDropperTest : public ::testing::Test {
protected:
void SetUp() override {
frame_dropper_.SetRates(kTargetBitRateKbps, kIncomingFrameRate);
}
void OverflowLeakyBucket() {
// Overflow bucket in frame dropper.
for (int i = 0; i < kIncomingFrameRate; ++i) {
frame_dropper_.Fill(kFrameSizeBytes, true);
}
frame_dropper_.Leak(kIncomingFrameRate);
}
void ValidateNoDropsAtTargetBitrate(int large_frame_size_bytes,
int large_frame_rate,
bool is_large_frame_delta) {
// Smaller frame size is computed to meet |kTargetBitRateKbps|.
int small_frame_size_bytes =
kFrameSizeBytes -
(large_frame_size_bytes * large_frame_rate) / kIncomingFrameRate;
for (int i = 1; i <= 5 * large_frame_rate; ++i) {
// Large frame. First frame is always a key frame.
frame_dropper_.Fill(large_frame_size_bytes,
(i == 1) ? false : is_large_frame_delta);
frame_dropper_.Leak(kIncomingFrameRate);
EXPECT_FALSE(frame_dropper_.DropFrame());
// Smaller frames.
for (int j = 1; j < kIncomingFrameRate / large_frame_rate; ++j) {
frame_dropper_.Fill(small_frame_size_bytes, true);
frame_dropper_.Leak(kIncomingFrameRate);
EXPECT_FALSE(frame_dropper_.DropFrame());
}
}
}
void ValidateThroughputMatchesTargetBitrate(int bitrate_kbps,
bool include_keyframe) {
int delta_frame_size;
int total_bytes = 0;
if (include_keyframe) {
delta_frame_size = ((1000.0 / 8 * bitrate_kbps) - kLargeFrameSizeBytes) /
(kIncomingFrameRate - 1);
} else {
delta_frame_size = bitrate_kbps * 1000.0 / (8 * kIncomingFrameRate);
}
const int kNumIterations = 1000;
for (int i = 1; i <= kNumIterations; ++i) {
int j = 0;
if (include_keyframe) {
if (!frame_dropper_.DropFrame()) {
frame_dropper_.Fill(kLargeFrameSizeBytes, false);
total_bytes += kLargeFrameSizeBytes;
}
frame_dropper_.Leak(kIncomingFrameRate);
j++;
}
for (; j < kIncomingFrameRate; ++j) {
if (!frame_dropper_.DropFrame()) {
frame_dropper_.Fill(delta_frame_size, true);
total_bytes += delta_frame_size;
}
frame_dropper_.Leak(kIncomingFrameRate);
}
}
float throughput_kbps = total_bytes * 8.0 / (1000 * kNumIterations);
float deviation_from_target =
(throughput_kbps - kTargetBitRateKbps) * 100.0 / kTargetBitRateKbps;
if (deviation_from_target < 0) {
deviation_from_target = -deviation_from_target;
}
// Variation is < 0.1%
EXPECT_LE(deviation_from_target, 0.1);
}
FrameDropper frame_dropper_;
};
TEST_F(FrameDropperTest, NoDropsWhenDisabled) {
frame_dropper_.Enable(false);
OverflowLeakyBucket();
EXPECT_FALSE(frame_dropper_.DropFrame());
}
TEST_F(FrameDropperTest, DropsByDefaultWhenBucketOverflows) {
OverflowLeakyBucket();
EXPECT_TRUE(frame_dropper_.DropFrame());
}
TEST_F(FrameDropperTest, NoDropsWhenFillRateMatchesLeakRate) {
for (int i = 0; i < 5 * kIncomingFrameRate; ++i) {
frame_dropper_.Fill(kFrameSizeBytes, true);
frame_dropper_.Leak(kIncomingFrameRate);
EXPECT_FALSE(frame_dropper_.DropFrame());
}
}
TEST_F(FrameDropperTest, LargeKeyFrames) {
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes, 1, false);
frame_dropper_.Reset();
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes / 2, 2, false);
frame_dropper_.Reset();
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes / 4, 4, false);
frame_dropper_.Reset();
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes / 8, 8, false);
}
TEST_F(FrameDropperTest, LargeDeltaFrames) {
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes, 1, true);
frame_dropper_.Reset();
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes / 2, 2, true);
frame_dropper_.Reset();
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes / 4, 4, true);
frame_dropper_.Reset();
ValidateNoDropsAtTargetBitrate(kLargeFrameSizeBytes / 8, 8, true);
}
TEST_F(FrameDropperTest, TrafficVolumeAboveAvailableBandwidth) {
ValidateThroughputMatchesTargetBitrate(700, kIncludeKeyFrame);
ValidateThroughputMatchesTargetBitrate(700, kDoNotIncludeKeyFrame);
ValidateThroughputMatchesTargetBitrate(600, kIncludeKeyFrame);
ValidateThroughputMatchesTargetBitrate(600, kDoNotIncludeKeyFrame);
ValidateThroughputMatchesTargetBitrate(500, kIncludeKeyFrame);
ValidateThroughputMatchesTargetBitrate(500, kDoNotIncludeKeyFrame);
}
} // namespace webrtc

View File

@ -0,0 +1,200 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
#include <string>
#include <utility>
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
// TODO(palmkvist): make logging more informative in the absence of a file name
// (or get one)
namespace webrtc {
const size_t kIvfHeaderSize = 32;
IvfFileWriter::IvfFileWriter(rtc::File file, size_t byte_limit)
: codec_type_(kVideoCodecUnknown),
bytes_written_(0),
byte_limit_(byte_limit),
num_frames_(0),
width_(0),
height_(0),
last_timestamp_(-1),
using_capture_timestamps_(false),
file_(std::move(file)) {
RTC_DCHECK(byte_limit == 0 || kIvfHeaderSize <= byte_limit)
<< "The byte_limit is too low, not even the header will fit.";
}
IvfFileWriter::~IvfFileWriter() {
Close();
}
std::unique_ptr<IvfFileWriter> IvfFileWriter::Wrap(rtc::File file,
size_t byte_limit) {
return std::unique_ptr<IvfFileWriter>(
new IvfFileWriter(std::move(file), byte_limit));
}
bool IvfFileWriter::WriteHeader() {
if (!file_.Seek(0)) {
LOG(LS_WARNING) << "Unable to rewind ivf output file.";
return false;
}
uint8_t ivf_header[kIvfHeaderSize] = {0};
ivf_header[0] = 'D';
ivf_header[1] = 'K';
ivf_header[2] = 'I';
ivf_header[3] = 'F';
ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[4], 0); // Version.
ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[6], 32); // Header size.
switch (codec_type_) {
case kVideoCodecVP8:
ivf_header[8] = 'V';
ivf_header[9] = 'P';
ivf_header[10] = '8';
ivf_header[11] = '0';
break;
case kVideoCodecVP9:
ivf_header[8] = 'V';
ivf_header[9] = 'P';
ivf_header[10] = '9';
ivf_header[11] = '0';
break;
case kVideoCodecH264:
ivf_header[8] = 'H';
ivf_header[9] = '2';
ivf_header[10] = '6';
ivf_header[11] = '4';
break;
default:
LOG(LS_ERROR) << "Unknown CODEC type: " << codec_type_;
return false;
}
ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[12], width_);
ByteWriter<uint16_t>::WriteLittleEndian(&ivf_header[14], height_);
// Render timestamps are in ms (1/1000 scale), while RTP timestamps use a
// 90kHz clock.
ByteWriter<uint32_t>::WriteLittleEndian(
&ivf_header[16], using_capture_timestamps_ ? 1000 : 90000);
ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[20], 1);
ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[24],
static_cast<uint32_t>(num_frames_));
ByteWriter<uint32_t>::WriteLittleEndian(&ivf_header[28], 0); // Reserved.
if (file_.Write(ivf_header, kIvfHeaderSize) < kIvfHeaderSize) {
LOG(LS_ERROR) << "Unable to write IVF header for ivf output file.";
return false;
}
if (bytes_written_ < kIvfHeaderSize) {
bytes_written_ = kIvfHeaderSize;
}
return true;
}
bool IvfFileWriter::InitFromFirstFrame(const EncodedImage& encoded_image,
VideoCodecType codec_type) {
width_ = encoded_image._encodedWidth;
height_ = encoded_image._encodedHeight;
RTC_CHECK_GT(width_, 0);
RTC_CHECK_GT(height_, 0);
using_capture_timestamps_ = encoded_image._timeStamp == 0;
codec_type_ = codec_type;
if (!WriteHeader())
return false;
const char* codec_name =
CodecTypeToPayloadString(codec_type_);
LOG(LS_WARNING) << "Created IVF file for codec data of type " << codec_name
<< " at resolution " << width_ << " x " << height_
<< ", using " << (using_capture_timestamps_ ? "1" : "90")
<< "kHz clock resolution.";
return true;
}
bool IvfFileWriter::WriteFrame(const EncodedImage& encoded_image,
VideoCodecType codec_type) {
if (!file_.IsOpen())
return false;
if (num_frames_ == 0 && !InitFromFirstFrame(encoded_image, codec_type))
return false;
RTC_DCHECK_EQ(codec_type_, codec_type);
if ((encoded_image._encodedWidth > 0 || encoded_image._encodedHeight > 0) &&
(encoded_image._encodedHeight != height_ ||
encoded_image._encodedWidth != width_)) {
LOG(LS_WARNING)
<< "Incomig frame has diffferent resolution then previous: (" << width_
<< "x" << height_ << ") -> (" << encoded_image._encodedWidth << "x"
<< encoded_image._encodedHeight << ")";
}
int64_t timestamp = using_capture_timestamps_
? encoded_image.capture_time_ms_
: wrap_handler_.Unwrap(encoded_image._timeStamp);
if (last_timestamp_ != -1 && timestamp <= last_timestamp_) {
LOG(LS_WARNING) << "Timestamp no increasing: " << last_timestamp_ << " -> "
<< timestamp;
}
last_timestamp_ = timestamp;
const size_t kFrameHeaderSize = 12;
if (byte_limit_ != 0 &&
bytes_written_ + kFrameHeaderSize + encoded_image._length > byte_limit_) {
LOG(LS_WARNING) << "Closing IVF file due to reaching size limit: "
<< byte_limit_ << " bytes.";
Close();
return false;
}
uint8_t frame_header[kFrameHeaderSize] = {};
ByteWriter<uint32_t>::WriteLittleEndian(
&frame_header[0], static_cast<uint32_t>(encoded_image._length));
ByteWriter<uint64_t>::WriteLittleEndian(&frame_header[4], timestamp);
if (file_.Write(frame_header, kFrameHeaderSize) < kFrameHeaderSize ||
file_.Write(encoded_image._buffer, encoded_image._length) <
encoded_image._length) {
LOG(LS_ERROR) << "Unable to write frame to file.";
return false;
}
bytes_written_ += kFrameHeaderSize + encoded_image._length;
++num_frames_;
return true;
}
bool IvfFileWriter::Close() {
if (!file_.IsOpen())
return false;
if (num_frames_ == 0) {
file_.Close();
return true;
}
bool ret = WriteHeader();
file_.Close();
return ret;
}
} // namespace webrtc

View File

@ -0,0 +1,60 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_
#include <memory>
#include <string>
#include "webrtc/common_video/include/video_frame.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/rtc_base/constructormagic.h"
#include "webrtc/rtc_base/file.h"
#include "webrtc/rtc_base/timeutils.h"
namespace webrtc {
class IvfFileWriter {
public:
// Takes ownership of the file, which will be closed either through
// Close or ~IvfFileWriter. If writing a frame would take the file above the
// |byte_limit| the file will be closed, the write (and all future writes)
// will fail. A |byte_limit| of 0 is equivalent to no limit.
static std::unique_ptr<IvfFileWriter> Wrap(rtc::File file, size_t byte_limit);
~IvfFileWriter();
bool WriteFrame(const EncodedImage& encoded_image, VideoCodecType codec_type);
bool Close();
private:
explicit IvfFileWriter(rtc::File file, size_t byte_limit);
bool WriteHeader();
bool InitFromFirstFrame(const EncodedImage& encoded_image,
VideoCodecType codec_type);
VideoCodecType codec_type_;
size_t bytes_written_;
size_t byte_limit_;
size_t num_frames_;
uint16_t width_;
uint16_t height_;
int64_t last_timestamp_;
bool using_capture_timestamps_;
rtc::TimestampWrapAroundHandler wrap_handler_;
rtc::File file_;
RTC_DISALLOW_COPY_AND_ASSIGN(IvfFileWriter);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_IVF_FILE_WRITER_H_

View File

@ -0,0 +1,183 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
#include <memory>
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/rtc_base/helpers.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/thread.h"
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/test/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
namespace {
static const int kHeaderSize = 32;
static const int kFrameHeaderSize = 12;
static uint8_t dummy_payload[4] = {0, 1, 2, 3};
} // namespace
class IvfFileWriterTest : public ::testing::Test {
protected:
void SetUp() override {
file_name_ =
webrtc::test::TempFilename(webrtc::test::OutputPath(), "test_file");
}
void TearDown() override { rtc::RemoveFile(file_name_); }
bool WriteDummyTestFrames(VideoCodecType codec_type,
int width,
int height,
int num_frames,
bool use_capture_tims_ms) {
EncodedImage frame;
frame._buffer = dummy_payload;
frame._encodedWidth = width;
frame._encodedHeight = height;
for (int i = 1; i <= num_frames; ++i) {
frame._length = i % sizeof(dummy_payload);
if (use_capture_tims_ms) {
frame.capture_time_ms_ = i;
} else {
frame._timeStamp = i;
}
if (!file_writer_->WriteFrame(frame, codec_type))
return false;
}
return true;
}
void VerifyIvfHeader(rtc::File* file,
const uint8_t fourcc[4],
int width,
int height,
uint32_t num_frames,
bool use_capture_tims_ms) {
ASSERT_TRUE(file->IsOpen());
uint8_t data[kHeaderSize];
ASSERT_EQ(static_cast<size_t>(kHeaderSize), file->Read(data, kHeaderSize));
uint8_t dkif[4] = {'D', 'K', 'I', 'F'};
EXPECT_EQ(0, memcmp(dkif, data, 4));
EXPECT_EQ(0u, ByteReader<uint16_t>::ReadLittleEndian(&data[4]));
EXPECT_EQ(32u, ByteReader<uint16_t>::ReadLittleEndian(&data[6]));
EXPECT_EQ(0, memcmp(fourcc, &data[8], 4));
EXPECT_EQ(width, ByteReader<uint16_t>::ReadLittleEndian(&data[12]));
EXPECT_EQ(height, ByteReader<uint16_t>::ReadLittleEndian(&data[14]));
EXPECT_EQ(use_capture_tims_ms ? 1000u : 90000u,
ByteReader<uint32_t>::ReadLittleEndian(&data[16]));
EXPECT_EQ(1u, ByteReader<uint32_t>::ReadLittleEndian(&data[20]));
EXPECT_EQ(num_frames, ByteReader<uint32_t>::ReadLittleEndian(&data[24]));
EXPECT_EQ(0u, ByteReader<uint32_t>::ReadLittleEndian(&data[28]));
}
void VerifyDummyTestFrames(rtc::File* file, uint32_t num_frames) {
const int kMaxFrameSize = 4;
for (uint32_t i = 1; i <= num_frames; ++i) {
uint8_t frame_header[kFrameHeaderSize];
ASSERT_EQ(static_cast<unsigned int>(kFrameHeaderSize),
file->Read(frame_header, kFrameHeaderSize));
uint32_t frame_length =
ByteReader<uint32_t>::ReadLittleEndian(&frame_header[0]);
EXPECT_EQ(i % 4, frame_length);
uint64_t timestamp =
ByteReader<uint64_t>::ReadLittleEndian(&frame_header[4]);
EXPECT_EQ(i, timestamp);
uint8_t data[kMaxFrameSize] = {};
ASSERT_EQ(frame_length,
static_cast<uint32_t>(file->Read(data, frame_length)));
EXPECT_EQ(0, memcmp(data, dummy_payload, frame_length));
}
}
void RunBasicFileStructureTest(VideoCodecType codec_type,
const uint8_t fourcc[4],
bool use_capture_tims_ms) {
file_writer_ = IvfFileWriter::Wrap(rtc::File::Open(file_name_), 0);
ASSERT_TRUE(file_writer_.get());
const int kWidth = 320;
const int kHeight = 240;
const int kNumFrames = 257;
ASSERT_TRUE(WriteDummyTestFrames(codec_type, kWidth, kHeight, kNumFrames,
use_capture_tims_ms));
EXPECT_TRUE(file_writer_->Close());
rtc::File out_file = rtc::File::Open(file_name_);
VerifyIvfHeader(&out_file, fourcc, kWidth, kHeight, kNumFrames,
use_capture_tims_ms);
VerifyDummyTestFrames(&out_file, kNumFrames);
out_file.Close();
}
std::string file_name_;
std::unique_ptr<IvfFileWriter> file_writer_;
};
TEST_F(IvfFileWriterTest, WritesBasicVP8FileNtpTimestamp) {
const uint8_t fourcc[4] = {'V', 'P', '8', '0'};
RunBasicFileStructureTest(kVideoCodecVP8, fourcc, false);
}
TEST_F(IvfFileWriterTest, WritesBasicVP8FileMsTimestamp) {
const uint8_t fourcc[4] = {'V', 'P', '8', '0'};
RunBasicFileStructureTest(kVideoCodecVP8, fourcc, true);
}
TEST_F(IvfFileWriterTest, WritesBasicVP9FileNtpTimestamp) {
const uint8_t fourcc[4] = {'V', 'P', '9', '0'};
RunBasicFileStructureTest(kVideoCodecVP9, fourcc, false);
}
TEST_F(IvfFileWriterTest, WritesBasicVP9FileMsTimestamp) {
const uint8_t fourcc[4] = {'V', 'P', '9', '0'};
RunBasicFileStructureTest(kVideoCodecVP9, fourcc, true);
}
TEST_F(IvfFileWriterTest, WritesBasicH264FileNtpTimestamp) {
const uint8_t fourcc[4] = {'H', '2', '6', '4'};
RunBasicFileStructureTest(kVideoCodecH264, fourcc, false);
}
TEST_F(IvfFileWriterTest, WritesBasicH264FileMsTimestamp) {
const uint8_t fourcc[4] = {'H', '2', '6', '4'};
RunBasicFileStructureTest(kVideoCodecH264, fourcc, true);
}
TEST_F(IvfFileWriterTest, ClosesWhenReachesLimit) {
const uint8_t fourcc[4] = {'V', 'P', '8', '0'};
const int kWidth = 320;
const int kHeight = 240;
const int kNumFramesToWrite = 2;
const int kNumFramesToFit = 1;
file_writer_ = IvfFileWriter::Wrap(
rtc::File::Open(file_name_),
kHeaderSize +
kNumFramesToFit * (kFrameHeaderSize + sizeof(dummy_payload)));
ASSERT_TRUE(file_writer_.get());
ASSERT_FALSE(WriteDummyTestFrames(kVideoCodecVP8, kWidth, kHeight,
kNumFramesToWrite, true));
ASSERT_FALSE(file_writer_->Close());
rtc::File out_file = rtc::File::Open(file_name_);
VerifyIvfHeader(&out_file, fourcc, kWidth, kHeight, kNumFramesToFit, true);
VerifyDummyTestFrames(&out_file, kNumFramesToFit);
out_file.Close();
}
} // namespace webrtc

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOCK_MOCK_FRAME_DROPPER_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOCK_MOCK_FRAME_DROPPER_H_
#include <string>
#include "webrtc/modules/video_coding/utility/frame_dropper.h"
#include "webrtc/test/gmock.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class MockFrameDropper : public FrameDropper {
public:
MOCK_METHOD0(Reset, void());
MOCK_METHOD1(Enable, void(bool enable));
MOCK_METHOD0(DropFrame, bool());
MOCK_METHOD2(Fill, void(size_t frameSizeBytes, bool deltaFrame));
MOCK_METHOD1(Leak, void(uint32_t inputFrameRate));
MOCK_METHOD2(SetRates, void(float bitRate, float incoming_frame_rate));
MOCK_CONST_METHOD1(ActualFrameRate, float(uint32_t inputFrameRate));
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOCK_MOCK_FRAME_DROPPER_H_

View File

@ -0,0 +1,46 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/moving_average.h"
#include <algorithm>
namespace webrtc {
MovingAverage::MovingAverage(size_t s) : sum_history_(s + 1, 0) {}
void MovingAverage::AddSample(int sample) {
count_++;
sum_ += sample;
sum_history_[count_ % sum_history_.size()] = sum_;
}
rtc::Optional<int> MovingAverage::GetAverage() const {
return GetAverage(size());
}
rtc::Optional<int> MovingAverage::GetAverage(size_t num_samples) const {
if (num_samples > size() || num_samples == 0)
return rtc::Optional<int>();
int sum = sum_ - sum_history_[(count_ - num_samples) % sum_history_.size()];
return rtc::Optional<int>(sum / static_cast<int>(num_samples));
}
void MovingAverage::Reset() {
count_ = 0;
sum_ = 0;
std::fill(sum_history_.begin(), sum_history_.end(), 0);
}
size_t MovingAverage::size() const {
return std::min(count_, sum_history_.size() - 1);
}
} // namespace webrtc

View File

@ -0,0 +1,35 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
#include <vector>
#include "webrtc/api/optional.h"
namespace webrtc {
class MovingAverage {
public:
explicit MovingAverage(size_t s);
void AddSample(int sample);
rtc::Optional<int> GetAverage() const;
rtc::Optional<int> GetAverage(size_t num_samples) const;
void Reset();
size_t size() const;
private:
size_t count_ = 0;
int sum_ = 0;
std::vector<int> sum_history_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_

View File

@ -0,0 +1,62 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/moving_average.h"
#include "webrtc/test/gtest.h"
TEST(MovingAverageTest, EmptyAverage) {
webrtc::MovingAverage moving_average(1);
EXPECT_EQ(0u, moving_average.size());
EXPECT_FALSE(moving_average.GetAverage(0));
}
// Test single value.
TEST(MovingAverageTest, OneElement) {
webrtc::MovingAverage moving_average(1);
moving_average.AddSample(3);
EXPECT_EQ(1u, moving_average.size());
EXPECT_EQ(3, *moving_average.GetAverage());
EXPECT_EQ(3, *moving_average.GetAverage(1));
EXPECT_FALSE(moving_average.GetAverage(2));
}
TEST(MovingAverageTest, GetAverage) {
webrtc::MovingAverage moving_average(1024);
moving_average.AddSample(1);
moving_average.AddSample(1);
moving_average.AddSample(3);
moving_average.AddSample(3);
EXPECT_EQ(*moving_average.GetAverage(4), 2);
EXPECT_EQ(*moving_average.GetAverage(2), 3);
EXPECT_FALSE(moving_average.GetAverage(0));
}
TEST(MovingAverageTest, Reset) {
webrtc::MovingAverage moving_average(5);
moving_average.AddSample(1);
EXPECT_EQ(1, *moving_average.GetAverage(1));
moving_average.Reset();
EXPECT_FALSE(moving_average.GetAverage(1));
EXPECT_FALSE(moving_average.GetAverage(6));
}
TEST(MovingAverageTest, ManySamples) {
webrtc::MovingAverage moving_average(10);
for (int i = 1; i < 11; i++) {
moving_average.AddSample(i);
}
EXPECT_EQ(*moving_average.GetAverage(), 5);
moving_average.Reset();
for (int i = 1; i < 2001; i++) {
moving_average.AddSample(i);
}
EXPECT_EQ(*moving_average.GetAverage(), 1995);
}

View File

@ -0,0 +1,205 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/quality_scaler.h"
#include <math.h>
#include <algorithm>
#include <memory>
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/task_queue.h"
// TODO(kthelgason): Some versions of Android have issues with log2.
// See https://code.google.com/p/android/issues/detail?id=212634 for details
#if defined(WEBRTC_ANDROID)
#define log2(x) (log(x) / log(2))
#endif
namespace webrtc {
namespace {
// Threshold constant used until first downscale (to permit fast rampup).
static const int kMeasureMs = 2000;
static const float kSamplePeriodScaleFactor = 2.5;
static const int kFramedropPercentThreshold = 60;
// QP scaling threshold defaults:
static const int kLowH264QpThreshold = 24;
static const int kHighH264QpThreshold = 37;
// QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
// bitstream range of [0, 127] and not the user-level range of [0,63].
static const int kLowVp8QpThreshold = 29;
static const int kHighVp8QpThreshold = 95;
// QP is obtained from VP9-bitstream for HW, so the QP corresponds to the
// bitstream range of [0, 255] and not the user-level range of [0,63].
// Current VP9 settings are mapped from VP8 thresholds above.
static const int kLowVp9QpThreshold = 96;
static const int kHighVp9QpThreshold = 185;
static const int kMinFramesNeededToScale = 2 * 30;
static VideoEncoder::QpThresholds CodecTypeToDefaultThresholds(
VideoCodecType codec_type) {
int low = -1;
int high = -1;
switch (codec_type) {
case kVideoCodecH264:
low = kLowH264QpThreshold;
high = kHighH264QpThreshold;
break;
case kVideoCodecVP8:
low = kLowVp8QpThreshold;
high = kHighVp8QpThreshold;
break;
case kVideoCodecVP9:
low = kLowVp9QpThreshold;
high = kHighVp9QpThreshold;
break;
default:
RTC_NOTREACHED() << "Invalid codec type for QualityScaler.";
}
return VideoEncoder::QpThresholds(low, high);
}
} // namespace
class QualityScaler::CheckQPTask : public rtc::QueuedTask {
public:
explicit CheckQPTask(QualityScaler* scaler) : scaler_(scaler) {
LOG(LS_INFO) << "Created CheckQPTask. Scheduling on queue...";
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this), scaler_->GetSamplingPeriodMs());
}
void Stop() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
LOG(LS_INFO) << "Stopping QP Check task.";
stop_ = true;
}
private:
bool Run() override {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
if (stop_)
return true; // TaskQueue will free this task.
scaler_->CheckQP();
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this), scaler_->GetSamplingPeriodMs());
return false; // Retain the task in order to reuse it.
}
QualityScaler* const scaler_;
bool stop_ = false;
rtc::SequencedTaskChecker task_checker_;
};
QualityScaler::QualityScaler(AdaptationObserverInterface* observer,
VideoCodecType codec_type)
: QualityScaler(observer, CodecTypeToDefaultThresholds(codec_type)) {}
QualityScaler::QualityScaler(AdaptationObserverInterface* observer,
VideoEncoder::QpThresholds thresholds)
: QualityScaler(observer, thresholds, kMeasureMs) {}
// Protected ctor, should not be called directly.
QualityScaler::QualityScaler(AdaptationObserverInterface* observer,
VideoEncoder::QpThresholds thresholds,
int64_t sampling_period)
: check_qp_task_(nullptr),
observer_(observer),
sampling_period_ms_(sampling_period),
fast_rampup_(true),
// Arbitrarily choose size based on 30 fps for 5 seconds.
average_qp_(5 * 30),
framedrop_percent_(5 * 30),
thresholds_(thresholds) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
RTC_DCHECK(observer_ != nullptr);
check_qp_task_ = new CheckQPTask(this);
LOG(LS_INFO) << "QP thresholds: low: " << thresholds_.low
<< ", high: " << thresholds_.high;
}
QualityScaler::~QualityScaler() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
check_qp_task_->Stop();
}
int64_t QualityScaler::GetSamplingPeriodMs() const {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
return fast_rampup_ ? sampling_period_ms_
: (sampling_period_ms_ * kSamplePeriodScaleFactor);
}
void QualityScaler::ReportDroppedFrame() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
framedrop_percent_.AddSample(100);
}
void QualityScaler::ReportQP(int qp) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
framedrop_percent_.AddSample(0);
average_qp_.AddSample(qp);
}
void QualityScaler::CheckQP() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
// Should be set through InitEncode -> Should be set by now.
RTC_DCHECK_GE(thresholds_.low, 0);
// If we have not observed at least this many frames we can't
// make a good scaling decision.
if (framedrop_percent_.size() < kMinFramesNeededToScale)
return;
// Check if we should scale down due to high frame drop.
const rtc::Optional<int> drop_rate = framedrop_percent_.GetAverage();
if (drop_rate && *drop_rate >= kFramedropPercentThreshold) {
ReportQPHigh();
return;
}
// Check if we should scale up or down based on QP.
const rtc::Optional<int> avg_qp = average_qp_.GetAverage();
if (avg_qp) {
LOG(LS_INFO) << "Checking average QP " << *avg_qp;
if (*avg_qp > thresholds_.high) {
ReportQPHigh();
return;
}
if (*avg_qp <= thresholds_.low) {
// QP has been low. We want to try a higher resolution.
ReportQPLow();
return;
}
}
}
void QualityScaler::ReportQPLow() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
ClearSamples();
observer_->AdaptUp(AdaptationObserverInterface::AdaptReason::kQuality);
}
void QualityScaler::ReportQPHigh() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
ClearSamples();
observer_->AdaptDown(AdaptationObserverInterface::AdaptReason::kQuality);
// If we've scaled down, wait longer before scaling up again.
if (fast_rampup_) {
fast_rampup_ = false;
}
}
void QualityScaler::ClearSamples() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&task_checker_);
framedrop_percent_.Reset();
average_qp_.Reset();
}
} // namespace webrtc

View File

@ -0,0 +1,87 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
#include <utility>
#include "webrtc/api/optional.h"
#include "webrtc/api/video_codecs/video_encoder.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/video_coding/utility/moving_average.h"
#include "webrtc/rtc_base/sequenced_task_checker.h"
namespace webrtc {
// An interface for signaling requests to limit or increase the resolution or
// framerate of the captured video stream.
class AdaptationObserverInterface {
public:
// Indicates if the adaptation is due to overuse of the CPU resources, or if
// the quality of the encoded frames have dropped too low.
enum AdaptReason : size_t { kQuality = 0, kCpu = 1 };
static const size_t kScaleReasonSize = 2;
// Called to signal that we can handle larger or more frequent frames.
virtual void AdaptUp(AdaptReason reason) = 0;
// Called to signal that the source should reduce the resolution or framerate.
virtual void AdaptDown(AdaptReason reason) = 0;
protected:
virtual ~AdaptationObserverInterface() {}
};
// QualityScaler runs asynchronously and monitors QP values of encoded frames.
// It holds a reference to a ScalingObserverInterface implementation to signal
// an intent to scale up or down.
class QualityScaler {
public:
// Construct a QualityScaler with a given |observer|.
// This starts the quality scaler periodically checking what the average QP
// has been recently.
QualityScaler(AdaptationObserverInterface* observer,
VideoCodecType codec_type);
// If specific thresholds are desired these can be supplied as |thresholds|.
QualityScaler(AdaptationObserverInterface* observer,
VideoEncoder::QpThresholds thresholds);
virtual ~QualityScaler();
// Should be called each time the encoder drops a frame
void ReportDroppedFrame();
// Inform the QualityScaler of the last seen QP.
void ReportQP(int qp);
// The following members declared protected for testing purposes
protected:
QualityScaler(AdaptationObserverInterface* observer,
VideoEncoder::QpThresholds thresholds,
int64_t sampling_period);
private:
class CheckQPTask;
void CheckQP();
void ClearSamples();
void ReportQPLow();
void ReportQPHigh();
int64_t GetSamplingPeriodMs() const;
CheckQPTask* check_qp_task_ RTC_GUARDED_BY(&task_checker_);
AdaptationObserverInterface* const observer_ RTC_GUARDED_BY(&task_checker_);
rtc::SequencedTaskChecker task_checker_;
const int64_t sampling_period_ms_;
bool fast_rampup_ RTC_GUARDED_BY(&task_checker_);
MovingAverage average_qp_ RTC_GUARDED_BY(&task_checker_);
MovingAverage framedrop_percent_ RTC_GUARDED_BY(&task_checker_);
VideoEncoder::QpThresholds thresholds_ RTC_GUARDED_BY(&task_checker_);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_

View File

@ -0,0 +1,202 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/quality_scaler.h"
#include <memory>
#include "webrtc/rtc_base/event.h"
#include "webrtc/rtc_base/task_queue.h"
#include "webrtc/test/gmock.h"
#include "webrtc/test/gtest.h"
namespace webrtc {
namespace {
static const int kFramerate = 30;
static const int kLowQp = 15;
static const int kLowQpThreshold = 18;
static const int kHighQp = 40;
static const size_t kDefaultTimeoutMs = 150;
} // namespace
#define DO_SYNC(q, block) do { \
rtc::Event event(false, false); \
q->PostTask([this, &event] { \
block; \
event.Set(); \
}); \
RTC_CHECK(event.Wait(1000)); \
} while (0)
class MockAdaptationObserver : public AdaptationObserverInterface {
public:
MockAdaptationObserver() : event(false, false) {}
virtual ~MockAdaptationObserver() {}
void AdaptUp(AdaptReason r) override {
adapt_up_events_++;
event.Set();
}
void AdaptDown(AdaptReason r) override {
adapt_down_events_++;
event.Set();
}
rtc::Event event;
int adapt_up_events_ = 0;
int adapt_down_events_ = 0;
};
// Pass a lower sampling period to speed up the tests.
class QualityScalerUnderTest : public QualityScaler {
public:
explicit QualityScalerUnderTest(AdaptationObserverInterface* observer,
VideoEncoder::QpThresholds thresholds)
: QualityScaler(observer, thresholds, 5) {}
};
class QualityScalerTest : public ::testing::Test {
protected:
enum ScaleDirection {
kKeepScaleAtHighQp,
kScaleDown,
kScaleDownAboveHighQp,
kScaleUp
};
QualityScalerTest()
: q_(new rtc::TaskQueue("QualityScalerTestQueue")),
observer_(new MockAdaptationObserver()) {
DO_SYNC(q_, {
qs_ = std::unique_ptr<QualityScaler>(new QualityScalerUnderTest(
observer_.get(),
VideoEncoder::QpThresholds(kLowQpThreshold, kHighQp)));});
}
~QualityScalerTest() {
DO_SYNC(q_, {qs_.reset(nullptr);});
}
void TriggerScale(ScaleDirection scale_direction) {
for (int i = 0; i < kFramerate * 5; ++i) {
switch (scale_direction) {
case kScaleUp:
qs_->ReportQP(kLowQp);
break;
case kScaleDown:
qs_->ReportDroppedFrame();
break;
case kKeepScaleAtHighQp:
qs_->ReportQP(kHighQp);
break;
case kScaleDownAboveHighQp:
qs_->ReportQP(kHighQp + 1);
break;
}
}
}
std::unique_ptr<rtc::TaskQueue> q_;
std::unique_ptr<QualityScaler> qs_;
std::unique_ptr<MockAdaptationObserver> observer_;
};
TEST_F(QualityScalerTest, DownscalesAfterContinuousFramedrop) {
DO_SYNC(q_, { TriggerScale(kScaleDown); });
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(1, observer_->adapt_down_events_);
}
TEST_F(QualityScalerTest, KeepsScaleAtHighQp) {
DO_SYNC(q_, { TriggerScale(kKeepScaleAtHighQp); });
EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(0, observer_->adapt_down_events_);
EXPECT_EQ(0, observer_->adapt_up_events_);
}
TEST_F(QualityScalerTest, DownscalesAboveHighQp) {
DO_SYNC(q_, { TriggerScale(kScaleDownAboveHighQp); });
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(1, observer_->adapt_down_events_);
EXPECT_EQ(0, observer_->adapt_up_events_);
}
TEST_F(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
DO_SYNC(q_, {
for (int i = 0; i < kFramerate * 5; ++i) {
qs_->ReportDroppedFrame();
qs_->ReportDroppedFrame();
qs_->ReportQP(kHighQp);
}
});
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(1, observer_->adapt_down_events_);
EXPECT_EQ(0, observer_->adapt_up_events_);
}
TEST_F(QualityScalerTest, DoesNotDownscaleOnNormalQp) {
DO_SYNC(q_, { TriggerScale(kScaleDownAboveHighQp); });
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(1, observer_->adapt_down_events_);
EXPECT_EQ(0, observer_->adapt_up_events_);
}
TEST_F(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) {
DO_SYNC(q_, {
for (int i = 0; i < kFramerate * 5; ++i) {
qs_->ReportDroppedFrame();
qs_->ReportQP(kHighQp);
}
});
EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(0, observer_->adapt_down_events_);
EXPECT_EQ(0, observer_->adapt_up_events_);
}
TEST_F(QualityScalerTest, UpscalesAfterLowQp) {
DO_SYNC(q_, { TriggerScale(kScaleUp); });
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(0, observer_->adapt_down_events_);
EXPECT_EQ(1, observer_->adapt_up_events_);
}
TEST_F(QualityScalerTest, ScalesDownAndBackUp) {
DO_SYNC(q_, { TriggerScale(kScaleDown); });
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(1, observer_->adapt_down_events_);
EXPECT_EQ(0, observer_->adapt_up_events_);
DO_SYNC(q_, { TriggerScale(kScaleUp); });
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(1, observer_->adapt_down_events_);
EXPECT_EQ(1, observer_->adapt_up_events_);
}
TEST_F(QualityScalerTest, DoesNotScaleUntilEnoughFramesObserved) {
DO_SYNC(q_, {
// Send 30 frames. This should not be enough to make a decision.
for (int i = 0; i < kFramerate; ++i) {
qs_->ReportQP(kLowQp);
}
});
EXPECT_FALSE(observer_->event.Wait(kDefaultTimeoutMs));
DO_SYNC(q_, {
// Send 30 more. This should result in an adapt request as
// enough frames have now been observed.
for (int i = 0; i < kFramerate; ++i) {
qs_->ReportQP(kLowQp);
}
});
EXPECT_TRUE(observer_->event.Wait(kDefaultTimeoutMs));
EXPECT_EQ(0, observer_->adapt_down_events_);
EXPECT_EQ(1, observer_->adapt_up_events_);
}
} // namespace webrtc
#undef DO_SYNC

View File

@ -0,0 +1,376 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include <limits>
#include <memory>
#include <utility>
#include <vector>
#include "webrtc/test/gmock.h"
#include "webrtc/test/gtest.h"
namespace webrtc {
namespace {
using ::testing::_;
constexpr uint32_t kMinBitrateKbps = 50;
constexpr uint32_t kTargetBitrateKbps = 100;
constexpr uint32_t kMaxBitrateKbps = 1000;
constexpr uint32_t kFramerateFps = 5;
class MockTemporalLayers : public TemporalLayers {
public:
MOCK_METHOD1(UpdateLayerConfig, TemporalLayers::FrameConfig(uint32_t));
MOCK_METHOD3(OnRatesUpdated, std::vector<uint32_t>(int, int, int));
MOCK_METHOD1(UpdateConfiguration, bool(vpx_codec_enc_cfg_t*));
MOCK_METHOD4(PopulateCodecSpecific,
void(bool,
const TemporalLayers::FrameConfig&,
CodecSpecificInfoVP8*,
uint32_t));
MOCK_METHOD2(FrameEncoded, void(unsigned int, int));
MOCK_CONST_METHOD0(Tl0PicIdx, uint8_t());
MOCK_CONST_METHOD1(GetTemporalLayerId,
int(const TemporalLayers::FrameConfig&));
};
} // namespace
class SimulcastRateAllocatorTest : public ::testing::TestWithParam<bool> {
public:
SimulcastRateAllocatorTest() {
memset(&codec_, 0, sizeof(VideoCodec));
codec_.minBitrate = kMinBitrateKbps;
codec_.targetBitrate = kTargetBitrateKbps;
codec_.maxBitrate = kMaxBitrateKbps;
CreateAllocator();
}
virtual ~SimulcastRateAllocatorTest() {}
template <size_t S>
void ExpectEqual(uint32_t (&expected)[S],
const std::vector<uint32_t>& actual) {
EXPECT_EQ(S, actual.size());
for (size_t i = 0; i < S; ++i)
EXPECT_EQ(expected[i], actual[i]) << "Mismatch at index " << i;
}
template <size_t S>
void ExpectEqual(uint32_t (&expected)[S], const BitrateAllocation& actual) {
// EXPECT_EQ(S, actual.size());
uint32_t sum = 0;
for (size_t i = 0; i < S; ++i) {
uint32_t layer_bitrate = actual.GetSpatialLayerSum(i);
EXPECT_EQ(expected[i] * 1000U, layer_bitrate) << "Mismatch at index "
<< i;
sum += layer_bitrate;
}
EXPECT_EQ(sum, actual.get_sum_bps());
}
void CreateAllocator() {
std::unique_ptr<TemporalLayersFactory> tl_factory(GetTlFactory());
codec_.VP8()->tl_factory = tl_factory.get();
allocator_.reset(new SimulcastRateAllocator(codec_, std::move(tl_factory)));
// Simulate InitEncode().
tl_factories_.clear();
if (codec_.numberOfSimulcastStreams == 0) {
tl_factories_.push_back(
std::unique_ptr<TemporalLayers>(codec_.VP8()->tl_factory->Create(
0, codec_.VP8()->numberOfTemporalLayers, 0)));
} else {
for (uint32_t i = 0; i < codec_.numberOfSimulcastStreams; ++i) {
tl_factories_.push_back(
std::unique_ptr<TemporalLayers>(codec_.VP8()->tl_factory->Create(
i, codec_.simulcastStream[i].numberOfTemporalLayers, 0)));
}
}
}
virtual std::unique_ptr<TemporalLayersFactory> GetTlFactory() {
return std::unique_ptr<TemporalLayersFactory>(new TemporalLayersFactory());
}
BitrateAllocation GetAllocation(uint32_t target_bitrate) {
return allocator_->GetAllocation(target_bitrate * 1000U, kDefaultFrameRate);
}
protected:
static const int kDefaultFrameRate = 30;
VideoCodec codec_;
std::unique_ptr<SimulcastRateAllocator> allocator_;
std::vector<std::unique_ptr<TemporalLayers>> tl_factories_;
};
TEST_F(SimulcastRateAllocatorTest, NoSimulcastBelowMin) {
uint32_t expected[] = {codec_.minBitrate};
ExpectEqual(expected, GetAllocation(codec_.minBitrate - 1));
ExpectEqual(expected, GetAllocation(1));
ExpectEqual(expected, GetAllocation(0));
}
TEST_F(SimulcastRateAllocatorTest, NoSimulcastAboveMax) {
uint32_t expected[] = {codec_.maxBitrate};
ExpectEqual(expected, GetAllocation(codec_.maxBitrate + 1));
ExpectEqual(expected, GetAllocation(std::numeric_limits<uint32_t>::max()));
}
TEST_F(SimulcastRateAllocatorTest, NoSimulcastNoMax) {
const uint32_t kMax = BitrateAllocation::kMaxBitrateBps / 1000;
codec_.maxBitrate = 0;
CreateAllocator();
uint32_t expected[] = {kMax};
ExpectEqual(expected, GetAllocation(kMax));
}
TEST_F(SimulcastRateAllocatorTest, NoSimulcastWithinLimits) {
for (uint32_t bitrate = codec_.minBitrate; bitrate <= codec_.maxBitrate;
++bitrate) {
uint32_t expected[] = {bitrate};
ExpectEqual(expected, GetAllocation(bitrate));
}
}
TEST_F(SimulcastRateAllocatorTest, SingleSimulcastBelowMin) {
// With simulcast, use the min bitrate from the ss spec instead of the global.
codec_.numberOfSimulcastStreams = 1;
const uint32_t kMin = codec_.minBitrate - 10;
codec_.simulcastStream[0].minBitrate = kMin;
codec_.simulcastStream[0].targetBitrate = kTargetBitrateKbps;
CreateAllocator();
uint32_t expected[] = {kMin};
ExpectEqual(expected, GetAllocation(kMin - 1));
ExpectEqual(expected, GetAllocation(1));
ExpectEqual(expected, GetAllocation(0));
}
TEST_F(SimulcastRateAllocatorTest, SingleSimulcastAboveMax) {
codec_.numberOfSimulcastStreams = 1;
codec_.simulcastStream[0].minBitrate = kMinBitrateKbps;
const uint32_t kMax = codec_.simulcastStream[0].maxBitrate + 1000;
codec_.simulcastStream[0].maxBitrate = kMax;
CreateAllocator();
uint32_t expected[] = {kMax};
ExpectEqual(expected, GetAllocation(kMax));
ExpectEqual(expected, GetAllocation(kMax + 1));
ExpectEqual(expected, GetAllocation(std::numeric_limits<uint32_t>::max()));
}
TEST_F(SimulcastRateAllocatorTest, SingleSimulcastWithinLimits) {
codec_.numberOfSimulcastStreams = 1;
codec_.simulcastStream[0].minBitrate = kMinBitrateKbps;
codec_.simulcastStream[0].targetBitrate = kTargetBitrateKbps;
codec_.simulcastStream[0].maxBitrate = kMaxBitrateKbps;
CreateAllocator();
for (uint32_t bitrate = kMinBitrateKbps; bitrate <= kMaxBitrateKbps;
++bitrate) {
uint32_t expected[] = {bitrate};
ExpectEqual(expected, GetAllocation(bitrate));
}
}
TEST_F(SimulcastRateAllocatorTest, OneToThreeStreams) {
codec_.numberOfSimulcastStreams = 3;
codec_.maxBitrate = 0;
codec_.simulcastStream[0].minBitrate = 10;
codec_.simulcastStream[0].targetBitrate = 100;
codec_.simulcastStream[0].maxBitrate = 500;
codec_.simulcastStream[1].minBitrate = 50;
codec_.simulcastStream[1].targetBitrate = 500;
codec_.simulcastStream[1].maxBitrate = 1000;
codec_.simulcastStream[2].minBitrate = 2000;
codec_.simulcastStream[2].targetBitrate = 3000;
codec_.simulcastStream[2].maxBitrate = 4000;
CreateAllocator();
{
// Single stream, min bitrate.
const uint32_t bitrate = codec_.simulcastStream[0].minBitrate;
uint32_t expected[] = {bitrate, 0, 0};
ExpectEqual(expected, GetAllocation(bitrate));
}
{
// Single stream at target bitrate.
const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate;
uint32_t expected[] = {bitrate, 0, 0};
ExpectEqual(expected, GetAllocation(bitrate));
}
{
// Bitrate above target for first stream, but below min for the next one.
const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate +
codec_.simulcastStream[1].minBitrate - 1;
uint32_t expected[] = {bitrate, 0, 0};
ExpectEqual(expected, GetAllocation(bitrate));
}
{
// Just enough for two streams.
const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate +
codec_.simulcastStream[1].minBitrate;
uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate,
codec_.simulcastStream[1].minBitrate, 0};
ExpectEqual(expected, GetAllocation(bitrate));
}
{
// Second stream maxed out, but not enough for third.
const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate +
codec_.simulcastStream[1].maxBitrate;
uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate,
codec_.simulcastStream[1].maxBitrate, 0};
ExpectEqual(expected, GetAllocation(bitrate));
}
{
// First two streams maxed out, but not enough for third. Nowhere to put
// remaining bits.
const uint32_t bitrate = codec_.simulcastStream[0].maxBitrate +
codec_.simulcastStream[1].maxBitrate + 499;
uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate,
codec_.simulcastStream[1].maxBitrate, 0};
ExpectEqual(expected, GetAllocation(bitrate));
}
{
// Just enough for all three streams.
const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate +
codec_.simulcastStream[1].targetBitrate +
codec_.simulcastStream[2].minBitrate;
uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate,
codec_.simulcastStream[1].targetBitrate,
codec_.simulcastStream[2].minBitrate};
ExpectEqual(expected, GetAllocation(bitrate));
}
{
// Third maxed out.
const uint32_t bitrate = codec_.simulcastStream[0].targetBitrate +
codec_.simulcastStream[1].targetBitrate +
codec_.simulcastStream[2].maxBitrate;
uint32_t expected[] = {codec_.simulcastStream[0].targetBitrate,
codec_.simulcastStream[1].targetBitrate,
codec_.simulcastStream[2].maxBitrate};
ExpectEqual(expected, GetAllocation(bitrate));
}
}
TEST_F(SimulcastRateAllocatorTest, GetPreferredBitrateBps) {
MockTemporalLayers mock_layers;
allocator_.reset(new SimulcastRateAllocator(codec_, nullptr));
allocator_->OnTemporalLayersCreated(0, &mock_layers);
EXPECT_CALL(mock_layers, OnRatesUpdated(_, _, _)).Times(0);
EXPECT_EQ(codec_.maxBitrate * 1000,
allocator_->GetPreferredBitrateBps(codec_.maxFramerate));
}
TEST_F(SimulcastRateAllocatorTest, GetPreferredBitrateSimulcast) {
codec_.numberOfSimulcastStreams = 3;
codec_.maxBitrate = 999999;
codec_.simulcastStream[0].minBitrate = 10;
codec_.simulcastStream[0].targetBitrate = 100;
codec_.simulcastStream[0].maxBitrate = 500;
codec_.simulcastStream[1].minBitrate = 50;
codec_.simulcastStream[1].targetBitrate = 500;
codec_.simulcastStream[1].maxBitrate = 1000;
codec_.simulcastStream[2].minBitrate = 2000;
codec_.simulcastStream[2].targetBitrate = 3000;
codec_.simulcastStream[2].maxBitrate = 4000;
CreateAllocator();
uint32_t preferred_bitrate_kbps;
preferred_bitrate_kbps = codec_.simulcastStream[0].targetBitrate;
preferred_bitrate_kbps += codec_.simulcastStream[1].targetBitrate;
preferred_bitrate_kbps += codec_.simulcastStream[2].maxBitrate;
EXPECT_EQ(preferred_bitrate_kbps * 1000,
allocator_->GetPreferredBitrateBps(codec_.maxFramerate));
}
class ScreenshareRateAllocationTest : public SimulcastRateAllocatorTest {
public:
void SetupConferenceScreenshare(bool use_simulcast) {
codec_.mode = VideoCodecMode::kScreensharing;
codec_.minBitrate = kMinBitrateKbps;
codec_.maxBitrate = kMaxBitrateKbps;
if (use_simulcast) {
codec_.numberOfSimulcastStreams = 1;
codec_.simulcastStream[0].minBitrate = kMinBitrateKbps;
codec_.simulcastStream[0].targetBitrate = kTargetBitrateKbps;
codec_.simulcastStream[0].maxBitrate = kMaxBitrateKbps;
codec_.simulcastStream[0].numberOfTemporalLayers = 2;
} else {
codec_.numberOfSimulcastStreams = 0;
codec_.targetBitrate = kTargetBitrateKbps;
codec_.VP8()->numberOfTemporalLayers = 2;
}
}
std::unique_ptr<TemporalLayersFactory> GetTlFactory() override {
return std::unique_ptr<TemporalLayersFactory>(
new ScreenshareTemporalLayersFactory());
}
};
INSTANTIATE_TEST_CASE_P(ScreenshareTest,
ScreenshareRateAllocationTest,
::testing::Bool());
TEST_P(ScreenshareRateAllocationTest, BitrateBelowTl0) {
SetupConferenceScreenshare(GetParam());
CreateAllocator();
BitrateAllocation allocation =
allocator_->GetAllocation(kTargetBitrateKbps * 1000, kFramerateFps);
// All allocation should go in TL0.
EXPECT_EQ(kTargetBitrateKbps, allocation.get_sum_kbps());
EXPECT_EQ(kTargetBitrateKbps, allocation.GetBitrate(0, 0) / 1000);
}
TEST_P(ScreenshareRateAllocationTest, BitrateAboveTl0) {
SetupConferenceScreenshare(GetParam());
CreateAllocator();
uint32_t target_bitrate_kbps = (kTargetBitrateKbps + kMaxBitrateKbps) / 2;
BitrateAllocation allocation =
allocator_->GetAllocation(target_bitrate_kbps * 1000, kFramerateFps);
// Fill TL0, then put the rest in TL1.
EXPECT_EQ(target_bitrate_kbps, allocation.get_sum_kbps());
EXPECT_EQ(kTargetBitrateKbps, allocation.GetBitrate(0, 0) / 1000);
EXPECT_EQ(target_bitrate_kbps - kTargetBitrateKbps,
allocation.GetBitrate(0, 1) / 1000);
}
TEST_P(ScreenshareRateAllocationTest, BitrateAboveTl1) {
SetupConferenceScreenshare(GetParam());
CreateAllocator();
BitrateAllocation allocation =
allocator_->GetAllocation(kMaxBitrateKbps * 2000, kFramerateFps);
// Fill both TL0 and TL1, but no more.
EXPECT_EQ(kMaxBitrateKbps, allocation.get_sum_kbps());
EXPECT_EQ(kTargetBitrateKbps, allocation.GetBitrate(0, 0) / 1000);
EXPECT_EQ(kMaxBitrateKbps - kTargetBitrateKbps,
allocation.GetBitrate(0, 1) / 1000);
}
} // namespace webrtc

View File

@ -0,0 +1,205 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/rtc_base/logging.h"
namespace webrtc {
namespace vp8 {
namespace {
const size_t kCommonPayloadHeaderLength = 3;
const size_t kKeyPayloadHeaderLength = 10;
} // namespace
static uint32_t BSwap32(uint32_t x) {
return (x >> 24) | ((x >> 8) & 0xff00) | ((x << 8) & 0xff0000) | (x << 24);
}
static void VP8LoadFinalBytes(VP8BitReader* const br) {
// Only read 8bits at a time.
if (br->buf_ < br->buf_end_) {
br->bits_ += 8;
br->value_ = static_cast<uint32_t>(*br->buf_++) | (br->value_ << 8);
} else if (!br->eof_) {
br->value_ <<= 8;
br->bits_ += 8;
br->eof_ = 1;
}
}
static void VP8LoadNewBytes(VP8BitReader* const br) {
int BITS = 24;
// Read 'BITS' bits at a time.
if (br->buf_ + sizeof(uint32_t) <= br->buf_end_) {
uint32_t bits;
const uint32_t in_bits = *(const uint32_t*)(br->buf_);
br->buf_ += BITS >> 3;
#if defined(WEBRTC_ARCH_BIG_ENDIAN)
bits = static_cast<uint32_t>(in_bits);
if (BITS != 8 * sizeof(uint32_t))
bits >>= (8 * sizeof(uint32_t) - BITS);
#else
bits = BSwap32(in_bits);
bits >>= 32 - BITS;
#endif
br->value_ = bits | (br->value_ << BITS);
br->bits_ += BITS;
} else {
VP8LoadFinalBytes(br);
}
}
static void VP8InitBitReader(VP8BitReader* const br,
const uint8_t* const start,
const uint8_t* const end) {
br->range_ = 255 - 1;
br->buf_ = start;
br->buf_end_ = end;
br->value_ = 0;
br->bits_ = -8; // To load the very first 8bits.
br->eof_ = 0;
VP8LoadNewBytes(br);
}
// Read a bit with proba 'prob'.
static int VP8GetBit(VP8BitReader* const br, int prob) {
uint8_t range = br->range_;
if (br->bits_ < 0) {
VP8LoadNewBytes(br);
if (br->eof_)
return 0;
}
const int pos = br->bits_;
const uint8_t split = (range * prob) >> 8;
const uint8_t value = static_cast<uint8_t>(br->value_ >> pos);
int bit;
if (value > split) {
range -= split + 1;
br->value_ -= static_cast<uint32_t>(split + 1) << pos;
bit = 1;
} else {
range = split;
bit = 0;
}
if (range <= static_cast<uint8_t>(0x7e)) {
const int shift = kVP8Log2Range[range];
range = kVP8NewRange[range];
br->bits_ -= shift;
}
br->range_ = range;
return bit;
}
static uint32_t VP8GetValue(VP8BitReader* const br, int bits) {
uint32_t v = 0;
while (bits-- > 0) {
v |= VP8GetBit(br, 0x80) << bits;
}
return v;
}
static uint32_t VP8Get(VP8BitReader* const br) {
return VP8GetValue(br, 1);
}
static int32_t VP8GetSignedValue(VP8BitReader* const br, int bits) {
const int value = VP8GetValue(br, bits);
return VP8Get(br) ? -value : value;
}
static void ParseSegmentHeader(VP8BitReader* br) {
int use_segment = VP8Get(br);
if (use_segment) {
int update_map = VP8Get(br);
if (VP8Get(br)) {
int s;
VP8Get(br);
for (s = 0; s < NUM_MB_SEGMENTS; ++s) {
VP8Get(br) ? VP8GetSignedValue(br, 7) : 0;
}
for (s = 0; s < NUM_MB_SEGMENTS; ++s) {
VP8Get(br) ? VP8GetSignedValue(br, 6) : 0;
}
}
if (update_map) {
int s;
for (s = 0; s < MB_FEATURE_TREE_PROBS; ++s) {
VP8Get(br) ? VP8GetValue(br, 8) : 255;
}
}
}
}
static void ParseFilterHeader(VP8BitReader* br) {
VP8Get(br);
VP8GetValue(br, 6);
VP8GetValue(br, 3);
int use_lf_delta = VP8Get(br);
if (use_lf_delta) {
if (VP8Get(br)) {
int i;
for (i = 0; i < NUM_REF_LF_DELTAS; ++i) {
if (VP8Get(br)) {
VP8GetSignedValue(br, 6);
}
}
for (i = 0; i < NUM_MODE_LF_DELTAS; ++i) {
if (VP8Get(br)) {
VP8GetSignedValue(br, 6);
}
}
}
}
}
bool GetQp(const uint8_t* buf, size_t length, int* qp) {
if (length < kCommonPayloadHeaderLength) {
LOG(LS_WARNING) << "Failed to get QP, invalid length.";
return false;
}
VP8BitReader br;
const uint32_t bits = buf[0] | (buf[1] << 8) | (buf[2] << 16);
int key_frame = !(bits & 1);
// Size of first partition in bytes.
uint32_t partition_length = (bits >> 5);
size_t header_length = kCommonPayloadHeaderLength;
if (key_frame) {
header_length = kKeyPayloadHeaderLength;
}
if (header_length + partition_length > length) {
LOG(LS_WARNING) << "Failed to get QP, invalid length: " << length;
return false;
}
buf += header_length;
VP8InitBitReader(&br, buf, buf + partition_length);
if (key_frame) {
// Color space and pixel type.
VP8Get(&br);
VP8Get(&br);
}
ParseSegmentHeader(&br);
ParseFilterHeader(&br);
// Number of coefficient data partitions.
VP8GetValue(&br, 2);
// Base QP.
const int base_q0 = VP8GetValue(&br, 7);
if (br.eof_ == 1) {
LOG(LS_WARNING) << "Failed to get QP, end of file reached.";
return false;
}
*qp = base_q0;
return true;
}
} // namespace vp8
} // namespace webrtc

View File

@ -0,0 +1,68 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_HEADER_PARSER_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_HEADER_PARSER_H_
#include <stdint.h>
#include <stdio.h>
namespace webrtc {
namespace vp8 {
enum {
MB_FEATURE_TREE_PROBS = 3,
NUM_MB_SEGMENTS = 4,
NUM_REF_LF_DELTAS = 4,
NUM_MODE_LF_DELTAS = 4,
};
typedef struct VP8BitReader VP8BitReader;
struct VP8BitReader {
// Boolean decoder.
uint32_t value_; // Current value.
uint32_t range_; // Current range minus 1. In [127, 254] interval.
int bits_; // Number of valid bits left.
// Read buffer.
const uint8_t* buf_; // Next byte to be read.
const uint8_t* buf_end_; // End of read buffer.
int eof_; // True if input is exhausted.
};
const uint8_t kVP8Log2Range[128] = {
7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0};
// range = ((range - 1) << kVP8Log2Range[range]) + 1
const uint8_t kVP8NewRange[128] = {
127, 127, 191, 127, 159, 191, 223, 127, 143, 159, 175, 191, 207, 223, 239,
127, 135, 143, 151, 159, 167, 175, 183, 191, 199, 207, 215, 223, 231, 239,
247, 127, 131, 135, 139, 143, 147, 151, 155, 159, 163, 167, 171, 175, 179,
183, 187, 191, 195, 199, 203, 207, 211, 215, 219, 223, 227, 231, 235, 239,
243, 247, 251, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149,
151, 153, 155, 157, 159, 161, 163, 165, 167, 169, 171, 173, 175, 177, 179,
181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205, 207, 209,
211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239,
241, 243, 245, 247, 249, 251, 253, 127};
// Gets the QP, QP range: [0, 127].
// Returns true on success, false otherwise.
bool GetQp(const uint8_t* buf, size_t length, int* qp);
} // namespace vp8
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP8_HEADER_PARSER_H_

View File

@ -0,0 +1,272 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "webrtc/rtc_base/bitbuffer.h"
#include "webrtc/rtc_base/logging.h"
namespace webrtc {
#define RETURN_FALSE_IF_ERROR(x) \
if (!(x)) { \
return false; \
}
namespace vp9 {
namespace {
const size_t kVp9NumRefsPerFrame = 3;
const size_t kVp9MaxRefLFDeltas = 4;
const size_t kVp9MaxModeLFDeltas = 2;
bool Vp9ReadProfile(rtc::BitBuffer* br, uint8_t* profile) {
uint32_t high_bit;
uint32_t low_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&low_bit, 1));
RETURN_FALSE_IF_ERROR(br->ReadBits(&high_bit, 1));
*profile = (high_bit << 1) + low_bit;
if (*profile > 2) {
uint32_t reserved_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1));
if (reserved_bit) {
LOG(LS_WARNING) << "Failed to get QP. Unsupported bitstream profile.";
return false;
}
}
return true;
}
bool Vp9ReadSyncCode(rtc::BitBuffer* br) {
uint32_t sync_code;
RETURN_FALSE_IF_ERROR(br->ReadBits(&sync_code, 24));
if (sync_code != 0x498342) {
LOG(LS_WARNING) << "Failed to get QP. Invalid sync code.";
return false;
}
return true;
}
bool Vp9ReadColorConfig(rtc::BitBuffer* br, uint8_t profile) {
if (profile == 2 || profile == 3) {
// Bitdepth.
RETURN_FALSE_IF_ERROR(br->ConsumeBits(1));
}
uint32_t color_space;
RETURN_FALSE_IF_ERROR(br->ReadBits(&color_space, 3));
// SRGB is 7.
if (color_space != 7) {
// YUV range flag.
RETURN_FALSE_IF_ERROR(br->ConsumeBits(1));
if (profile == 1 || profile == 3) {
// 1 bit: subsampling x.
// 1 bit: subsampling y.
RETURN_FALSE_IF_ERROR(br->ConsumeBits(2));
uint32_t reserved_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1));
if (reserved_bit) {
LOG(LS_WARNING) << "Failed to get QP. Reserved bit set.";
return false;
}
}
} else {
if (profile == 1 || profile == 3) {
uint32_t reserved_bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&reserved_bit, 1));
if (reserved_bit) {
LOG(LS_WARNING) << "Failed to get QP. Reserved bit set.";
return false;
}
} else {
LOG(LS_WARNING) << "Failed to get QP. 4:4:4 color not supported in "
"profile 0 or 2.";
return false;
}
}
return true;
}
bool Vp9ReadFrameSize(rtc::BitBuffer* br) {
// 2 bytes: frame width.
// 2 bytes: frame height.
return br->ConsumeBytes(4);
}
bool Vp9ReadRenderSize(rtc::BitBuffer* br) {
uint32_t bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1));
if (bit) {
// 2 bytes: render width.
// 2 bytes: render height.
RETURN_FALSE_IF_ERROR(br->ConsumeBytes(4));
}
return true;
}
bool Vp9ReadFrameSizeFromRefs(rtc::BitBuffer* br) {
uint32_t found_ref = 0;
for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) {
// Size in refs.
RETURN_FALSE_IF_ERROR(br->ReadBits(&found_ref, 1));
if (found_ref)
break;
}
if (!found_ref) {
if (!Vp9ReadFrameSize(br)) {
return false;
}
}
return Vp9ReadRenderSize(br);
}
bool Vp9ReadInterpolationFilter(rtc::BitBuffer* br) {
uint32_t bit;
RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1));
if (bit)
return true;
return br->ConsumeBits(2);
}
bool Vp9ReadLoopfilter(rtc::BitBuffer* br) {
// 6 bits: filter level.
// 3 bits: sharpness level.
RETURN_FALSE_IF_ERROR(br->ConsumeBits(9));
uint32_t mode_ref_delta_enabled;
RETURN_FALSE_IF_ERROR(br->ReadBits(&mode_ref_delta_enabled, 1));
if (mode_ref_delta_enabled) {
uint32_t mode_ref_delta_update;
RETURN_FALSE_IF_ERROR(br->ReadBits(&mode_ref_delta_update, 1));
if (mode_ref_delta_update) {
uint32_t bit;
for (size_t i = 0; i < kVp9MaxRefLFDeltas; i++) {
RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1));
if (bit) {
RETURN_FALSE_IF_ERROR(br->ConsumeBits(7));
}
}
for (size_t i = 0; i < kVp9MaxModeLFDeltas; i++) {
RETURN_FALSE_IF_ERROR(br->ReadBits(&bit, 1));
if (bit) {
RETURN_FALSE_IF_ERROR(br->ConsumeBits(7));
}
}
}
}
return true;
}
} // namespace
bool GetQp(const uint8_t* buf, size_t length, int* qp) {
rtc::BitBuffer br(buf, length);
// Frame marker.
uint32_t frame_marker;
RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_marker, 2));
if (frame_marker != 0x2) {
LOG(LS_WARNING) << "Failed to get QP. Frame marker should be 2.";
return false;
}
// Profile.
uint8_t profile;
if (!Vp9ReadProfile(&br, &profile))
return false;
// Show existing frame.
uint32_t show_existing_frame;
RETURN_FALSE_IF_ERROR(br.ReadBits(&show_existing_frame, 1));
if (show_existing_frame)
return false;
// Frame type: KEY_FRAME(0), INTER_FRAME(1).
uint32_t frame_type;
uint32_t show_frame;
uint32_t error_resilient;
RETURN_FALSE_IF_ERROR(br.ReadBits(&frame_type, 1));
RETURN_FALSE_IF_ERROR(br.ReadBits(&show_frame, 1));
RETURN_FALSE_IF_ERROR(br.ReadBits(&error_resilient, 1));
if (!frame_type) {
if (!Vp9ReadSyncCode(&br))
return false;
if (!Vp9ReadColorConfig(&br, profile))
return false;
if (!Vp9ReadFrameSize(&br))
return false;
if (!Vp9ReadRenderSize(&br))
return false;
} else {
uint32_t intra_only = 0;
if (!show_frame)
RETURN_FALSE_IF_ERROR(br.ReadBits(&intra_only, 1));
if (!error_resilient)
RETURN_FALSE_IF_ERROR(br.ConsumeBits(2)); // Reset frame context.
if (intra_only) {
if (!Vp9ReadSyncCode(&br))
return false;
if (profile > 0) {
if (!Vp9ReadColorConfig(&br, profile))
return false;
}
// Refresh frame flags.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(8));
if (!Vp9ReadFrameSize(&br))
return false;
if (!Vp9ReadRenderSize(&br))
return false;
} else {
// Refresh frame flags.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(8));
for (size_t i = 0; i < kVp9NumRefsPerFrame; i++) {
// 3 bits: Ref frame index.
// 1 bit: Ref frame sign biases.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(4));
}
if (!Vp9ReadFrameSizeFromRefs(&br))
return false;
// Allow high precision mv.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(1));
// Interpolation filter.
if (!Vp9ReadInterpolationFilter(&br))
return false;
}
}
if (!error_resilient) {
// 1 bit: Refresh frame context.
// 1 bit: Frame parallel decoding mode.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(2));
}
// Frame context index.
RETURN_FALSE_IF_ERROR(br.ConsumeBits(2));
if (!Vp9ReadLoopfilter(&br))
return false;
// Base QP.
uint8_t base_q0;
RETURN_FALSE_IF_ERROR(br.ReadUInt8(&base_q0));
*qp = base_q0;
return true;
}
} // namespace vp9
} // namespace webrtc

View File

@ -0,0 +1,29 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP9_UNCOMPRESSED_HEADER_PARSER_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP9_UNCOMPRESSED_HEADER_PARSER_H_
#include <stddef.h>
#include <stdint.h>
namespace webrtc {
namespace vp9 {
// Gets the QP, QP range: [0, 255].
// Returns true on success, false otherwise.
bool GetQp(const uint8_t* buf, size_t length, int* qp);
} // namespace vp9
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_UTILITY_VP9_UNCOMPRESSED_HEADER_PARSER_H_