Renamed FrameObject to EncodedFrame.

The plan is to:
 1. Move FrameObject to api/video.
 2. Rename FrameObject to EncodedFrame.
 3. Move EncodedFrame out of the video_coding namespace.

This is the 2nd CL.

Bug: webrtc:8909
Change-Id: I5e76a0a3b306156b8bc1de67834b4adf14bebef9
Reviewed-on: https://webrtc-review.googlesource.com/56182
Commit-Queue: Philip Eliasson <philipel@webrtc.org>
Commit-Queue: Tommi <tommi@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Reviewed-by: Tommi <tommi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22158}
This commit is contained in:
philipel
2018-02-22 14:35:06 +01:00
committed by Commit Bot
parent a5c735f5d9
commit e7c891f953
14 changed files with 49 additions and 46 deletions

View File

@ -16,14 +16,13 @@
namespace webrtc { namespace webrtc {
namespace video_coding { namespace video_coding {
// TODO(philipel): Rename FrameObject to EncodedFrame.
// TODO(philipel): Remove webrtc::VCMEncodedFrame inheritance. // TODO(philipel): Remove webrtc::VCMEncodedFrame inheritance.
class FrameObject : public webrtc::VCMEncodedFrame { class EncodedFrame : public webrtc::VCMEncodedFrame {
public: public:
static const uint8_t kMaxFrameReferences = 5; static const uint8_t kMaxFrameReferences = 5;
FrameObject() = default; EncodedFrame() = default;
virtual ~FrameObject() {} virtual ~EncodedFrame() {}
virtual bool GetBitstream(uint8_t* destination) const = 0; virtual bool GetBitstream(uint8_t* destination) const = 0;
@ -59,6 +58,9 @@ class FrameObject : public webrtc::VCMEncodedFrame {
bool inter_layer_predicted = false; bool inter_layer_predicted = false;
}; };
// TODO(philipel): Remove this when downstream projects have been updated.
using FrameObject = EncodedFrame;
} // namespace video_coding } // namespace video_coding
} // namespace webrtc } // namespace webrtc

View File

@ -60,7 +60,7 @@ FrameBuffer::~FrameBuffer() {}
FrameBuffer::ReturnReason FrameBuffer::NextFrame( FrameBuffer::ReturnReason FrameBuffer::NextFrame(
int64_t max_wait_time_ms, int64_t max_wait_time_ms,
std::unique_ptr<FrameObject>* frame_out, std::unique_ptr<EncodedFrame>* frame_out,
bool keyframe_required) { bool keyframe_required) {
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame"); TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
int64_t latest_return_time_ms = int64_t latest_return_time_ms =
@ -106,7 +106,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
continue; continue;
} }
FrameObject* frame = frame_it->second.frame.get(); EncodedFrame* frame = frame_it->second.frame.get();
if (keyframe_required && !frame->is_keyframe()) if (keyframe_required && !frame->is_keyframe())
continue; continue;
@ -134,7 +134,7 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
rtc::CritScope lock(&crit_); rtc::CritScope lock(&crit_);
now_ms = clock_->TimeInMilliseconds(); now_ms = clock_->TimeInMilliseconds();
if (next_frame_it_ != frames_.end()) { if (next_frame_it_ != frames_.end()) {
std::unique_ptr<FrameObject> frame = std::unique_ptr<EncodedFrame> frame =
std::move(next_frame_it_->second.frame); std::move(next_frame_it_->second.frame);
if (!frame->delayed_by_retransmission()) { if (!frame->delayed_by_retransmission()) {
@ -208,7 +208,8 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
return kTimeout; return kTimeout;
} }
bool FrameBuffer::HasBadRenderTiming(const FrameObject& frame, int64_t now_ms) { bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame,
int64_t now_ms) {
// Assume that render timing errors are due to changes in the video stream. // Assume that render timing errors are due to changes in the video stream.
int64_t render_time_ms = frame.RenderTimeMs(); int64_t render_time_ms = frame.RenderTimeMs();
const int64_t kMaxVideoDelayMs = 10000; const int64_t kMaxVideoDelayMs = 10000;
@ -255,7 +256,7 @@ void FrameBuffer::UpdateRtt(int64_t rtt_ms) {
jitter_estimator_->UpdateRtt(rtt_ms); jitter_estimator_->UpdateRtt(rtt_ms);
} }
bool FrameBuffer::ValidReferences(const FrameObject& frame) const { bool FrameBuffer::ValidReferences(const EncodedFrame& frame) const {
if (frame.picture_id < 0) if (frame.picture_id < 0)
return false; return false;
@ -275,7 +276,7 @@ bool FrameBuffer::ValidReferences(const FrameObject& frame) const {
return true; return true;
} }
void FrameBuffer::UpdatePlayoutDelays(const FrameObject& frame) { void FrameBuffer::UpdatePlayoutDelays(const EncodedFrame& frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdatePlayoutDelays"); TRACE_EVENT0("webrtc", "FrameBuffer::UpdatePlayoutDelays");
PlayoutDelay playout_delay = frame.EncodedImage().playout_delay_; PlayoutDelay playout_delay = frame.EncodedImage().playout_delay_;
if (playout_delay.min_ms >= 0) if (playout_delay.min_ms >= 0)
@ -285,7 +286,7 @@ void FrameBuffer::UpdatePlayoutDelays(const FrameObject& frame) {
timing_->set_max_playout_delay(playout_delay.max_ms); timing_->set_max_playout_delay(playout_delay.max_ms);
} }
int64_t FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) { int64_t FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame"); TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
RTC_DCHECK(frame); RTC_DCHECK(frame);
if (stats_callback_) if (stats_callback_)
@ -459,7 +460,7 @@ void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
} }
} }
bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame, bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
FrameMap::iterator info) { FrameMap::iterator info) {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame"); TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
FrameKey key(frame.picture_id, frame.spatial_layer); FrameKey key(frame.picture_id, frame.spatial_layer);

View File

@ -16,7 +16,7 @@
#include <memory> #include <memory>
#include <utility> #include <utility>
#include "modules/video_coding/frame_object.h" #include "api/video/encoded_frame.h"
#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_coding_defines.h"
#include "modules/video_coding/inter_frame_delay.h" #include "modules/video_coding/inter_frame_delay.h"
#include "rtc_base/constructormagic.h" #include "rtc_base/constructormagic.h"
@ -47,7 +47,7 @@ class FrameBuffer {
// Insert a frame into the frame buffer. Returns the picture id // Insert a frame into the frame buffer. Returns the picture id
// of the last continuous frame or -1 if there is no continuous frame. // of the last continuous frame or -1 if there is no continuous frame.
int64_t InsertFrame(std::unique_ptr<FrameObject> frame); int64_t InsertFrame(std::unique_ptr<EncodedFrame> frame);
// Get the next frame for decoding. Will return at latest after // Get the next frame for decoding. Will return at latest after
// |max_wait_time_ms|. // |max_wait_time_ms|.
@ -57,7 +57,7 @@ class FrameBuffer {
// kTimeout. // kTimeout.
// - If the FrameBuffer is stopped then it will return kStopped. // - If the FrameBuffer is stopped then it will return kStopped.
ReturnReason NextFrame(int64_t max_wait_time_ms, ReturnReason NextFrame(int64_t max_wait_time_ms,
std::unique_ptr<FrameObject>* frame_out, std::unique_ptr<EncodedFrame>* frame_out,
bool keyframe_required = false); bool keyframe_required = false);
// Tells the FrameBuffer which protection mode that is in use. Affects // Tells the FrameBuffer which protection mode that is in use. Affects
@ -120,18 +120,18 @@ class FrameBuffer {
// If this frame is continuous or not. // If this frame is continuous or not.
bool continuous = false; bool continuous = false;
// The actual FrameObject. // The actual EncodedFrame.
std::unique_ptr<FrameObject> frame; std::unique_ptr<EncodedFrame> frame;
}; };
using FrameMap = std::map<FrameKey, FrameInfo>; using FrameMap = std::map<FrameKey, FrameInfo>;
// Check that the references of |frame| are valid. // Check that the references of |frame| are valid.
bool ValidReferences(const FrameObject& frame) const; bool ValidReferences(const EncodedFrame& frame) const;
// Updates the minimal and maximal playout delays // Updates the minimal and maximal playout delays
// depending on the frame. // depending on the frame.
void UpdatePlayoutDelays(const FrameObject& frame) void UpdatePlayoutDelays(const EncodedFrame& frame)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
// Update all directly dependent and indirectly dependent frames and mark // Update all directly dependent and indirectly dependent frames and mark
@ -151,7 +151,7 @@ class FrameBuffer {
// Update the corresponding FrameInfo of |frame| and all FrameInfos that // Update the corresponding FrameInfo of |frame| and all FrameInfos that
// |frame| references. // |frame| references.
// Return false if |frame| will never be decodable, true otherwise. // Return false if |frame| will never be decodable, true otherwise.
bool UpdateFrameInfoWithIncomingFrame(const FrameObject& frame, bool UpdateFrameInfoWithIncomingFrame(const EncodedFrame& frame,
FrameMap::iterator info) FrameMap::iterator info)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
@ -161,7 +161,7 @@ class FrameBuffer {
void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
bool HasBadRenderTiming(const FrameObject& frame, int64_t now_ms) bool HasBadRenderTiming(const EncodedFrame& frame, int64_t now_ms)
RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_);
FrameMap frames_ RTC_GUARDED_BY(crit_); FrameMap frames_ RTC_GUARDED_BY(crit_);

View File

@ -86,7 +86,7 @@ class VCMJitterEstimatorMock : public VCMJitterEstimator {
MOCK_METHOD1(GetJitterEstimate, int(double rttMultiplier)); MOCK_METHOD1(GetJitterEstimate, int(double rttMultiplier));
}; };
class FrameObjectFake : public FrameObject { class FrameObjectFake : public EncodedFrame {
public: public:
bool GetBitstream(uint8_t* destination) const override { return true; } bool GetBitstream(uint8_t* destination) const override { return true; }
@ -155,7 +155,7 @@ class TestFrameBuffer2 : public ::testing::Test {
bool inter_layer_predicted, bool inter_layer_predicted,
T... refs) { T... refs) {
static_assert(sizeof...(refs) <= kMaxReferences, static_assert(sizeof...(refs) <= kMaxReferences,
"To many references specified for FrameObject."); "To many references specified for EncodedFrame.");
std::array<uint16_t, sizeof...(refs)> references = { std::array<uint16_t, sizeof...(refs)> references = {
{rtc::checked_cast<uint16_t>(refs)...}}; {rtc::checked_cast<uint16_t>(refs)...}};
@ -174,7 +174,7 @@ class TestFrameBuffer2 : public ::testing::Test {
void ExtractFrame(int64_t max_wait_time = 0, bool keyframe_required = false) { void ExtractFrame(int64_t max_wait_time = 0, bool keyframe_required = false) {
crit_.Enter(); crit_.Enter();
if (max_wait_time == 0) { if (max_wait_time == 0) {
std::unique_ptr<FrameObject> frame; std::unique_ptr<EncodedFrame> frame;
FrameBuffer::ReturnReason res = FrameBuffer::ReturnReason res =
buffer_.NextFrame(0, &frame, keyframe_required); buffer_.NextFrame(0, &frame, keyframe_required);
if (res != FrameBuffer::ReturnReason::kStopped) if (res != FrameBuffer::ReturnReason::kStopped)
@ -213,7 +213,7 @@ class TestFrameBuffer2 : public ::testing::Test {
if (tfb->tear_down_) if (tfb->tear_down_)
return; return;
std::unique_ptr<FrameObject> frame; std::unique_ptr<EncodedFrame> frame;
FrameBuffer::ReturnReason res = FrameBuffer::ReturnReason res =
tfb->buffer_.NextFrame(tfb->max_wait_time_, &frame); tfb->buffer_.NextFrame(tfb->max_wait_time_, &frame);
if (res != FrameBuffer::ReturnReason::kStopped) if (res != FrameBuffer::ReturnReason::kStopped)
@ -228,7 +228,7 @@ class TestFrameBuffer2 : public ::testing::Test {
VCMTimingFake timing_; VCMTimingFake timing_;
::testing::NiceMock<VCMJitterEstimatorMock> jitter_estimator_; ::testing::NiceMock<VCMJitterEstimatorMock> jitter_estimator_;
FrameBuffer buffer_; FrameBuffer buffer_;
std::vector<std::unique_ptr<FrameObject>> frames_; std::vector<std::unique_ptr<EncodedFrame>> frames_;
Random rand_; Random rand_;
::testing::NiceMock<VCMReceiveStatisticsCallbackMock> stats_callback_; ::testing::NiceMock<VCMReceiveStatisticsCallbackMock> stats_callback_;

View File

@ -32,11 +32,11 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
VCMPacket* first_packet = packet_buffer_->GetPacket(first_seq_num); VCMPacket* first_packet = packet_buffer_->GetPacket(first_seq_num);
RTC_CHECK(first_packet); RTC_CHECK(first_packet);
// RtpFrameObject members // EncodedFrame members
frame_type_ = first_packet->frameType; frame_type_ = first_packet->frameType;
codec_type_ = first_packet->codec; codec_type_ = first_packet->codec;
// TODO(philipel): Remove when encoded image is replaced by FrameObject. // TODO(philipel): Remove when encoded image is replaced by EncodedFrame.
// VCMEncodedFrame members // VCMEncodedFrame members
CopyCodecSpecific(&first_packet->video_header); CopyCodecSpecific(&first_packet->video_header);
_completeFrame = true; _completeFrame = true;
@ -68,7 +68,7 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
_encodedWidth = first_packet->width; _encodedWidth = first_packet->width;
_encodedHeight = first_packet->height; _encodedHeight = first_packet->height;
// FrameObject members // EncodedFrame members
timestamp = first_packet->timestamp; timestamp = first_packet->timestamp;
VCMPacket* last_packet = packet_buffer_->GetPacket(last_seq_num); VCMPacket* last_packet = packet_buffer_->GetPacket(last_seq_num);

View File

@ -21,7 +21,7 @@ namespace video_coding {
class PacketBuffer; class PacketBuffer;
class RtpFrameObject : public FrameObject { class RtpFrameObject : public EncodedFrame {
public: public:
RtpFrameObject(PacketBuffer* packet_buffer, RtpFrameObject(PacketBuffer* packet_buffer,
uint16_t first_seq_num, uint16_t first_seq_num,

View File

@ -30,7 +30,6 @@ class Clock;
namespace video_coding { namespace video_coding {
class FrameObject;
class RtpFrameObject; class RtpFrameObject;
// A received frame is a frame which has received all its packets. // A received frame is a frame which has received all its packets.

View File

@ -26,7 +26,7 @@
namespace webrtc { namespace webrtc {
namespace video_coding { namespace video_coding {
class FrameObject; class EncodedFrame;
class RtpFrameObject; class RtpFrameObject;
// A complete frame is a frame which has received all its packets and all its // A complete frame is a frame which has received all its packets and all its
@ -34,7 +34,7 @@ class RtpFrameObject;
class OnCompleteFrameCallback { class OnCompleteFrameCallback {
public: public:
virtual ~OnCompleteFrameCallback() {} virtual ~OnCompleteFrameCallback() {}
virtual void OnCompleteFrame(std::unique_ptr<FrameObject> frame) = 0; virtual void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) = 0;
}; };
class RtpFrameReferenceFinder { class RtpFrameReferenceFinder {

View File

@ -64,7 +64,7 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
uint16_t Rand() { return rand_.Rand<uint16_t>(); } uint16_t Rand() { return rand_.Rand<uint16_t>(); }
void OnCompleteFrame(std::unique_ptr<FrameObject> frame) override { void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override {
int64_t pid = frame->picture_id; int64_t pid = frame->picture_id;
uint16_t sidx = frame->spatial_layer; uint16_t sidx = frame->spatial_layer;
auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx)); auto frame_it = frames_from_callback_.find(std::make_pair(pid, sidx));
@ -260,8 +260,9 @@ class TestRtpFrameReferenceFinder : public ::testing::Test,
return f1.first < f2.first; return f1.first < f2.first;
} }
}; };
std::map<std::pair<int64_t, uint8_t>, std::unique_ptr<FrameObject>, FrameComp> std::
frames_from_callback_; map<std::pair<int64_t, uint8_t>, std::unique_ptr<EncodedFrame>, FrameComp>
frames_from_callback_;
}; };
TEST_F(TestRtpFrameReferenceFinder, PaddingPackets) { TEST_F(TestRtpFrameReferenceFinder, PaddingPackets) {

View File

@ -395,7 +395,7 @@ void RtpVideoStreamReceiver::OnReceivedFrame(
} }
void RtpVideoStreamReceiver::OnCompleteFrame( void RtpVideoStreamReceiver::OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) { std::unique_ptr<video_coding::EncodedFrame> frame) {
{ {
rtc::CritScope lock(&last_seq_num_cs_); rtc::CritScope lock(&last_seq_num_cs_);
video_coding::RtpFrameObject* rtp_frame = video_coding::RtpFrameObject* rtp_frame =

View File

@ -135,7 +135,7 @@ class RtpVideoStreamReceiver : public RtpData,
// Implements OnCompleteFrameCallback. // Implements OnCompleteFrameCallback.
void OnCompleteFrame( void OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) override; std::unique_ptr<video_coding::EncodedFrame> frame) override;
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;

View File

@ -61,14 +61,14 @@ class MockOnCompleteFrameCallback
public: public:
MockOnCompleteFrameCallback() : buffer_(rtc::ByteBuffer::ORDER_NETWORK) {} MockOnCompleteFrameCallback() : buffer_(rtc::ByteBuffer::ORDER_NETWORK) {}
MOCK_METHOD1(DoOnCompleteFrame, void(video_coding::FrameObject* frame)); MOCK_METHOD1(DoOnCompleteFrame, void(video_coding::EncodedFrame* frame));
MOCK_METHOD1(DoOnCompleteFrameFailNullptr, MOCK_METHOD1(DoOnCompleteFrameFailNullptr,
void(video_coding::FrameObject* frame)); void(video_coding::EncodedFrame* frame));
MOCK_METHOD1(DoOnCompleteFrameFailLength, MOCK_METHOD1(DoOnCompleteFrameFailLength,
void(video_coding::FrameObject* frame)); void(video_coding::EncodedFrame* frame));
MOCK_METHOD1(DoOnCompleteFrameFailBitstream, MOCK_METHOD1(DoOnCompleteFrameFailBitstream,
void(video_coding::FrameObject* frame)); void(video_coding::EncodedFrame* frame));
void OnCompleteFrame(std::unique_ptr<video_coding::FrameObject> frame) { void OnCompleteFrame(std::unique_ptr<video_coding::EncodedFrame> frame) {
if (!frame) { if (!frame) {
DoOnCompleteFrameFailNullptr(nullptr); DoOnCompleteFrameFailNullptr(nullptr);
return; return;

View File

@ -355,7 +355,7 @@ void VideoReceiveStream::RequestKeyFrame() {
} }
void VideoReceiveStream::OnCompleteFrame( void VideoReceiveStream::OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) { std::unique_ptr<video_coding::EncodedFrame> frame) {
int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame));
if (last_continuous_pid != -1) if (last_continuous_pid != -1)
rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid); rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid);
@ -416,7 +416,7 @@ bool VideoReceiveStream::Decode() {
static const int kMaxWaitForKeyFrameMs = 200; static const int kMaxWaitForKeyFrameMs = 200;
int wait_ms = keyframe_required_ ? kMaxWaitForKeyFrameMs : kMaxWaitForFrameMs; int wait_ms = keyframe_required_ ? kMaxWaitForKeyFrameMs : kMaxWaitForFrameMs;
std::unique_ptr<video_coding::FrameObject> frame; std::unique_ptr<video_coding::EncodedFrame> frame;
// TODO(philipel): Call NextFrame with |keyframe_required| argument when // TODO(philipel): Call NextFrame with |keyframe_required| argument when
// downstream project has been fixed. // downstream project has been fixed.
video_coding::FrameBuffer::ReturnReason res = video_coding::FrameBuffer::ReturnReason res =

View File

@ -101,7 +101,7 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
// Implements video_coding::OnCompleteFrameCallback. // Implements video_coding::OnCompleteFrameCallback.
void OnCompleteFrame( void OnCompleteFrame(
std::unique_ptr<video_coding::FrameObject> frame) override; std::unique_ptr<video_coding::EncodedFrame> frame) override;
// Implements CallStatsObserver::OnRttUpdate // Implements CallStatsObserver::OnRttUpdate
void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override;