Reland of Delete VideoFrame default constructor, and IsZeroSize method. (patchset #1 id:1 of https://codereview.webrtc.org/2574123002/ )
Reason for revert: Fixing perf tests. Original issue's description: > Revert of Delete VideoFrame default constructor, and IsZeroSize method. (patchset #5 id:80001 of https://codereview.webrtc.org/2541863002/ ) > > Reason for revert: > Crashes perf tests, e.g., > > ./out/Debug/webrtc_perf_tests --gtest_filter='FullStackTest.ScreenshareSlidesVP8_2TL_VeryLossyNet' > > dies with an assert related to rtc::Optional. > > Original issue's description: > > Delete VideoFrame default constructor, and IsZeroSize method. > > > > This ensures that the video_frame_buffer method never can return a > > null pointer. > > > > BUG=webrtc:6591 > > > > Committed: https://crrev.com/bfcf561923a42005e4c7d66d8e72e5932155f997 > > Cr-Commit-Position: refs/heads/master@{#15574} > > TBR=magjed@webrtc.org,stefan@webrtc.org > # Skipping CQ checks because original CL landed less than 1 days ago. > NOPRESUBMIT=true > NOTREECHECKS=true > NOTRY=true > BUG=webrtc:6591 > > Committed: https://crrev.com/0989fbcad2ca4eb5805a77e8ebfefd3af06ade23 > Cr-Commit-Position: refs/heads/master@{#15597} TBR=magjed@webrtc.org,stefan@webrtc.org # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=webrtc:6591 Review-Url: https://codereview.webrtc.org/2574183002 Cr-Commit-Position: refs/heads/master@{#15633}
This commit is contained in:
@ -111,12 +111,6 @@ void CheckRotate(int width, int height, webrtc::VideoRotation rotation,
|
|||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
TEST(TestVideoFrame, InitialValues) {
|
|
||||||
VideoFrame frame;
|
|
||||||
EXPECT_TRUE(frame.IsZeroSize());
|
|
||||||
EXPECT_EQ(kVideoRotation_0, frame.rotation());
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST(TestVideoFrame, WidthHeightValues) {
|
TEST(TestVideoFrame, WidthHeightValues) {
|
||||||
VideoFrame frame(I420Buffer::Create(10, 10, 10, 14, 90),
|
VideoFrame frame(I420Buffer::Create(10, 10, 10, 14, 90),
|
||||||
webrtc::kVideoRotation_0,
|
webrtc::kVideoRotation_0,
|
||||||
|
|||||||
@ -128,8 +128,6 @@ int PrintVideoFrame(const VideoFrameBuffer& frame, FILE* file) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
|
int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
|
||||||
if (frame.IsZeroSize())
|
|
||||||
return -1;
|
|
||||||
return PrintVideoFrame(*frame.video_frame_buffer(), file);
|
return PrintVideoFrame(*frame.video_frame_buffer(), file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -23,13 +23,6 @@ namespace webrtc {
|
|||||||
// to optimized bitstream readers. See avcodec_decode_video2.
|
// to optimized bitstream readers. See avcodec_decode_video2.
|
||||||
const size_t EncodedImage::kBufferPaddingBytesH264 = 8;
|
const size_t EncodedImage::kBufferPaddingBytesH264 = 8;
|
||||||
|
|
||||||
VideoFrame::VideoFrame()
|
|
||||||
: video_frame_buffer_(nullptr),
|
|
||||||
timestamp_rtp_(0),
|
|
||||||
ntp_time_ms_(0),
|
|
||||||
timestamp_us_(0),
|
|
||||||
rotation_(kVideoRotation_0) {}
|
|
||||||
|
|
||||||
VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||||
webrtc::VideoRotation rotation,
|
webrtc::VideoRotation rotation,
|
||||||
int64_t timestamp_us)
|
int64_t timestamp_us)
|
||||||
@ -59,10 +52,6 @@ int VideoFrame::height() const {
|
|||||||
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
|
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VideoFrame::IsZeroSize() const {
|
|
||||||
return !video_frame_buffer_;
|
|
||||||
}
|
|
||||||
|
|
||||||
rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
|
rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
|
||||||
return video_frame_buffer_;
|
return video_frame_buffer_;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -37,8 +37,15 @@ TEST(VideoBroadcasterTest, OnFrame) {
|
|||||||
FakeVideoRenderer sink2;
|
FakeVideoRenderer sink2;
|
||||||
broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
|
broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
|
||||||
broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants());
|
broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants());
|
||||||
|
static int kWidth = 100;
|
||||||
|
static int kHeight = 50;
|
||||||
|
|
||||||
webrtc::VideoFrame frame;
|
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
|
||||||
|
webrtc::I420Buffer::Create(kWidth, kHeight));
|
||||||
|
// Initialize, to avoid warnings on use of initialized values.
|
||||||
|
buffer->SetToBlack();
|
||||||
|
|
||||||
|
webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
|
||||||
|
|
||||||
broadcaster.OnFrame(frame);
|
broadcaster.OnFrame(frame);
|
||||||
EXPECT_EQ(1, sink1.num_rendered_frames());
|
EXPECT_EQ(1, sink1.num_rendered_frames());
|
||||||
|
|||||||
@ -161,27 +161,28 @@ int FakeVideoSendStream::GetNumberOfSwappedFrames() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int FakeVideoSendStream::GetLastWidth() const {
|
int FakeVideoSendStream::GetLastWidth() const {
|
||||||
return last_frame_.width();
|
return last_frame_->width();
|
||||||
}
|
}
|
||||||
|
|
||||||
int FakeVideoSendStream::GetLastHeight() const {
|
int FakeVideoSendStream::GetLastHeight() const {
|
||||||
return last_frame_.height();
|
return last_frame_->height();
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t FakeVideoSendStream::GetLastTimestamp() const {
|
int64_t FakeVideoSendStream::GetLastTimestamp() const {
|
||||||
RTC_DCHECK(last_frame_.ntp_time_ms() == 0);
|
RTC_DCHECK(last_frame_->ntp_time_ms() == 0);
|
||||||
return last_frame_.render_time_ms();
|
return last_frame_->render_time_ms();
|
||||||
}
|
}
|
||||||
|
|
||||||
void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) {
|
void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) {
|
||||||
++num_swapped_frames_;
|
++num_swapped_frames_;
|
||||||
if (frame.width() != last_frame_.width() ||
|
if (!last_frame_ ||
|
||||||
frame.height() != last_frame_.height() ||
|
frame.width() != last_frame_->width() ||
|
||||||
frame.rotation() != last_frame_.rotation()) {
|
frame.height() != last_frame_->height() ||
|
||||||
|
frame.rotation() != last_frame_->rotation()) {
|
||||||
video_streams_ = encoder_config_.video_stream_factory->CreateEncoderStreams(
|
video_streams_ = encoder_config_.video_stream_factory->CreateEncoderStreams(
|
||||||
frame.width(), frame.height(), encoder_config_);
|
frame.width(), frame.height(), encoder_config_);
|
||||||
}
|
}
|
||||||
last_frame_ = frame;
|
last_frame_ = rtc::Optional<webrtc::VideoFrame>(frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
void FakeVideoSendStream::SetStats(
|
void FakeVideoSendStream::SetStats(
|
||||||
@ -202,8 +203,15 @@ void FakeVideoSendStream::EnableEncodedFrameRecording(
|
|||||||
|
|
||||||
void FakeVideoSendStream::ReconfigureVideoEncoder(
|
void FakeVideoSendStream::ReconfigureVideoEncoder(
|
||||||
webrtc::VideoEncoderConfig config) {
|
webrtc::VideoEncoderConfig config) {
|
||||||
|
int width, height;
|
||||||
|
if (last_frame_) {
|
||||||
|
width = last_frame_->width();
|
||||||
|
height = last_frame_->height();
|
||||||
|
} else {
|
||||||
|
width = height = 0;
|
||||||
|
}
|
||||||
video_streams_ = config.video_stream_factory->CreateEncoderStreams(
|
video_streams_ = config.video_stream_factory->CreateEncoderStreams(
|
||||||
last_frame_.width(), last_frame_.height(), config);
|
width, height, config);
|
||||||
if (config.encoder_specific_settings != NULL) {
|
if (config.encoder_specific_settings != NULL) {
|
||||||
if (config_.encoder_settings.payload_name == "VP8") {
|
if (config_.encoder_settings.payload_name == "VP8") {
|
||||||
config.encoder_specific_settings->FillVideoCodecVp8(&vpx_settings_.vp8);
|
config.encoder_specific_settings->FillVideoCodecVp8(&vpx_settings_.vp8);
|
||||||
|
|||||||
@ -165,7 +165,7 @@ class FakeVideoSendStream final
|
|||||||
bool resolution_scaling_enabled_;
|
bool resolution_scaling_enabled_;
|
||||||
rtc::VideoSourceInterface<webrtc::VideoFrame>* source_;
|
rtc::VideoSourceInterface<webrtc::VideoFrame>* source_;
|
||||||
int num_swapped_frames_;
|
int num_swapped_frames_;
|
||||||
webrtc::VideoFrame last_frame_;
|
rtc::Optional<webrtc::VideoFrame> last_frame_;
|
||||||
webrtc::VideoSendStream::Stats stats_;
|
webrtc::VideoSendStream::Stats stats_;
|
||||||
int num_encoder_reconfigurations_ = 0;
|
int num_encoder_reconfigurations_ = 0;
|
||||||
};
|
};
|
||||||
|
|||||||
@ -308,10 +308,6 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
|
|||||||
ReportError();
|
ReportError();
|
||||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
}
|
}
|
||||||
if (input_frame.IsZeroSize()) {
|
|
||||||
ReportError();
|
|
||||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
|
||||||
}
|
|
||||||
if (!encoded_image_callback_) {
|
if (!encoded_image_callback_) {
|
||||||
LOG(LS_WARNING) << "InitEncode() has been called, but a callback function "
|
LOG(LS_WARNING) << "InitEncode() has been called, but a callback function "
|
||||||
<< "has not been set with RegisterEncodeCompleteCallback()";
|
<< "has not been set with RegisterEncodeCompleteCallback()";
|
||||||
|
|||||||
@ -131,7 +131,6 @@ class I420Decoder : public VideoDecoder {
|
|||||||
uint16_t* width,
|
uint16_t* width,
|
||||||
uint16_t* height);
|
uint16_t* height);
|
||||||
|
|
||||||
VideoFrame _decodedImage;
|
|
||||||
int _width;
|
int _width;
|
||||||
int _height;
|
int _height;
|
||||||
bool _inited;
|
bool _inited;
|
||||||
|
|||||||
@ -204,7 +204,6 @@ class VideoProcessorImpl : public VideoProcessor {
|
|||||||
// Keep track of the last successful frame, since we need to write that
|
// Keep track of the last successful frame, since we need to write that
|
||||||
// when decoding fails:
|
// when decoding fails:
|
||||||
uint8_t* last_successful_frame_buffer_;
|
uint8_t* last_successful_frame_buffer_;
|
||||||
webrtc::VideoFrame source_frame_;
|
|
||||||
// To keep track of if we have excluded the first key frame from packet loss:
|
// To keep track of if we have excluded the first key frame from packet loss:
|
||||||
bool first_key_frame_has_been_excluded_;
|
bool first_key_frame_has_been_excluded_;
|
||||||
// To tell the decoder previous frame have been dropped due to packet loss:
|
// To tell the decoder previous frame have been dropped due to packet loss:
|
||||||
|
|||||||
@ -300,7 +300,6 @@ int SimulcastEncoderAdapter::Encode(
|
|||||||
// TODO(perkj): ensure that works going forward, and figure out how this
|
// TODO(perkj): ensure that works going forward, and figure out how this
|
||||||
// affects webrtc:5683.
|
// affects webrtc:5683.
|
||||||
if ((dst_width == src_width && dst_height == src_height) ||
|
if ((dst_width == src_width && dst_height == src_height) ||
|
||||||
input_image.IsZeroSize() ||
|
|
||||||
input_image.video_frame_buffer()->native_handle()) {
|
input_image.video_frame_buffer()->native_handle()) {
|
||||||
int ret = streaminfos_[stream_idx].encoder->Encode(
|
int ret = streaminfos_[stream_idx].encoder->Encode(
|
||||||
input_image, codec_specific_info, &stream_frame_types);
|
input_image, codec_specific_info, &stream_frame_types);
|
||||||
|
|||||||
@ -13,6 +13,7 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
|
|
||||||
#include "webrtc/base/checks.h"
|
#include "webrtc/base/checks.h"
|
||||||
|
#include "webrtc/base/optional.h"
|
||||||
#include "webrtc/base/timeutils.h"
|
#include "webrtc/base/timeutils.h"
|
||||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
|
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
|
||||||
@ -88,7 +89,7 @@ bool Vp8UnitTestEncodeCompleteCallback::EncodeComplete() {
|
|||||||
|
|
||||||
class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
||||||
public:
|
public:
|
||||||
explicit Vp8UnitTestDecodeCompleteCallback(VideoFrame* frame)
|
explicit Vp8UnitTestDecodeCompleteCallback(rtc::Optional<VideoFrame>* frame)
|
||||||
: decoded_frame_(frame), decode_complete(false) {}
|
: decoded_frame_(frame), decode_complete(false) {}
|
||||||
int32_t Decoded(VideoFrame& frame) override;
|
int32_t Decoded(VideoFrame& frame) override;
|
||||||
int32_t Decoded(VideoFrame& frame, int64_t decode_time_ms) override {
|
int32_t Decoded(VideoFrame& frame, int64_t decode_time_ms) override {
|
||||||
@ -98,7 +99,7 @@ class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
|
|||||||
bool DecodeComplete();
|
bool DecodeComplete();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
VideoFrame* decoded_frame_;
|
rtc::Optional<VideoFrame>* decoded_frame_;
|
||||||
bool decode_complete;
|
bool decode_complete;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -111,7 +112,7 @@ bool Vp8UnitTestDecodeCompleteCallback::DecodeComplete() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int Vp8UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image) {
|
int Vp8UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image) {
|
||||||
*decoded_frame_ = image;
|
*decoded_frame_ = rtc::Optional<VideoFrame>(image);
|
||||||
decode_complete = true;
|
decode_complete = true;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -184,8 +185,8 @@ class TestVp8Impl : public ::testing::Test {
|
|||||||
int64_t startTime = rtc::TimeMillis();
|
int64_t startTime = rtc::TimeMillis();
|
||||||
while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) {
|
while (rtc::TimeMillis() - startTime < kMaxWaitDecTimeMs) {
|
||||||
if (decode_complete_callback_->DecodeComplete()) {
|
if (decode_complete_callback_->DecodeComplete()) {
|
||||||
return CalcBufferSize(kI420, decoded_frame_.width(),
|
return CalcBufferSize(kI420, decoded_frame_->width(),
|
||||||
decoded_frame_.height());
|
decoded_frame_->height());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
@ -202,7 +203,7 @@ class TestVp8Impl : public ::testing::Test {
|
|||||||
std::unique_ptr<VideoEncoder> encoder_;
|
std::unique_ptr<VideoEncoder> encoder_;
|
||||||
std::unique_ptr<VideoDecoder> decoder_;
|
std::unique_ptr<VideoDecoder> decoder_;
|
||||||
EncodedImage encoded_frame_;
|
EncodedImage encoded_frame_;
|
||||||
VideoFrame decoded_frame_;
|
rtc::Optional<VideoFrame> decoded_frame_;
|
||||||
VideoCodec codec_inst_;
|
VideoCodec codec_inst_;
|
||||||
TemporalLayersFactory tl_factory_;
|
TemporalLayersFactory tl_factory_;
|
||||||
};
|
};
|
||||||
@ -252,10 +253,11 @@ TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
|
|||||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||||
decoder_->Decode(encoded_frame_, false, NULL));
|
decoder_->Decode(encoded_frame_, false, NULL));
|
||||||
EXPECT_GT(WaitForDecodedFrame(), 0u);
|
EXPECT_GT(WaitForDecodedFrame(), 0u);
|
||||||
|
ASSERT_TRUE(decoded_frame_);
|
||||||
// Compute PSNR on all planes (faster than SSIM).
|
// Compute PSNR on all planes (faster than SSIM).
|
||||||
EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
|
EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_frame_), 36);
|
||||||
EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
|
EXPECT_EQ(kTestTimestamp, decoded_frame_->timestamp());
|
||||||
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
|
EXPECT_EQ(kTestNtpTimeMs, decoded_frame_->ntp_time_ms());
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(WEBRTC_ANDROID)
|
#if defined(WEBRTC_ANDROID)
|
||||||
@ -280,7 +282,8 @@ TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
|
|||||||
encoded_frame_._frameType = kVideoFrameKey;
|
encoded_frame_._frameType = kVideoFrameKey;
|
||||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||||
decoder_->Decode(encoded_frame_, false, NULL));
|
decoder_->Decode(encoded_frame_, false, NULL));
|
||||||
EXPECT_GT(I420PSNR(input_frame_.get(), &decoded_frame_), 36);
|
ASSERT_TRUE(decoded_frame_);
|
||||||
|
EXPECT_GT(I420PSNR(input_frame_.get(), &*decoded_frame_), 36);
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@ -660,8 +660,6 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
|
|||||||
|
|
||||||
if (!inited_)
|
if (!inited_)
|
||||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
if (frame.IsZeroSize())
|
|
||||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
|
||||||
if (encoded_complete_callback_ == NULL)
|
if (encoded_complete_callback_ == NULL)
|
||||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
|
|
||||||
|
|||||||
@ -486,9 +486,6 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
|
|||||||
if (!inited_) {
|
if (!inited_) {
|
||||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
}
|
}
|
||||||
if (input_image.IsZeroSize()) {
|
|
||||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
|
||||||
}
|
|
||||||
if (encoded_complete_callback_ == NULL) {
|
if (encoded_complete_callback_ == NULL) {
|
||||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -123,8 +123,14 @@ int32_t VCMGenericEncoder::SetPeriodicKeyFrames(bool enable) {
|
|||||||
int32_t VCMGenericEncoder::RequestFrame(
|
int32_t VCMGenericEncoder::RequestFrame(
|
||||||
const std::vector<FrameType>& frame_types) {
|
const std::vector<FrameType>& frame_types) {
|
||||||
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
|
RTC_DCHECK_RUNS_SERIALIZED(&race_checker_);
|
||||||
VideoFrame image;
|
|
||||||
return encoder_->Encode(image, NULL, &frame_types);
|
// TODO(nisse): Used only with internal source. Delete as soon as
|
||||||
|
// that feature is removed. The only implementation I've been able
|
||||||
|
// to find ignores what's in the frame.
|
||||||
|
return encoder_->Encode(VideoFrame(I420Buffer::Create(1, 1),
|
||||||
|
kVideoRotation_0, 0),
|
||||||
|
NULL, &frame_types);
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool VCMGenericEncoder::InternalSource() const {
|
bool VCMGenericEncoder::InternalSource() const {
|
||||||
|
|||||||
@ -380,7 +380,6 @@ int H264VideoToolboxEncoder::Encode(
|
|||||||
// |input_frame| size should always match codec settings.
|
// |input_frame| size should always match codec settings.
|
||||||
RTC_DCHECK_EQ(frame.width(), width_);
|
RTC_DCHECK_EQ(frame.width(), width_);
|
||||||
RTC_DCHECK_EQ(frame.height(), height_);
|
RTC_DCHECK_EQ(frame.height(), height_);
|
||||||
RTC_DCHECK(!frame.IsZeroSize());
|
|
||||||
if (!callback_ || !compression_session_) {
|
if (!callback_ || !compression_session_) {
|
||||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -188,7 +188,7 @@ class ScrollingImageFrameGenerator : public FrameGenerator {
|
|||||||
}
|
}
|
||||||
CropSourceToScrolledImage(scroll_factor);
|
CropSourceToScrolledImage(scroll_factor);
|
||||||
|
|
||||||
return ¤t_frame_;
|
return current_frame_ ? &*current_frame_ : nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void UpdateSourceFrame(size_t frame_num) {
|
void UpdateSourceFrame(size_t frame_num) {
|
||||||
@ -219,14 +219,14 @@ class ScrollingImageFrameGenerator : public FrameGenerator {
|
|||||||
|
|
||||||
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer(
|
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer(
|
||||||
current_source_frame_->video_frame_buffer());
|
current_source_frame_->video_frame_buffer());
|
||||||
current_frame_ = webrtc::VideoFrame(
|
current_frame_ = rtc::Optional<webrtc::VideoFrame>(webrtc::VideoFrame(
|
||||||
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
||||||
target_width_, target_height_,
|
target_width_, target_height_,
|
||||||
&frame_buffer->DataY()[offset_y], frame_buffer->StrideY(),
|
&frame_buffer->DataY()[offset_y], frame_buffer->StrideY(),
|
||||||
&frame_buffer->DataU()[offset_u], frame_buffer->StrideU(),
|
&frame_buffer->DataU()[offset_u], frame_buffer->StrideU(),
|
||||||
&frame_buffer->DataV()[offset_v], frame_buffer->StrideV(),
|
&frame_buffer->DataV()[offset_v], frame_buffer->StrideV(),
|
||||||
KeepRefUntilDone(frame_buffer)),
|
KeepRefUntilDone(frame_buffer)),
|
||||||
kVideoRotation_0, 0);
|
kVideoRotation_0, 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
Clock* const clock_;
|
Clock* const clock_;
|
||||||
@ -239,7 +239,7 @@ class ScrollingImageFrameGenerator : public FrameGenerator {
|
|||||||
|
|
||||||
size_t current_frame_num_;
|
size_t current_frame_num_;
|
||||||
VideoFrame* current_source_frame_;
|
VideoFrame* current_source_frame_;
|
||||||
VideoFrame current_frame_;
|
rtc::Optional<VideoFrame> current_frame_;
|
||||||
YuvFileGenerator file_generator_;
|
YuvFileGenerator file_generator_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -371,6 +371,7 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
struct FrameComparison {
|
struct FrameComparison {
|
||||||
FrameComparison()
|
FrameComparison()
|
||||||
: dropped(false),
|
: dropped(false),
|
||||||
|
input_time_ms(0),
|
||||||
send_time_ms(0),
|
send_time_ms(0),
|
||||||
recv_time_ms(0),
|
recv_time_ms(0),
|
||||||
render_time_ms(0),
|
render_time_ms(0),
|
||||||
@ -379,6 +380,7 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
FrameComparison(const VideoFrame& reference,
|
FrameComparison(const VideoFrame& reference,
|
||||||
const VideoFrame& render,
|
const VideoFrame& render,
|
||||||
bool dropped,
|
bool dropped,
|
||||||
|
int64_t input_time_ms,
|
||||||
int64_t send_time_ms,
|
int64_t send_time_ms,
|
||||||
int64_t recv_time_ms,
|
int64_t recv_time_ms,
|
||||||
int64_t render_time_ms,
|
int64_t render_time_ms,
|
||||||
@ -386,14 +388,29 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
: reference(reference),
|
: reference(reference),
|
||||||
render(render),
|
render(render),
|
||||||
dropped(dropped),
|
dropped(dropped),
|
||||||
|
input_time_ms(input_time_ms),
|
||||||
send_time_ms(send_time_ms),
|
send_time_ms(send_time_ms),
|
||||||
recv_time_ms(recv_time_ms),
|
recv_time_ms(recv_time_ms),
|
||||||
render_time_ms(render_time_ms),
|
render_time_ms(render_time_ms),
|
||||||
encoded_frame_size(encoded_frame_size) {}
|
encoded_frame_size(encoded_frame_size) {}
|
||||||
|
|
||||||
VideoFrame reference;
|
FrameComparison(bool dropped,
|
||||||
VideoFrame render;
|
int64_t input_time_ms,
|
||||||
|
int64_t send_time_ms,
|
||||||
|
int64_t recv_time_ms,
|
||||||
|
int64_t render_time_ms,
|
||||||
|
size_t encoded_frame_size)
|
||||||
|
: dropped(dropped),
|
||||||
|
input_time_ms(input_time_ms),
|
||||||
|
send_time_ms(send_time_ms),
|
||||||
|
recv_time_ms(recv_time_ms),
|
||||||
|
render_time_ms(render_time_ms),
|
||||||
|
encoded_frame_size(encoded_frame_size) {}
|
||||||
|
|
||||||
|
rtc::Optional<VideoFrame> reference;
|
||||||
|
rtc::Optional<VideoFrame> render;
|
||||||
bool dropped;
|
bool dropped;
|
||||||
|
int64_t input_time_ms;
|
||||||
int64_t send_time_ms;
|
int64_t send_time_ms;
|
||||||
int64_t recv_time_ms;
|
int64_t recv_time_ms;
|
||||||
int64_t render_time_ms;
|
int64_t render_time_ms;
|
||||||
@ -476,21 +493,18 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
if (it != encoded_frame_sizes_.end())
|
if (it != encoded_frame_sizes_.end())
|
||||||
encoded_frame_sizes_.erase(it);
|
encoded_frame_sizes_.erase(it);
|
||||||
|
|
||||||
VideoFrame reference_copy;
|
|
||||||
VideoFrame render_copy;
|
|
||||||
|
|
||||||
rtc::CritScope crit(&comparison_lock_);
|
rtc::CritScope crit(&comparison_lock_);
|
||||||
if (comparisons_.size() < kMaxComparisons) {
|
if (comparisons_.size() < kMaxComparisons) {
|
||||||
reference_copy = reference;
|
comparisons_.push_back(FrameComparison(reference, render, dropped,
|
||||||
render_copy = render;
|
reference.ntp_time_ms(),
|
||||||
|
send_time_ms, recv_time_ms,
|
||||||
|
render_time_ms, encoded_size));
|
||||||
} else {
|
} else {
|
||||||
// Copy the time to ensure that delay calculations can still be made.
|
comparisons_.push_back(FrameComparison(dropped,
|
||||||
reference_copy.set_ntp_time_ms(reference.ntp_time_ms());
|
reference.ntp_time_ms(),
|
||||||
render_copy.set_ntp_time_ms(render.ntp_time_ms());
|
send_time_ms, recv_time_ms,
|
||||||
|
render_time_ms, encoded_size));
|
||||||
}
|
}
|
||||||
comparisons_.push_back(FrameComparison(reference_copy, render_copy, dropped,
|
|
||||||
send_time_ms, recv_time_ms,
|
|
||||||
render_time_ms, encoded_size));
|
|
||||||
comparison_available_event_.Set();
|
comparison_available_event_.Set();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -527,8 +541,6 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
if (AllFramesRecorded())
|
if (AllFramesRecorded())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
VideoFrame reference;
|
|
||||||
VideoFrame render;
|
|
||||||
FrameComparison comparison;
|
FrameComparison comparison;
|
||||||
|
|
||||||
if (!PopComparison(&comparison)) {
|
if (!PopComparison(&comparison)) {
|
||||||
@ -624,19 +636,17 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
// Perform expensive psnr and ssim calculations while not holding lock.
|
// Perform expensive psnr and ssim calculations while not holding lock.
|
||||||
double psnr = -1.0;
|
double psnr = -1.0;
|
||||||
double ssim = -1.0;
|
double ssim = -1.0;
|
||||||
if (!comparison.reference.IsZeroSize()) {
|
if (comparison.reference) {
|
||||||
psnr = I420PSNR(&comparison.reference, &comparison.render);
|
psnr = I420PSNR(&*comparison.reference, &*comparison.render);
|
||||||
ssim = I420SSIM(&comparison.reference, &comparison.render);
|
ssim = I420SSIM(&*comparison.reference, &*comparison.render);
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t input_time_ms = comparison.reference.ntp_time_ms();
|
|
||||||
|
|
||||||
rtc::CritScope crit(&comparison_lock_);
|
rtc::CritScope crit(&comparison_lock_);
|
||||||
if (graph_data_output_file_) {
|
if (graph_data_output_file_) {
|
||||||
samples_.push_back(
|
samples_.push_back(Sample(
|
||||||
Sample(comparison.dropped, input_time_ms, comparison.send_time_ms,
|
comparison.dropped, comparison.input_time_ms, comparison.send_time_ms,
|
||||||
comparison.recv_time_ms, comparison.render_time_ms,
|
comparison.recv_time_ms, comparison.render_time_ms,
|
||||||
comparison.encoded_frame_size, psnr, ssim));
|
comparison.encoded_frame_size, psnr, ssim));
|
||||||
}
|
}
|
||||||
if (psnr >= 0.0)
|
if (psnr >= 0.0)
|
||||||
psnr_.AddSample(psnr);
|
psnr_.AddSample(psnr);
|
||||||
@ -651,10 +661,10 @@ class VideoAnalyzer : public PacketReceiver,
|
|||||||
rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_);
|
rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_);
|
||||||
last_render_time_ = comparison.render_time_ms;
|
last_render_time_ = comparison.render_time_ms;
|
||||||
|
|
||||||
sender_time_.AddSample(comparison.send_time_ms - input_time_ms);
|
sender_time_.AddSample(comparison.send_time_ms - comparison.input_time_ms);
|
||||||
receiver_time_.AddSample(comparison.render_time_ms -
|
receiver_time_.AddSample(comparison.render_time_ms -
|
||||||
comparison.recv_time_ms);
|
comparison.recv_time_ms);
|
||||||
end_to_end_.AddSample(comparison.render_time_ms - input_time_ms);
|
end_to_end_.AddSample(comparison.render_time_ms - comparison.input_time_ms);
|
||||||
encoded_frame_size_.AddSample(comparison.encoded_frame_size);
|
encoded_frame_size_.AddSample(comparison.encoded_frame_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -86,10 +86,10 @@ class ViEEncoder::EncodeTask : public rtc::QueuedTask {
|
|||||||
ViEEncoder* vie_encoder,
|
ViEEncoder* vie_encoder,
|
||||||
int64_t time_when_posted_in_ms,
|
int64_t time_when_posted_in_ms,
|
||||||
bool log_stats)
|
bool log_stats)
|
||||||
: vie_encoder_(vie_encoder),
|
: frame_(frame),
|
||||||
|
vie_encoder_(vie_encoder),
|
||||||
time_when_posted_ms_(time_when_posted_in_ms),
|
time_when_posted_ms_(time_when_posted_in_ms),
|
||||||
log_stats_(log_stats) {
|
log_stats_(log_stats) {
|
||||||
frame_ = frame;
|
|
||||||
++vie_encoder_->posted_frames_waiting_for_encode_;
|
++vie_encoder_->posted_frames_waiting_for_encode_;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -22,10 +22,6 @@ namespace webrtc {
|
|||||||
|
|
||||||
class VideoFrame {
|
class VideoFrame {
|
||||||
public:
|
public:
|
||||||
// TODO(nisse): Deprecated. Using the default constructor violates the
|
|
||||||
// reasonable assumption that video_frame_buffer() returns a valid buffer.
|
|
||||||
VideoFrame();
|
|
||||||
|
|
||||||
// TODO(nisse): This constructor is consistent with
|
// TODO(nisse): This constructor is consistent with
|
||||||
// cricket::WebRtcVideoFrame. After the class
|
// cricket::WebRtcVideoFrame. After the class
|
||||||
// cricket::WebRtcVideoFrame and its baseclass cricket::VideoFrame
|
// cricket::WebRtcVideoFrame and its baseclass cricket::VideoFrame
|
||||||
@ -106,15 +102,6 @@ class VideoFrame {
|
|||||||
return timestamp_us() / rtc::kNumMicrosecsPerMillisec;
|
return timestamp_us() / rtc::kNumMicrosecsPerMillisec;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return true if and only if video_frame_buffer() is null. Which is possible
|
|
||||||
// only if the object was default-constructed.
|
|
||||||
// TODO(nisse): Deprecated. Should be deleted in the cricket::VideoFrame and
|
|
||||||
// webrtc::VideoFrame merge. The intention is that video_frame_buffer() never
|
|
||||||
// should return nullptr. To handle potentially uninitialized or non-existent
|
|
||||||
// frames, consider using rtc::Optional. Otherwise, IsZeroSize() can be
|
|
||||||
// replaced by video_frame_buffer() == nullptr.
|
|
||||||
bool IsZeroSize() const;
|
|
||||||
|
|
||||||
// Return the underlying buffer. Never nullptr for a properly
|
// Return the underlying buffer. Never nullptr for a properly
|
||||||
// initialized VideoFrame.
|
// initialized VideoFrame.
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
|
||||||
|
|||||||
Reference in New Issue
Block a user