Don't force key frame when decoding with errors

BUG=2241
R=stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2036004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4597 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org
2013-08-22 23:29:43 +00:00
parent 61b262c427
commit 44af55cc44
11 changed files with 122 additions and 83 deletions

View File

@ -150,8 +150,8 @@ CodecTest::VideoEncodedBufferToEncodedImage(VideoFrame& videoBuffer,
image._buffer = videoBuffer.Buffer();
image._length = videoBuffer.Length();
image._size = videoBuffer.Size();
//image._frameType = static_cast<VideoFrameType>
// (videoBuffer.GetFrameType());
// image._frameType = static_cast<VideoFrameType>
// (videoBuffer.GetFrameType());
image._timeStamp = videoBuffer.TimeStamp();
image._encodedWidth = videoBuffer.Width();
image._encodedHeight = videoBuffer.Height();

View File

@ -32,6 +32,7 @@ _refEncFrame(NULL),
_refDecFrame(NULL),
_refEncFrameLength(0),
_sourceFile(NULL),
is_key_frame_(false),
_encodeCompleteCallback(NULL),
_decodeCompleteCallback(NULL)
{
@ -48,6 +49,7 @@ _refEncFrame(NULL),
_refDecFrame(NULL),
_refEncFrameLength(0),
_sourceFile(NULL),
is_key_frame_(false),
_encodeCompleteCallback(NULL),
_decodeCompleteCallback(NULL)
{
@ -254,23 +256,27 @@ UnitTest::Setup()
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
unsigned int frameLength = 0;
int i=0;
int i = 0;
_inputVideoBuffer.CreateEmptyFrame(_inst.width, _inst.height, _inst.width,
(_inst.width + 1) / 2,
(_inst.width + 1) / 2);
while (frameLength == 0)
{
EncodedImage encodedImage;
if (i > 0)
{
// Insert yet another frame
// Insert yet another frame.
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
_sourceFile) == _lengthSourceFrame);
EXPECT_EQ(0, ConvertToI420(kI420, _refFrame, 0, 0, _width, _height,
0, kRotateNone, &_inputVideoBuffer));
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
ASSERT_TRUE(WaitForEncodedFrame() > 0);
} else {
// The first frame is always a key frame.
encodedImage._frameType = kKeyFrame;
}
EncodedImage encodedImage;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
ASSERT_TRUE(_decoder->Decode(encodedImage, 0, NULL)
== WEBRTC_VIDEO_CODEC_OK);
@ -332,6 +338,10 @@ UnitTest::Decode()
{
return WEBRTC_VIDEO_CODEC_OK;
}
if (is_key_frame_) {
encodedImage._frameType = kKeyFrame;
}
int ret = _decoder->Decode(encodedImage, 0, NULL);
unsigned int frameLength = WaitForDecodedFrame();
assert(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
@ -526,6 +536,10 @@ UnitTest::Perform()
memset(tmpBuf, 0, _refEncFrameLength);
_encodedVideoBuffer.CopyFrame(_refEncFrameLength, tmpBuf);
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
if (i == 0) {
// First frame is a key frame.
is_key_frame_ = true;
}
ret = _decoder->Decode(encodedImage, false, NULL);
EXPECT_TRUE(ret <= 0);
if (ret == 0)
@ -543,6 +557,8 @@ UnitTest::Perform()
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
frameLength = 0;
VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
// first frame is a key frame.
encodedImage._frameType = kKeyFrame;
while (frameLength == 0)
{
_decoder->Decode(encodedImage, false, NULL);
@ -686,6 +702,10 @@ UnitTest::Perform()
encTimeStamp = _encodedVideoBuffer.TimeStamp();
EXPECT_TRUE(_inputVideoBuffer.timestamp() ==
static_cast<unsigned>(encTimeStamp));
if (frames == 0) {
// First frame is always a key frame.
is_key_frame_ = true;
}
frameLength = Decode();
if (frameLength == 0)

View File

@ -63,6 +63,7 @@ protected:
unsigned char* _refDecFrame;
unsigned int _refEncFrameLength;
FILE* _sourceFile;
bool is_key_frame_;
UnitTestEncodeCompleteCallback* _encodeCompleteCallback;
UnitTestDecodeCompleteCallback* _decodeCompleteCallback;

View File

@ -106,6 +106,43 @@ class TestVp8Impl : public ::testing::Test {
Vp8UnitTestDecodeCompleteCallback(&decoded_video_frame_));
encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get());
decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get());
// Using a QCIF image (aligned stride (u,v planes) > width).
// Processing only one frame.
const VideoSource source(test::ResourcePath("paris_qcif", "yuv"), kQCIF);
length_source_frame_ = source.GetFrameLength();
source_buffer_.reset(new uint8_t[length_source_frame_]);
source_file_ = fopen(source.GetFileName().c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL);
// Set input frame.
ASSERT_EQ(fread(source_buffer_.get(), 1, length_source_frame_,
source_file_), length_source_frame_);
codec_inst_.width = source.GetWidth();
codec_inst_.height = source.GetHeight();
codec_inst_.maxFramerate = source.GetFrameRate();
// Setting aligned stride values.
int stride_uv = 0;
int stride_y = 0;
Calc16ByteAlignedStride(codec_inst_.width, &stride_y, &stride_uv);
EXPECT_EQ(stride_y, 176);
EXPECT_EQ(stride_uv, 96);
input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
stride_y, stride_uv, stride_uv);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
codec_inst_.width, codec_inst_.height,
0, kRotateNone, &input_frame_));
}
void SetUpEncodeDecode() {
codec_inst_.startBitrate = 300;
codec_inst_.maxBitrate = 4000;
codec_inst_.qpMax = 56;
codec_inst_.codecSpecific.VP8.denoisingOn = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_inst_, 1, 1440));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
}
int WaitForEncodedFrame() const {
@ -143,6 +180,7 @@ class TestVp8Impl : public ::testing::Test {
scoped_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
scoped_array<uint8_t> source_buffer_;
FILE* source_file_;
I420VideoFrame input_frame_;
scoped_ptr<VideoEncoder> encoder_;
scoped_ptr<VideoDecoder> decoder_;
VideoFrame encoded_video_frame_;
@ -190,49 +228,38 @@ TEST_F(TestVp8Impl, EncoderParameterTest) {
}
TEST_F(TestVp8Impl, DISABLED_ON_ANDROID(AlignedStrideEncodeDecode)) {
// Using a QCIF image (aligned stride (u,v planse) > width).
// Processing only one frame.
const VideoSource source(test::ResourcePath("paris_qcif", "yuv"), kQCIF);
length_source_frame_ = source.GetFrameLength();
source_buffer_.reset(new uint8_t[length_source_frame_]);
source_file_ = fopen(source.GetFileName().c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL);
codec_inst_.maxFramerate = source.GetFrameRate();
codec_inst_.startBitrate = 300;
codec_inst_.maxBitrate = 4000;
codec_inst_.qpMax = 56;
codec_inst_.width = source.GetWidth();
codec_inst_.height = source.GetHeight();
codec_inst_.codecSpecific.VP8.denoisingOn = true;
// Get input frame.
ASSERT_EQ(fread(source_buffer_.get(), 1, length_source_frame_, source_file_),
length_source_frame_);
// Setting aligned stride values.
int stride_uv = 0;
int stride_y = 0;
Calc16ByteAlignedStride(codec_inst_.width, &stride_y, &stride_uv);
EXPECT_EQ(stride_y, 176);
EXPECT_EQ(stride_uv, 96);
I420VideoFrame input_frame;
input_frame.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
stride_y, stride_uv, stride_uv);
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
codec_inst_.width, codec_inst_.height,
0, kRotateNone, &input_frame));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_inst_, 1, 1440));
encoder_->Encode(input_frame, NULL, NULL);
SetUpEncodeDecode();
encoder_->Encode(input_frame_, NULL, NULL);
EXPECT_GT(WaitForEncodedFrame(), 0);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
EncodedImage encodedImage;
VideoFrameToEncodedImage(encoded_video_frame_, encodedImage);
// First frame should be a key frame.
encodedImage._frameType = kKeyFrame;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encodedImage, false, NULL));
EXPECT_GT(WaitForDecodedFrame(), 0);
// Compute PSNR on all planes (faster than SSIM).
EXPECT_GT(I420PSNR(&input_frame, &decoded_video_frame_), 36);
EXPECT_GT(I420PSNR(&input_frame_, &decoded_video_frame_), 36);
}
TEST_F(TestVp8Impl, DecodeWithACompleteKeyFrame) {
SetUpEncodeDecode();
encoder_->Encode(input_frame_, NULL, NULL);
EXPECT_GT(WaitForEncodedFrame(), 0);
EncodedImage encodedImage;
VideoFrameToEncodedImage(encoded_video_frame_, encodedImage);
// Setting complete to false -> should return an error.
encodedImage._completeFrame = false;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
decoder_->Decode(encodedImage, false, NULL));
// Setting complete back to true.
encodedImage._completeFrame = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
decoder_->Decode(encodedImage, false, NULL));
// Now setting a key frame.
encodedImage._frameType = kKeyFrame;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encodedImage, false, NULL));
EXPECT_GT(I420PSNR(&input_frame_, &decoded_video_frame_), 36);
}
} // namespace webrtc

View File

@ -502,8 +502,8 @@ VP8DecoderImpl::VP8DecoderImpl()
image_format_(VPX_IMG_FMT_NONE),
ref_frame_(NULL),
propagation_cnt_(-1),
latest_keyframe_complete_(false),
mfqe_enabled_(false) {
mfqe_enabled_(false),
key_frame_required_(true) {
memset(&codec_, 0, sizeof(codec_));
}
@ -518,7 +518,6 @@ int VP8DecoderImpl::Reset() {
}
InitDecode(&codec_, 1);
propagation_cnt_ = -1;
latest_keyframe_complete_ = false;
mfqe_enabled_ = false;
return WEBRTC_VIDEO_CODEC_OK;
}
@ -571,9 +570,12 @@ int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
}
propagation_cnt_ = -1;
latest_keyframe_complete_ = false;
inited_ = true;
// Always start with a complete key frame.
key_frame_required_ = true;
return WEBRTC_VIDEO_CODEC_OK;
}
@ -615,6 +617,18 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
}
#endif
// Always start with a complete key frame.
if (key_frame_required_) {
if (input_image._frameType != kKeyFrame)
return WEBRTC_VIDEO_CODEC_ERROR;
// We have a key frame - is it complete?
if (input_image._completeFrame) {
key_frame_required_ = false;
} else {
return WEBRTC_VIDEO_CODEC_ERROR;
}
}
// Restrict error propagation using key frame requests. Disabled when
// the feedback mode is enabled (RPS).
// Reset on a key frame refresh.
@ -708,9 +722,7 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
// Whenever we receive an incomplete key frame all reference buffers will
// be corrupt. If that happens we must request new key frames until we
// decode a complete.
if (input_image._frameType == kKeyFrame)
latest_keyframe_complete_ = input_image._completeFrame;
if (!latest_keyframe_complete_)
if (input_image._frameType == kKeyFrame && !input_image._completeFrame)
return WEBRTC_VIDEO_CODEC_ERROR;
// Check for reference updates and last reference buffer corruption and

View File

@ -226,8 +226,8 @@ class VP8DecoderImpl : public VP8Decoder {
int image_format_;
vpx_ref_frame_t* ref_frame_;
int propagation_cnt_;
bool latest_keyframe_complete_;
bool mfqe_enabled_;
bool key_frame_required_;
}; // end of VP8Decoder class
} // namespace webrtc