Add EncodedImageCallback::OnEncodedImage().

OnEncodedImage() is going to replace Encoded(), which is deprecated now.
The new OnEncodedImage() returns Result struct that contains frame_id,
which tells the encoder RTP timestamp for the frame.

BUG=chromium:621691
R=niklas.enbom@webrtc.org, sprang@webrtc.org, stefan@webrtc.org

Review URL: https://codereview.webrtc.org/2089773002 .

Committed: https://crrev.com/4c7f4cd2ef76821edca6d773d733a924b0bedd25
Committed: https://crrev.com/ad34dbe934d47f88011045671b4aea00dbd5a795
Cr-Original-Original-Commit-Position: refs/heads/master@{#13613}
Cr-Original-Commit-Position: refs/heads/master@{#13615}
Cr-Commit-Position: refs/heads/master@{#13617}
This commit is contained in:
Sergey Ulanov
2016-08-02 17:46:41 -07:00
parent 51db4dd1bd
commit 525df3ffd1
43 changed files with 477 additions and 382 deletions

View File

@ -410,7 +410,8 @@ const char* VideoCodecTypeToStr(webrtc::VideoCodecType e) {
}
// Callbacks
int32_t VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
EncodedImageCallback::Result
VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::OnEncodedImage(
const EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info,
const webrtc::RTPFragmentationHeader* fragmentation) {
@ -419,7 +420,7 @@ int32_t VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
video_processor_->FrameEncoded(codec_specific_info->codecType,
encoded_image,
fragmentation);
return 0;
return Result(Result::OK, 0);
}
int32_t VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
VideoFrame& image) {

View File

@ -230,7 +230,7 @@ class VideoProcessorImpl : public VideoProcessor {
public:
explicit VideoProcessorEncodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {}
int32_t Encoded(
Result OnEncodedImage(
const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info,
const webrtc::RTPFragmentationHeader* fragmentation) override;

View File

@ -120,12 +120,12 @@ class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback {
size_t stream_idx)
: adapter_(adapter), stream_idx_(stream_idx) {}
int32_t Encoded(
const webrtc::EncodedImage& encodedImage,
const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
const webrtc::RTPFragmentationHeader* fragmentation = NULL) override {
return adapter_->Encoded(stream_idx_, encodedImage, codecSpecificInfo,
fragmentation);
EncodedImageCallback::Result OnEncodedImage(
const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info,
const webrtc::RTPFragmentationHeader* fragmentation) override {
return adapter_->OnEncodedImage(stream_idx_, encoded_image,
codec_specific_info, fragmentation);
}
private:
@ -404,7 +404,7 @@ int SimulcastEncoderAdapter::SetRates(uint32_t new_bitrate_kbit,
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t SimulcastEncoderAdapter::Encoded(
EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage(
size_t stream_idx,
const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
@ -413,7 +413,7 @@ int32_t SimulcastEncoderAdapter::Encoded(
CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8);
vp8Info->simulcastIdx = stream_idx;
return encoded_complete_callback_->Encoded(
return encoded_complete_callback_->OnEncodedImage(
encodedImage, &stream_codec_specific, fragmentation);
}

View File

@ -51,10 +51,11 @@ class SimulcastEncoderAdapter : public VP8Encoder {
// Eventual handler for the contained encoders' EncodedImageCallbacks, but
// called from an internal helper that also knows the correct stream
// index.
int32_t Encoded(size_t stream_idx,
const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo = NULL,
const RTPFragmentationHeader* fragmentation = NULL);
EncodedImageCallback::Result OnEncodedImage(
size_t stream_idx,
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation);
void OnDroppedFrame() override;

View File

@ -242,16 +242,16 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test,
last_encoded_image_simulcast_index_(-1) {}
virtual ~TestSimulcastEncoderAdapterFake() {}
int32_t Encoded(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo = NULL,
const RTPFragmentationHeader* fragmentation = NULL) override {
last_encoded_image_width_ = encodedImage._encodedWidth;
last_encoded_image_height_ = encodedImage._encodedHeight;
if (codecSpecificInfo) {
Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
last_encoded_image_width_ = encoded_image._encodedWidth;
last_encoded_image_height_ = encoded_image._encodedHeight;
if (codec_specific_info) {
last_encoded_image_simulcast_index_ =
codecSpecificInfo->codecSpecific.VP8.simulcastIdx;
codec_specific_info->codecSpecific.VP8.simulcastIdx;
}
return 0;
return Result(Result::OK, encoded_image._timeStamp);
}
bool GetLastEncodedImageInfo(int* out_width,

View File

@ -61,9 +61,9 @@ class Vp8TestEncodedImageCallback : public EncodedImageCallback {
delete[] encoded_frame_._buffer;
}
virtual int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
virtual Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
// Only store the base layer.
if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) {
if (encoded_image._frameType == kVideoFrameKey) {
@ -89,7 +89,7 @@ class Vp8TestEncodedImageCallback : public EncodedImageCallback {
codec_specific_info->codecSpecific.VP8.layerSync;
temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
codec_specific_info->codecSpecific.VP8.temporalIdx;
return 0;
return Result(Result::OK, encoded_image._timeStamp);
}
void GetLastEncodedFrameInfo(int* picture_id,
int* temporal_layer,
@ -338,34 +338,38 @@ class TestVp8Simulcast : public ::testing::Test {
if (expected_video_streams >= 1) {
EXPECT_CALL(
encoder_callback_,
Encoded(
OnEncodedImage(
AllOf(Field(&EncodedImage::_frameType, frame_type),
Field(&EncodedImage::_encodedWidth, kDefaultWidth / 4),
Field(&EncodedImage::_encodedHeight, kDefaultHeight / 4)),
_, _))
.Times(1)
.WillRepeatedly(Return(0));
.WillRepeatedly(Return(EncodedImageCallback::Result(
EncodedImageCallback::Result::OK, 0)));
}
if (expected_video_streams >= 2) {
EXPECT_CALL(
encoder_callback_,
Encoded(
OnEncodedImage(
AllOf(Field(&EncodedImage::_frameType, frame_type),
Field(&EncodedImage::_encodedWidth, kDefaultWidth / 2),
Field(&EncodedImage::_encodedHeight, kDefaultHeight / 2)),
_, _))
.Times(1)
.WillRepeatedly(Return(0));
.WillRepeatedly(Return(EncodedImageCallback::Result(
EncodedImageCallback::Result::OK, 0)));
}
if (expected_video_streams >= 3) {
EXPECT_CALL(
encoder_callback_,
Encoded(AllOf(Field(&EncodedImage::_frameType, frame_type),
Field(&EncodedImage::_encodedWidth, kDefaultWidth),
Field(&EncodedImage::_encodedHeight, kDefaultHeight)),
_, _))
OnEncodedImage(
AllOf(Field(&EncodedImage::_frameType, frame_type),
Field(&EncodedImage::_encodedWidth, kDefaultWidth),
Field(&EncodedImage::_encodedHeight, kDefaultHeight)),
_, _))
.Times(1)
.WillRepeatedly(Return(0));
.WillRepeatedly(Return(EncodedImageCallback::Result(
EncodedImageCallback::Result::OK, 0)));
}
}
@ -590,13 +594,15 @@ class TestVp8Simulcast : public ::testing::Test {
encoder_->SetRates(kMaxBitrates[0] + kMaxBitrates[1], 30);
std::vector<FrameType> frame_types(kNumberOfSimulcastStreams,
kVideoFrameDelta);
EXPECT_CALL(encoder_callback_,
Encoded(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey),
Field(&EncodedImage::_encodedWidth, width),
Field(&EncodedImage::_encodedHeight, height)),
_, _))
EXPECT_CALL(
encoder_callback_,
OnEncodedImage(AllOf(Field(&EncodedImage::_frameType, kVideoFrameKey),
Field(&EncodedImage::_encodedWidth, width),
Field(&EncodedImage::_encodedHeight, height)),
_, _))
.Times(1)
.WillRepeatedly(Return(0));
.WillRepeatedly(Return(
EncodedImageCallback::Result(EncodedImageCallback::Result::OK, 0)));
EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, &frame_types));
// Switch back.

View File

@ -43,9 +43,9 @@ class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback {
void* decoderSpecificInfo)
: encoded_frame_(frame), encode_complete_(false) {}
virtual int Encoded(const EncodedImage& encoded_frame_,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader*);
Result OnEncodedImage(const EncodedImage& encoded_frame_,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override;
bool EncodeComplete();
private:
@ -54,9 +54,10 @@ class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback {
bool encode_complete_;
};
int Vp8UnitTestEncodeCompleteCallback::Encoded(
webrtc::EncodedImageCallback::Result
Vp8UnitTestEncodeCompleteCallback::OnEncodedImage(
const EncodedImage& encoded_frame,
const CodecSpecificInfo* codecSpecificInfo,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
if (encoded_frame_->_size < encoded_frame._length) {
delete[] encoded_frame_->_buffer;
@ -72,7 +73,7 @@ int Vp8UnitTestEncodeCompleteCallback::Encoded(
encoded_frame_->_frameType = encoded_frame._frameType;
encoded_frame_->_completeFrame = encoded_frame._completeFrame;
encode_complete_ = true;
return 0;
return Result(Result::OK, 0);
}
bool Vp8UnitTestEncodeCompleteCallback::EncodeComplete() {

View File

@ -26,9 +26,9 @@ class Vp8SequenceCoderEncodeCallback : public webrtc::EncodedImageCallback {
explicit Vp8SequenceCoderEncodeCallback(FILE* encoded_file)
: encoded_file_(encoded_file), encoded_bytes_(0) {}
~Vp8SequenceCoderEncodeCallback();
int Encoded(const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const webrtc::RTPFragmentationHeader*);
Result OnEncodedImage(const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info,
const webrtc::RTPFragmentationHeader*);
// Returns the encoded image.
webrtc::EncodedImage encoded_image() { return encoded_image_; }
size_t encoded_bytes() { return encoded_bytes_; }
@ -43,7 +43,9 @@ Vp8SequenceCoderEncodeCallback::~Vp8SequenceCoderEncodeCallback() {
delete[] encoded_image_._buffer;
encoded_image_._buffer = NULL;
}
int Vp8SequenceCoderEncodeCallback::Encoded(
webrtc::EncodedImageCallback::Result
Vp8SequenceCoderEncodeCallback::OnEncodedImage(
const webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
const webrtc::RTPFragmentationHeader* fragmentation) {
@ -58,11 +60,11 @@ int Vp8SequenceCoderEncodeCallback::Encoded(
if (encoded_file_ != NULL) {
if (fwrite(encoded_image._buffer, 1, encoded_image._length,
encoded_file_) != encoded_image._length) {
return -1;
return Result(Result::ERROR_SEND_FAILED, 0);
}
}
encoded_bytes_ += encoded_image_._length;
return 0;
return Result(Result::OK, 0);
}
// TODO(mikhal): Add support for varying the frame size.

View File

@ -21,6 +21,7 @@
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
VCMGenericEncoder::VCMGenericEncoder(
VideoEncoder* encoder,
VCMEncodedFrameCallback* encoded_frame_callback,
@ -143,23 +144,25 @@ VCMEncodedFrameCallback::VCMEncodedFrameCallback(
VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {}
int32_t VCMEncodedFrameCallback::Encoded(
EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific,
const RTPFragmentationHeader* fragmentation_header) {
TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
"timestamp", encoded_image._timeStamp);
int ret_val = post_encode_callback_->Encoded(encoded_image, codec_specific,
fragmentation_header);
if (ret_val < 0)
return ret_val;
Result result = post_encode_callback_->OnEncodedImage(
encoded_image, codec_specific, fragmentation_header);
if (result.error != Result::OK)
return result;
if (media_opt_) {
media_opt_->UpdateWithEncodedData(encoded_image);
if (internal_source_)
return media_opt_->DropFrame(); // Signal to encoder to drop next frame.
if (internal_source_) {
// Signal to encoder to drop next frame.
result.drop_next_frame = media_opt_->DropFrame();
}
}
return VCM_OK;
return result;
}
} // namespace webrtc

View File

@ -41,9 +41,10 @@ class VCMEncodedFrameCallback : public EncodedImageCallback {
virtual ~VCMEncodedFrameCallback();
// Implements EncodedImageCallback.
int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific,
const RTPFragmentationHeader* fragmentation_header) override;
EncodedImageCallback::Result OnEncodedImage(
const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override;
void SetInternalSource(bool internal_source) {
internal_source_ = internal_source;
}

View File

@ -22,10 +22,10 @@ namespace webrtc {
class MockEncodedImageCallback : public EncodedImageCallback {
public:
MOCK_METHOD3(Encoded,
int32_t(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation));
MOCK_METHOD3(OnEncodedImage,
Result(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation));
};
class MockVideoEncoder : public VideoEncoder {

View File

@ -45,7 +45,8 @@ namespace {
class EncodedImageCallbackWrapper : public EncodedImageCallback {
public:
EncodedImageCallbackWrapper()
: cs_(CriticalSectionWrapper::CreateCriticalSection()), callback_(NULL) {}
: cs_(CriticalSectionWrapper::CreateCriticalSection()),
callback_(nullptr) {}
virtual ~EncodedImageCallbackWrapper() {}
@ -54,14 +55,15 @@ class EncodedImageCallbackWrapper : public EncodedImageCallback {
callback_ = callback;
}
virtual int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
virtual Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
CriticalSectionScoped cs(cs_.get());
if (callback_)
return callback_->Encoded(encoded_image, codec_specific_info,
fragmentation);
return 0;
if (callback_) {
return callback_->OnEncodedImage(encoded_image, codec_specific_info,
fragmentation);
}
return Result(Result::ERROR_SEND_FAILED);
}
private:

View File

@ -93,13 +93,13 @@ class EncodedImageCallbackImpl : public EncodedImageCallback {
virtual ~EncodedImageCallbackImpl() {}
int32_t Encoded(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) override {
assert(codec_specific_info);
frame_data_.push_back(
FrameData(encoded_image._length, *codec_specific_info));
return 0;
return Result(Result::OK, encoded_image._timeStamp);
}
void Reset() {