Add ability to downscale content to improve quality.
BUG=3712 R=marpan@google.com, stefan@webrtc.org Review URL: https://webrtc-codereview.appspot.com/18169004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@7164 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@ -703,34 +703,26 @@ TEST_F(VideoProcessorIntegrationTest,
|
||||
rc_metrics);
|
||||
}
|
||||
|
||||
// Run with no packet loss, at low bitrate, then increase rate somewhat.
|
||||
// Key frame is thrown in every 120 frames. Can expect some frame drops after
|
||||
// key frame, even at high rate. The internal spatial resizer is on, so expect
|
||||
// spatial resize down at first key frame, and back up at second key frame.
|
||||
// Error_concealment is off in this test since there is a memory leak with
|
||||
// resizing and error concealment.
|
||||
// Run with no packet loss, at low bitrate. During this time we should've
|
||||
// resized once.
|
||||
TEST_F(VideoProcessorIntegrationTest,
|
||||
DISABLED_ON_ANDROID(ProcessNoLossSpatialResizeFrameDrop)) {
|
||||
config_.networking_config.packet_loss_probability = 0;
|
||||
// Bitrate and frame rate profile.
|
||||
RateProfile rate_profile;
|
||||
SetRateProfilePars(&rate_profile, 0, 100, 30, 0);
|
||||
SetRateProfilePars(&rate_profile, 1, 200, 30, 120);
|
||||
SetRateProfilePars(&rate_profile, 2, 200, 30, 240);
|
||||
rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
|
||||
SetRateProfilePars(&rate_profile, 0, 50, 30, 0);
|
||||
rate_profile.frame_index_rate_update[1] = kNbrFramesLong + 1;
|
||||
rate_profile.num_frames = kNbrFramesLong;
|
||||
// Codec/network settings.
|
||||
CodecConfigPars process_settings;
|
||||
SetCodecParameters(&process_settings, 0.0f, 120, 1, false, true, true, true);
|
||||
// Metrics for expected quality.: lower quality on average from up-sampling
|
||||
// the down-sampled portion of the run, in case resizer is on.
|
||||
SetCodecParameters(
|
||||
&process_settings, 0.0f, kNbrFramesLong, 1, false, true, true, true);
|
||||
// Metrics for expected quality.
|
||||
QualityMetrics quality_metrics;
|
||||
SetQualityMetrics(&quality_metrics, 29.0, 20.0, 0.75, 0.60);
|
||||
SetQualityMetrics(&quality_metrics, 25.0, 15.0, 0.70, 0.40);
|
||||
// Metrics for rate control.
|
||||
RateControlMetrics rc_metrics[3];
|
||||
SetRateControlMetrics(rc_metrics, 0, 45, 30, 75, 20, 70, 0);
|
||||
SetRateControlMetrics(rc_metrics, 1, 20, 35, 30, 20, 15, 1);
|
||||
SetRateControlMetrics(rc_metrics, 2, 0, 30, 30, 15, 25, 1);
|
||||
RateControlMetrics rc_metrics[1];
|
||||
SetRateControlMetrics(rc_metrics, 0, 160, 60, 120, 20, 70, 1);
|
||||
ProcessFramesAndVerify(quality_metrics,
|
||||
rate_profile,
|
||||
process_settings,
|
||||
|
||||
@ -105,6 +105,7 @@ int VP8EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
|
||||
temporal_layers_->ConfigureBitrates(new_bitrate_kbit, codec_.maxBitrate,
|
||||
new_framerate, config_);
|
||||
codec_.maxFramerate = new_framerate;
|
||||
quality_scaler_.ReportFramerate(new_framerate);
|
||||
|
||||
// update encoder context
|
||||
if (vpx_codec_enc_config_set(encoder_, config_)) {
|
||||
@ -230,8 +231,8 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
||||
30 : 0;
|
||||
config_->rc_end_usage = VPX_CBR;
|
||||
config_->g_pass = VPX_RC_ONE_PASS;
|
||||
config_->rc_resize_allowed = inst->codecSpecific.VP8.automaticResizeOn ?
|
||||
1 : 0;
|
||||
// Handle resizing outside of libvpx.
|
||||
config_->rc_resize_allowed = 0;
|
||||
config_->rc_min_quantizer = 2;
|
||||
config_->rc_max_quantizer = inst->qpMax;
|
||||
config_->rc_undershoot_pct = 100;
|
||||
@ -272,6 +273,8 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
|
||||
cpu_speed_ = -12;
|
||||
#endif
|
||||
rps_->Init();
|
||||
quality_scaler_.Init(codec_.qpMax);
|
||||
quality_scaler_.ReportFramerate(codec_.maxFramerate);
|
||||
return InitAndSetControlSettings(inst);
|
||||
}
|
||||
|
||||
@ -296,6 +299,7 @@ int VP8EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
|
||||
vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
|
||||
rc_max_intra_target_);
|
||||
inited_ = true;
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
@ -315,15 +319,15 @@ uint32_t VP8EncoderImpl::MaxIntraTarget(uint32_t optimalBuffersize) {
|
||||
return (targetPct < minIntraTh) ? minIntraTh: targetPct;
|
||||
}
|
||||
|
||||
int VP8EncoderImpl::Encode(const I420VideoFrame& input_image,
|
||||
int VP8EncoderImpl::Encode(const I420VideoFrame& input_frame,
|
||||
const CodecSpecificInfo* codec_specific_info,
|
||||
const std::vector<VideoFrameType>* frame_types) {
|
||||
TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", input_image.timestamp());
|
||||
TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", input_frame.timestamp());
|
||||
|
||||
if (!inited_) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
if (input_image.IsZeroSize()) {
|
||||
if (input_frame.IsZeroSize()) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
if (encoded_complete_callback_ == NULL) {
|
||||
@ -336,25 +340,31 @@ int VP8EncoderImpl::Encode(const I420VideoFrame& input_image,
|
||||
frame_type = (*frame_types)[0];
|
||||
}
|
||||
|
||||
const I420VideoFrame& frame =
|
||||
config_->rc_dropframe_thresh > 0 &&
|
||||
codec_.codecSpecific.VP8.automaticResizeOn
|
||||
? quality_scaler_.GetScaledFrame(input_frame)
|
||||
: input_frame;
|
||||
|
||||
// Check for change in frame size.
|
||||
if (input_image.width() != codec_.width ||
|
||||
input_image.height() != codec_.height) {
|
||||
int ret = UpdateCodecFrameSize(input_image);
|
||||
if (frame.width() != codec_.width ||
|
||||
frame.height() != codec_.height) {
|
||||
int ret = UpdateCodecFrameSize(frame);
|
||||
if (ret < 0) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
// Image in vpx_image_t format.
|
||||
// Input image is const. VP8's raw image is not defined as const.
|
||||
raw_->planes[PLANE_Y] = const_cast<uint8_t*>(input_image.buffer(kYPlane));
|
||||
raw_->planes[PLANE_U] = const_cast<uint8_t*>(input_image.buffer(kUPlane));
|
||||
raw_->planes[PLANE_V] = const_cast<uint8_t*>(input_image.buffer(kVPlane));
|
||||
// Input frame is const. VP8's raw frame is not defined as const.
|
||||
raw_->planes[PLANE_Y] = const_cast<uint8_t*>(frame.buffer(kYPlane));
|
||||
raw_->planes[PLANE_U] = const_cast<uint8_t*>(frame.buffer(kUPlane));
|
||||
raw_->planes[PLANE_V] = const_cast<uint8_t*>(frame.buffer(kVPlane));
|
||||
// TODO(mikhal): Stride should be set in initialization.
|
||||
raw_->stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
|
||||
raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane);
|
||||
raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane);
|
||||
raw_->stride[VPX_PLANE_Y] = frame.stride(kYPlane);
|
||||
raw_->stride[VPX_PLANE_U] = frame.stride(kUPlane);
|
||||
raw_->stride[VPX_PLANE_V] = frame.stride(kVPlane);
|
||||
|
||||
int flags = temporal_layers_->EncodeFlags(input_image.timestamp());
|
||||
int flags = temporal_layers_->EncodeFlags(frame.timestamp());
|
||||
|
||||
bool send_keyframe = (frame_type == kKeyFrame);
|
||||
if (send_keyframe) {
|
||||
@ -370,11 +380,11 @@ int VP8EncoderImpl::Encode(const I420VideoFrame& input_image,
|
||||
codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
|
||||
}
|
||||
if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
|
||||
sendRefresh = rps_->ReceivedSLI(input_image.timestamp());
|
||||
sendRefresh = rps_->ReceivedSLI(frame.timestamp());
|
||||
}
|
||||
}
|
||||
flags = rps_->EncodeFlags(picture_id_, sendRefresh,
|
||||
input_image.timestamp());
|
||||
frame.timestamp());
|
||||
}
|
||||
|
||||
// TODO(holmer): Ideally the duration should be the timestamp diff of this
|
||||
@ -390,7 +400,7 @@ int VP8EncoderImpl::Encode(const I420VideoFrame& input_image,
|
||||
}
|
||||
timestamp_ += duration;
|
||||
|
||||
return GetEncodedPartitions(input_image);
|
||||
return GetEncodedPartitions(frame);
|
||||
}
|
||||
|
||||
int VP8EncoderImpl::UpdateCodecFrameSize(const I420VideoFrame& input_image) {
|
||||
@ -480,6 +490,11 @@ int VP8EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) {
|
||||
encoded_image_._encodedWidth = codec_.width;
|
||||
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
|
||||
&frag_info);
|
||||
int qp;
|
||||
vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER_64, &qp);
|
||||
quality_scaler_.ReportEncodedFrame(qp);
|
||||
} else {
|
||||
quality_scaler_.ReportDroppedFrame();
|
||||
}
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
@ -14,6 +14,7 @@
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_IMPL_H_
|
||||
|
||||
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
|
||||
#include "webrtc/modules/video_coding/utility/quality_scaler.h"
|
||||
|
||||
// VPX forward declaration
|
||||
typedef struct vpx_codec_ctx vpx_codec_ctx_t;
|
||||
@ -139,6 +140,7 @@ class VP8EncoderImpl : public VP8Encoder {
|
||||
vpx_codec_ctx_t* encoder_;
|
||||
vpx_codec_enc_cfg_t* config_;
|
||||
vpx_image_t* raw_;
|
||||
QualityScaler quality_scaler_;
|
||||
}; // end of VP8Encoder class
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user