Add content type information to Encoded Images and add corresponding RTP extension header.

Use it to separate UMA e2e delay metric between screenshare from video.
Content type extension is set based on encoder settings and processed and decoders.

Also,
Fix full-stack-tests to calculate RTT correctly, so new metric could be tested.

BUG=webrtc:7420

Review-Url: https://codereview.webrtc.org/2772033002
Cr-Commit-Position: refs/heads/master@{#17640}
This commit is contained in:
ilnik
2017-04-11 01:46:04 -07:00
committed by Commit bot
parent 93cda2ebde
commit 64e739aeae
48 changed files with 465 additions and 91 deletions

View File

@ -85,6 +85,7 @@ class H264VideoToolboxEncoder : public H264Encoder {
uint32_t encoder_bitrate_bps_;
int32_t width_;
int32_t height_;
VideoCodecMode mode_;
const CFStringRef profile_;
H264BitstreamParser h264_bitstream_parser_;

View File

@ -364,6 +364,7 @@ int H264VideoToolboxEncoder::InitEncode(const VideoCodec* codec_settings,
width_ = codec_settings->width;
height_ = codec_settings->height;
mode_ = codec_settings->mode;
// We can only set average bitrate on the HW encoder.
target_bitrate_bps_ = codec_settings->startBitrate;
bitrate_adjuster_.SetTargetBitrateBps(target_bitrate_bps_);
@ -722,6 +723,9 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
frame._timeStamp = timestamp;
frame.rotation_ = rotation;
frame.content_type_ =
(mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED;
h264_bitstream_parser_.ParseBitstream(buffer->data(), buffer->size());
h264_bitstream_parser_.GetLastSliceQp(&frame.qp_);