Implement timing frames.
Timing information is gathered in EncodedImage, starting at encoders. Then it's sent using RTP header extension. In the end, it's gathered at the GenericDecoder. Actual reporting and tests will be in the next CLs. BUG=webrtc:7594 Review-Url: https://codereview.webrtc.org/2911193002 Cr-Commit-Position: refs/heads/master@{#18659}
This commit is contained in:
@ -720,9 +720,9 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
|
||||
frame.capture_time_ms_ = render_time_ms;
|
||||
frame._timeStamp = timestamp;
|
||||
frame.rotation_ = rotation;
|
||||
|
||||
frame.content_type_ =
|
||||
(mode_ == kScreensharing) ? VideoContentType::SCREENSHARE : VideoContentType::UNSPECIFIED;
|
||||
frame.timing_.is_timing_frame = false;
|
||||
|
||||
h264_bitstream_parser_.ParseBitstream(buffer->data(), buffer->size());
|
||||
h264_bitstream_parser_.GetLastSliceQp(&frame.qp_);
|
||||
|
||||
Reference in New Issue
Block a user