In GenericEncoder enable timing frames for encoders with internal source
Bug: webrtc:9058 Change-Id: Iab75238cef9d8683d3f78b045d24dcca71427e14 Reviewed-on: https://webrtc-review.googlesource.com/64446 Reviewed-by: Erik Språng <sprang@webrtc.org> Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org> Cr-Commit-Position: refs/heads/master@{#22640}
This commit is contained in:
committed by
Commit Bot
parent
dc52651911
commit
e24c41ea45
@ -766,6 +766,7 @@ if (rtc_include_tests) {
|
|||||||
"../../rtc_base:checks",
|
"../../rtc_base:checks",
|
||||||
"../../rtc_base:rtc_base",
|
"../../rtc_base:rtc_base",
|
||||||
"../../rtc_base:rtc_base_approved",
|
"../../rtc_base:rtc_base_approved",
|
||||||
|
"../../rtc_base:rtc_base_tests_utils",
|
||||||
"../../rtc_base:rtc_numerics",
|
"../../rtc_base:rtc_numerics",
|
||||||
"../../rtc_base:rtc_task_queue",
|
"../../rtc_base:rtc_task_queue",
|
||||||
"../../system_wrappers",
|
"../../system_wrappers",
|
||||||
|
|||||||
@ -261,6 +261,136 @@ void VCMEncodedFrameCallback::OnEncodeStarted(uint32_t rtp_timestamp,
|
|||||||
rtp_timestamp, capture_time_ms, rtc::TimeMillis());
|
rtp_timestamp, capture_time_ms, rtc::TimeMillis());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rtc::Optional<int64_t> VCMEncodedFrameCallback::ExtractEncodeStartTime(
|
||||||
|
size_t simulcast_svc_idx,
|
||||||
|
EncodedImage* encoded_image) {
|
||||||
|
rtc::Optional<int64_t> result;
|
||||||
|
size_t num_simulcast_svc_streams = timing_frames_info_.size();
|
||||||
|
if (simulcast_svc_idx < num_simulcast_svc_streams) {
|
||||||
|
auto encode_start_list =
|
||||||
|
&timing_frames_info_[simulcast_svc_idx].encode_start_list;
|
||||||
|
// Skip frames for which there was OnEncodeStarted but no OnEncodedImage
|
||||||
|
// call. These are dropped by encoder internally.
|
||||||
|
// Because some hardware encoders don't preserve capture timestamp we
|
||||||
|
// use RTP timestamps here.
|
||||||
|
while (!encode_start_list->empty() &&
|
||||||
|
IsNewerTimestamp(encoded_image->_timeStamp,
|
||||||
|
encode_start_list->front().rtp_timestamp)) {
|
||||||
|
post_encode_callback_->OnDroppedFrame(DropReason::kDroppedByEncoder);
|
||||||
|
encode_start_list->pop_front();
|
||||||
|
}
|
||||||
|
if (encode_start_list->size() > 0 &&
|
||||||
|
encode_start_list->front().rtp_timestamp == encoded_image->_timeStamp) {
|
||||||
|
result.emplace(encode_start_list->front().encode_start_time_ms);
|
||||||
|
if (encoded_image->capture_time_ms_ !=
|
||||||
|
encode_start_list->front().capture_time_ms) {
|
||||||
|
// Force correct capture timestamp.
|
||||||
|
encoded_image->capture_time_ms_ =
|
||||||
|
encode_start_list->front().capture_time_ms;
|
||||||
|
++incorrect_capture_time_logged_messages_;
|
||||||
|
if (incorrect_capture_time_logged_messages_ <=
|
||||||
|
kMessagesThrottlingThreshold ||
|
||||||
|
incorrect_capture_time_logged_messages_ % kThrottleRatio == 0) {
|
||||||
|
RTC_LOG(LS_WARNING)
|
||||||
|
<< "Encoder is not preserving capture timestamps.";
|
||||||
|
if (incorrect_capture_time_logged_messages_ ==
|
||||||
|
kMessagesThrottlingThreshold) {
|
||||||
|
RTC_LOG(LS_WARNING) << "Too many log messages. Further incorrect "
|
||||||
|
"timestamps warnings will be throttled.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
encode_start_list->pop_front();
|
||||||
|
} else {
|
||||||
|
++reordered_frames_logged_messages_;
|
||||||
|
if (reordered_frames_logged_messages_ <= kMessagesThrottlingThreshold ||
|
||||||
|
reordered_frames_logged_messages_ % kThrottleRatio == 0) {
|
||||||
|
RTC_LOG(LS_WARNING) << "Frame with no encode started time recordings. "
|
||||||
|
"Encoder may be reordering frames "
|
||||||
|
"or not preserving RTP timestamps.";
|
||||||
|
if (reordered_frames_logged_messages_ == kMessagesThrottlingThreshold) {
|
||||||
|
RTC_LOG(LS_WARNING) << "Too many log messages. Further frames "
|
||||||
|
"reordering warnings will be throttled.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VCMEncodedFrameCallback::FillTimingInfo(size_t simulcast_svc_idx,
|
||||||
|
EncodedImage* encoded_image) {
|
||||||
|
rtc::Optional<size_t> outlier_frame_size;
|
||||||
|
rtc::Optional<int64_t> encode_start_ms;
|
||||||
|
uint8_t timing_flags = TimingFrameFlags::kNotTriggered;
|
||||||
|
{
|
||||||
|
rtc::CritScope crit(&timing_params_lock_);
|
||||||
|
|
||||||
|
// Encoders with internal sources do not call OnEncodeStarted
|
||||||
|
// |timing_frames_info_| may be not filled here.
|
||||||
|
if (!internal_source_) {
|
||||||
|
encode_start_ms =
|
||||||
|
ExtractEncodeStartTime(simulcast_svc_idx, encoded_image);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t target_bitrate =
|
||||||
|
timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec;
|
||||||
|
if (framerate_ > 0 && target_bitrate > 0) {
|
||||||
|
// framerate and target bitrate were reported by encoder.
|
||||||
|
size_t average_frame_size = target_bitrate / framerate_;
|
||||||
|
outlier_frame_size.emplace(
|
||||||
|
average_frame_size * timing_frames_thresholds_.outlier_ratio_percent /
|
||||||
|
100);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Outliers trigger timing frames, but do not affect scheduled timing
|
||||||
|
// frames.
|
||||||
|
if (outlier_frame_size && encoded_image->_length >= *outlier_frame_size) {
|
||||||
|
timing_flags |= TimingFrameFlags::kTriggeredBySize;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's time to send a timing frame.
|
||||||
|
int64_t timing_frame_delay_ms =
|
||||||
|
encoded_image->capture_time_ms_ - last_timing_frame_time_ms_;
|
||||||
|
// Trigger threshold if it's a first frame, too long passed since the last
|
||||||
|
// timing frame, or we already sent timing frame on a different simulcast
|
||||||
|
// stream with the same capture time.
|
||||||
|
if (last_timing_frame_time_ms_ == -1 ||
|
||||||
|
timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms ||
|
||||||
|
timing_frame_delay_ms == 0) {
|
||||||
|
timing_flags = TimingFrameFlags::kTriggeredByTimer;
|
||||||
|
last_timing_frame_time_ms_ = encoded_image->capture_time_ms_;
|
||||||
|
}
|
||||||
|
} // rtc::CritScope crit(&timing_params_lock_);
|
||||||
|
|
||||||
|
int64_t now_ms = rtc::TimeMillis();
|
||||||
|
// Workaround for chromoting encoder: it passes encode start and finished
|
||||||
|
// timestamps in |timing_| field, but they (together with capture timestamp)
|
||||||
|
// are not in the WebRTC clock.
|
||||||
|
if (internal_source_ && encoded_image->timing_.encode_finish_ms > 0 &&
|
||||||
|
encoded_image->timing_.encode_start_ms > 0) {
|
||||||
|
int64_t clock_offset_ms = now_ms - encoded_image->timing_.encode_finish_ms;
|
||||||
|
// Translate capture timestamp to local WebRTC clock.
|
||||||
|
encoded_image->capture_time_ms_ += clock_offset_ms;
|
||||||
|
encoded_image->_timeStamp =
|
||||||
|
static_cast<uint32_t>(encoded_image->capture_time_ms_ * 90);
|
||||||
|
encode_start_ms.emplace(encoded_image->timing_.encode_start_ms +
|
||||||
|
clock_offset_ms);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If encode start is not available that means that encoder uses internal
|
||||||
|
// source. In that case capture timestamp may be from a different clock with a
|
||||||
|
// drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
|
||||||
|
// because to being sent in the network capture time required to be less than
|
||||||
|
// all the other timestamps.
|
||||||
|
if (encode_start_ms) {
|
||||||
|
encoded_image->SetEncodeTime(*encode_start_ms, now_ms);
|
||||||
|
encoded_image->timing_.flags = timing_flags;
|
||||||
|
} else {
|
||||||
|
encoded_image->timing_.flags = TimingFrameFlags::kInvalid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
|
EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
|
||||||
const EncodedImage& encoded_image,
|
const EncodedImage& encoded_image,
|
||||||
const CodecSpecificInfo* codec_specific,
|
const CodecSpecificInfo* codec_specific,
|
||||||
@ -281,110 +411,7 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
|
|||||||
|
|
||||||
EncodedImage image_copy(encoded_image);
|
EncodedImage image_copy(encoded_image);
|
||||||
|
|
||||||
rtc::Optional<size_t> outlier_frame_size;
|
FillTimingInfo(simulcast_svc_idx, &image_copy);
|
||||||
rtc::Optional<int64_t> encode_start_ms;
|
|
||||||
size_t num_simulcast_svc_streams = 1;
|
|
||||||
uint8_t timing_flags = TimingFrameFlags::kNotTriggered;
|
|
||||||
if (!internal_source_) {
|
|
||||||
rtc::CritScope crit(&timing_params_lock_);
|
|
||||||
|
|
||||||
// Encoders with internal sources do not call OnEncodeStarted and
|
|
||||||
// OnFrameRateChanged. |timing_frames_info_| may be not filled here.
|
|
||||||
num_simulcast_svc_streams = timing_frames_info_.size();
|
|
||||||
if (simulcast_svc_idx < num_simulcast_svc_streams) {
|
|
||||||
auto encode_start_list =
|
|
||||||
&timing_frames_info_[simulcast_svc_idx].encode_start_list;
|
|
||||||
// Skip frames for which there was OnEncodeStarted but no OnEncodedImage
|
|
||||||
// call. These are dropped by encoder internally.
|
|
||||||
// Because some hardware encoders don't preserve capture timestamp we use
|
|
||||||
// RTP timestamps here.
|
|
||||||
while (!encode_start_list->empty() &&
|
|
||||||
IsNewerTimestamp(image_copy._timeStamp,
|
|
||||||
encode_start_list->front().rtp_timestamp)) {
|
|
||||||
post_encode_callback_->OnDroppedFrame(DropReason::kDroppedByEncoder);
|
|
||||||
encode_start_list->pop_front();
|
|
||||||
}
|
|
||||||
if (encode_start_list->size() > 0 &&
|
|
||||||
encode_start_list->front().rtp_timestamp == image_copy._timeStamp) {
|
|
||||||
encode_start_ms.emplace(
|
|
||||||
encode_start_list->front().encode_start_time_ms);
|
|
||||||
if (image_copy.capture_time_ms_ !=
|
|
||||||
encode_start_list->front().capture_time_ms) {
|
|
||||||
// Force correct capture timestamp.
|
|
||||||
image_copy.capture_time_ms_ =
|
|
||||||
encode_start_list->front().capture_time_ms;
|
|
||||||
++incorrect_capture_time_logged_messages_;
|
|
||||||
if (incorrect_capture_time_logged_messages_ <=
|
|
||||||
kMessagesThrottlingThreshold ||
|
|
||||||
incorrect_capture_time_logged_messages_ % kThrottleRatio == 0) {
|
|
||||||
RTC_LOG(LS_WARNING)
|
|
||||||
<< "Encoder is not preserving capture timestamps.";
|
|
||||||
if (incorrect_capture_time_logged_messages_ ==
|
|
||||||
kMessagesThrottlingThreshold) {
|
|
||||||
RTC_LOG(LS_WARNING) << "Too many log messages. Further incorrect "
|
|
||||||
"timestamps warnings will be throttled.";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
encode_start_list->pop_front();
|
|
||||||
} else {
|
|
||||||
++reordered_frames_logged_messages_;
|
|
||||||
if (reordered_frames_logged_messages_ <= kMessagesThrottlingThreshold ||
|
|
||||||
reordered_frames_logged_messages_ % kThrottleRatio == 0) {
|
|
||||||
RTC_LOG(LS_WARNING)
|
|
||||||
<< "Frame with no encode started time recordings. "
|
|
||||||
"Encoder may be reordering frames "
|
|
||||||
"or not preserving RTP timestamps.";
|
|
||||||
if (reordered_frames_logged_messages_ ==
|
|
||||||
kMessagesThrottlingThreshold) {
|
|
||||||
RTC_LOG(LS_WARNING) << "Too many log messages. Further frames "
|
|
||||||
"reordering warnings will be throttled.";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t target_bitrate =
|
|
||||||
timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec;
|
|
||||||
if (framerate_ > 0 && target_bitrate > 0) {
|
|
||||||
// framerate and target bitrate were reported by encoder.
|
|
||||||
size_t average_frame_size = target_bitrate / framerate_;
|
|
||||||
outlier_frame_size.emplace(
|
|
||||||
average_frame_size *
|
|
||||||
timing_frames_thresholds_.outlier_ratio_percent / 100);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it's time to send a timing frame.
|
|
||||||
int64_t timing_frame_delay_ms =
|
|
||||||
image_copy.capture_time_ms_ - last_timing_frame_time_ms_;
|
|
||||||
// Trigger threshold if it's a first frame, too long passed since the last
|
|
||||||
// timing frame, or we already sent timing frame on a different simulcast
|
|
||||||
// stream with the same capture time.
|
|
||||||
if (last_timing_frame_time_ms_ == -1 ||
|
|
||||||
timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms ||
|
|
||||||
timing_frame_delay_ms == 0) {
|
|
||||||
timing_flags = TimingFrameFlags::kTriggeredByTimer;
|
|
||||||
last_timing_frame_time_ms_ = image_copy.capture_time_ms_;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Outliers trigger timing frames, but do not affect scheduled timing
|
|
||||||
// frames.
|
|
||||||
if (outlier_frame_size && image_copy._length >= *outlier_frame_size) {
|
|
||||||
timing_flags |= TimingFrameFlags::kTriggeredBySize;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If encode start is not available that means that encoder uses internal
|
|
||||||
// source. In that case capture timestamp may be from a different clock with a
|
|
||||||
// drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
|
|
||||||
// because to being sent in the network capture time required to be less than
|
|
||||||
// all the other timestamps.
|
|
||||||
if (encode_start_ms) {
|
|
||||||
image_copy.SetEncodeTime(*encode_start_ms, rtc::TimeMillis());
|
|
||||||
image_copy.timing_.flags = timing_flags;
|
|
||||||
} else {
|
|
||||||
image_copy.timing_.flags = TimingFrameFlags::kInvalid;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Piggyback ALR experiment group id and simulcast id into the content type.
|
// Piggyback ALR experiment group id and simulcast id into the content type.
|
||||||
uint8_t experiment_id =
|
uint8_t experiment_id =
|
||||||
|
|||||||
@ -77,6 +77,14 @@ class VCMEncodedFrameCallback : public EncodedImageCallback {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
// For non-internal-source encoders, returns encode started time and fixes
|
||||||
|
// capture timestamp for the frame, if corrupted by the encoder.
|
||||||
|
rtc::Optional<int64_t> ExtractEncodeStartTime(size_t simulcast_svc_idx,
|
||||||
|
EncodedImage* encoded_image)
|
||||||
|
RTC_EXCLUSIVE_LOCKS_REQUIRED(timing_params_lock_);
|
||||||
|
|
||||||
|
void FillTimingInfo(size_t simulcast_svc_idx, EncodedImage* encoded_image);
|
||||||
|
|
||||||
rtc::CriticalSection timing_params_lock_;
|
rtc::CriticalSection timing_params_lock_;
|
||||||
bool internal_source_;
|
bool internal_source_;
|
||||||
EncodedImageCallback* const post_encode_callback_;
|
EncodedImageCallback* const post_encode_callback_;
|
||||||
|
|||||||
@ -13,6 +13,7 @@
|
|||||||
#include "modules/video_coding/encoded_frame.h"
|
#include "modules/video_coding/encoded_frame.h"
|
||||||
#include "modules/video_coding/generic_encoder.h"
|
#include "modules/video_coding/generic_encoder.h"
|
||||||
#include "modules/video_coding/include/video_coding_defines.h"
|
#include "modules/video_coding/include/video_coding_defines.h"
|
||||||
|
#include "rtc_base/fakeclock.h"
|
||||||
#include "test/gtest.h"
|
#include "test/gtest.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
@ -208,6 +209,46 @@ TEST(TestVCMEncodedFrameCallback, NoTimingFrameIfNoEncodeStartTime) {
|
|||||||
EXPECT_FALSE(sink.WasTimingFrame());
|
EXPECT_FALSE(sink.WasTimingFrame());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(TestVCMEncodedFrameCallback, AdjustsCaptureTimeForInternalSourceEncoder) {
|
||||||
|
rtc::ScopedFakeClock clock;
|
||||||
|
clock.SetTimeMicros(1234567);
|
||||||
|
EncodedImage image;
|
||||||
|
CodecSpecificInfo codec_specific;
|
||||||
|
const int64_t kEncodeStartDelayMs = 2;
|
||||||
|
const int64_t kEncodeFinishDelayMs = 10;
|
||||||
|
int64_t timestamp = 1;
|
||||||
|
image._length = 500;
|
||||||
|
image.capture_time_ms_ = timestamp;
|
||||||
|
image._timeStamp = static_cast<uint32_t>(timestamp * 90);
|
||||||
|
codec_specific.codecType = kVideoCodecGeneric;
|
||||||
|
codec_specific.codecSpecific.generic.simulcast_idx = 0;
|
||||||
|
FakeEncodedImageCallback sink;
|
||||||
|
VCMEncodedFrameCallback callback(&sink, nullptr);
|
||||||
|
callback.SetInternalSource(true);
|
||||||
|
VideoCodec::TimingFrameTriggerThresholds thresholds;
|
||||||
|
thresholds.delay_ms = 1; // Make all frames timing frames.
|
||||||
|
callback.SetTimingFramesThresholds(thresholds);
|
||||||
|
callback.OnTargetBitrateChanged(500, 0);
|
||||||
|
|
||||||
|
// Verify a single frame without encode timestamps isn't a timing frame.
|
||||||
|
callback.OnEncodedImage(image, &codec_specific, nullptr);
|
||||||
|
EXPECT_FALSE(sink.WasTimingFrame());
|
||||||
|
|
||||||
|
// New frame, but this time with encode timestamps set in timing_.
|
||||||
|
// This should be a timing frame.
|
||||||
|
image.capture_time_ms_ = ++timestamp;
|
||||||
|
image._timeStamp = static_cast<uint32_t>(timestamp * 90);
|
||||||
|
image.timing_.encode_start_ms = timestamp + kEncodeStartDelayMs;
|
||||||
|
image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs;
|
||||||
|
callback.OnEncodedImage(image, &codec_specific, nullptr);
|
||||||
|
EXPECT_TRUE(sink.WasTimingFrame());
|
||||||
|
// Frame is captured kEncodeFinishDelayMs before it's encoded, so restored
|
||||||
|
// capture timestamp should be kEncodeFinishDelayMs in the past.
|
||||||
|
EXPECT_EQ(sink.GetLastCaptureTimestamp(),
|
||||||
|
clock.TimeNanos() / rtc::kNumNanosecsPerMillisec -
|
||||||
|
kEncodeFinishDelayMs);
|
||||||
|
}
|
||||||
|
|
||||||
TEST(TestVCMEncodedFrameCallback, NotifiesAboutDroppedFrames) {
|
TEST(TestVCMEncodedFrameCallback, NotifiesAboutDroppedFrames) {
|
||||||
EncodedImage image;
|
EncodedImage image;
|
||||||
CodecSpecificInfo codec_specific;
|
CodecSpecificInfo codec_specific;
|
||||||
|
|||||||
Reference in New Issue
Block a user