Revert "In GenericEncoder enable timing frames for encoders with internal source"

This reverts commit e24c41ea45fef7a49a24c5d905957aabcd3ba028.

Reason for revert: Breaks downstream project.

Original change's description:
> In GenericEncoder enable timing frames for encoders with internal source
>
> Bug: webrtc:9058
> Change-Id: Iab75238cef9d8683d3f78b045d24dcca71427e14
> Reviewed-on: https://webrtc-review.googlesource.com/64446
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#22640}

TBR=ilnik@webrtc.org,sprang@webrtc.org


# Skipping CQ checks because MAC bots are out of commission right now.
No-Presubmit: True
No-Tree-Checks: True
No-Try: True

Bug: webrtc:9058
Change-Id: I1d6258066e81b37b05d0ad0ff41d792f93d17ad9
Reviewed-on: https://webrtc-review.googlesource.com/65660
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22691}
This commit is contained in:
Ilya Nikolaevskiy
2018-03-30 06:54:39 +00:00
committed by Commit Bot
parent 191bf5c653
commit 4157936823
4 changed files with 104 additions and 181 deletions

View File

@ -806,7 +806,6 @@ if (rtc_include_tests) {
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base:rtc_base", "../../rtc_base:rtc_base",
"../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_base_tests_utils",
"../../rtc_base:rtc_numerics", "../../rtc_base:rtc_numerics",
"../../rtc_base:rtc_task_queue", "../../rtc_base:rtc_task_queue",
"../../system_wrappers", "../../system_wrappers",

View File

@ -248,136 +248,6 @@ void VCMEncodedFrameCallback::OnEncodeStarted(uint32_t rtp_timestamp,
rtp_timestamp, capture_time_ms, rtc::TimeMillis()); rtp_timestamp, capture_time_ms, rtc::TimeMillis());
} }
rtc::Optional<int64_t> VCMEncodedFrameCallback::ExtractEncodeStartTime(
size_t simulcast_svc_idx,
EncodedImage* encoded_image) {
rtc::Optional<int64_t> result;
size_t num_simulcast_svc_streams = timing_frames_info_.size();
if (simulcast_svc_idx < num_simulcast_svc_streams) {
auto encode_start_list =
&timing_frames_info_[simulcast_svc_idx].encode_start_list;
// Skip frames for which there was OnEncodeStarted but no OnEncodedImage
// call. These are dropped by encoder internally.
// Because some hardware encoders don't preserve capture timestamp we
// use RTP timestamps here.
while (!encode_start_list->empty() &&
IsNewerTimestamp(encoded_image->_timeStamp,
encode_start_list->front().rtp_timestamp)) {
post_encode_callback_->OnDroppedFrame(DropReason::kDroppedByEncoder);
encode_start_list->pop_front();
}
if (encode_start_list->size() > 0 &&
encode_start_list->front().rtp_timestamp == encoded_image->_timeStamp) {
result.emplace(encode_start_list->front().encode_start_time_ms);
if (encoded_image->capture_time_ms_ !=
encode_start_list->front().capture_time_ms) {
// Force correct capture timestamp.
encoded_image->capture_time_ms_ =
encode_start_list->front().capture_time_ms;
++incorrect_capture_time_logged_messages_;
if (incorrect_capture_time_logged_messages_ <=
kMessagesThrottlingThreshold ||
incorrect_capture_time_logged_messages_ % kThrottleRatio == 0) {
RTC_LOG(LS_WARNING)
<< "Encoder is not preserving capture timestamps.";
if (incorrect_capture_time_logged_messages_ ==
kMessagesThrottlingThreshold) {
RTC_LOG(LS_WARNING) << "Too many log messages. Further incorrect "
"timestamps warnings will be throttled.";
}
}
}
encode_start_list->pop_front();
} else {
++reordered_frames_logged_messages_;
if (reordered_frames_logged_messages_ <= kMessagesThrottlingThreshold ||
reordered_frames_logged_messages_ % kThrottleRatio == 0) {
RTC_LOG(LS_WARNING) << "Frame with no encode started time recordings. "
"Encoder may be reordering frames "
"or not preserving RTP timestamps.";
if (reordered_frames_logged_messages_ == kMessagesThrottlingThreshold) {
RTC_LOG(LS_WARNING) << "Too many log messages. Further frames "
"reordering warnings will be throttled.";
}
}
}
}
return result;
}
void VCMEncodedFrameCallback::FillTimingInfo(size_t simulcast_svc_idx,
EncodedImage* encoded_image) {
rtc::Optional<size_t> outlier_frame_size;
rtc::Optional<int64_t> encode_start_ms;
uint8_t timing_flags = TimingFrameFlags::kNotTriggered;
{
rtc::CritScope crit(&timing_params_lock_);
// Encoders with internal sources do not call OnEncodeStarted
// |timing_frames_info_| may be not filled here.
if (!internal_source_) {
encode_start_ms =
ExtractEncodeStartTime(simulcast_svc_idx, encoded_image);
}
size_t target_bitrate =
timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec;
if (framerate_ > 0 && target_bitrate > 0) {
// framerate and target bitrate were reported by encoder.
size_t average_frame_size = target_bitrate / framerate_;
outlier_frame_size.emplace(
average_frame_size * timing_frames_thresholds_.outlier_ratio_percent /
100);
}
// Outliers trigger timing frames, but do not affect scheduled timing
// frames.
if (outlier_frame_size && encoded_image->_length >= *outlier_frame_size) {
timing_flags |= TimingFrameFlags::kTriggeredBySize;
}
// Check if it's time to send a timing frame.
int64_t timing_frame_delay_ms =
encoded_image->capture_time_ms_ - last_timing_frame_time_ms_;
// Trigger threshold if it's a first frame, too long passed since the last
// timing frame, or we already sent timing frame on a different simulcast
// stream with the same capture time.
if (last_timing_frame_time_ms_ == -1 ||
timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms ||
timing_frame_delay_ms == 0) {
timing_flags = TimingFrameFlags::kTriggeredByTimer;
last_timing_frame_time_ms_ = encoded_image->capture_time_ms_;
}
} // rtc::CritScope crit(&timing_params_lock_);
int64_t now_ms = rtc::TimeMillis();
// Workaround for chromoting encoder: it passes encode start and finished
// timestamps in |timing_| field, but they (together with capture timestamp)
// are not in the WebRTC clock.
if (internal_source_ && encoded_image->timing_.encode_finish_ms > 0 &&
encoded_image->timing_.encode_start_ms > 0) {
int64_t clock_offset_ms = now_ms - encoded_image->timing_.encode_finish_ms;
// Translate capture timestamp to local WebRTC clock.
encoded_image->capture_time_ms_ += clock_offset_ms;
encoded_image->_timeStamp =
static_cast<uint32_t>(encoded_image->capture_time_ms_ * 90);
encode_start_ms.emplace(encoded_image->timing_.encode_start_ms +
clock_offset_ms);
}
// If encode start is not available that means that encoder uses internal
// source. In that case capture timestamp may be from a different clock with a
// drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
// because to being sent in the network capture time required to be less than
// all the other timestamps.
if (encode_start_ms) {
encoded_image->SetEncodeTime(*encode_start_ms, now_ms);
encoded_image->timing_.flags = timing_flags;
} else {
encoded_image->timing_.flags = TimingFrameFlags::kInvalid;
}
}
EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
const EncodedImage& encoded_image, const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific, const CodecSpecificInfo* codec_specific,
@ -398,7 +268,110 @@ EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage(
EncodedImage image_copy(encoded_image); EncodedImage image_copy(encoded_image);
FillTimingInfo(simulcast_svc_idx, &image_copy); rtc::Optional<size_t> outlier_frame_size;
rtc::Optional<int64_t> encode_start_ms;
size_t num_simulcast_svc_streams = 1;
uint8_t timing_flags = TimingFrameFlags::kNotTriggered;
if (!internal_source_) {
rtc::CritScope crit(&timing_params_lock_);
// Encoders with internal sources do not call OnEncodeStarted and
// OnFrameRateChanged. |timing_frames_info_| may be not filled here.
num_simulcast_svc_streams = timing_frames_info_.size();
if (simulcast_svc_idx < num_simulcast_svc_streams) {
auto encode_start_list =
&timing_frames_info_[simulcast_svc_idx].encode_start_list;
// Skip frames for which there was OnEncodeStarted but no OnEncodedImage
// call. These are dropped by encoder internally.
// Because some hardware encoders don't preserve capture timestamp we use
// RTP timestamps here.
while (!encode_start_list->empty() &&
IsNewerTimestamp(image_copy._timeStamp,
encode_start_list->front().rtp_timestamp)) {
post_encode_callback_->OnDroppedFrame(DropReason::kDroppedByEncoder);
encode_start_list->pop_front();
}
if (encode_start_list->size() > 0 &&
encode_start_list->front().rtp_timestamp == image_copy._timeStamp) {
encode_start_ms.emplace(
encode_start_list->front().encode_start_time_ms);
if (image_copy.capture_time_ms_ !=
encode_start_list->front().capture_time_ms) {
// Force correct capture timestamp.
image_copy.capture_time_ms_ =
encode_start_list->front().capture_time_ms;
++incorrect_capture_time_logged_messages_;
if (incorrect_capture_time_logged_messages_ <=
kMessagesThrottlingThreshold ||
incorrect_capture_time_logged_messages_ % kThrottleRatio == 0) {
RTC_LOG(LS_WARNING)
<< "Encoder is not preserving capture timestamps.";
if (incorrect_capture_time_logged_messages_ ==
kMessagesThrottlingThreshold) {
RTC_LOG(LS_WARNING) << "Too many log messages. Further incorrect "
"timestamps warnings will be throttled.";
}
}
}
encode_start_list->pop_front();
} else {
++reordered_frames_logged_messages_;
if (reordered_frames_logged_messages_ <= kMessagesThrottlingThreshold ||
reordered_frames_logged_messages_ % kThrottleRatio == 0) {
RTC_LOG(LS_WARNING)
<< "Frame with no encode started time recordings. "
"Encoder may be reordering frames "
"or not preserving RTP timestamps.";
if (reordered_frames_logged_messages_ ==
kMessagesThrottlingThreshold) {
RTC_LOG(LS_WARNING) << "Too many log messages. Further frames "
"reordering warnings will be throttled.";
}
}
}
size_t target_bitrate =
timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec;
if (framerate_ > 0 && target_bitrate > 0) {
// framerate and target bitrate were reported by encoder.
size_t average_frame_size = target_bitrate / framerate_;
outlier_frame_size.emplace(
average_frame_size *
timing_frames_thresholds_.outlier_ratio_percent / 100);
}
}
// Check if it's time to send a timing frame.
int64_t timing_frame_delay_ms =
image_copy.capture_time_ms_ - last_timing_frame_time_ms_;
// Trigger threshold if it's a first frame, too long passed since the last
// timing frame, or we already sent timing frame on a different simulcast
// stream with the same capture time.
if (last_timing_frame_time_ms_ == -1 ||
timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms ||
timing_frame_delay_ms == 0) {
timing_flags = TimingFrameFlags::kTriggeredByTimer;
last_timing_frame_time_ms_ = image_copy.capture_time_ms_;
}
// Outliers trigger timing frames, but do not affect scheduled timing
// frames.
if (outlier_frame_size && image_copy._length >= *outlier_frame_size) {
timing_flags |= TimingFrameFlags::kTriggeredBySize;
}
}
// If encode start is not available that means that encoder uses internal
// source. In that case capture timestamp may be from a different clock with a
// drift relative to rtc::TimeMillis(). We can't use it for Timing frames,
// because to being sent in the network capture time required to be less than
// all the other timestamps.
if (encode_start_ms) {
image_copy.SetEncodeTime(*encode_start_ms, rtc::TimeMillis());
image_copy.timing_.flags = timing_flags;
} else {
image_copy.timing_.flags = TimingFrameFlags::kInvalid;
}
// Piggyback ALR experiment group id and simulcast id into the content type. // Piggyback ALR experiment group id and simulcast id into the content type.
uint8_t experiment_id = uint8_t experiment_id =

View File

@ -77,14 +77,6 @@ class VCMEncodedFrameCallback : public EncodedImageCallback {
} }
private: private:
// For non-internal-source encoders, returns encode started time and fixes
// capture timestamp for the frame, if corrupted by the encoder.
rtc::Optional<int64_t> ExtractEncodeStartTime(size_t simulcast_svc_idx,
EncodedImage* encoded_image)
RTC_EXCLUSIVE_LOCKS_REQUIRED(timing_params_lock_);
void FillTimingInfo(size_t simulcast_svc_idx, EncodedImage* encoded_image);
rtc::CriticalSection timing_params_lock_; rtc::CriticalSection timing_params_lock_;
bool internal_source_; bool internal_source_;
EncodedImageCallback* const post_encode_callback_; EncodedImageCallback* const post_encode_callback_;

View File

@ -13,7 +13,6 @@
#include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/encoded_frame.h"
#include "modules/video_coding/generic_encoder.h" #include "modules/video_coding/generic_encoder.h"
#include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_coding_defines.h"
#include "rtc_base/fakeclock.h"
#include "test/gtest.h" #include "test/gtest.h"
namespace webrtc { namespace webrtc {
@ -209,46 +208,6 @@ TEST(TestVCMEncodedFrameCallback, NoTimingFrameIfNoEncodeStartTime) {
EXPECT_FALSE(sink.WasTimingFrame()); EXPECT_FALSE(sink.WasTimingFrame());
} }
TEST(TestVCMEncodedFrameCallback, AdjustsCaptureTimeForInternalSourceEncoder) {
rtc::ScopedFakeClock clock;
clock.SetTimeMicros(1234567);
EncodedImage image;
CodecSpecificInfo codec_specific;
const int64_t kEncodeStartDelayMs = 2;
const int64_t kEncodeFinishDelayMs = 10;
int64_t timestamp = 1;
image._length = 500;
image.capture_time_ms_ = timestamp;
image._timeStamp = static_cast<uint32_t>(timestamp * 90);
codec_specific.codecType = kVideoCodecGeneric;
codec_specific.codecSpecific.generic.simulcast_idx = 0;
FakeEncodedImageCallback sink;
VCMEncodedFrameCallback callback(&sink, nullptr);
callback.SetInternalSource(true);
VideoCodec::TimingFrameTriggerThresholds thresholds;
thresholds.delay_ms = 1; // Make all frames timing frames.
callback.SetTimingFramesThresholds(thresholds);
callback.OnTargetBitrateChanged(500, 0);
// Verify a single frame without encode timestamps isn't a timing frame.
callback.OnEncodedImage(image, &codec_specific, nullptr);
EXPECT_FALSE(sink.WasTimingFrame());
// New frame, but this time with encode timestamps set in timing_.
// This should be a timing frame.
image.capture_time_ms_ = ++timestamp;
image._timeStamp = static_cast<uint32_t>(timestamp * 90);
image.timing_.encode_start_ms = timestamp + kEncodeStartDelayMs;
image.timing_.encode_finish_ms = timestamp + kEncodeFinishDelayMs;
callback.OnEncodedImage(image, &codec_specific, nullptr);
EXPECT_TRUE(sink.WasTimingFrame());
// Frame is captured kEncodeFinishDelayMs before it's encoded, so restored
// capture timestamp should be kEncodeFinishDelayMs in the past.
EXPECT_EQ(sink.GetLastCaptureTimestamp(),
clock.TimeNanos() / rtc::kNumNanosecsPerMillisec -
kEncodeFinishDelayMs);
}
TEST(TestVCMEncodedFrameCallback, NotifiesAboutDroppedFrames) { TEST(TestVCMEncodedFrameCallback, NotifiesAboutDroppedFrames) {
EncodedImage image; EncodedImage image;
CodecSpecificInfo codec_specific; CodecSpecificInfo codec_specific;