Revert of Collecting encode_time_ms for each frame (patchset #13 id:220001 of https://codereview.webrtc.org/1374233002/ )

Reason for revert:
Breaks EndToEndTest.AssignsTransportSequenceNumbers in video_engine_tests
on several bots:
http://build.chromium.org/p/client.webrtc/builders/Linux64%20Debug/builds/5507
http://build.chromium.org/p/client.webrtc/builders/Mac64%20Debug/builds/4815
http://build.chromium.org/p/client.webrtc/builders/Win%20SyzyASan/builds/3272
http://build.chromium.org/p/client.webrtc/builders/Linux%20Memcheck/builds/4414

It seems very unfortunate that it breaks on _exactly_ the bot configs that aren't covered by the CQ trybots.

Original issue's description:
> Collecting encode_time_ms for each frame.
>
> Also, in Sample struct, replacing double with the original type.
> It makes more sense to save the original data as truthful as possible, and then
> convert it to double later if necessary (in the plot script).
>
> Committed: https://crrev.com/092b13384e57b33e2003d9736dfa1f491e76f938
> Cr-Commit-Position: refs/heads/master@{#10184}

TBR=sprang@webrtc.org,pbos@webrtc.org,mflodman@webrtc.org,asapersson@webrtc.org,ivica@webrtc.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true

Review URL: https://codereview.webrtc.org/1383283005

Cr-Commit-Position: refs/heads/master@{#10185}
This commit is contained in:
kjellander
2015-10-06 11:34:08 -07:00
committed by Commit bot
parent 092b13384e
commit 8104479724
7 changed files with 43 additions and 91 deletions

View File

@ -34,16 +34,15 @@ import numpy
# Fields
DROPPED = 0
INPUT_TIME = 1 # ms (timestamp)
SEND_TIME = 2 # ms (timestamp)
RECV_TIME = 3 # ms (timestamp)
RENDER_TIME = 4 # ms (timestamp)
ENCODED_FRAME_SIZE = 5 # bytes
PSNR = 6
SSIM = 7
ENCODE_TIME = 8 # ms (time interval)
INPUT_TIME = 1 # ms
SEND_TIME = 2 # ms
RECV_TIME = 3 # ms
ENCODED_FRAME_SIZE = 4 # bytes
PSNR = 5
SSIM = 6
RENDER_TIME = 7 # ms
TOTAL_RAW_FIELDS = 9
TOTAL_RAW_FIELDS = 8
SENDER_TIME = TOTAL_RAW_FIELDS + 0
RECEIVER_TIME = TOTAL_RAW_FIELDS + 1
@ -67,7 +66,6 @@ _fields = [
(PSNR, "psnr", "PSNR"),
(SSIM, "ssim", "SSIM"),
(RENDER_TIME, "render_time_ms", "render time"),
(ENCODE_TIME, "encode_time_ms", "encode time"),
# Auto-generated
(SENDER_TIME, "sender_time", "sender time"),
(RECEIVER_TIME, "receiver_time", "receiver time"),

View File

@ -29,13 +29,11 @@
namespace webrtc {
namespace internal {
VideoCaptureInput::VideoCaptureInput(
ProcessThread* module_process_thread,
VideoCaptureCallback* frame_callback,
VideoRenderer* local_renderer,
SendStatisticsProxy* stats_proxy,
CpuOveruseObserver* overuse_observer,
EncodingTimeObserver* encoding_time_observer)
VideoCaptureInput::VideoCaptureInput(ProcessThread* module_process_thread,
VideoCaptureCallback* frame_callback,
VideoRenderer* local_renderer,
SendStatisticsProxy* stats_proxy,
CpuOveruseObserver* overuse_observer)
: capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
module_process_thread_(module_process_thread),
frame_callback_(frame_callback),
@ -54,8 +52,7 @@ VideoCaptureInput::VideoCaptureInput(
overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock(),
CpuOveruseOptions(),
overuse_observer,
stats_proxy)),
encoding_time_observer_(encoding_time_observer) {
stats_proxy)) {
encoder_thread_->Start();
encoder_thread_->SetPriority(kHighPriority);
module_process_thread_->RegisterModule(overuse_detector_.get());
@ -152,10 +149,6 @@ bool VideoCaptureInput::EncoderProcess() {
Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
overuse_detector_->FrameEncoded(encode_time_ms);
stats_proxy_->OnEncodedFrame(encode_time_ms);
if (encoding_time_observer_) {
encoding_time_observer_->OnReportEncodedTime(
deliver_frame.ntp_time_ms(), encode_time_ms);
}
}
}
// We're done!

View File

@ -55,8 +55,7 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
VideoCaptureCallback* frame_callback,
VideoRenderer* local_renderer,
SendStatisticsProxy* send_stats_proxy,
CpuOveruseObserver* overuse_observer,
EncodingTimeObserver* encoding_time_observer);
CpuOveruseObserver* overuse_observer);
~VideoCaptureInput();
void IncomingCapturedFrame(const VideoFrame& video_frame) override;
@ -91,7 +90,6 @@ class VideoCaptureInput : public webrtc::VideoCaptureInput {
const int64_t delta_ntp_internal_ms_;
rtc::scoped_ptr<OveruseFrameDetector> overuse_detector_;
EncodingTimeObserver* const encoding_time_observer_;
};
} // namespace internal

View File

@ -64,7 +64,7 @@ class VideoCaptureInputTest : public ::testing::Test {
Config config;
input_.reset(new internal::VideoCaptureInput(
mock_process_thread_.get(), mock_frame_callback_.get(), nullptr,
&stats_proxy_, nullptr, nullptr));
&stats_proxy_, nullptr));
}
virtual void TearDown() {

View File

@ -42,16 +42,16 @@ class VideoAnalyzer : public PacketReceiver,
public Transport,
public VideoRenderer,
public VideoCaptureInput,
public EncodedFrameObserver,
public EncodingTimeObserver {
public EncodedFrameObserver {
public:
VideoAnalyzer(Transport* transport,
VideoAnalyzer(VideoCaptureInput* input,
Transport* transport,
const std::string& test_label,
double avg_psnr_threshold,
double avg_ssim_threshold,
int duration_frames,
FILE* graph_data_output_file)
: input_(nullptr),
: input_(input),
transport_(transport),
receiver_(nullptr),
send_stream_(nullptr),
@ -123,12 +123,6 @@ class VideoAnalyzer : public PacketReceiver,
return receiver_->DeliverPacket(media_type, packet, length, packet_time);
}
// EncodingTimeObserver.
void OnReportEncodedTime(int64_t ntp_time_ms, int encode_time_ms) override {
rtc::CritScope crit(&comparison_lock_);
samples_encode_time_ms_[ntp_time_ms] = encode_time_ms;
}
void IncomingCapturedFrame(const VideoFrame& video_frame) override {
VideoFrame copy = video_frame;
copy.set_timestamp(copy.ntp_time_ms() * 90);
@ -285,31 +279,31 @@ class VideoAnalyzer : public PacketReceiver,
};
struct Sample {
Sample(int dropped,
int64_t input_time_ms,
int64_t send_time_ms,
int64_t recv_time_ms,
int64_t render_time_ms,
size_t encoded_frame_size,
Sample(double dropped,
double input_time_ms,
double send_time_ms,
double recv_time_ms,
double encoded_frame_size,
double psnr,
double ssim)
double ssim,
double render_time_ms)
: dropped(dropped),
input_time_ms(input_time_ms),
send_time_ms(send_time_ms),
recv_time_ms(recv_time_ms),
render_time_ms(render_time_ms),
encoded_frame_size(encoded_frame_size),
psnr(psnr),
ssim(ssim) {}
ssim(ssim),
render_time_ms(render_time_ms) {}
int dropped;
int64_t input_time_ms;
int64_t send_time_ms;
int64_t recv_time_ms;
int64_t render_time_ms;
size_t encoded_frame_size;
double dropped;
double input_time_ms;
double send_time_ms;
double recv_time_ms;
double encoded_frame_size;
double psnr;
double ssim;
double render_time_ms;
};
void AddFrameComparison(const VideoFrame& reference,
@ -471,8 +465,8 @@ class VideoAnalyzer : public PacketReceiver,
if (graph_data_output_file_) {
samples_.push_back(
Sample(comparison.dropped, input_time_ms, comparison.send_time_ms,
comparison.recv_time_ms, comparison.render_time_ms,
comparison.encoded_frame_size, psnr, ssim));
comparison.recv_time_ms, comparison.encoded_frame_size, psnr,
ssim, comparison.render_time_ms));
}
psnr_.AddSample(psnr);
ssim_.AddSample(ssim);
@ -518,39 +512,21 @@ class VideoAnalyzer : public PacketReceiver,
"input_time_ms "
"send_time_ms "
"recv_time_ms "
"render_time_ms "
"encoded_frame_size "
"psnr "
"ssim "
"encode_time_ms\n");
int missing_encode_time_samples = 0;
"render_time_ms\n");
for (const Sample& sample : samples_) {
auto it = samples_encode_time_ms_.find(sample.input_time_ms);
int encode_time_ms;
if (it != samples_encode_time_ms_.end()) {
encode_time_ms = it->second;
} else {
++missing_encode_time_samples;
encode_time_ms = -1;
}
fprintf(out, "%d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %" PRIuS
" %lf %lf %d\n",
sample.dropped, sample.input_time_ms, sample.send_time_ms,
sample.recv_time_ms, sample.render_time_ms,
fprintf(out, "%lf %lf %lf %lf %lf %lf %lf %lf\n", sample.dropped,
sample.input_time_ms, sample.send_time_ms, sample.recv_time_ms,
sample.encoded_frame_size, sample.psnr, sample.ssim,
encode_time_ms);
}
if (missing_encode_time_samples) {
fprintf(stderr,
"Warning: Missing encode_time_ms samples for %d frame(s).\n",
missing_encode_time_samples);
sample.render_time_ms);
}
}
const std::string test_label_;
FILE* const graph_data_output_file_;
std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
std::map<int64_t, int> samples_encode_time_ms_ GUARDED_BY(comparison_lock_);
test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
test::Statistics receiver_time_ GUARDED_BY(comparison_lock_);
test::Statistics psnr_ GUARDED_BY(comparison_lock_);
@ -761,7 +737,7 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
static_cast<uint8_t>(params.common.tl_discard_threshold), 0);
test::DirectTransport recv_transport(params.pipe);
VideoAnalyzer analyzer(
&send_transport, params.analyzer.test_label,
nullptr, &send_transport, params.analyzer.test_label,
params.analyzer.avg_psnr_threshold, params.analyzer.avg_ssim_threshold,
params.analyzer.test_durations_secs * params.common.fps,
graph_data_output_file);
@ -775,7 +751,6 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
recv_transport.SetReceiver(sender_call_->Receiver());
SetupFullStack(params, &analyzer, &recv_transport);
send_config_.encoding_time_observer = &analyzer;
receive_configs_[0].renderer = &analyzer;
for (auto& config : receive_configs_)
config.pre_decode_callback = &analyzer;

View File

@ -162,7 +162,7 @@ VideoSendStream::VideoSendStream(
input_.reset(new internal::VideoCaptureInput(
module_process_thread_, vie_encoder_, config_.local_renderer,
&stats_proxy_, this, config_.encoding_time_observer));
&stats_proxy_, this));
// 28 to match packet overhead in ModuleRtpRtcpImpl.
RTC_DCHECK_LE(config_.rtp.max_packet_size, static_cast<size_t>(0xFFFF - 28));

View File

@ -26,13 +26,6 @@ namespace webrtc {
class LoadObserver;
class VideoEncoder;
class EncodingTimeObserver {
public:
virtual ~EncodingTimeObserver() {}
virtual void OnReportEncodedTime(int64_t ntp_time_ms, int encode_time_ms) = 0;
};
// Class to deliver captured frame to the video send stream.
class VideoCaptureInput {
public:
@ -159,11 +152,6 @@ class VideoSendStream : public SendStream {
// below the minimum configured bitrate. If this variable is false, the
// stream may send at a rate higher than the estimated available bitrate.
bool suspend_below_min_bitrate = false;
// Called for each encoded frame. Passes the total time spent on encoding.
// TODO(ivica): Consolidate with post_encode_callback:
// https://code.google.com/p/webrtc/issues/detail?id=5042
EncodingTimeObserver* encoding_time_observer = nullptr;
};
// Gets interface used to insert captured frames. Valid as long as the