From ab6bf4f54cc2bc6171338c0a7b3b5d9d7ce095bc Mon Sep 17 00:00:00 2001 From: "asapersson@webrtc.org" Date: Tue, 27 May 2014 07:43:15 +0000 Subject: [PATCH] Added api for getting cpu measures using a struct. R=mflodman@webrtc.org Review URL: https://webrtc-codereview.appspot.com/12479004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6249 4adac7df-926f-26a2-2b94-8c16560cd09d --- webrtc/video_engine/include/vie_base.h | 32 ++++++--- webrtc/video_engine/overuse_frame_detector.cc | 30 +++------ webrtc/video_engine/overuse_frame_detector.h | 32 +++++---- .../overuse_frame_detector_unittest.cc | 67 +++++++++++++------ webrtc/video_engine/vie_base_impl.cc | 33 +++++++-- webrtc/video_engine/vie_base_impl.h | 2 + webrtc/video_engine/vie_capturer.cc | 11 +-- webrtc/video_engine/vie_capturer.h | 6 +- 8 files changed, 129 insertions(+), 84 deletions(-) diff --git a/webrtc/video_engine/include/vie_base.h b/webrtc/video_engine/include/vie_base.h index a08cbacb4b..56916ce3b9 100644 --- a/webrtc/video_engine/include/vie_base.h +++ b/webrtc/video_engine/include/vie_base.h @@ -109,6 +109,24 @@ struct CpuOveruseOptions { } }; +struct CpuOveruseMetrics { + CpuOveruseMetrics() + : capture_jitter_ms(-1), + avg_encode_time_ms(-1), + encode_usage_percent(-1), + capture_queue_delay_ms_per_s(-1) {} + + int capture_jitter_ms; // The current estimated jitter in ms based on + // incoming captured frames. + int avg_encode_time_ms; // The average encode time in ms. + int encode_usage_percent; // The average encode time divided by the average + // time difference between incoming captured frames. + int capture_queue_delay_ms_per_s; // The current time delay between an + // incoming captured frame until the frame + // is being processed. The delay is + // expressed in ms delay per second. +}; + class WEBRTC_DLLEXPORT VideoEngine { public: // Creates a VideoEngine object, which can then be used to acquire sub‐APIs. @@ -193,16 +211,12 @@ class WEBRTC_DLLEXPORT ViEBase { } // Gets cpu overuse measures. - // capture_jitter_ms: The current estimated jitter in ms based on incoming - // captured frames. - // avg_encode_time_ms: The average encode time in ms. - // encode_usage_percent: The average encode time divided by the average time - // difference between incoming captured frames. - // capture_queue_delay_ms_per_s: The current time delay between an incoming - // captured frame until the frame is being - // processed. The delay is expressed in ms - // delay per second. // TODO(asapersson): Remove default implementation. + virtual int GetCpuOveruseMetrics(int channel, + CpuOveruseMetrics* metrics) { + return -1; + } + // TODO(asapersson): Remove this function when libjingle has been updated. virtual int CpuOveruseMeasures(int channel, int* capture_jitter_ms, int* avg_encode_time_ms, diff --git a/webrtc/video_engine/overuse_frame_detector.cc b/webrtc/video_engine/overuse_frame_detector.cc index 078c89a55b..c136130b9c 100644 --- a/webrtc/video_engine/overuse_frame_detector.cc +++ b/webrtc/video_engine/overuse_frame_detector.cc @@ -302,31 +302,21 @@ void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) { ResetAll(num_pixels_); } -int OveruseFrameDetector::CaptureJitterMs() const { - CriticalSectionScoped cs(crit_.get()); - return static_cast(capture_deltas_.StdDev() + 0.5); -} - -int OveruseFrameDetector::AvgEncodeTimeMs() const { - CriticalSectionScoped cs(crit_.get()); - return encode_time_->filtered_encode_time_ms(); -} - -int OveruseFrameDetector::EncodeUsagePercent() const { - CriticalSectionScoped cs(crit_.get()); - return encode_usage_->UsageInPercent(); -} - -int OveruseFrameDetector::AvgCaptureQueueDelayMsPerS() const { - CriticalSectionScoped cs(crit_.get()); - return capture_queue_delay_->filtered_delay_ms_per_s(); -} - int OveruseFrameDetector::CaptureQueueDelayMsPerS() const { CriticalSectionScoped cs(crit_.get()); return capture_queue_delay_->delay_ms(); } +void OveruseFrameDetector::GetCpuOveruseMetrics( + CpuOveruseMetrics* metrics) const { + CriticalSectionScoped cs(crit_.get()); + metrics->capture_jitter_ms = static_cast(capture_deltas_.StdDev() + 0.5); + metrics->avg_encode_time_ms = encode_time_->filtered_encode_time_ms(); + metrics->encode_usage_percent = encode_usage_->UsageInPercent(); + metrics->capture_queue_delay_ms_per_s = + capture_queue_delay_->filtered_delay_ms_per_s(); +} + int32_t OveruseFrameDetector::TimeUntilNextProcess() { CriticalSectionScoped cs(crit_.get()); return next_process_time_ - clock_->TimeInMilliseconds(); diff --git a/webrtc/video_engine/overuse_frame_detector.h b/webrtc/video_engine/overuse_frame_detector.h index c30bb57dfd..38b927baee 100644 --- a/webrtc/video_engine/overuse_frame_detector.h +++ b/webrtc/video_engine/overuse_frame_detector.h @@ -70,25 +70,23 @@ class OveruseFrameDetector : public Module { void FrameEncoded(int encode_time_ms); // Accessors. - // The estimated jitter based on incoming captured frames. - int CaptureJitterMs() const; - // Running average of reported encode time (FrameEncoded()). - // Only used for stats. - int AvgEncodeTimeMs() const; + // Returns CpuOveruseMetrics where + // capture_jitter_ms: The estimated jitter based on incoming captured frames. + // avg_encode_time_ms: Running average of reported encode time + // (FrameEncoded()). Only used for stats. + // encode_usage_percent: The average encode time divided by the average time + // difference between incoming captured frames. + // capture_queue_delay_ms_per_s: The current time delay between an incoming + // captured frame (FrameCaptured()) until the + // frame is being processed + // (FrameProcessingStarted()). (Note: if a new + // frame is received before an old frame has + // been processed, the old frame is skipped). + // The delay is expressed in ms delay per sec. + // Only used for stats. + void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const; - // The average encode time divided by the average time difference between - // incoming captured frames. - // This variable is currently only used for statistics. - int EncodeUsagePercent() const; - - // The current time delay between an incoming captured frame (FrameCaptured()) - // until the frame is being processed (FrameProcessingStarted()). - // (Note: if a new frame is received before an old frame has been processed, - // the old frame is skipped). - // The delay is returned as the delay in ms per second. - // This variable is currently only used for statistics. - int AvgCaptureQueueDelayMsPerS() const; int CaptureQueueDelayMsPerS() const; // Implements Module. diff --git a/webrtc/video_engine/overuse_frame_detector_unittest.cc b/webrtc/video_engine/overuse_frame_detector_unittest.cc index a760fbfbcc..2d7116f869 100644 --- a/webrtc/video_engine/overuse_frame_detector_unittest.cc +++ b/webrtc/video_engine/overuse_frame_detector_unittest.cc @@ -118,6 +118,24 @@ class OveruseFrameDetectorTest : public ::testing::Test { overuse_detector_->Process(); } + int CaptureJitterMs() { + CpuOveruseMetrics metrics; + overuse_detector_->GetCpuOveruseMetrics(&metrics); + return metrics.capture_jitter_ms; + } + + int AvgEncodeTimeMs() { + CpuOveruseMetrics metrics; + overuse_detector_->GetCpuOveruseMetrics(&metrics); + return metrics.avg_encode_time_ms; + } + + int EncodeUsagePercent() { + CpuOveruseMetrics metrics; + overuse_detector_->GetCpuOveruseMetrics(&metrics); + return metrics.encode_usage_percent; + } + CpuOveruseOptions options_; scoped_ptr clock_; scoped_ptr observer_; @@ -196,49 +214,58 @@ TEST_F(OveruseFrameDetectorTest, IncorrectConsecutiveCountTriggersNoOveruse) { TriggerOveruse(1); } +TEST_F(OveruseFrameDetectorTest, GetCpuOveruseMetrics) { + CpuOveruseMetrics metrics; + overuse_detector_->GetCpuOveruseMetrics(&metrics); + EXPECT_GT(metrics.capture_jitter_ms, 0); + EXPECT_GT(metrics.avg_encode_time_ms, 0); + EXPECT_GT(metrics.encode_usage_percent, 0); + EXPECT_GE(metrics.capture_queue_delay_ms_per_s, 0); +} + TEST_F(OveruseFrameDetectorTest, CaptureJitter) { - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight); - EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_NE(InitialJitter(), CaptureJitterMs()); } TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterResolutionChange) { - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight); - EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_NE(InitialJitter(), CaptureJitterMs()); // Verify reset. InsertFramesWithInterval(1, kFrameInterval33ms, kWidth, kHeight + 1); - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); } TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterFrameTimeout) { - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); InsertFramesWithInterval(1000, kFrameInterval33ms, kWidth, kHeight); - EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_NE(InitialJitter(), CaptureJitterMs()); InsertFramesWithInterval( 1, options_.frame_timeout_interval_ms, kWidth, kHeight); - EXPECT_NE(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_NE(InitialJitter(), CaptureJitterMs()); // Verify reset. InsertFramesWithInterval( 1, options_.frame_timeout_interval_ms + 1, kWidth, kHeight); - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); } TEST_F(OveruseFrameDetectorTest, CaptureJitterResetAfterChangingThreshold) { - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); options_.high_capture_jitter_threshold_ms = 90.0f; overuse_detector_->SetOptions(options_); - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); options_.low_capture_jitter_threshold_ms = 30.0f; overuse_detector_->SetOptions(options_); - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); } TEST_F(OveruseFrameDetectorTest, MinFrameSamplesBeforeUpdatingCaptureJitter) { options_.min_frame_samples = 40; overuse_detector_->SetOptions(options_); InsertFramesWithInterval(40, kFrameInterval33ms, kWidth, kHeight); - EXPECT_EQ(InitialJitter(), overuse_detector_->CaptureJitterMs()); + EXPECT_EQ(InitialJitter(), CaptureJitterMs()); } TEST_F(OveruseFrameDetectorTest, NoCaptureQueueDelay) { @@ -289,33 +316,33 @@ TEST_F(OveruseFrameDetectorTest, CaptureQueueDelayNoMatchingCapturedFrame) { TEST_F(OveruseFrameDetectorTest, EncodedFrame) { const int kInitialAvgEncodeTimeInMs = 5; - EXPECT_EQ(kInitialAvgEncodeTimeInMs, overuse_detector_->AvgEncodeTimeMs()); + EXPECT_EQ(kInitialAvgEncodeTimeInMs, AvgEncodeTimeMs()); for (int i = 0; i < 30; i++) { clock_->AdvanceTimeMilliseconds(33); overuse_detector_->FrameEncoded(2); } - EXPECT_EQ(2, overuse_detector_->AvgEncodeTimeMs()); + EXPECT_EQ(2, AvgEncodeTimeMs()); } TEST_F(OveruseFrameDetectorTest, InitialEncodeUsage) { - EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent()); + EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent()); } TEST_F(OveruseFrameDetectorTest, EncodedUsage) { const int kEncodeTimeMs = 5; InsertAndEncodeFramesWithInterval( 1000, kFrameInterval33ms, kWidth, kHeight, kEncodeTimeMs); - EXPECT_EQ(15, overuse_detector_->EncodeUsagePercent()); + EXPECT_EQ(15, EncodeUsagePercent()); } TEST_F(OveruseFrameDetectorTest, EncodeUsageResetAfterChangingThreshold) { - EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent()); + EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent()); options_.high_encode_usage_threshold_percent = 100; overuse_detector_->SetOptions(options_); - EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent()); + EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent()); options_.low_encode_usage_threshold_percent = 20; overuse_detector_->SetOptions(options_); - EXPECT_EQ(InitialEncodeUsage(), overuse_detector_->EncodeUsagePercent()); + EXPECT_EQ(InitialEncodeUsage(), EncodeUsagePercent()); } TEST_F(OveruseFrameDetectorTest, TriggerOveruseWithEncodeUsage) { diff --git a/webrtc/video_engine/vie_base_impl.cc b/webrtc/video_engine/vie_base_impl.cc index f4b87e8fa4..29fbe7fb84 100644 --- a/webrtc/video_engine/vie_base_impl.cc +++ b/webrtc/video_engine/vie_base_impl.cc @@ -140,10 +140,35 @@ int ViEBaseImpl::CpuOveruseMeasures(int video_channel, if (provider) { ViECapturer* capturer = is.Capture(provider->Id()); if (capturer) { - capturer->CpuOveruseMeasures(capture_jitter_ms, - avg_encode_time_ms, - encode_usage_percent, - capture_queue_delay_ms_per_s); + CpuOveruseMetrics metrics; + capturer->GetCpuOveruseMetrics(&metrics); + *capture_jitter_ms = metrics.capture_jitter_ms; + *avg_encode_time_ms = metrics.avg_encode_time_ms; + *encode_usage_percent = metrics.encode_usage_percent; + *capture_queue_delay_ms_per_s = metrics.capture_queue_delay_ms_per_s; + return 0; + } + } + return -1; +} + +int ViEBaseImpl::GetCpuOveruseMetrics(int video_channel, + CpuOveruseMetrics* metrics) { + ViEChannelManagerScoped cs(*(shared_data_.channel_manager())); + ViEChannel* vie_channel = cs.Channel(video_channel); + if (!vie_channel) { + shared_data_.SetLastError(kViEBaseInvalidChannelId); + return -1; + } + ViEEncoder* vie_encoder = cs.Encoder(video_channel); + assert(vie_encoder); + + ViEInputManagerScoped is(*(shared_data_.input_manager())); + ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder); + if (provider) { + ViECapturer* capturer = is.Capture(provider->Id()); + if (capturer) { + capturer->GetCpuOveruseMetrics(metrics); return 0; } } diff --git a/webrtc/video_engine/vie_base_impl.h b/webrtc/video_engine/vie_base_impl.h index 52c888e3ba..d6a046e608 100644 --- a/webrtc/video_engine/vie_base_impl.h +++ b/webrtc/video_engine/vie_base_impl.h @@ -35,6 +35,8 @@ class ViEBaseImpl CpuOveruseObserver* observer); virtual int SetCpuOveruseOptions(int channel, const CpuOveruseOptions& options); + virtual int GetCpuOveruseMetrics(int channel, + CpuOveruseMetrics* metrics); virtual int CpuOveruseMeasures(int channel, int* capture_jitter_ms, int* avg_encode_time_ms, diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc index f037dc82de..867de9b14f 100644 --- a/webrtc/video_engine/vie_capturer.cc +++ b/webrtc/video_engine/vie_capturer.cc @@ -249,15 +249,8 @@ void ViECapturer::SetCpuOveruseOptions(const CpuOveruseOptions& options) { overuse_detector_->SetOptions(options); } -void ViECapturer::CpuOveruseMeasures(int* capture_jitter_ms, - int* avg_encode_time_ms, - int* encode_usage_percent, - int* capture_queue_delay_ms_per_s) const { - *capture_jitter_ms = overuse_detector_->CaptureJitterMs(); - *avg_encode_time_ms = overuse_detector_->AvgEncodeTimeMs(); - *encode_usage_percent = overuse_detector_->EncodeUsagePercent(); - *capture_queue_delay_ms_per_s = - overuse_detector_->AvgCaptureQueueDelayMsPerS(); +void ViECapturer::GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const { + overuse_detector_->GetCpuOveruseMetrics(metrics); } int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) { diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h index 37f203a8fa..9ac5f8312f 100644 --- a/webrtc/video_engine/vie_capturer.h +++ b/webrtc/video_engine/vie_capturer.h @@ -108,11 +108,7 @@ class ViECapturer void RegisterCpuOveruseObserver(CpuOveruseObserver* observer); void SetCpuOveruseOptions(const CpuOveruseOptions& options); - - void CpuOveruseMeasures(int* capture_jitter_ms, - int* avg_encode_time_ms, - int* encode_usage_percent, - int* capture_queue_delay_ms_per_s) const; + void GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const; protected: ViECapturer(int capture_id,