Replace legacy getStats with standard getStats in the iOS example

Bug: webrtc:12688
Change-Id: I039cdae7647738d5f17b229c1137b72bf4aa7be0
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/219580
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Commit-Queue: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#34063}
This commit is contained in:
Jaehyun Ko
2021-05-20 15:04:02 +09:00
committed by WebRTC LUCI CQ
parent bd346d7439
commit 193f4bf6c1
11 changed files with 21 additions and 413 deletions

View File

@ -253,8 +253,6 @@ if (is_ios || (is_mac && target_cpu != "x86")) {
"objc/AppRTCMobile/ARDAppClient.m", "objc/AppRTCMobile/ARDAppClient.m",
"objc/AppRTCMobile/ARDAppEngineClient.h", "objc/AppRTCMobile/ARDAppEngineClient.h",
"objc/AppRTCMobile/ARDAppEngineClient.m", "objc/AppRTCMobile/ARDAppEngineClient.m",
"objc/AppRTCMobile/ARDBitrateTracker.h",
"objc/AppRTCMobile/ARDBitrateTracker.m",
"objc/AppRTCMobile/ARDCaptureController.h", "objc/AppRTCMobile/ARDCaptureController.h",
"objc/AppRTCMobile/ARDCaptureController.m", "objc/AppRTCMobile/ARDCaptureController.m",
"objc/AppRTCMobile/ARDExternalSampleCapturer.h", "objc/AppRTCMobile/ARDExternalSampleCapturer.h",

View File

@ -48,7 +48,7 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error; - (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats; - (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
@optional @optional
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client

View File

@ -191,9 +191,8 @@ static int const kKbpsMultiplier = 1000;
repeats:YES repeats:YES
timerHandler:^{ timerHandler:^{
ARDAppClient *strongSelf = weakSelf; ARDAppClient *strongSelf = weakSelf;
[strongSelf.peerConnection statsForTrack:nil [strongSelf.peerConnection statisticsWithCompletionHandler:^(
statsOutputLevel:RTCStatsOutputLevelDebug RTC_OBJC_TYPE(RTCStatisticsReport) * stats) {
completionHandler:^(NSArray *stats) {
dispatch_async(dispatch_get_main_queue(), ^{ dispatch_async(dispatch_get_main_queue(), ^{
ARDAppClient *strongSelf = weakSelf; ARDAppClient *strongSelf = weakSelf;
[strongSelf.delegate appClient:strongSelf didGetStats:stats]; [strongSelf.delegate appClient:strongSelf didGetStats:stats];

View File

@ -1,30 +0,0 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
/** Class used to estimate bitrate based on byte count. It is expected that
* byte count is monotonocially increasing. This class tracks the times that
* byte count is updated, and measures the bitrate based on the byte difference
* over the interval between updates.
*/
@interface ARDBitrateTracker : NSObject
/** The bitrate in bits per second. */
@property(nonatomic, readonly) double bitrate;
/** The bitrate as a formatted string in bps, Kbps or Mbps. */
@property(nonatomic, readonly) NSString *bitrateString;
/** Converts the bitrate to a readable format in bps, Kbps or Mbps. */
+ (NSString *)bitrateStringForBitrate:(double)bitrate;
/** Updates the tracked bitrate with the new byte count. */
- (void)updateBitrateWithCurrentByteCount:(NSInteger)byteCount;
@end

View File

@ -1,45 +0,0 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDBitrateTracker.h"
#import <QuartzCore/QuartzCore.h>
@implementation ARDBitrateTracker {
CFTimeInterval _prevTime;
NSInteger _prevByteCount;
}
@synthesize bitrate = _bitrate;
+ (NSString *)bitrateStringForBitrate:(double)bitrate {
if (bitrate > 1e6) {
return [NSString stringWithFormat:@"%.2fMbps", bitrate * 1e-6];
} else if (bitrate > 1e3) {
return [NSString stringWithFormat:@"%.0fKbps", bitrate * 1e-3];
} else {
return [NSString stringWithFormat:@"%.0fbps", bitrate];
}
}
- (NSString *)bitrateString {
return [[self class] bitrateStringForBitrate:_bitrate];
}
- (void)updateBitrateWithCurrentByteCount:(NSInteger)byteCount {
CFTimeInterval currentTime = CACurrentMediaTime();
if (_prevTime && (byteCount > _prevByteCount)) {
_bitrate = (byteCount - _prevByteCount) * 8 / (currentTime - _prevTime);
}
_prevByteCount = byteCount;
_prevTime = currentTime;
}
@end

View File

@ -10,10 +10,9 @@
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#import "sdk/objc/api/peerconnection/RTCStatisticsReport.h"
#import "sdk/objc/base/RTCMacros.h" #import "sdk/objc/base/RTCMacros.h"
@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
/** Class used to accumulate stats information into a single displayable string. /** Class used to accumulate stats information into a single displayable string.
*/ */
@interface ARDStatsBuilder : NSObject @interface ARDStatsBuilder : NSObject
@ -22,10 +21,6 @@
* class. * class.
*/ */
@property(nonatomic, readonly) NSString *statsString; @property(nonatomic, readonly) NSString *statsString;
@property(nonatomic) RTC_OBJC_TYPE(RTCStatisticsReport) * stats;
/** Parses the information in the stats report into an appropriate internal
* format used to generate the stats string.
*/
- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport;
@end @end

View File

@ -13,334 +13,24 @@
#import "sdk/objc/api/peerconnection/RTCLegacyStatsReport.h" #import "sdk/objc/api/peerconnection/RTCLegacyStatsReport.h"
#import "sdk/objc/base/RTCMacros.h" #import "sdk/objc/base/RTCMacros.h"
#import "ARDBitrateTracker.h"
#import "ARDUtilities.h" #import "ARDUtilities.h"
@implementation ARDStatsBuilder { @implementation ARDStatsBuilder
// Connection stats.
NSString *_connRecvBitrate;
NSString *_connRtt;
NSString *_connSendBitrate;
NSString *_localCandType;
NSString *_remoteCandType;
NSString *_transportType;
// BWE stats. @synthesize stats = _stats;
NSString *_actualEncBitrate;
NSString *_availableRecvBw;
NSString *_availableSendBw;
NSString *_targetEncBitrate;
// Video send stats.
NSString *_videoEncodeMs;
NSString *_videoInputFps;
NSString *_videoInputHeight;
NSString *_videoInputWidth;
NSString *_videoSendCodec;
NSString *_videoSendBitrate;
NSString *_videoSendFps;
NSString *_videoSendHeight;
NSString *_videoSendWidth;
// QP stats.
int _videoQPSum;
int _framesEncoded;
int _oldVideoQPSum;
int _oldFramesEncoded;
// Video receive stats.
NSString *_videoDecodeMs;
NSString *_videoDecodedFps;
NSString *_videoOutputFps;
NSString *_videoRecvBitrate;
NSString *_videoRecvFps;
NSString *_videoRecvHeight;
NSString *_videoRecvWidth;
// Audio send stats.
NSString *_audioSendBitrate;
NSString *_audioSendCodec;
// Audio receive stats.
NSString *_audioCurrentDelay;
NSString *_audioExpandRate;
NSString *_audioRecvBitrate;
NSString *_audioRecvCodec;
// Bitrate trackers.
ARDBitrateTracker *_audioRecvBitrateTracker;
ARDBitrateTracker *_audioSendBitrateTracker;
ARDBitrateTracker *_connRecvBitrateTracker;
ARDBitrateTracker *_connSendBitrateTracker;
ARDBitrateTracker *_videoRecvBitrateTracker;
ARDBitrateTracker *_videoSendBitrateTracker;
}
- (instancetype)init {
if (self = [super init]) {
_audioSendBitrateTracker = [[ARDBitrateTracker alloc] init];
_audioRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
_connSendBitrateTracker = [[ARDBitrateTracker alloc] init];
_connRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
_videoSendBitrateTracker = [[ARDBitrateTracker alloc] init];
_videoRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
_videoQPSum = 0;
_framesEncoded = 0;
}
return self;
}
- (NSString *)statsString { - (NSString *)statsString {
NSMutableString *result = [NSMutableString string]; NSMutableString *result = [NSMutableString string];
NSString *systemStatsFormat = @"(cpu)%ld%%\n";
[result appendString:[NSString stringWithFormat:systemStatsFormat,
(long)ARDGetCpuUsagePercentage()]];
// Connection stats. [result appendFormat:@"(cpu)%ld%%\n", (long)ARDGetCpuUsagePercentage()];
NSString *connStatsFormat = @"CN %@ms | %@->%@/%@ | (s)%@ | (r)%@\n";
[result appendString:[NSString stringWithFormat:connStatsFormat,
_connRtt,
_localCandType, _remoteCandType, _transportType,
_connSendBitrate, _connRecvBitrate]];
// Video send stats. for (NSString *key in _stats.statistics) {
NSString *videoSendFormat = @"VS (input) %@x%@@%@fps | (sent) %@x%@@%@fps\n" RTC_OBJC_TYPE(RTCStatistics) *stat = _stats.statistics[key];
"VS (enc) %@/%@ | (sent) %@/%@ | %@ms | %@\n" [result appendFormat:@"%@\n", stat.description];
"AvgQP (past %d encoded frames) = %d\n "; }
int avgqp = [self calculateAvgQP];
[result appendString:[NSString stringWithFormat:videoSendFormat,
_videoInputWidth, _videoInputHeight, _videoInputFps,
_videoSendWidth, _videoSendHeight, _videoSendFps,
_actualEncBitrate, _targetEncBitrate,
_videoSendBitrate, _availableSendBw,
_videoEncodeMs,
_videoSendCodec,
_framesEncoded - _oldFramesEncoded, avgqp]];
// Video receive stats.
NSString *videoReceiveFormat =
@"VR (recv) %@x%@@%@fps | (decoded)%@ | (output)%@fps | %@/%@ | %@ms\n";
[result appendString:[NSString stringWithFormat:videoReceiveFormat,
_videoRecvWidth, _videoRecvHeight, _videoRecvFps,
_videoDecodedFps,
_videoOutputFps,
_videoRecvBitrate, _availableRecvBw,
_videoDecodeMs]];
// Audio send stats.
NSString *audioSendFormat = @"AS %@ | %@\n";
[result appendString:[NSString stringWithFormat:audioSendFormat,
_audioSendBitrate, _audioSendCodec]];
// Audio receive stats.
NSString *audioReceiveFormat = @"AR %@ | %@ | %@ms | (expandrate)%@";
[result appendString:[NSString stringWithFormat:audioReceiveFormat,
_audioRecvBitrate, _audioRecvCodec, _audioCurrentDelay,
_audioExpandRate]];
return result; return result;
} }
- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSString *reportType = statsReport.type;
if ([reportType isEqualToString:@"ssrc"] &&
[statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) {
if ([statsReport.reportId rangeOfString:@"send"].location != NSNotFound) {
[self parseSendSsrcStatsReport:statsReport];
}
if ([statsReport.reportId rangeOfString:@"recv"].location != NSNotFound) {
[self parseRecvSsrcStatsReport:statsReport];
}
} else if ([reportType isEqualToString:@"VideoBwe"]) {
[self parseBweStatsReport:statsReport];
} else if ([reportType isEqualToString:@"googCandidatePair"]) {
[self parseConnectionStatsReport:statsReport];
}
}
#pragma mark - Private
- (int)calculateAvgQP {
int deltaFramesEncoded = _framesEncoded - _oldFramesEncoded;
int deltaQPSum = _videoQPSum - _oldVideoQPSum;
return deltaFramesEncoded != 0 ? deltaQPSum / deltaFramesEncoded : 0;
}
- (void)updateBweStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googAvailableSendBandwidth"]) {
_availableSendBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) {
_availableRecvBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googActualEncBitrate"]) {
_actualEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googTargetEncBitrate"]) {
_targetEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
}
}
- (void)parseBweStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateBweStatOfKey:key value:value];
}];
}
- (void)updateConnectionStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googRtt"]) {
_connRtt = value;
} else if ([key isEqualToString:@"googLocalCandidateType"]) {
_localCandType = value;
} else if ([key isEqualToString:@"googRemoteCandidateType"]) {
_remoteCandType = value;
} else if ([key isEqualToString:@"googTransportType"]) {
_transportType = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_connRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connRecvBitrate = _connRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connSendBitrate = _connSendBitrateTracker.bitrateString;
}
}
- (void)parseConnectionStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
if (![activeConnection isEqualToString:@"true"]) {
return;
}
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateConnectionStatOfKey:key value:value];
}];
}
- (void)parseSendSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSDictionary *values = statsReport.values;
if ([values objectForKey:@"googFrameRateSent"]) {
// Video track.
[self parseVideoSendStatsReport:statsReport];
} else if ([values objectForKey:@"audioInputLevel"]) {
// Audio track.
[self parseAudioSendStatsReport:statsReport];
}
}
- (void)updateAudioSendStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googCodecName"]) {
_audioSendCodec = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioSendBitrate = _audioSendBitrateTracker.bitrateString;
}
}
- (void)parseAudioSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioSendStatOfKey:key value:value];
}];
}
- (void)updateVideoSendStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googCodecName"]) {
_videoSendCodec = value;
} else if ([key isEqualToString:@"googFrameHeightInput"]) {
_videoInputHeight = value;
} else if ([key isEqualToString:@"googFrameWidthInput"]) {
_videoInputWidth = value;
} else if ([key isEqualToString:@"googFrameRateInput"]) {
_videoInputFps = value;
} else if ([key isEqualToString:@"googFrameHeightSent"]) {
_videoSendHeight = value;
} else if ([key isEqualToString:@"googFrameWidthSent"]) {
_videoSendWidth = value;
} else if ([key isEqualToString:@"googFrameRateSent"]) {
_videoSendFps = value;
} else if ([key isEqualToString:@"googAvgEncodeMs"]) {
_videoEncodeMs = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_videoSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoSendBitrate = _videoSendBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"qpSum"]) {
_oldVideoQPSum = _videoQPSum;
_videoQPSum = value.integerValue;
} else if ([key isEqualToString:@"framesEncoded"]) {
_oldFramesEncoded = _framesEncoded;
_framesEncoded = value.integerValue;
}
}
- (void)parseVideoSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoSendStatOfKey:key value:value];
}];
}
- (void)parseRecvSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSDictionary *values = statsReport.values;
if ([values objectForKey:@"googFrameWidthReceived"]) {
// Video track.
[self parseVideoRecvStatsReport:statsReport];
} else if ([values objectForKey:@"audioOutputLevel"]) {
// Audio track.
[self parseAudioRecvStatsReport:statsReport];
}
}
- (void)updateAudioRecvStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googCodecName"]) {
_audioRecvCodec = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_audioRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioRecvBitrate = _audioRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"googSpeechExpandRate"]) {
_audioExpandRate = value;
} else if ([key isEqualToString:@"googCurrentDelayMs"]) {
_audioCurrentDelay = value;
}
}
- (void)parseAudioRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioRecvStatOfKey:key value:value];
}];
}
- (void)updateVideoRecvStatOfKey:(NSString *)key value:(NSString *)value {
if ([key isEqualToString:@"googFrameHeightReceived"]) {
_videoRecvHeight = value;
} else if ([key isEqualToString:@"googFrameWidthReceived"]) {
_videoRecvWidth = value;
} else if ([key isEqualToString:@"googFrameRateReceived"]) {
_videoRecvFps = value;
} else if ([key isEqualToString:@"googFrameRateDecoded"]) {
_videoDecodedFps = value;
} else if ([key isEqualToString:@"googFrameRateOutput"]) {
_videoOutputFps = value;
} else if ([key isEqualToString:@"googDecodeMs"]) {
_videoDecodeMs = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoRecvBitrate = _videoRecvBitrateTracker.bitrateString;
}
}
- (void)parseVideoRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoRecvStatOfKey:key value:value];
}];
}
@end @end

View File

@ -10,8 +10,12 @@
#import <UIKit/UIKit.h> #import <UIKit/UIKit.h>
#import "sdk/objc/base/RTCMacros.h"
@class RTC_OBJC_TYPE(RTCStatisticsReport);
@interface ARDStatsView : UIView @interface ARDStatsView : UIView
- (void)setStats:(NSArray *)stats; - (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats;
@end @end

View File

@ -34,10 +34,8 @@
return self; return self;
} }
- (void)setStats:(NSArray *)stats { - (void)setStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
for (RTC_OBJC_TYPE(RTCLegacyStatsReport) * report in stats) { _statsBuilder.stats = stats;
[_statsBuilder parseStatsReport:report];
}
_statsLabel.text = _statsBuilder.statsString; _statsLabel.text = _statsBuilder.statsString;
} }

View File

@ -132,8 +132,7 @@
}); });
} }
- (void)appClient:(ARDAppClient *)client - (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
didGetStats:(NSArray *)stats {
_videoCallView.statsView.stats = stats; _videoCallView.statsView.stats = stats;
[_videoCallView setNeedsLayout]; [_videoCallView setNeedsLayout];
} }

View File

@ -120,7 +120,7 @@
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack { didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
} }
- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats { - (void)appClient:(ARDAppClient *)client didGetStats:(RTC_OBJC_TYPE(RTCStatisticsReport) *)stats {
} }
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error { - (void)appClient:(ARDAppClient *)client didError:(NSError *)error {