Add UMA histogram for actual Android buffer size
Previously a histogram was added to track the requested buffer size, this CL adds a histogram for the actually used buffer size. Bug: b/157429867 Change-Id: I04016760982a4c43b8ba8f0e095fe1171b705258 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/176227 Reviewed-by: Sami Kalliomäki <sakal@webrtc.org> Reviewed-by: Henrik Andreassson <henrika@webrtc.org> Commit-Queue: Ivo Creusen <ivoc@webrtc.org> Cr-Commit-Position: refs/heads/master@{#31385}
This commit is contained in:
@ -34,7 +34,9 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
|
|||||||
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
|
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
|
||||||
get_stream_max_volume_(
|
get_stream_max_volume_(
|
||||||
native_reg->GetMethodId("getStreamMaxVolume", "()I")),
|
native_reg->GetMethodId("getStreamMaxVolume", "()I")),
|
||||||
get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")) {}
|
get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")),
|
||||||
|
get_buffer_size_in_frames_(
|
||||||
|
native_reg->GetMethodId("getBufferSizeInFrames", "()I")) {}
|
||||||
|
|
||||||
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
|
AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
|
||||||
|
|
||||||
@ -46,15 +48,26 @@ bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
|
|||||||
nullptr);
|
nullptr);
|
||||||
if (buffer_size_factor == 0)
|
if (buffer_size_factor == 0)
|
||||||
buffer_size_factor = 1.0;
|
buffer_size_factor = 1.0;
|
||||||
int buffer_size_bytes = audio_track_->CallIntMethod(
|
int requested_buffer_size_bytes = audio_track_->CallIntMethod(
|
||||||
init_playout_, sample_rate, channels, buffer_size_factor);
|
init_playout_, sample_rate, channels, buffer_size_factor);
|
||||||
if (buffer_size_bytes != -1) {
|
// Update UMA histograms for both the requested and actual buffer size.
|
||||||
|
if (requested_buffer_size_bytes >= 0) {
|
||||||
// To avoid division by zero, we assume the sample rate is 48k if an invalid
|
// To avoid division by zero, we assume the sample rate is 48k if an invalid
|
||||||
// value is found.
|
// value is found.
|
||||||
sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
|
sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
|
||||||
const int buffer_size_ms = (buffer_size_bytes * 1000) / (2 * sample_rate);
|
// This calculation assumes that audio is mono.
|
||||||
|
const int requested_buffer_size_ms =
|
||||||
|
(requested_buffer_size_bytes * 1000) / (2 * sample_rate);
|
||||||
RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
|
RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
|
||||||
buffer_size_ms, 0, 1000, 100);
|
requested_buffer_size_ms, 0, 1000, 100);
|
||||||
|
int actual_buffer_size_frames =
|
||||||
|
audio_track_->CallIntMethod(get_buffer_size_in_frames_);
|
||||||
|
if (actual_buffer_size_frames >= 0) {
|
||||||
|
const int actual_buffer_size_ms =
|
||||||
|
actual_buffer_size_frames * 1000 / sample_rate;
|
||||||
|
RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
|
||||||
|
actual_buffer_size_ms, 0, 1000, 100);
|
||||||
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -62,6 +62,7 @@ class AudioTrackJni {
|
|||||||
jmethodID set_stream_volume_;
|
jmethodID set_stream_volume_;
|
||||||
jmethodID get_stream_max_volume_;
|
jmethodID get_stream_max_volume_;
|
||||||
jmethodID get_stream_volume_;
|
jmethodID get_stream_volume_;
|
||||||
|
jmethodID get_buffer_size_in_frames_;
|
||||||
};
|
};
|
||||||
|
|
||||||
explicit AudioTrackJni(AudioManager* audio_manager);
|
explicit AudioTrackJni(AudioManager* audio_manager);
|
||||||
|
@ -433,6 +433,13 @@ public class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private int getBufferSizeInFrames() {
|
||||||
|
if (Build.VERSION.SDK_INT >= 23) {
|
||||||
|
return audioTrack.getBufferSizeInFrames();
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
private void logBufferCapacityInFrames() {
|
private void logBufferCapacityInFrames() {
|
||||||
if (Build.VERSION.SDK_INT >= 24) {
|
if (Build.VERSION.SDK_INT >= 24) {
|
||||||
Logging.d(TAG,
|
Logging.d(TAG,
|
||||||
|
@ -423,6 +423,14 @@ class WebRtcAudioTrack {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
|
private int getBufferSizeInFrames() {
|
||||||
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||||
|
return audioTrack.getBufferSizeInFrames();
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
private void logBufferCapacityInFrames() {
|
private void logBufferCapacityInFrames() {
|
||||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
|
||||||
Logging.d(TAG,
|
Logging.d(TAG,
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
|
#include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
|
||||||
#include "sdk/android/src/jni/jni_helpers.h"
|
#include "sdk/android/src/jni/jni_helpers.h"
|
||||||
#include "system_wrappers/include/field_trial.h"
|
#include "system_wrappers/include/field_trial.h"
|
||||||
|
#include "system_wrappers/include/metrics.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -89,12 +90,33 @@ int32_t AudioTrackJni::InitPlayout() {
|
|||||||
nullptr);
|
nullptr);
|
||||||
if (buffer_size_factor == 0)
|
if (buffer_size_factor == 0)
|
||||||
buffer_size_factor = 1.0;
|
buffer_size_factor = 1.0;
|
||||||
if (!Java_WebRtcAudioTrack_initPlayout(
|
int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout(
|
||||||
env_, j_audio_track_, audio_parameters_.sample_rate(),
|
env_, j_audio_track_, audio_parameters_.sample_rate(),
|
||||||
static_cast<int>(audio_parameters_.channels()), buffer_size_factor)) {
|
static_cast<int>(audio_parameters_.channels()), buffer_size_factor);
|
||||||
|
if (requested_buffer_size_bytes < 0) {
|
||||||
RTC_LOG(LS_ERROR) << "InitPlayout failed";
|
RTC_LOG(LS_ERROR) << "InitPlayout failed";
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
// Update UMA histograms for both the requested and actual buffer size.
|
||||||
|
// To avoid division by zero, we assume the sample rate is 48k if an invalid
|
||||||
|
// value is found.
|
||||||
|
const int sample_rate = audio_parameters_.sample_rate() <= 0
|
||||||
|
? 48000
|
||||||
|
: audio_parameters_.sample_rate();
|
||||||
|
// This calculation assumes that audio is mono.
|
||||||
|
const int requested_buffer_size_ms =
|
||||||
|
(requested_buffer_size_bytes * 1000) / (2 * sample_rate);
|
||||||
|
RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
|
||||||
|
requested_buffer_size_ms, 0, 1000, 100);
|
||||||
|
int actual_buffer_size_frames =
|
||||||
|
Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
|
||||||
|
if (actual_buffer_size_frames >= 0) {
|
||||||
|
const int actual_buffer_size_ms =
|
||||||
|
actual_buffer_size_frames * 1000 / sample_rate;
|
||||||
|
RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
|
||||||
|
actual_buffer_size_ms, 0, 1000, 100);
|
||||||
|
}
|
||||||
|
|
||||||
initialized_ = true;
|
initialized_ = true;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user