Add UMA histogram for native audio buffer size in ms

The Android native audio code asks the OS to provide an appropriate
buffer size for real-time audio playout. We should add logging for this
value so we can see what values are used in practice.

Bug: b/157429867
Change-Id: I111a74faefc0e77b5c98921804d6625cba1b84af
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/176126
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Reviewed-by: Henrik Andreasson <henrika@chromium.org>
Commit-Queue: Ivo Creusen <ivoc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31368}
This commit is contained in:
Ivo Creusen
2020-05-27 13:41:25 +02:00
committed by Commit Bot
parent 63673fe2cc
commit bdb5830d69
3 changed files with 26 additions and 15 deletions

View File

@ -19,6 +19,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
namespace webrtc {
@ -27,7 +28,7 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
NativeRegistration* native_reg,
std::unique_ptr<GlobalRef> audio_track)
: audio_track_(std::move(audio_track)),
init_playout_(native_reg->GetMethodId("initPlayout", "(IID)Z")),
init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")),
start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
@ -45,8 +46,18 @@ bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
nullptr);
if (buffer_size_factor == 0)
buffer_size_factor = 1.0;
return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels,
buffer_size_factor);
int buffer_size_bytes = audio_track_->CallIntMethod(
init_playout_, sample_rate, channels, buffer_size_factor);
if (buffer_size_bytes != -1) {
// To avoid division by zero, we assume the sample rate is 48k if an invalid
// value is found.
sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
const int buffer_size_ms = (buffer_size_bytes * 1000) / (2 * sample_rate);
RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
buffer_size_ms, 0, 1000, 100);
return true;
}
return false;
}
bool AudioTrackJni::JavaAudioTrack::StartPlayout() {

View File

@ -215,7 +215,7 @@ public class WebRtcAudioTrack {
}
}
private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@ -244,14 +244,14 @@ public class WebRtcAudioTrack {
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
return false;
return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
return false;
return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@ -273,7 +273,7 @@ public class WebRtcAudioTrack {
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
return false;
return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@ -282,11 +282,11 @@ public class WebRtcAudioTrack {
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
return false;
return -1;
}
logMainParameters();
logMainParametersExtended();
return true;
return minBufferSizeInBytes;
}
private boolean startPlayout() {

View File

@ -183,7 +183,7 @@ class WebRtcAudioTrack {
}
@CalledByNative
private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) {
threadChecker.checkIsOnValidThread();
Logging.d(TAG,
"initPlayout(sampleRate=" + sampleRate + ", channels=" + channels
@ -212,14 +212,14 @@ class WebRtcAudioTrack {
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
return false;
return -1;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
return false;
return -1;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
@ -241,7 +241,7 @@ class WebRtcAudioTrack {
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
return false;
return -1;
}
// It can happen that an AudioTrack is created but it was not successfully
@ -250,11 +250,11 @@ class WebRtcAudioTrack {
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
return false;
return -1;
}
logMainParameters();
logMainParametersExtended();
return true;
return minBufferSizeInBytes;
}
@CalledByNative