diff --git a/modules/audio_device/android/audio_track_jni.cc b/modules/audio_device/android/audio_track_jni.cc index 776f0cfd70..6dd75ddd67 100644 --- a/modules/audio_device/android/audio_track_jni.cc +++ b/modules/audio_device/android/audio_track_jni.cc @@ -19,6 +19,7 @@ #include "rtc_base/logging.h" #include "rtc_base/platform_thread.h" #include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { @@ -27,7 +28,7 @@ AudioTrackJni::JavaAudioTrack::JavaAudioTrack( NativeRegistration* native_reg, std::unique_ptr audio_track) : audio_track_(std::move(audio_track)), - init_playout_(native_reg->GetMethodId("initPlayout", "(IID)Z")), + init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")), start_playout_(native_reg->GetMethodId("startPlayout", "()Z")), stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")), set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")), @@ -45,8 +46,18 @@ bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) { nullptr); if (buffer_size_factor == 0) buffer_size_factor = 1.0; - return audio_track_->CallBooleanMethod(init_playout_, sample_rate, channels, - buffer_size_factor); + int buffer_size_bytes = audio_track_->CallIntMethod( + init_playout_, sample_rate, channels, buffer_size_factor); + if (buffer_size_bytes != -1) { + // To avoid division by zero, we assume the sample rate is 48k if an invalid + // value is found. + sample_rate = sample_rate <= 0 ? 48000 : sample_rate; + const int buffer_size_ms = (buffer_size_bytes * 1000) / (2 * sample_rate); + RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs", + buffer_size_ms, 0, 1000, 100); + return true; + } + return false; } bool AudioTrackJni::JavaAudioTrack::StartPlayout() { diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java index 3023c99fa2..1973657450 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -215,7 +215,7 @@ public class WebRtcAudioTrack { } } - private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) { + private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { threadChecker.checkIsOnValidThread(); Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels @@ -244,14 +244,14 @@ public class WebRtcAudioTrack { // can happen that |minBufferSizeInBytes| contains an invalid value. if (minBufferSizeInBytes < byteBuffer.capacity()) { reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value."); - return false; + return -1; } // Ensure that prevision audio session was stopped correctly before trying // to create a new AudioTrack. if (audioTrack != null) { reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack."); - return false; + return -1; } try { // Create an AudioTrack object and initialize its associated audio buffer. @@ -273,7 +273,7 @@ public class WebRtcAudioTrack { } catch (IllegalArgumentException e) { reportWebRtcAudioTrackInitError(e.getMessage()); releaseAudioResources(); - return false; + return -1; } // It can happen that an AudioTrack is created but it was not successfully @@ -282,11 +282,11 @@ public class WebRtcAudioTrack { if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) { reportWebRtcAudioTrackInitError("Initialization of audio track failed."); releaseAudioResources(); - return false; + return -1; } logMainParameters(); logMainParametersExtended(); - return true; + return minBufferSizeInBytes; } private boolean startPlayout() { diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index edc9dd179d..07debc3aae 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -183,7 +183,7 @@ class WebRtcAudioTrack { } @CalledByNative - private boolean initPlayout(int sampleRate, int channels, double bufferSizeFactor) { + private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { threadChecker.checkIsOnValidThread(); Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels @@ -212,14 +212,14 @@ class WebRtcAudioTrack { // can happen that |minBufferSizeInBytes| contains an invalid value. if (minBufferSizeInBytes < byteBuffer.capacity()) { reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value."); - return false; + return -1; } // Ensure that prevision audio session was stopped correctly before trying // to create a new AudioTrack. if (audioTrack != null) { reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack."); - return false; + return -1; } try { // Create an AudioTrack object and initialize its associated audio buffer. @@ -241,7 +241,7 @@ class WebRtcAudioTrack { } catch (IllegalArgumentException e) { reportWebRtcAudioTrackInitError(e.getMessage()); releaseAudioResources(); - return false; + return -1; } // It can happen that an AudioTrack is created but it was not successfully @@ -250,11 +250,11 @@ class WebRtcAudioTrack { if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) { reportWebRtcAudioTrackInitError("Initialization of audio track failed."); releaseAudioResources(); - return false; + return -1; } logMainParameters(); logMainParametersExtended(); - return true; + return minBufferSizeInBytes; } @CalledByNative