Use backticks not vertical bars to denote variables in comments for /sdk

Bug: webrtc:12338
Change-Id: Ifaad29ccb63b0f2f3aeefb77dae061ebc7f87e6c
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/227024
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#34561}
This commit is contained in:
Artem Titov
2021-07-27 12:23:39 +02:00
committed by WebRTC LUCI CQ
parent f0671921a1
commit d7ac581045
87 changed files with 235 additions and 235 deletions

View File

@ -164,7 +164,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
bool IsInterrupted();
private:
// Called by the relevant AudioSessionObserver methods on |thread_|.
// Called by the relevant AudioSessionObserver methods on `thread_`.
void HandleInterruptionBegin();
void HandleInterruptionEnd();
void HandleValidRouteChange();
@ -173,7 +173,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
void HandlePlayoutGlitchDetected();
void HandleOutputVolumeChange();
// Uses current |playout_parameters_| and |record_parameters_| to inform the
// Uses current `playout_parameters_` and `record_parameters_` to inform the
// audio device buffer (ADB) about our internal audio parameters.
void UpdateAudioDeviceBuffer();
@ -181,7 +181,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric,
// values may be different once the AVAudioSession has been activated.
// This method asks for the current hardware parameters and takes actions
// if they should differ from what we have asked for initially. It also
// defines |playout_parameters_| and |record_parameters_|.
// defines `playout_parameters_` and `record_parameters_`.
void SetupAudioBuffersForActiveAudioSession();
// Creates the audio unit.

View File

@ -386,7 +386,7 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags
// Allocate AudioBuffers to be used as storage for the received audio.
// The AudioBufferList structure works as a placeholder for the
// AudioBuffer structure, which holds a pointer to the actual data buffer
// in |record_audio_buffer_|. Recorded audio will be rendered into this memory
// in `record_audio_buffer_`. Recorded audio will be rendered into this memory
// at each input callback when calling AudioUnitRender().
AudioBufferList audio_buffer_list;
audio_buffer_list.mNumberBuffers = 1;
@ -397,7 +397,7 @@ OSStatus AudioDeviceIOS::OnDeliverRecordedData(AudioUnitRenderActionFlags* flags
audio_buffer->mData = reinterpret_cast<int8_t*>(record_audio_buffer_.data());
// Obtain the recorded audio samples by initiating a rendering cycle.
// Since it happens on the input bus, the |io_data| parameter is a reference
// Since it happens on the input bus, the `io_data` parameter is a reference
// to the preallocated audio buffer list that the audio unit renders into.
// We can make the audio unit provide a buffer instead in io_data, but we
// currently just use our own.
@ -467,7 +467,7 @@ OSStatus AudioDeviceIOS::OnGetPlayoutData(AudioUnitRenderActionFlags* flags,
// Read decoded 16-bit PCM samples from WebRTC (using a size that matches
// the native I/O audio unit) and copy the result to the audio buffer in the
// |io_data| destination.
// `io_data` destination.
fine_audio_buffer_->GetPlayoutData(
rtc::ArrayView<int16_t>(static_cast<int16_t*>(audio_buffer->mData), num_frames),
kFixedPlayoutDelayEstimate);