Replace rtc::Optional with absl::optional

This is a no-op change because rtc::Optional is an alias to absl::optional

This CL generated by running script passing top level directories except rtc_base and api

find $@ -type f \( -name \*.h -o -name \*.cc -o -name \*.mm \) \
-exec sed -i 's|rtc::Optional|absl::optional|g' {} \+ \
-exec sed -i 's|rtc::nullopt|absl::nullopt|g' {} \+ \
-exec sed -i 's|#include "api/optional.h"|#include "absl/types/optional.h"|' {} \+

find $@ -type f -name BUILD.gn \
-exec sed -r -i 's|"[\./api]*:optional"|"//third_party/abseil-cpp/absl/types:optional"|' {} \+;

git cl format

Bug: webrtc:9078
Change-Id: I9465c172e65ba6e6ed4e4fdc35b0b265038d6f71
Reviewed-on: https://webrtc-review.googlesource.com/84584
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23697}
This commit is contained in:
Danil Chapovalov
2018-06-21 10:17:24 +02:00
committed by Commit Bot
parent ae810c10b4
commit 196100efa6
71 changed files with 189 additions and 188 deletions

View File

@ -124,7 +124,7 @@ class MediaCodecVideoDecoder : public VideoDecoder, public rtc::MessageHandler {
int current_delay_time_ms_; // Overall delay time in the current second.
int32_t max_pending_frames_; // Maximum number of pending input frames.
H264BitstreamParser h264_bitstream_parser_;
std::deque<rtc::Optional<uint8_t>> pending_frame_qps_;
std::deque<absl::optional<uint8_t>> pending_frame_qps_;
// State that is constant for the lifetime of this object once the ctor
// returns.
@ -506,7 +506,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Save input image timestamps for later output.
frames_received_++;
current_bytes_ += inputImage._length;
rtc::Optional<uint8_t> qp;
absl::optional<uint8_t> qp;
if (codecType_ == kVideoCodecVP8) {
int qp_int;
if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) {
@ -743,7 +743,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(JNIEnv* jni,
decoded_frame.set_timestamp(output_timestamps_ms);
decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
rtc::Optional<uint8_t> qp = pending_frame_qps_.front();
absl::optional<uint8_t> qp = pending_frame_qps_.front();
pending_frame_qps_.pop_front();
callback_->Decoded(decoded_frame, decode_time_ms, qp);
}

View File

@ -347,7 +347,7 @@ int32_t MediaCodecVideoEncoder::InitEncode(const VideoCodec* codec_settings,
// Check allowed H.264 profile
profile_ = H264::Profile::kProfileBaseline;
if (codec_type == kVideoCodecH264) {
const rtc::Optional<H264::ProfileLevelId> profile_level_id =
const absl::optional<H264::ProfileLevelId> profile_level_id =
H264::ParseSdpProfileLevelId(codec_.params);
RTC_DCHECK(profile_level_id);
profile_ = profile_level_id->profile;

View File

@ -38,7 +38,7 @@ bool AndroidVideoTrackSource::is_screencast() const {
return is_screencast_;
}
rtc::Optional<bool> AndroidVideoTrackSource::needs_denoising() const {
absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const {
return false;
}

View File

@ -37,7 +37,7 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
// depending on video codec.
rtc::Optional<bool> needs_denoising() const override;
absl::optional<bool> needs_denoising() const override;
// Called by the native capture observer
void SetState(SourceState state);

View File

@ -135,16 +135,16 @@ int AAudioPlayer::SetSpeakerVolume(uint32_t volume) {
return -1;
}
rtc::Optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
return rtc::nullopt;
absl::optional<uint32_t> AAudioPlayer::SpeakerVolume() const {
return absl::nullopt;
}
rtc::Optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
return rtc::nullopt;
absl::optional<uint32_t> AAudioPlayer::MaxSpeakerVolume() const {
return absl::nullopt;
}
rtc::Optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
return rtc::nullopt;
absl::optional<uint32_t> AAudioPlayer::MinSpeakerVolume() const {
return absl::nullopt;
}
void AAudioPlayer::OnErrorCallback(aaudio_result_t error) {

View File

@ -14,7 +14,7 @@
#include <aaudio/AAudio.h>
#include <memory>
#include "api/optional.h"
#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "rtc_base/messagehandler.h"
@ -73,9 +73,9 @@ class AAudioPlayer final : public AudioOutput,
// Not implemented in AAudio.
bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override;
rtc::Optional<uint32_t> SpeakerVolume() const override;
rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
rtc::Optional<uint32_t> MinSpeakerVolume() const override;
absl::optional<uint32_t> SpeakerVolume() const override;
absl::optional<uint32_t> MaxSpeakerVolume() const override;
absl::optional<uint32_t> MinSpeakerVolume() const override;
protected:
// AAudioObserverInterface implementation.

View File

@ -341,7 +341,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_)
return -1;
rtc::Optional<uint32_t> volume = output_->SpeakerVolume();
absl::optional<uint32_t> volume = output_->SpeakerVolume();
if (!volume)
return -1;
*output_volume = *volume;
@ -353,7 +353,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_)
return -1;
rtc::Optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
absl::optional<uint32_t> max_volume = output_->MaxSpeakerVolume();
if (!max_volume)
return -1;
*output_max_volume = *max_volume;
@ -364,7 +364,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule {
RTC_LOG(INFO) << __FUNCTION__;
if (!initialized_)
return -1;
rtc::Optional<uint32_t> min_volume = output_->MinSpeakerVolume();
absl::optional<uint32_t> min_volume = output_->MinSpeakerVolume();
if (!min_volume)
return -1;
*output_min_volume = *min_volume;

View File

@ -13,7 +13,7 @@
#include <memory>
#include "api/optional.h"
#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
@ -58,9 +58,9 @@ class AudioOutput {
virtual bool Playing() const = 0;
virtual bool SpeakerVolumeIsAvailable() = 0;
virtual int SetSpeakerVolume(uint32_t volume) = 0;
virtual rtc::Optional<uint32_t> SpeakerVolume() const = 0;
virtual rtc::Optional<uint32_t> MaxSpeakerVolume() const = 0;
virtual rtc::Optional<uint32_t> MinSpeakerVolume() const = 0;
virtual absl::optional<uint32_t> SpeakerVolume() const = 0;
virtual absl::optional<uint32_t> MaxSpeakerVolume() const = 0;
virtual absl::optional<uint32_t> MinSpeakerVolume() const = 0;
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
};

View File

@ -144,17 +144,17 @@ int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
: -1;
}
rtc::Optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
}
rtc::Optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
return 0;
}
rtc::Optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
const uint32_t volume =
Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);

View File

@ -14,7 +14,7 @@
#include <jni.h>
#include <memory>
#include "api/optional.h"
#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
#include "rtc_base/thread_checker.h"
@ -62,9 +62,9 @@ class AudioTrackJni : public AudioOutput {
bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override;
rtc::Optional<uint32_t> SpeakerVolume() const override;
rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
rtc::Optional<uint32_t> MinSpeakerVolume() const override;
absl::optional<uint32_t> SpeakerVolume() const override;
absl::optional<uint32_t> MaxSpeakerVolume() const override;
absl::optional<uint32_t> MinSpeakerVolume() const override;
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;

View File

@ -182,16 +182,16 @@ int OpenSLESPlayer::SetSpeakerVolume(uint32_t volume) {
return -1;
}
rtc::Optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
return rtc::nullopt;
absl::optional<uint32_t> OpenSLESPlayer::SpeakerVolume() const {
return absl::nullopt;
}
rtc::Optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
return rtc::nullopt;
absl::optional<uint32_t> OpenSLESPlayer::MaxSpeakerVolume() const {
return absl::nullopt;
}
rtc::Optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
return rtc::nullopt;
absl::optional<uint32_t> OpenSLESPlayer::MinSpeakerVolume() const {
return absl::nullopt;
}
void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {

View File

@ -16,7 +16,7 @@
#include <SLES/OpenSLES_AndroidConfiguration.h>
#include <memory>
#include "api/optional.h"
#include "absl/types/optional.h"
#include "modules/audio_device/audio_device_buffer.h"
#include "modules/audio_device/fine_audio_buffer.h"
#include "modules/audio_device/include/audio_device_defines.h"
@ -75,9 +75,9 @@ class OpenSLESPlayer : public AudioOutput {
bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override;
rtc::Optional<uint32_t> SpeakerVolume() const override;
rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
rtc::Optional<uint32_t> MinSpeakerVolume() const override;
absl::optional<uint32_t> SpeakerVolume() const override;
absl::optional<uint32_t> MaxSpeakerVolume() const override;
absl::optional<uint32_t> MinSpeakerVolume() const override;
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;

View File

@ -207,13 +207,13 @@ PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
return PeerConnectionInterface::kTlsCertPolicySecure;
}
rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
JNIEnv* jni,
const JavaRef<jobject>& j_network_preference) {
std::string enum_name = GetJavaEnumName(jni, j_network_preference);
if (enum_name == "UNKNOWN")
return rtc::nullopt;
return absl::nullopt;
if (enum_name == "ETHERNET")
return rtc::ADAPTER_TYPE_ETHERNET;
@ -231,7 +231,7 @@ rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
return rtc::ADAPTER_TYPE_LOOPBACK;
RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name;
return rtc::nullopt;
return absl::nullopt;
}
} // namespace jni

View File

@ -75,7 +75,7 @@ PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
JNIEnv* jni,
const JavaRef<jobject>& j_ice_server_tls_cert_policy);
rtc::Optional<rtc::AdapterType> JavaToNativeNetworkPreference(
absl::optional<rtc::AdapterType> JavaToNativeNetworkPreference(
JNIEnv* jni,
const JavaRef<jobject>& j_network_preference);

View File

@ -428,7 +428,7 @@ static jlong JNI_PeerConnectionFactory_CreatePeerConnection(
if (key_type != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate =
rtc::RTCCertificateGenerator::GenerateCertificate(
rtc::KeyParams(key_type), rtc::nullopt);
rtc::KeyParams(key_type), absl::nullopt);
if (!certificate) {
RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: "
<< key_type;

View File

@ -89,7 +89,7 @@ ScopedJavaLocalRef<jstring> JNI_RtpTransceiver_GetMid(
JNIEnv* jni,
const base::android::JavaParamRef<jclass>&,
jlong j_rtp_transceiver_pointer) {
rtc::Optional<std::string> mid =
absl::optional<std::string> mid =
reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
->mid();
return NativeToJavaString(jni, mid);
@ -133,7 +133,7 @@ ScopedJavaLocalRef<jobject> JNI_RtpTransceiver_CurrentDirection(
JNIEnv* jni,
const base::android::JavaParamRef<jclass>&,
jlong j_rtp_transceiver_pointer) {
rtc::Optional<RtpTransceiverDirection> direction =
absl::optional<RtpTransceiverDirection> direction =
reinterpret_cast<RtpTransceiverInterface*>(j_rtp_transceiver_pointer)
->current_direction();
return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction)

View File

@ -27,7 +27,7 @@ std::unique_ptr<SessionDescriptionInterface> JavaToNativeSessionDescription(
jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp));
std::string std_description =
JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp));
rtc::Optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
absl::optional<SdpType> sdp_type_maybe = SdpTypeFromString(std_type);
if (!sdp_type_maybe) {
RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type;
return nullptr;

View File

@ -30,9 +30,9 @@ namespace {
const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec;
template <typename Dst, typename Src>
inline rtc::Optional<Dst> cast_optional(const rtc::Optional<Src>& value) {
return value ? rtc::Optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
: rtc::nullopt;
inline absl::optional<Dst> cast_optional(const absl::optional<Src>& value) {
return value ? absl::optional<Dst>(rtc::dchecked_cast<Dst, Src>(*value))
: absl::nullopt;
}
} // namespace
@ -106,7 +106,7 @@ int32_t VideoDecoderWrapper::Decode(
frame_extra_info.timestamp_rtp = input_image._timeStamp;
frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_;
frame_extra_info.qp =
qp_parsing_enabled_ ? ParseQP(input_image) : rtc::nullopt;
qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt;
{
rtc::CritScope cs(&frame_extra_infos_lock_);
frame_extra_infos_.push_back(frame_extra_info);
@ -183,10 +183,10 @@ void VideoDecoderWrapper::OnDecodedFrame(
JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp);
frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp);
rtc::Optional<int32_t> decoding_time_ms =
absl::optional<int32_t> decoding_time_ms =
JavaToNativeOptionalInt(env, j_decode_time_ms);
rtc::Optional<uint8_t> decoder_qp =
absl::optional<uint8_t> decoder_qp =
cast_optional<uint8_t, int32_t>(JavaToNativeOptionalInt(env, j_qp));
// If the decoder provides QP values itself, no need to parse the bitstream.
// Enable QP parsing if decoder does not provide QP values itself.
@ -226,13 +226,13 @@ int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni,
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
}
rtc::Optional<uint8_t> VideoDecoderWrapper::ParseQP(
absl::optional<uint8_t> VideoDecoderWrapper::ParseQP(
const EncodedImage& input_image) {
if (input_image.qp_ != -1) {
return input_image.qp_;
}
rtc::Optional<uint8_t> qp;
absl::optional<uint8_t> qp;
switch (codec_settings_.codecType) {
case kVideoCodecVP8: {
int qp_int;

View File

@ -66,7 +66,7 @@ class VideoDecoderWrapper : public VideoDecoder {
uint32_t timestamp_rtp;
int64_t timestamp_ntp;
rtc::Optional<uint8_t> qp;
absl::optional<uint8_t> qp;
FrameExtraInfo();
FrameExtraInfo(const FrameExtraInfo&);
@ -82,7 +82,7 @@ class VideoDecoderWrapper : public VideoDecoder {
const char* method_name)
RTC_RUN_ON(decoder_thread_checker_);
rtc::Optional<uint8_t> ParseQP(const EncodedImage& input_image)
absl::optional<uint8_t> ParseQP(const EncodedImage& input_image)
RTC_RUN_ON(decoder_thread_checker_);
const ScopedJavaGlobalRef<jobject> decoder_;

View File

@ -165,10 +165,10 @@ VideoEncoderWrapper::ScalingSettings VideoEncoderWrapper::GetScalingSettings()
if (!isOn)
return ScalingSettings::kOff;
rtc::Optional<int> low = JavaToNativeOptionalInt(
absl::optional<int> low = JavaToNativeOptionalInt(
jni,
Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings));
rtc::Optional<int> high = JavaToNativeOptionalInt(
absl::optional<int> high = JavaToNativeOptionalInt(
jni,
Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings));