From 196100efa62583dca81c52e8abc06a29b086764a Mon Sep 17 00:00:00 2001 From: Danil Chapovalov Date: Thu, 21 Jun 2018 10:17:24 +0200 Subject: [PATCH] Replace rtc::Optional with absl::optional This is a no-op change because rtc::Optional is an alias to absl::optional This CL generated by running script passing top level directories except rtc_base and api find $@ -type f \( -name \*.h -o -name \*.cc -o -name \*.mm \) \ -exec sed -i 's|rtc::Optional|absl::optional|g' {} \+ \ -exec sed -i 's|rtc::nullopt|absl::nullopt|g' {} \+ \ -exec sed -i 's|#include "api/optional.h"|#include "absl/types/optional.h"|' {} \+ find $@ -type f -name BUILD.gn \ -exec sed -r -i 's|"[\./api]*:optional"|"//third_party/abseil-cpp/absl/types:optional"|' {} \+; git cl format Bug: webrtc:9078 Change-Id: I9465c172e65ba6e6ed4e4fdc35b0b265038d6f71 Reviewed-on: https://webrtc-review.googlesource.com/84584 Reviewed-by: Karl Wiberg Commit-Queue: Danil Chapovalov Cr-Commit-Position: refs/heads/master@{#23697} --- common_audio/BUILD.gn | 2 +- common_audio/mocks/mock_smoothing_filter.h | 2 +- common_audio/smoothing_filter.cc | 4 ++-- common_audio/smoothing_filter.h | 8 ++++---- common_video/BUILD.gn | 2 +- common_video/bitrate_adjuster.cc | 2 +- common_video/h264/h264_bitstream_parser.cc | 2 +- common_video/h264/h264_bitstream_parser.h | 8 ++++---- common_video/h264/pps_parser.cc | 18 +++++++++--------- common_video/h264/pps_parser.h | 10 +++++----- common_video/h264/pps_parser_unittest.cc | 4 ++-- common_video/h264/profile_level_id_unittest.cc | 4 ++-- common_video/h264/sps_parser.cc | 8 ++++---- common_video/h264/sps_parser.h | 6 +++--- common_video/h264/sps_parser_unittest.cc | 2 +- common_video/h264/sps_vui_rewriter.cc | 4 ++-- common_video/h264/sps_vui_rewriter.h | 11 ++++++----- common_video/h264/sps_vui_rewriter_unittest.cc | 2 +- common_video/include/bitrate_adjuster.h | 2 +- common_video/incoming_video_stream.cc | 2 +- common_video/video_render_frames.cc | 4 ++-- common_video/video_render_frames.h | 4 ++-- examples/peerconnection/client/conductor.cc | 2 +- logging/rtc_event_log/rtc_event_log_parser.h | 4 ++-- .../rtc_event_log/rtc_event_log_unittest.cc | 4 ++-- modules/BUILD.gn | 4 ++-- modules/audio_device/BUILD.gn | 4 ++-- modules/audio_device/audio_device_unittest.cc | 4 ++-- .../audio_device/win/core_audio_input_win.cc | 6 +++--- .../audio_device/win/core_audio_input_win.h | 6 +++--- modules/include/module_common_types.h | 2 +- modules/include/module_common_types_public.h | 4 ++-- sdk/BUILD.gn | 2 +- sdk/android/BUILD.gn | 12 ++++++------ sdk/android/native_api/jni/java_types.cc | 14 +++++++------- sdk/android/native_api/jni/java_types.h | 15 ++++++++------- sdk/android/native_api/video/videosource.cc | 2 +- sdk/android/src/jni/androidmediadecoder.cc | 6 +++--- sdk/android/src/jni/androidmediaencoder.cc | 2 +- sdk/android/src/jni/androidvideotracksource.cc | 2 +- sdk/android/src/jni/androidvideotracksource.h | 2 +- .../src/jni/audio_device/aaudio_player.cc | 12 ++++++------ .../src/jni/audio_device/aaudio_player.h | 8 ++++---- .../jni/audio_device/audio_device_module.cc | 6 +++--- .../src/jni/audio_device/audio_device_module.h | 8 ++++---- .../src/jni/audio_device/audio_track_jni.cc | 6 +++--- .../src/jni/audio_device/audio_track_jni.h | 8 ++++---- .../src/jni/audio_device/opensles_player.cc | 12 ++++++------ .../src/jni/audio_device/opensles_player.h | 8 ++++---- sdk/android/src/jni/pc/icecandidate.cc | 6 +++--- sdk/android/src/jni/pc/icecandidate.h | 2 +- .../src/jni/pc/peerconnectionfactory.cc | 2 +- sdk/android/src/jni/pc/rtptransceiver.cc | 4 ++-- sdk/android/src/jni/pc/sessiondescription.cc | 2 +- sdk/android/src/jni/videodecoderwrapper.cc | 16 ++++++++-------- sdk/android/src/jni/videodecoderwrapper.h | 4 ++-- sdk/android/src/jni/videoencoderwrapper.cc | 4 ++-- .../Classes/PeerConnection/RTCConfiguration.mm | 9 ++++----- .../PeerConnection/RTCH264ProfileLevelId.mm | 4 ++-- .../PeerConnection/RTCPeerConnection.mm | 6 +++--- .../PeerConnection/RTCRtpCodecParameters.mm | 4 ++-- .../PeerConnection/RTCRtpEncodingParameters.mm | 6 +++--- .../Classes/PeerConnection/RTCVideoCodec.mm | 4 ++-- .../Classes/Video/RTCDefaultShader.mm | 6 +++--- .../Classes/Video/UIDevice+H264Profile.h | 2 +- .../Classes/Video/UIDevice+H264Profile.mm | 6 +++--- .../VideoToolbox/RTCVideoEncoderH264.mm | 2 +- .../Native/src/objc_video_track_source.h | 2 +- system_wrappers/BUILD.gn | 2 +- system_wrappers/include/rtp_to_ntp_estimator.h | 4 ++-- system_wrappers/source/rtp_to_ntp_estimator.cc | 4 ++-- 71 files changed, 189 insertions(+), 188 deletions(-) diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn index 32cca43f3c..89ede3884a 100644 --- a/common_audio/BUILD.gn +++ b/common_audio/BUILD.gn @@ -64,7 +64,6 @@ rtc_static_library("common_audio") { ":sinc_resampler", "..:webrtc_common", "../:typedefs", - "../api:optional", "../rtc_base:checks", "../rtc_base:gtest_prod", "../rtc_base:rtc_base_approved", @@ -72,6 +71,7 @@ rtc_static_library("common_audio") { "../rtc_base/memory:aligned_malloc", "../system_wrappers", "../system_wrappers:cpu_features_api", + "//third_party/abseil-cpp/absl/types:optional", ] defines = [] diff --git a/common_audio/mocks/mock_smoothing_filter.h b/common_audio/mocks/mock_smoothing_filter.h index dec6ea5f47..712049fa6a 100644 --- a/common_audio/mocks/mock_smoothing_filter.h +++ b/common_audio/mocks/mock_smoothing_filter.h @@ -19,7 +19,7 @@ namespace webrtc { class MockSmoothingFilter : public SmoothingFilter { public: MOCK_METHOD1(AddSample, void(float)); - MOCK_METHOD0(GetAverage, rtc::Optional()); + MOCK_METHOD0(GetAverage, absl::optional()); MOCK_METHOD1(SetTimeConstantMs, bool(int)); }; diff --git a/common_audio/smoothing_filter.cc b/common_audio/smoothing_filter.cc index ecfb5c252b..d426bda250 100644 --- a/common_audio/smoothing_filter.cc +++ b/common_audio/smoothing_filter.cc @@ -52,10 +52,10 @@ void SmoothingFilterImpl::AddSample(float sample) { last_sample_ = sample; } -rtc::Optional SmoothingFilterImpl::GetAverage() { +absl::optional SmoothingFilterImpl::GetAverage() { if (!init_end_time_ms_) { // |init_end_time_ms_| undefined since we have not received any sample. - return rtc::nullopt; + return absl::nullopt; } ExtrapolateLastSample(rtc::TimeMillis()); return state_; diff --git a/common_audio/smoothing_filter.h b/common_audio/smoothing_filter.h index b8ab4e5919..cff746953a 100644 --- a/common_audio/smoothing_filter.h +++ b/common_audio/smoothing_filter.h @@ -11,7 +11,7 @@ #ifndef COMMON_AUDIO_SMOOTHING_FILTER_H_ #define COMMON_AUDIO_SMOOTHING_FILTER_H_ -#include "api/optional.h" +#include "absl/types/optional.h" #include "rtc_base/constructormagic.h" #include "system_wrappers/include/clock.h" @@ -21,7 +21,7 @@ class SmoothingFilter { public: virtual ~SmoothingFilter() = default; virtual void AddSample(float sample) = 0; - virtual rtc::Optional GetAverage() = 0; + virtual absl::optional GetAverage() = 0; virtual bool SetTimeConstantMs(int time_constant_ms) = 0; }; @@ -44,7 +44,7 @@ class SmoothingFilterImpl final : public SmoothingFilter { ~SmoothingFilterImpl() override; void AddSample(float sample) override; - rtc::Optional GetAverage() override; + absl::optional GetAverage() override; bool SetTimeConstantMs(int time_constant_ms) override; // Methods used for unittests. @@ -58,7 +58,7 @@ class SmoothingFilterImpl final : public SmoothingFilter { const float init_factor_; const float init_const_; - rtc::Optional init_end_time_ms_; + absl::optional init_end_time_ms_; float last_sample_; float alpha_; float state_; diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn index 650b6ad5fc..0f4d400366 100644 --- a/common_video/BUILD.gn +++ b/common_video/BUILD.gn @@ -57,7 +57,6 @@ rtc_static_library("common_video") { deps = [ "..:webrtc_common", "../:typedefs", - "../api:optional", "../api/video:video_bitrate_allocation", "../api/video:video_frame", "../api/video:video_frame_i420", @@ -67,6 +66,7 @@ rtc_static_library("common_video") { "../rtc_base:rtc_base", "../rtc_base:rtc_task_queue", "../rtc_base:safe_minmax", + "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] } diff --git a/common_video/bitrate_adjuster.cc b/common_video/bitrate_adjuster.cc index 3bb06944f2..163c4b1981 100644 --- a/common_video/bitrate_adjuster.cc +++ b/common_video/bitrate_adjuster.cc @@ -68,7 +68,7 @@ uint32_t BitrateAdjuster::GetAdjustedBitrateBps() const { return adjusted_bitrate_bps_; } -rtc::Optional BitrateAdjuster::GetEstimatedBitrateBps() { +absl::optional BitrateAdjuster::GetEstimatedBitrateBps() { rtc::CritScope cs(&crit_); return bitrate_tracker_.Rate(rtc::TimeMillis()); } diff --git a/common_video/h264/h264_bitstream_parser.cc b/common_video/h264/h264_bitstream_parser.cc index 031fcde924..d8f8a62378 100644 --- a/common_video/h264/h264_bitstream_parser.cc +++ b/common_video/h264/h264_bitstream_parser.cc @@ -44,7 +44,7 @@ H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu( if (!sps_ || !pps_) return kInvalidStream; - last_slice_qp_delta_ = rtc::nullopt; + last_slice_qp_delta_ = absl::nullopt; const std::vector slice_rbsp = H264::ParseRbsp(source, source_length); if (slice_rbsp.size() < H264::kNaluTypeSize) diff --git a/common_video/h264/h264_bitstream_parser.h b/common_video/h264/h264_bitstream_parser.h index 4ef6b40b44..b3fac7bb21 100644 --- a/common_video/h264/h264_bitstream_parser.h +++ b/common_video/h264/h264_bitstream_parser.h @@ -13,7 +13,7 @@ #include #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "common_video/h264/pps_parser.h" #include "common_video/h264/sps_parser.h" @@ -53,11 +53,11 @@ class H264BitstreamParser { uint8_t nalu_type); // SPS/PPS state, updated when parsing new SPS/PPS, used to parse slices. - rtc::Optional sps_; - rtc::Optional pps_; + absl::optional sps_; + absl::optional pps_; // Last parsed slice QP. - rtc::Optional last_slice_qp_delta_; + absl::optional last_slice_qp_delta_; }; } // namespace webrtc diff --git a/common_video/h264/pps_parser.cc b/common_video/h264/pps_parser.cc index 7bc0ff74d0..5bc29f3592 100644 --- a/common_video/h264/pps_parser.cc +++ b/common_video/h264/pps_parser.cc @@ -19,7 +19,7 @@ #define RETURN_EMPTY_ON_FAIL(x) \ if (!(x)) { \ - return rtc::nullopt; \ + return absl::nullopt; \ } namespace { @@ -33,8 +33,8 @@ namespace webrtc { // You can find it on this page: // http://www.itu.int/rec/T-REC-H.264 -rtc::Optional PpsParser::ParsePps(const uint8_t* data, - size_t length) { +absl::optional PpsParser::ParsePps(const uint8_t* data, + size_t length) { // First, parse out rbsp, which is basically the source buffer minus emulation // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in // section 7.3.1 of the H.264 standard. @@ -57,26 +57,26 @@ bool PpsParser::ParsePpsIds(const uint8_t* data, return ParsePpsIdsInternal(&bit_buffer, pps_id, sps_id); } -rtc::Optional PpsParser::ParsePpsIdFromSlice(const uint8_t* data, - size_t length) { +absl::optional PpsParser::ParsePpsIdFromSlice(const uint8_t* data, + size_t length) { std::vector unpacked_buffer = H264::ParseRbsp(data, length); rtc::BitBuffer slice_reader(unpacked_buffer.data(), unpacked_buffer.size()); uint32_t golomb_tmp; // first_mb_in_slice: ue(v) if (!slice_reader.ReadExponentialGolomb(&golomb_tmp)) - return rtc::nullopt; + return absl::nullopt; // slice_type: ue(v) if (!slice_reader.ReadExponentialGolomb(&golomb_tmp)) - return rtc::nullopt; + return absl::nullopt; // pic_parameter_set_id: ue(v) uint32_t slice_pps_id; if (!slice_reader.ReadExponentialGolomb(&slice_pps_id)) - return rtc::nullopt; + return absl::nullopt; return slice_pps_id; } -rtc::Optional PpsParser::ParseInternal( +absl::optional PpsParser::ParseInternal( rtc::BitBuffer* bit_buffer) { PpsState pps; diff --git a/common_video/h264/pps_parser.h b/common_video/h264/pps_parser.h index 571af9773a..d6c31b0688 100644 --- a/common_video/h264/pps_parser.h +++ b/common_video/h264/pps_parser.h @@ -11,7 +11,7 @@ #ifndef COMMON_VIDEO_H264_PPS_PARSER_H_ #define COMMON_VIDEO_H264_PPS_PARSER_H_ -#include "api/optional.h" +#include "absl/types/optional.h" namespace rtc { class BitBuffer; @@ -38,20 +38,20 @@ class PpsParser { }; // Unpack RBSP and parse PPS state from the supplied buffer. - static rtc::Optional ParsePps(const uint8_t* data, size_t length); + static absl::optional ParsePps(const uint8_t* data, size_t length); static bool ParsePpsIds(const uint8_t* data, size_t length, uint32_t* pps_id, uint32_t* sps_id); - static rtc::Optional ParsePpsIdFromSlice(const uint8_t* data, - size_t length); + static absl::optional ParsePpsIdFromSlice(const uint8_t* data, + size_t length); protected: // Parse the PPS state, for a bit buffer where RBSP decoding has already been // performed. - static rtc::Optional ParseInternal(rtc::BitBuffer* bit_buffer); + static absl::optional ParseInternal(rtc::BitBuffer* bit_buffer); static bool ParsePpsIdsInternal(rtc::BitBuffer* bit_buffer, uint32_t* pps_id, uint32_t* sps_id); diff --git a/common_video/h264/pps_parser_unittest.cc b/common_video/h264/pps_parser_unittest.cc index 14a425f6c4..9fdbf7e3bf 100644 --- a/common_video/h264/pps_parser_unittest.cc +++ b/common_video/h264/pps_parser_unittest.cc @@ -192,7 +192,7 @@ class PpsParserTest : public ::testing::Test { PpsParser::PpsState generated_pps_; rtc::Buffer buffer_; - rtc::Optional parsed_pps_; + absl::optional parsed_pps_; }; TEST_F(PpsParserTest, ZeroPps) { @@ -215,7 +215,7 @@ TEST_F(PpsParserTest, MaxPps) { } TEST_F(PpsParserTest, PpsIdFromSlice) { - rtc::Optional pps_id = PpsParser::ParsePpsIdFromSlice( + absl::optional pps_id = PpsParser::ParsePpsIdFromSlice( kH264BitstreamChunk, sizeof(kH264BitstreamChunk)); ASSERT_TRUE(pps_id); EXPECT_EQ(2u, *pps_id); diff --git a/common_video/h264/profile_level_id_unittest.cc b/common_video/h264/profile_level_id_unittest.cc index d7b7cfb351..66ad300131 100644 --- a/common_video/h264/profile_level_id_unittest.cc +++ b/common_video/h264/profile_level_id_unittest.cc @@ -125,7 +125,7 @@ TEST(H264ProfileLevelId, TestToStringInvalid) { } TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) { - const rtc::Optional profile_level_id = + const absl::optional profile_level_id = ParseSdpProfileLevelId(CodecParameterMap()); EXPECT_TRUE(profile_level_id); EXPECT_EQ(kProfileConstrainedBaseline, profile_level_id->profile); @@ -135,7 +135,7 @@ TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdEmpty) { TEST(H264ProfileLevelId, TestParseSdpProfileLevelIdConstrainedHigh) { CodecParameterMap params; params["profile-level-id"] = "640c2a"; - const rtc::Optional profile_level_id = + const absl::optional profile_level_id = ParseSdpProfileLevelId(params); EXPECT_TRUE(profile_level_id); EXPECT_EQ(kProfileConstrainedHigh, profile_level_id->profile); diff --git a/common_video/h264/sps_parser.cc b/common_video/h264/sps_parser.cc index c921972ce0..b313f48788 100644 --- a/common_video/h264/sps_parser.cc +++ b/common_video/h264/sps_parser.cc @@ -18,7 +18,7 @@ #include "rtc_base/logging.h" namespace { -typedef rtc::Optional OptionalSps; +typedef absl::optional OptionalSps; #define RETURN_EMPTY_ON_FAIL(x) \ if (!(x)) { \ @@ -38,14 +38,14 @@ SpsParser::SpsState::SpsState() = default; // http://www.itu.int/rec/T-REC-H.264 // Unpack RBSP and parse SPS state from the supplied buffer. -rtc::Optional SpsParser::ParseSps(const uint8_t* data, - size_t length) { +absl::optional SpsParser::ParseSps(const uint8_t* data, + size_t length) { std::vector unpacked_buffer = H264::ParseRbsp(data, length); rtc::BitBuffer bit_buffer(unpacked_buffer.data(), unpacked_buffer.size()); return ParseSpsUpToVui(&bit_buffer); } -rtc::Optional SpsParser::ParseSpsUpToVui( +absl::optional SpsParser::ParseSpsUpToVui( rtc::BitBuffer* buffer) { // Now, we need to use a bit buffer to parse through the actual AVC SPS // format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the diff --git a/common_video/h264/sps_parser.h b/common_video/h264/sps_parser.h index 1fddc0c782..d4294b2558 100644 --- a/common_video/h264/sps_parser.h +++ b/common_video/h264/sps_parser.h @@ -11,7 +11,7 @@ #ifndef COMMON_VIDEO_H264_SPS_PARSER_H_ #define COMMON_VIDEO_H264_SPS_PARSER_H_ -#include "api/optional.h" +#include "absl/types/optional.h" namespace rtc { class BitBuffer; @@ -41,12 +41,12 @@ class SpsParser { }; // Unpack RBSP and parse SPS state from the supplied buffer. - static rtc::Optional ParseSps(const uint8_t* data, size_t length); + static absl::optional ParseSps(const uint8_t* data, size_t length); protected: // Parse the SPS state, up till the VUI part, for a bit buffer where RBSP // decoding has already been performed. - static rtc::Optional ParseSpsUpToVui(rtc::BitBuffer* buffer); + static absl::optional ParseSpsUpToVui(rtc::BitBuffer* buffer); }; } // namespace webrtc diff --git a/common_video/h264/sps_parser_unittest.cc b/common_video/h264/sps_parser_unittest.cc index 6856c1bbf2..50227ed870 100644 --- a/common_video/h264/sps_parser_unittest.cc +++ b/common_video/h264/sps_parser_unittest.cc @@ -112,7 +112,7 @@ class H264SpsParserTest : public ::testing::Test { H264SpsParserTest() {} virtual ~H264SpsParserTest() {} - rtc::Optional sps_; + absl::optional sps_; }; TEST_F(H264SpsParserTest, TestSampleSPSHdLandscape) { diff --git a/common_video/h264/sps_vui_rewriter.cc b/common_video/h264/sps_vui_rewriter.cc index c346865090..749b62e6d2 100644 --- a/common_video/h264/sps_vui_rewriter.cc +++ b/common_video/h264/sps_vui_rewriter.cc @@ -72,13 +72,13 @@ bool CopyRemainingBits(rtc::BitBuffer* source, SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps( const uint8_t* buffer, size_t length, - rtc::Optional* sps, + absl::optional* sps, rtc::Buffer* destination) { // Create temporary RBSP decoded buffer of the payload (exlcuding the // leading nalu type header byte (the SpsParser uses only the payload). std::vector rbsp_buffer = H264::ParseRbsp(buffer, length); rtc::BitBuffer source_buffer(rbsp_buffer.data(), rbsp_buffer.size()); - rtc::Optional sps_state = + absl::optional sps_state = SpsParser::ParseSpsUpToVui(&source_buffer); if (!sps_state) return ParseResult::kFailure; diff --git a/common_video/h264/sps_vui_rewriter.h b/common_video/h264/sps_vui_rewriter.h index f639c0d893..233051d2d6 100644 --- a/common_video/h264/sps_vui_rewriter.h +++ b/common_video/h264/sps_vui_rewriter.h @@ -12,7 +12,7 @@ #ifndef COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_ #define COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_ -#include "api/optional.h" +#include "absl/types/optional.h" #include "common_video/h264/sps_parser.h" #include "rtc_base/buffer.h" @@ -43,10 +43,11 @@ class SpsVuiRewriter : private SpsParser { // SPS state. This function assumes that any previous headers // (NALU start, type, Stap-A, etc) have already been parsed and that RBSP // decoding has been performed. - static ParseResult ParseAndRewriteSps(const uint8_t* buffer, - size_t length, - rtc::Optional* sps, - rtc::Buffer* destination); + static ParseResult ParseAndRewriteSps( + const uint8_t* buffer, + size_t length, + absl::optional* sps, + rtc::Buffer* destination); }; } // namespace webrtc diff --git a/common_video/h264/sps_vui_rewriter_unittest.cc b/common_video/h264/sps_vui_rewriter_unittest.cc index 0de5d3372d..9464de8077 100644 --- a/common_video/h264/sps_vui_rewriter_unittest.cc +++ b/common_video/h264/sps_vui_rewriter_unittest.cc @@ -159,7 +159,7 @@ void TestSps(SpsMode mode, SpsVuiRewriter::ParseResult expected_parse_result) { index.payload_start_offset += H264::kNaluTypeSize; index.payload_size -= H264::kNaluTypeSize; - rtc::Optional sps; + absl::optional sps; rtc::Buffer out_buffer; SpsVuiRewriter::ParseResult result = SpsVuiRewriter::ParseAndRewriteSps(&buffer[index.payload_start_offset], diff --git a/common_video/include/bitrate_adjuster.h b/common_video/include/bitrate_adjuster.h index 76ce9e9901..ee312e4f9a 100644 --- a/common_video/include/bitrate_adjuster.h +++ b/common_video/include/bitrate_adjuster.h @@ -44,7 +44,7 @@ class BitrateAdjuster { uint32_t GetAdjustedBitrateBps() const; // Returns what we think the current bitrate is. - rtc::Optional GetEstimatedBitrateBps(); + absl::optional GetEstimatedBitrateBps(); // This should be called after each frame is encoded. The timestamp at which // it is called is used to estimate the output bitrate of the encoder. diff --git a/common_video/incoming_video_stream.cc b/common_video/incoming_video_stream.cc index b5a2c31a6d..efca514556 100644 --- a/common_video/incoming_video_stream.cc +++ b/common_video/incoming_video_stream.cc @@ -69,7 +69,7 @@ void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) { void IncomingVideoStream::Dequeue() { TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue"); RTC_DCHECK(incoming_render_queue_.IsCurrent()); - rtc::Optional frame_to_render = render_buffers_.FrameToRender(); + absl::optional frame_to_render = render_buffers_.FrameToRender(); if (frame_to_render) callback_->OnFrame(*frame_to_render); diff --git a/common_video/video_render_frames.cc b/common_video/video_render_frames.cc index 4fa9ef7b08..982923c0ba 100644 --- a/common_video/video_render_frames.cc +++ b/common_video/video_render_frames.cc @@ -73,8 +73,8 @@ int32_t VideoRenderFrames::AddFrame(VideoFrame&& new_frame) { return static_cast(incoming_frames_.size()); } -rtc::Optional VideoRenderFrames::FrameToRender() { - rtc::Optional render_frame; +absl::optional VideoRenderFrames::FrameToRender() { + absl::optional render_frame; // Get the newest frame that can be released for rendering. while (!incoming_frames_.empty() && TimeToNextFrameRelease() <= 0) { render_frame = std::move(incoming_frames_.front()); diff --git a/common_video/video_render_frames.h b/common_video/video_render_frames.h index af254f2c63..31a46344c9 100644 --- a/common_video/video_render_frames.h +++ b/common_video/video_render_frames.h @@ -15,7 +15,7 @@ #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "api/video/video_frame.h" namespace webrtc { @@ -30,7 +30,7 @@ class VideoRenderFrames { int32_t AddFrame(VideoFrame&& new_frame); // Get a frame for rendering, or false if it's not time to render. - rtc::Optional FrameToRender(); + absl::optional FrameToRender(); // Returns the number of ms to next frame to render uint32_t TimeToNextFrameRelease(); diff --git a/examples/peerconnection/client/conductor.cc b/examples/peerconnection/client/conductor.cc index b7f06aad8a..89c8984692 100644 --- a/examples/peerconnection/client/conductor.cc +++ b/examples/peerconnection/client/conductor.cc @@ -270,7 +270,7 @@ void Conductor::OnMessageFromPeer(int peer_id, const std::string& message) { } return; } - rtc::Optional type_maybe = + absl::optional type_maybe = webrtc::SdpTypeFromString(type_str); if (!type_maybe) { RTC_LOG(LS_ERROR) << "Unknown SDP type: " << type_str; diff --git a/logging/rtc_event_log/rtc_event_log_parser.h b/logging/rtc_event_log/rtc_event_log_parser.h index af4b2de39f..862a48d42c 100644 --- a/logging/rtc_event_log/rtc_event_log_parser.h +++ b/logging/rtc_event_log/rtc_event_log_parser.h @@ -57,8 +57,8 @@ class ParsedRtcEventLog { struct BweProbeResultEvent { uint64_t timestamp; uint32_t id; - rtc::Optional bitrate_bps; - rtc::Optional failure_reason; + absl::optional bitrate_bps; + absl::optional failure_reason; }; struct BweDelayBasedUpdate { diff --git a/logging/rtc_event_log/rtc_event_log_unittest.cc b/logging/rtc_event_log/rtc_event_log_unittest.cc index 21f9cf3765..f40722da92 100644 --- a/logging/rtc_event_log/rtc_event_log_unittest.cc +++ b/logging/rtc_event_log/rtc_event_log_unittest.cc @@ -748,8 +748,8 @@ TEST(RtcEventLogTest, CircularBufferKeepsMostRecentEvents) { EXPECT_GT(parsed_log.GetNumberOfEvents(), 2u); RtcEventLogTestHelper::VerifyLogStartEvent(parsed_log, 0); - rtc::Optional last_timestamp; - rtc::Optional last_ssrc; + absl::optional last_timestamp; + absl::optional last_ssrc; for (size_t i = 1; i < parsed_log.GetNumberOfEvents() - 1; i++) { EXPECT_EQ(parsed_log.GetEventType(i), ParsedRtcEventLogNew::EventType::AUDIO_PLAYOUT_EVENT); diff --git a/modules/BUILD.gn b/modules/BUILD.gn index 7a50406f20..12345b8913 100644 --- a/modules/BUILD.gn +++ b/modules/BUILD.gn @@ -37,7 +37,7 @@ rtc_source_set("module_api_public") { deps = [ "..:webrtc_common", "../:typedefs", - "../api:optional", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -53,13 +53,13 @@ rtc_source_set("module_api") { "..:webrtc_common", "../:typedefs", "../api:libjingle_peerconnection_api", - "../api:optional", "../api/transport:network_control", "../api/video:video_frame", "../api/video:video_frame_i420", "../rtc_base:deprecation", "../rtc_base:rtc_base_approved", "video_coding:codec_globals_headers", + "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/modules/audio_device/BUILD.gn b/modules/audio_device/BUILD.gn index 4b58779d58..96f53b8271 100644 --- a/modules/audio_device/BUILD.gn +++ b/modules/audio_device/BUILD.gn @@ -204,9 +204,9 @@ rtc_source_set("audio_device_module_from_input_and_output") { ":audio_device_api", ":audio_device_buffer", ":windows_core_audio_utility", - "../../api:optional", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -483,7 +483,6 @@ if (rtc_include_tests) { ":audio_device_impl", ":mock_audio_device", "../../api:array_view", - "../../api:optional", "../../common_audio", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", @@ -491,6 +490,7 @@ if (rtc_include_tests) { "../../test:fileutils", "../../test:test_support", "../utility:utility", + "//third_party/abseil-cpp/absl/types:optional", ] if (is_linux || is_mac || is_win) { sources += [ "audio_device_unittest.cc" ] diff --git a/modules/audio_device/audio_device_unittest.cc b/modules/audio_device/audio_device_unittest.cc index 40b6987089..d9ebc8f19c 100644 --- a/modules/audio_device/audio_device_unittest.cc +++ b/modules/audio_device/audio_device_unittest.cc @@ -13,8 +13,8 @@ #include #include +#include "absl/types/optional.h" #include "api/array_view.h" -#include "api/optional.h" #include "modules/audio_device/audio_device_impl.h" #include "modules/audio_device/include/audio_device.h" #include "modules/audio_device/include/mock_audio_transport.h" @@ -294,7 +294,7 @@ class LatencyAudioStream : public AudioStream { rtc::ThreadChecker read_thread_checker_; rtc::ThreadChecker write_thread_checker_; - rtc::Optional pulse_time_ RTC_GUARDED_BY(lock_); + absl::optional pulse_time_ RTC_GUARDED_BY(lock_); std::vector latencies_ RTC_GUARDED_BY(race_checker_); size_t read_count_ RTC_GUARDED_BY(read_thread_checker_) = 0; size_t write_count_ RTC_GUARDED_BY(write_thread_checker_) = 0; diff --git a/modules/audio_device/win/core_audio_input_win.cc b/modules/audio_device/win/core_audio_input_win.cc index 1322f1cafb..fe11e0264f 100644 --- a/modules/audio_device/win/core_audio_input_win.cc +++ b/modules/audio_device/win/core_audio_input_win.cc @@ -315,10 +315,10 @@ bool CoreAudioInput::OnDataCallback(uint64_t device_frequency) { return true; } -rtc::Optional CoreAudioInput::EstimateLatencyMillis( +absl::optional CoreAudioInput::EstimateLatencyMillis( uint64_t capture_time_100ns) { if (!qpc_to_100ns_) { - return rtc::nullopt; + return absl::nullopt; } // Input parameter |capture_time_100ns| contains the performance counter at // the time that the audio endpoint device recorded the device position of @@ -329,7 +329,7 @@ rtc::Optional CoreAudioInput::EstimateLatencyMillis( // - subtracting |capture_time_100ns| from now_time_100ns. LARGE_INTEGER perf_counter_now = {}; if (!::QueryPerformanceCounter(&perf_counter_now)) { - return rtc::nullopt; + return absl::nullopt; } uint64_t qpc_now_raw = perf_counter_now.QuadPart; uint64_t now_time_100ns = qpc_now_raw * (*qpc_to_100ns_); diff --git a/modules/audio_device/win/core_audio_input_win.h b/modules/audio_device/win/core_audio_input_win.h index 5cfbf2aa0a..0dd2e3730a 100644 --- a/modules/audio_device/win/core_audio_input_win.h +++ b/modules/audio_device/win/core_audio_input_win.h @@ -14,7 +14,7 @@ #include #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "modules/audio_device/win/audio_device_module_win.h" #include "modules/audio_device/win/core_audio_base_win.h" @@ -53,11 +53,11 @@ class CoreAudioInput final : public CoreAudioBase, public AudioInput { private: bool OnDataCallback(uint64_t device_frequency); - rtc::Optional EstimateLatencyMillis(uint64_t capture_time_100ns); + absl::optional EstimateLatencyMillis(uint64_t capture_time_100ns); std::unique_ptr fine_audio_buffer_; Microsoft::WRL::ComPtr audio_capture_client_; - rtc::Optional qpc_to_100ns_; + absl::optional qpc_to_100ns_; }; } // namespace webrtc_win diff --git a/modules/include/module_common_types.h b/modules/include/module_common_types.h index a1889d4bdc..b5b023d6fb 100644 --- a/modules/include/module_common_types.h +++ b/modules/include/module_common_types.h @@ -17,7 +17,7 @@ #include #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "api/rtp_headers.h" #include "api/transport/network_types.h" #include "api/video/video_rotation.h" diff --git a/modules/include/module_common_types_public.h b/modules/include/module_common_types_public.h index f1ae3de462..2afd9af1c0 100644 --- a/modules/include/module_common_types_public.h +++ b/modules/include/module_common_types_public.h @@ -13,7 +13,7 @@ #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "typedefs.h" // NOLINT(build/include) namespace webrtc { @@ -78,7 +78,7 @@ class Unwrapper { } private: - rtc::Optional last_value_; + absl::optional last_value_; }; using SequenceNumberUnwrapper = Unwrapper; diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 1e3bd624cc..37764b430e 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -304,12 +304,12 @@ if (is_ios || is_mac) { ":videoframebuffer_objc", ":videosource_objc", "../api:libjingle_peerconnection_api", - "../api:optional", "../api/video:video_frame", "../common_video", "../media:rtc_media_base", "../rtc_base:checks", "../rtc_base:rtc_base", + "//third_party/abseil-cpp/absl/types:optional", ] configs += [ diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 031a20fcbe..498c7dc23e 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -87,11 +87,11 @@ rtc_source_set("base_jni") { ":internal_jni", ":native_api_jni", "../../api:libjingle_peerconnection_api", - "../../api:optional", "../../rtc_base:checks", "../../rtc_base:rtc_base", "../../rtc_base:rtc_base_approved", "../../system_wrappers:metrics_api", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -145,11 +145,11 @@ rtc_source_set("audio_device_module_base") { ":base_jni", ":generated_audio_device_module_base_jni", ":native_api_jni", - "../../api:optional", "../../modules/audio_device:audio_device_buffer", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../system_wrappers:metrics_api", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -170,13 +170,13 @@ if (rtc_enable_android_aaudio) { ":audio_device_module_base", ":base_jni", "../../api:array_view", - "../../api:optional", "../../modules/audio_device:audio_device", "../../modules/audio_device:audio_device_buffer", "../../rtc_base:checks", "../../rtc_base:rtc_base", "../../rtc_base:rtc_base_approved", "../../system_wrappers", + "//third_party/abseil-cpp/absl/types:optional", ] } } @@ -196,11 +196,11 @@ rtc_source_set("opensles_audio_device_module") { ":audio_device_module_base", ":base_jni", "../../api:array_view", - "../../api:optional", "../../modules/audio_device:audio_device", "../../modules/audio_device:audio_device_buffer", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -217,12 +217,12 @@ rtc_source_set("java_audio_device_module") { ":audio_device_module_base", ":base_jni", ":generated_java_audio_device_module_native_jni", - "../../api:optional", "../../modules/audio_device:audio_device", "../../modules/audio_device:audio_device_buffer", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../system_wrappers:metrics_api", + "//third_party/abseil-cpp/absl/types:optional", ] } @@ -1148,9 +1148,9 @@ rtc_static_library("native_api_jni") { ":generated_external_classes_jni", ":generated_native_api_jni", ":internal_jni", - "//api:optional", "//rtc_base:checks", "//rtc_base:rtc_base_approved", + "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/sdk/android/native_api/jni/java_types.cc b/sdk/android/native_api/jni/java_types.cc index fbc35aae5c..df0a22905e 100644 --- a/sdk/android/native_api/jni/java_types.cc +++ b/sdk/android/native_api/jni/java_types.cc @@ -125,18 +125,18 @@ int64_t JavaToNativeLong(JNIEnv* env, const JavaRef& j_long) { return JNI_Long::Java_Long_longValue(env, j_long); } -rtc::Optional JavaToNativeOptionalBool(JNIEnv* jni, - const JavaRef& boolean) { +absl::optional JavaToNativeOptionalBool(JNIEnv* jni, + const JavaRef& boolean) { if (IsNull(jni, boolean)) - return rtc::nullopt; + return absl::nullopt; return JNI_Boolean::Java_Boolean_booleanValue(jni, boolean); } -rtc::Optional JavaToNativeOptionalInt( +absl::optional JavaToNativeOptionalInt( JNIEnv* jni, const JavaRef& integer) { if (IsNull(jni, integer)) - return rtc::nullopt; + return absl::nullopt; return JNI_Integer::Java_Integer_intValue(jni, integer); } @@ -196,13 +196,13 @@ ScopedJavaLocalRef NativeToJavaString(JNIEnv* jni, ScopedJavaLocalRef NativeToJavaInteger( JNIEnv* jni, - const rtc::Optional& optional_int) { + const absl::optional& optional_int) { return optional_int ? NativeToJavaInteger(jni, *optional_int) : nullptr; } ScopedJavaLocalRef NativeToJavaString( JNIEnv* jni, - const rtc::Optional& str) { + const absl::optional& str) { return str ? NativeToJavaString(jni, *str) : nullptr; } diff --git a/sdk/android/native_api/jni/java_types.h b/sdk/android/native_api/jni/java_types.h index a84b7d9961..3b857049cc 100644 --- a/sdk/android/native_api/jni/java_types.h +++ b/sdk/android/native_api/jni/java_types.h @@ -22,7 +22,7 @@ #include #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "rtc_base/checks.h" #include "rtc_base/thread_checker.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" @@ -126,10 +126,11 @@ ScopedJavaLocalRef GetJavaMapEntryValue( int64_t JavaToNativeLong(JNIEnv* env, const JavaRef& j_long); -rtc::Optional JavaToNativeOptionalBool(JNIEnv* jni, - const JavaRef& boolean); -rtc::Optional JavaToNativeOptionalInt(JNIEnv* jni, - const JavaRef& integer); +absl::optional JavaToNativeOptionalBool(JNIEnv* jni, + const JavaRef& boolean); +absl::optional JavaToNativeOptionalInt( + JNIEnv* jni, + const JavaRef& integer); // Given a (UTF-16) jstring return a new UTF-8 native string. std::string JavaToNativeString(JNIEnv* jni, const JavaRef& j_string); @@ -196,10 +197,10 @@ ScopedJavaLocalRef NativeToJavaString(JNIEnv* jni, ScopedJavaLocalRef NativeToJavaInteger( JNIEnv* jni, - const rtc::Optional& optional_int); + const absl::optional& optional_int); ScopedJavaLocalRef NativeToJavaString( JNIEnv* jni, - const rtc::Optional& str); + const absl::optional& str); // Helper function for converting std::vector into a Java array. template diff --git a/sdk/android/native_api/video/videosource.cc b/sdk/android/native_api/video/videosource.cc index 9470feb59c..4c302da0ed 100644 --- a/sdk/android/native_api/video/videosource.cc +++ b/sdk/android/native_api/video/videosource.cc @@ -74,7 +74,7 @@ class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface { return android_video_track_source_->is_screencast(); } - rtc::Optional needs_denoising() const override { + absl::optional needs_denoising() const override { return android_video_track_source_->needs_denoising(); } diff --git a/sdk/android/src/jni/androidmediadecoder.cc b/sdk/android/src/jni/androidmediadecoder.cc index a5f47da4af..1ca9e06177 100644 --- a/sdk/android/src/jni/androidmediadecoder.cc +++ b/sdk/android/src/jni/androidmediadecoder.cc @@ -124,7 +124,7 @@ class MediaCodecVideoDecoder : public VideoDecoder, public rtc::MessageHandler { int current_delay_time_ms_; // Overall delay time in the current second. int32_t max_pending_frames_; // Maximum number of pending input frames. H264BitstreamParser h264_bitstream_parser_; - std::deque> pending_frame_qps_; + std::deque> pending_frame_qps_; // State that is constant for the lifetime of this object once the ctor // returns. @@ -506,7 +506,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( // Save input image timestamps for later output. frames_received_++; current_bytes_ += inputImage._length; - rtc::Optional qp; + absl::optional qp; if (codecType_ == kVideoCodecVP8) { int qp_int; if (vp8::GetQp(inputImage._buffer, inputImage._length, &qp_int)) { @@ -743,7 +743,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(JNIEnv* jni, decoded_frame.set_timestamp(output_timestamps_ms); decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms); - rtc::Optional qp = pending_frame_qps_.front(); + absl::optional qp = pending_frame_qps_.front(); pending_frame_qps_.pop_front(); callback_->Decoded(decoded_frame, decode_time_ms, qp); } diff --git a/sdk/android/src/jni/androidmediaencoder.cc b/sdk/android/src/jni/androidmediaencoder.cc index de2743b6a5..9edbf2d8b1 100644 --- a/sdk/android/src/jni/androidmediaencoder.cc +++ b/sdk/android/src/jni/androidmediaencoder.cc @@ -347,7 +347,7 @@ int32_t MediaCodecVideoEncoder::InitEncode(const VideoCodec* codec_settings, // Check allowed H.264 profile profile_ = H264::Profile::kProfileBaseline; if (codec_type == kVideoCodecH264) { - const rtc::Optional profile_level_id = + const absl::optional profile_level_id = H264::ParseSdpProfileLevelId(codec_.params); RTC_DCHECK(profile_level_id); profile_ = profile_level_id->profile; diff --git a/sdk/android/src/jni/androidvideotracksource.cc b/sdk/android/src/jni/androidvideotracksource.cc index 1d75a4f817..41d4278ac9 100644 --- a/sdk/android/src/jni/androidvideotracksource.cc +++ b/sdk/android/src/jni/androidvideotracksource.cc @@ -38,7 +38,7 @@ bool AndroidVideoTrackSource::is_screencast() const { return is_screencast_; } -rtc::Optional AndroidVideoTrackSource::needs_denoising() const { +absl::optional AndroidVideoTrackSource::needs_denoising() const { return false; } diff --git a/sdk/android/src/jni/androidvideotracksource.h b/sdk/android/src/jni/androidvideotracksource.h index 3dbcb2ac3b..3c4d1ef663 100644 --- a/sdk/android/src/jni/androidvideotracksource.h +++ b/sdk/android/src/jni/androidvideotracksource.h @@ -37,7 +37,7 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource { // Indicates that the encoder should denoise video before encoding it. // If it is not set, the default configuration is used which is different // depending on video codec. - rtc::Optional needs_denoising() const override; + absl::optional needs_denoising() const override; // Called by the native capture observer void SetState(SourceState state); diff --git a/sdk/android/src/jni/audio_device/aaudio_player.cc b/sdk/android/src/jni/audio_device/aaudio_player.cc index e6bcddd8f6..f32c2659fc 100644 --- a/sdk/android/src/jni/audio_device/aaudio_player.cc +++ b/sdk/android/src/jni/audio_device/aaudio_player.cc @@ -135,16 +135,16 @@ int AAudioPlayer::SetSpeakerVolume(uint32_t volume) { return -1; } -rtc::Optional AAudioPlayer::SpeakerVolume() const { - return rtc::nullopt; +absl::optional AAudioPlayer::SpeakerVolume() const { + return absl::nullopt; } -rtc::Optional AAudioPlayer::MaxSpeakerVolume() const { - return rtc::nullopt; +absl::optional AAudioPlayer::MaxSpeakerVolume() const { + return absl::nullopt; } -rtc::Optional AAudioPlayer::MinSpeakerVolume() const { - return rtc::nullopt; +absl::optional AAudioPlayer::MinSpeakerVolume() const { + return absl::nullopt; } void AAudioPlayer::OnErrorCallback(aaudio_result_t error) { diff --git a/sdk/android/src/jni/audio_device/aaudio_player.h b/sdk/android/src/jni/audio_device/aaudio_player.h index 92d18003f2..b43b5b3157 100644 --- a/sdk/android/src/jni/audio_device/aaudio_player.h +++ b/sdk/android/src/jni/audio_device/aaudio_player.h @@ -14,7 +14,7 @@ #include #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/messagehandler.h" @@ -73,9 +73,9 @@ class AAudioPlayer final : public AudioOutput, // Not implemented in AAudio. bool SpeakerVolumeIsAvailable() override; int SetSpeakerVolume(uint32_t volume) override; - rtc::Optional SpeakerVolume() const override; - rtc::Optional MaxSpeakerVolume() const override; - rtc::Optional MinSpeakerVolume() const override; + absl::optional SpeakerVolume() const override; + absl::optional MaxSpeakerVolume() const override; + absl::optional MinSpeakerVolume() const override; protected: // AAudioObserverInterface implementation. diff --git a/sdk/android/src/jni/audio_device/audio_device_module.cc b/sdk/android/src/jni/audio_device/audio_device_module.cc index 0b18fe4b91..196c65525a 100644 --- a/sdk/android/src/jni/audio_device/audio_device_module.cc +++ b/sdk/android/src/jni/audio_device/audio_device_module.cc @@ -341,7 +341,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { RTC_LOG(INFO) << __FUNCTION__; if (!initialized_) return -1; - rtc::Optional volume = output_->SpeakerVolume(); + absl::optional volume = output_->SpeakerVolume(); if (!volume) return -1; *output_volume = *volume; @@ -353,7 +353,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { RTC_LOG(INFO) << __FUNCTION__; if (!initialized_) return -1; - rtc::Optional max_volume = output_->MaxSpeakerVolume(); + absl::optional max_volume = output_->MaxSpeakerVolume(); if (!max_volume) return -1; *output_max_volume = *max_volume; @@ -364,7 +364,7 @@ class AndroidAudioDeviceModule : public AudioDeviceModule { RTC_LOG(INFO) << __FUNCTION__; if (!initialized_) return -1; - rtc::Optional min_volume = output_->MinSpeakerVolume(); + absl::optional min_volume = output_->MinSpeakerVolume(); if (!min_volume) return -1; *output_min_volume = *min_volume; diff --git a/sdk/android/src/jni/audio_device/audio_device_module.h b/sdk/android/src/jni/audio_device/audio_device_module.h index c8fdfc3d6c..cddd3e0676 100644 --- a/sdk/android/src/jni/audio_device/audio_device_module.h +++ b/sdk/android/src/jni/audio_device/audio_device_module.h @@ -13,7 +13,7 @@ #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "modules/audio_device/audio_device_buffer.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" @@ -58,9 +58,9 @@ class AudioOutput { virtual bool Playing() const = 0; virtual bool SpeakerVolumeIsAvailable() = 0; virtual int SetSpeakerVolume(uint32_t volume) = 0; - virtual rtc::Optional SpeakerVolume() const = 0; - virtual rtc::Optional MaxSpeakerVolume() const = 0; - virtual rtc::Optional MinSpeakerVolume() const = 0; + virtual absl::optional SpeakerVolume() const = 0; + virtual absl::optional MaxSpeakerVolume() const = 0; + virtual absl::optional MinSpeakerVolume() const = 0; virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; }; diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc index 1b49de6c0e..03959d615e 100644 --- a/sdk/android/src/jni/audio_device/audio_track_jni.cc +++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -144,17 +144,17 @@ int AudioTrackJni::SetSpeakerVolume(uint32_t volume) { : -1; } -rtc::Optional AudioTrackJni::MaxSpeakerVolume() const { +absl::optional AudioTrackJni::MaxSpeakerVolume() const { RTC_DCHECK(thread_checker_.CalledOnValidThread()); return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_); } -rtc::Optional AudioTrackJni::MinSpeakerVolume() const { +absl::optional AudioTrackJni::MinSpeakerVolume() const { RTC_DCHECK(thread_checker_.CalledOnValidThread()); return 0; } -rtc::Optional AudioTrackJni::SpeakerVolume() const { +absl::optional AudioTrackJni::SpeakerVolume() const { RTC_DCHECK(thread_checker_.CalledOnValidThread()); const uint32_t volume = Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_); diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.h b/sdk/android/src/jni/audio_device/audio_track_jni.h index 1225caf515..25c6b6f9ff 100644 --- a/sdk/android/src/jni/audio_device/audio_track_jni.h +++ b/sdk/android/src/jni/audio_device/audio_track_jni.h @@ -14,7 +14,7 @@ #include #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/thread_checker.h" @@ -62,9 +62,9 @@ class AudioTrackJni : public AudioOutput { bool SpeakerVolumeIsAvailable() override; int SetSpeakerVolume(uint32_t volume) override; - rtc::Optional SpeakerVolume() const override; - rtc::Optional MaxSpeakerVolume() const override; - rtc::Optional MinSpeakerVolume() const override; + absl::optional SpeakerVolume() const override; + absl::optional MaxSpeakerVolume() const override; + absl::optional MinSpeakerVolume() const override; void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; diff --git a/sdk/android/src/jni/audio_device/opensles_player.cc b/sdk/android/src/jni/audio_device/opensles_player.cc index f5f56bf112..3e831a9b35 100644 --- a/sdk/android/src/jni/audio_device/opensles_player.cc +++ b/sdk/android/src/jni/audio_device/opensles_player.cc @@ -182,16 +182,16 @@ int OpenSLESPlayer::SetSpeakerVolume(uint32_t volume) { return -1; } -rtc::Optional OpenSLESPlayer::SpeakerVolume() const { - return rtc::nullopt; +absl::optional OpenSLESPlayer::SpeakerVolume() const { + return absl::nullopt; } -rtc::Optional OpenSLESPlayer::MaxSpeakerVolume() const { - return rtc::nullopt; +absl::optional OpenSLESPlayer::MaxSpeakerVolume() const { + return absl::nullopt; } -rtc::Optional OpenSLESPlayer::MinSpeakerVolume() const { - return rtc::nullopt; +absl::optional OpenSLESPlayer::MinSpeakerVolume() const { + return absl::nullopt; } void OpenSLESPlayer::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) { diff --git a/sdk/android/src/jni/audio_device/opensles_player.h b/sdk/android/src/jni/audio_device/opensles_player.h index d5f4a68749..d8befe50d0 100644 --- a/sdk/android/src/jni/audio_device/opensles_player.h +++ b/sdk/android/src/jni/audio_device/opensles_player.h @@ -16,7 +16,7 @@ #include #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/fine_audio_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" @@ -75,9 +75,9 @@ class OpenSLESPlayer : public AudioOutput { bool SpeakerVolumeIsAvailable() override; int SetSpeakerVolume(uint32_t volume) override; - rtc::Optional SpeakerVolume() const override; - rtc::Optional MaxSpeakerVolume() const override; - rtc::Optional MinSpeakerVolume() const override; + absl::optional SpeakerVolume() const override; + absl::optional MaxSpeakerVolume() const override; + absl::optional MinSpeakerVolume() const override; void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; diff --git a/sdk/android/src/jni/pc/icecandidate.cc b/sdk/android/src/jni/pc/icecandidate.cc index 84e0f6e7c6..b9d66a4679 100644 --- a/sdk/android/src/jni/pc/icecandidate.cc +++ b/sdk/android/src/jni/pc/icecandidate.cc @@ -207,13 +207,13 @@ PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy( return PeerConnectionInterface::kTlsCertPolicySecure; } -rtc::Optional JavaToNativeNetworkPreference( +absl::optional JavaToNativeNetworkPreference( JNIEnv* jni, const JavaRef& j_network_preference) { std::string enum_name = GetJavaEnumName(jni, j_network_preference); if (enum_name == "UNKNOWN") - return rtc::nullopt; + return absl::nullopt; if (enum_name == "ETHERNET") return rtc::ADAPTER_TYPE_ETHERNET; @@ -231,7 +231,7 @@ rtc::Optional JavaToNativeNetworkPreference( return rtc::ADAPTER_TYPE_LOOPBACK; RTC_CHECK(false) << "Unexpected NetworkPreference enum_name " << enum_name; - return rtc::nullopt; + return absl::nullopt; } } // namespace jni diff --git a/sdk/android/src/jni/pc/icecandidate.h b/sdk/android/src/jni/pc/icecandidate.h index be4d27ca17..662b649ba6 100644 --- a/sdk/android/src/jni/pc/icecandidate.h +++ b/sdk/android/src/jni/pc/icecandidate.h @@ -75,7 +75,7 @@ PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy( JNIEnv* jni, const JavaRef& j_ice_server_tls_cert_policy); -rtc::Optional JavaToNativeNetworkPreference( +absl::optional JavaToNativeNetworkPreference( JNIEnv* jni, const JavaRef& j_network_preference); diff --git a/sdk/android/src/jni/pc/peerconnectionfactory.cc b/sdk/android/src/jni/pc/peerconnectionfactory.cc index ab5bf5568e..5730c209df 100644 --- a/sdk/android/src/jni/pc/peerconnectionfactory.cc +++ b/sdk/android/src/jni/pc/peerconnectionfactory.cc @@ -428,7 +428,7 @@ static jlong JNI_PeerConnectionFactory_CreatePeerConnection( if (key_type != rtc::KT_DEFAULT) { rtc::scoped_refptr certificate = rtc::RTCCertificateGenerator::GenerateCertificate( - rtc::KeyParams(key_type), rtc::nullopt); + rtc::KeyParams(key_type), absl::nullopt); if (!certificate) { RTC_LOG(LS_ERROR) << "Failed to generate certificate. KeyType: " << key_type; diff --git a/sdk/android/src/jni/pc/rtptransceiver.cc b/sdk/android/src/jni/pc/rtptransceiver.cc index 0a115c291f..fa20d8031f 100644 --- a/sdk/android/src/jni/pc/rtptransceiver.cc +++ b/sdk/android/src/jni/pc/rtptransceiver.cc @@ -89,7 +89,7 @@ ScopedJavaLocalRef JNI_RtpTransceiver_GetMid( JNIEnv* jni, const base::android::JavaParamRef&, jlong j_rtp_transceiver_pointer) { - rtc::Optional mid = + absl::optional mid = reinterpret_cast(j_rtp_transceiver_pointer) ->mid(); return NativeToJavaString(jni, mid); @@ -133,7 +133,7 @@ ScopedJavaLocalRef JNI_RtpTransceiver_CurrentDirection( JNIEnv* jni, const base::android::JavaParamRef&, jlong j_rtp_transceiver_pointer) { - rtc::Optional direction = + absl::optional direction = reinterpret_cast(j_rtp_transceiver_pointer) ->current_direction(); return direction ? NativeToJavaRtpTransceiverDirection(jni, *direction) diff --git a/sdk/android/src/jni/pc/sessiondescription.cc b/sdk/android/src/jni/pc/sessiondescription.cc index 6bdbfa174d..bd3806d562 100644 --- a/sdk/android/src/jni/pc/sessiondescription.cc +++ b/sdk/android/src/jni/pc/sessiondescription.cc @@ -27,7 +27,7 @@ std::unique_ptr JavaToNativeSessionDescription( jni, Java_SessionDescription_getTypeInCanonicalForm(jni, j_sdp)); std::string std_description = JavaToStdString(jni, Java_SessionDescription_getDescription(jni, j_sdp)); - rtc::Optional sdp_type_maybe = SdpTypeFromString(std_type); + absl::optional sdp_type_maybe = SdpTypeFromString(std_type); if (!sdp_type_maybe) { RTC_LOG(LS_ERROR) << "Unexpected SDP type: " << std_type; return nullptr; diff --git a/sdk/android/src/jni/videodecoderwrapper.cc b/sdk/android/src/jni/videodecoderwrapper.cc index a7aee0423f..5fbd72fec4 100644 --- a/sdk/android/src/jni/videodecoderwrapper.cc +++ b/sdk/android/src/jni/videodecoderwrapper.cc @@ -30,9 +30,9 @@ namespace { const int64_t kNumRtpTicksPerMillisec = 90000 / rtc::kNumMillisecsPerSec; template -inline rtc::Optional cast_optional(const rtc::Optional& value) { - return value ? rtc::Optional(rtc::dchecked_cast(*value)) - : rtc::nullopt; +inline absl::optional cast_optional(const absl::optional& value) { + return value ? absl::optional(rtc::dchecked_cast(*value)) + : absl::nullopt; } } // namespace @@ -106,7 +106,7 @@ int32_t VideoDecoderWrapper::Decode( frame_extra_info.timestamp_rtp = input_image._timeStamp; frame_extra_info.timestamp_ntp = input_image.ntp_time_ms_; frame_extra_info.qp = - qp_parsing_enabled_ ? ParseQP(input_image) : rtc::nullopt; + qp_parsing_enabled_ ? ParseQP(input_image) : absl::nullopt; { rtc::CritScope cs(&frame_extra_infos_lock_); frame_extra_infos_.push_back(frame_extra_info); @@ -183,10 +183,10 @@ void VideoDecoderWrapper::OnDecodedFrame( JavaToNativeFrame(env, j_frame, frame_extra_info.timestamp_rtp); frame.set_ntp_time_ms(frame_extra_info.timestamp_ntp); - rtc::Optional decoding_time_ms = + absl::optional decoding_time_ms = JavaToNativeOptionalInt(env, j_decode_time_ms); - rtc::Optional decoder_qp = + absl::optional decoder_qp = cast_optional(JavaToNativeOptionalInt(env, j_qp)); // If the decoder provides QP values itself, no need to parse the bitstream. // Enable QP parsing if decoder does not provide QP values itself. @@ -226,13 +226,13 @@ int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni, return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; } -rtc::Optional VideoDecoderWrapper::ParseQP( +absl::optional VideoDecoderWrapper::ParseQP( const EncodedImage& input_image) { if (input_image.qp_ != -1) { return input_image.qp_; } - rtc::Optional qp; + absl::optional qp; switch (codec_settings_.codecType) { case kVideoCodecVP8: { int qp_int; diff --git a/sdk/android/src/jni/videodecoderwrapper.h b/sdk/android/src/jni/videodecoderwrapper.h index b56a3a739d..c719aa4525 100644 --- a/sdk/android/src/jni/videodecoderwrapper.h +++ b/sdk/android/src/jni/videodecoderwrapper.h @@ -66,7 +66,7 @@ class VideoDecoderWrapper : public VideoDecoder { uint32_t timestamp_rtp; int64_t timestamp_ntp; - rtc::Optional qp; + absl::optional qp; FrameExtraInfo(); FrameExtraInfo(const FrameExtraInfo&); @@ -82,7 +82,7 @@ class VideoDecoderWrapper : public VideoDecoder { const char* method_name) RTC_RUN_ON(decoder_thread_checker_); - rtc::Optional ParseQP(const EncodedImage& input_image) + absl::optional ParseQP(const EncodedImage& input_image) RTC_RUN_ON(decoder_thread_checker_); const ScopedJavaGlobalRef decoder_; diff --git a/sdk/android/src/jni/videoencoderwrapper.cc b/sdk/android/src/jni/videoencoderwrapper.cc index ce48f56c56..94719ead16 100644 --- a/sdk/android/src/jni/videoencoderwrapper.cc +++ b/sdk/android/src/jni/videoencoderwrapper.cc @@ -165,10 +165,10 @@ VideoEncoderWrapper::ScalingSettings VideoEncoderWrapper::GetScalingSettings() if (!isOn) return ScalingSettings::kOff; - rtc::Optional low = JavaToNativeOptionalInt( + absl::optional low = JavaToNativeOptionalInt( jni, Java_VideoEncoderWrapper_getScalingSettingsLow(jni, j_scaling_settings)); - rtc::Optional high = JavaToNativeOptionalInt( + absl::optional high = JavaToNativeOptionalInt( jni, Java_VideoEncoderWrapper_getScalingSettingsHigh(jni, j_scaling_settings)); diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm index 4bd0450f69..a357085caf 100644 --- a/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm +++ b/sdk/objc/Framework/Classes/PeerConnection/RTCConfiguration.mm @@ -171,8 +171,8 @@ // Generate non-default certificate. if (keyType != rtc::KT_DEFAULT) { rtc::scoped_refptr certificate = - rtc::RTCCertificateGenerator::GenerateCertificate( - rtc::KeyParams(keyType), rtc::Optional()); + rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType), + absl::optional()); if (!certificate) { RTCLogError(@"Failed to generate certificate."); return nullptr; @@ -184,14 +184,13 @@ nativeConfig->presume_writable_when_fully_relayed = _shouldPresumeWritableWhenFullyRelayed ? true : false; if (_iceCheckMinInterval != nil) { - nativeConfig->ice_check_min_interval = - rtc::Optional(_iceCheckMinInterval.intValue); + nativeConfig->ice_check_min_interval = absl::optional(_iceCheckMinInterval.intValue); } if (_iceRegatherIntervalRange != nil) { std::unique_ptr nativeIntervalRange( _iceRegatherIntervalRange.nativeIntervalRange); nativeConfig->ice_regather_interval_range = - rtc::Optional(*nativeIntervalRange); + absl::optional(*nativeIntervalRange); } nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics]; if (_turnCustomizer) { diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm index 33f9ae92e1..04a5689417 100644 --- a/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm +++ b/sdk/objc/Framework/Classes/PeerConnection/RTCH264ProfileLevelId.mm @@ -31,7 +31,7 @@ if (self = [super init]) { self.hexString = hexString; - rtc::Optional profile_level_id = + absl::optional profile_level_id = webrtc::H264::ParseProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]); if (profile_level_id.has_value()) { self.profile = static_cast(profile_level_id->profile); @@ -46,7 +46,7 @@ self.profile = profile; self.level = level; - rtc::Optional hex_string = + absl::optional hex_string = webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId( static_cast(profile), static_cast(level))); self.hexString = diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm index c16942246c..bea0edebdd 100644 --- a/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm +++ b/sdk/objc/Framework/Classes/PeerConnection/RTCPeerConnection.mm @@ -470,13 +470,13 @@ void PeerConnectionDelegateAdapter::OnAddTrack( maxBitrateBps:(nullable NSNumber *)maxBitrateBps { webrtc::PeerConnectionInterface::BitrateParameters params; if (minBitrateBps != nil) { - params.min_bitrate_bps = rtc::Optional(minBitrateBps.intValue); + params.min_bitrate_bps = absl::optional(minBitrateBps.intValue); } if (currentBitrateBps != nil) { - params.current_bitrate_bps = rtc::Optional(currentBitrateBps.intValue); + params.current_bitrate_bps = absl::optional(currentBitrateBps.intValue); } if (maxBitrateBps != nil) { - params.max_bitrate_bps = rtc::Optional(maxBitrateBps.intValue); + params.max_bitrate_bps = absl::optional(maxBitrateBps.intValue); } return _peerConnection->SetBitrate(params).ok(); } diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm index b6baee6054..7951cee45d 100644 --- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpCodecParameters.mm @@ -93,10 +93,10 @@ const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); RTC_NOTREACHED(); } if (_clockRate != nil) { - parameters.clock_rate = rtc::Optional(_clockRate.intValue); + parameters.clock_rate = absl::optional(_clockRate.intValue); } if (_numChannels != nil) { - parameters.num_channels = rtc::Optional(_numChannels.intValue); + parameters.num_channels = absl::optional(_numChannels.intValue); } for (NSString *paramKey in _parameters.allKeys) { std::string key = [NSString stdStringForString:paramKey]; diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm index 8521862758..299e318bdc 100644 --- a/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/Framework/Classes/PeerConnection/RTCRtpEncodingParameters.mm @@ -44,13 +44,13 @@ webrtc::RtpEncodingParameters parameters; parameters.active = _isActive; if (_maxBitrateBps != nil) { - parameters.max_bitrate_bps = rtc::Optional(_maxBitrateBps.intValue); + parameters.max_bitrate_bps = absl::optional(_maxBitrateBps.intValue); } if (_minBitrateBps != nil) { - parameters.min_bitrate_bps = rtc::Optional(_minBitrateBps.intValue); + parameters.min_bitrate_bps = absl::optional(_minBitrateBps.intValue); } if (_ssrc != nil) { - parameters.ssrc = rtc::Optional(_ssrc.unsignedLongValue); + parameters.ssrc = absl::optional(_ssrc.unsignedLongValue); } return parameters; } diff --git a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm index 63be2dc67c..d7521262d4 100644 --- a/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm +++ b/sdk/objc/Framework/Classes/PeerConnection/RTCVideoCodec.mm @@ -43,9 +43,9 @@ namespace { using namespace webrtc::H264; NSString *MaxSupportedLevelForProfile(Profile profile) { - const rtc::Optional profileLevelId = [UIDevice maxSupportedH264Profile]; + const absl::optional profileLevelId = [UIDevice maxSupportedH264Profile]; if (profileLevelId && profileLevelId->profile >= profile) { - const rtc::Optional profileString = + const absl::optional profileString = ProfileLevelIdToString(ProfileLevelId(profile, profileLevelId->level)); if (profileString) { return [NSString stringForStdString:*profileString]; diff --git a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm index 3caf144262..c5fbde12c8 100644 --- a/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm +++ b/sdk/objc/Framework/Classes/Video/RTCDefaultShader.mm @@ -20,7 +20,7 @@ #import "RTCShader.h" #import "WebRTC/RTCLogging.h" -#include "api/optional.h" +#include "absl/types/optional.h" static const int kYTextureUnit = 0; static const int kUTextureUnit = 1; @@ -73,7 +73,7 @@ static const char kNV12FragmentShaderSource[] = GLuint _vertexBuffer; GLuint _vertexArray; // Store current rotation and only upload new vertex data when rotation changes. - rtc::Optional _currentRotation; + absl::optional _currentRotation; GLuint _i420Program; GLuint _nv12Program; @@ -144,7 +144,7 @@ static const char kNV12FragmentShaderSource[] = #endif glBindBuffer(GL_ARRAY_BUFFER, _vertexBuffer); if (!_currentRotation || rotation != *_currentRotation) { - _currentRotation = rtc::Optional(rotation); + _currentRotation = absl::optional(rotation); RTCSetVertexData(*_currentRotation); } return YES; diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h index 03ea780b2a..bb6f6ce520 100644 --- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h +++ b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.h @@ -14,6 +14,6 @@ @interface UIDevice (H264Profile) -+ (rtc::Optional)maxSupportedH264Profile; ++ (absl::optional)maxSupportedH264Profile; @end diff --git a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm index ef94c14c8f..196e34e4ef 100644 --- a/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm +++ b/sdk/objc/Framework/Classes/Video/UIDevice+H264Profile.mm @@ -85,7 +85,7 @@ constexpr SupportedH264Profile kH264MaxSupportedProfiles[] = { {RTCDeviceTypeIPadPro10Inch, {kProfileHigh, kLevel4_2}}, // https://support.apple.com/kb/SP762 }; -rtc::Optional FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) { +absl::optional FindMaxSupportedProfileForDevice(RTCDeviceType deviceType) { const auto* result = std::find_if(std::begin(kH264MaxSupportedProfiles), std::end(kH264MaxSupportedProfiles), [deviceType](const SupportedH264Profile& supportedProfile) { @@ -94,14 +94,14 @@ rtc::Optional FindMaxSupportedProfileForDevice(RTCDeviceType dev if (result != std::end(kH264MaxSupportedProfiles)) { return result->profile; } - return rtc::nullopt; + return absl::nullopt; } } // namespace @implementation UIDevice (H264Profile) -+ (rtc::Optional)maxSupportedH264Profile { ++ (absl::optional)maxSupportedH264Profile { return FindMaxSupportedProfileForDevice([self deviceType]); } diff --git a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm index 66e9b61128..27dcdeef7f 100644 --- a/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm +++ b/sdk/objc/Framework/Classes/VideoToolbox/RTCVideoEncoderH264.mm @@ -172,7 +172,7 @@ void compressionOutputCallback(void *encoder, // returned. The user must initialize the encoder with a resolution and // framerate conforming to the selected H264 level regardless. CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) { - const rtc::Optional profile_level_id = + const absl::optional profile_level_id = webrtc::H264::ParseSdpProfileLevelId(videoFormat.parameters); RTC_DCHECK(profile_level_id); switch (profile_level_id->profile) { diff --git a/sdk/objc/Framework/Native/src/objc_video_track_source.h b/sdk/objc/Framework/Native/src/objc_video_track_source.h index 1062e967e7..d237980d40 100644 --- a/sdk/objc/Framework/Native/src/objc_video_track_source.h +++ b/sdk/objc/Framework/Native/src/objc_video_track_source.h @@ -36,7 +36,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { // Indicates that the encoder should denoise video before encoding it. // If it is not set, the default configuration is used which is different // depending on video codec. - rtc::Optional needs_denoising() const override { return false; } + absl::optional needs_denoising() const override { return false; } SourceState state() const override { return SourceState::kLive; } diff --git a/system_wrappers/BUILD.gn b/system_wrappers/BUILD.gn index 9b178f92eb..5e2858f6fe 100644 --- a/system_wrappers/BUILD.gn +++ b/system_wrappers/BUILD.gn @@ -40,10 +40,10 @@ rtc_static_library("system_wrappers") { ":runtime_enabled_features_api", "..:webrtc_common", "../:typedefs", - "../api:optional", "../modules:module_api_public", "../rtc_base:checks", "../rtc_base/synchronization:rw_lock_wrapper", + "//third_party/abseil-cpp/absl/types:optional", ] if (is_posix || is_fuchsia) { diff --git a/system_wrappers/include/rtp_to_ntp_estimator.h b/system_wrappers/include/rtp_to_ntp_estimator.h index 7c0757c546..62a79a597f 100644 --- a/system_wrappers/include/rtp_to_ntp_estimator.h +++ b/system_wrappers/include/rtp_to_ntp_estimator.h @@ -13,7 +13,7 @@ #include -#include "api/optional.h" +#include "absl/types/optional.h" #include "modules/include/module_common_types_public.h" #include "rtc_base/numerics/moving_median_filter.h" #include "system_wrappers/include/ntp_time.h" @@ -72,7 +72,7 @@ class RtpToNtpEstimator { bool Estimate(int64_t rtp_timestamp, int64_t* rtp_timestamp_ms) const; // Returns estimated rtp to ntp linear transform parameters. - const rtc::Optional params() const; + const absl::optional params() const; static const int kMaxInvalidSamples = 3; diff --git a/system_wrappers/source/rtp_to_ntp_estimator.cc b/system_wrappers/source/rtp_to_ntp_estimator.cc index 5af102a054..730c4f6691 100644 --- a/system_wrappers/source/rtp_to_ntp_estimator.cc +++ b/system_wrappers/source/rtp_to_ntp_estimator.cc @@ -193,9 +193,9 @@ bool RtpToNtpEstimator::Estimate(int64_t rtp_timestamp, return true; } -const rtc::Optional RtpToNtpEstimator::params() +const absl::optional RtpToNtpEstimator::params() const { - rtc::Optional res; + absl::optional res; if (params_calculated_) { res.emplace(smoothing_filter_.GetFilteredValue()); }