Remove rule that discourages passing optional by const reference
include example to demonstrate: (subjectively) increased readability (objectively) decreased binary size Bug: None Change-Id: I970e668af98d98725b2d527f44169a8b7c9d2338 Reviewed-on: https://webrtc-review.googlesource.com/c/121420 Commit-Queue: Danil Chapovalov <danilchap@webrtc.org> Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> Reviewed-by: Johannes Kron <kron@webrtc.org> Cr-Commit-Position: refs/heads/master@{#26545}
This commit is contained in:
committed by
Commit Bot
parent
681de2036b
commit
b769894025
@ -62,9 +62,8 @@ class RTC_EXPORT EncodedImage {
|
||||
const webrtc::ColorSpace* ColorSpace() const {
|
||||
return color_space_ ? &*color_space_ : nullptr;
|
||||
}
|
||||
void SetColorSpace(const webrtc::ColorSpace* color_space) {
|
||||
color_space_ =
|
||||
color_space ? absl::make_optional(*color_space) : absl::nullopt;
|
||||
void SetColorSpace(const absl::optional<webrtc::ColorSpace>& color_space) {
|
||||
color_space_ = color_space;
|
||||
}
|
||||
|
||||
size_t size() const { return size_; }
|
||||
|
||||
@ -61,7 +61,7 @@ VideoFrame::Builder& VideoFrame::Builder::set_rotation(VideoRotation rotation) {
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_color_space(
|
||||
const ColorSpace& color_space) {
|
||||
const absl::optional<ColorSpace>& color_space) {
|
||||
color_space_ = color_space;
|
||||
return *this;
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ class RTC_EXPORT VideoFrame {
|
||||
Builder& set_timestamp_rtp(uint32_t timestamp_rtp);
|
||||
Builder& set_ntp_time_ms(int64_t ntp_time_ms);
|
||||
Builder& set_rotation(VideoRotation rotation);
|
||||
Builder& set_color_space(const ColorSpace& color_space);
|
||||
Builder& set_color_space(const absl::optional<ColorSpace>& color_space);
|
||||
Builder& set_color_space(const ColorSpace* color_space);
|
||||
Builder& set_id(uint16_t id);
|
||||
Builder& set_partial_frame_description(
|
||||
@ -140,12 +140,9 @@ class RTC_EXPORT VideoFrame {
|
||||
void set_rotation(VideoRotation rotation) { rotation_ = rotation; }
|
||||
|
||||
// Get color space when available.
|
||||
const ColorSpace* color_space() const {
|
||||
return color_space_ ? &*color_space_ : nullptr;
|
||||
}
|
||||
void set_color_space(ColorSpace* color_space) {
|
||||
color_space_ =
|
||||
color_space ? absl::make_optional(*color_space) : absl::nullopt;
|
||||
const absl::optional<ColorSpace>& color_space() const { return color_space_; }
|
||||
void set_color_space(const absl::optional<ColorSpace>& color_space) {
|
||||
color_space_ = color_space;
|
||||
}
|
||||
|
||||
const PartialFrameDescription* partial_frame_description() const {
|
||||
|
||||
@ -126,7 +126,7 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) {
|
||||
ColorSpace color_space(
|
||||
ColorSpace::PrimaryID::kSMPTE170M, ColorSpace::TransferID::kSMPTE170M,
|
||||
ColorSpace::MatrixID::kSMPTE170M, ColorSpace::RangeID::kFull);
|
||||
encoded_image.SetColorSpace(&color_space);
|
||||
encoded_image.SetColorSpace(color_space);
|
||||
header = params.GetRtpVideoHeader(encoded_image, &codec_info, kDontCare);
|
||||
|
||||
EXPECT_EQ(kVideoRotation_90, header.rotation);
|
||||
|
||||
@ -123,7 +123,7 @@ TEST_F(TestH264Impl, MAYBE_EncodedColorSpaceEqualsInputColorSpace) {
|
||||
VideoFrame input_frame_w_color_space =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(input_frame->video_frame_buffer())
|
||||
.set_color_space(&color_space)
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
@ -141,7 +141,7 @@ TEST_F(TestH264Impl, MAYBE_DecodedColorSpaceEqualsEncodedColorSpace) {
|
||||
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
|
||||
// Add color space to encoded frame.
|
||||
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/false);
|
||||
encoded_frame.SetColorSpace(&color_space);
|
||||
encoded_frame.SetColorSpace(color_space);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
decoder_->Decode(encoded_frame, false, nullptr, 0));
|
||||
std::unique_ptr<VideoFrame> decoded_frame;
|
||||
|
||||
@ -283,11 +283,12 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image,
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int LibvpxVp8Decoder::ReturnFrame(const vpx_image_t* img,
|
||||
uint32_t timestamp,
|
||||
int64_t ntp_time_ms,
|
||||
int qp,
|
||||
const ColorSpace* explicit_color_space) {
|
||||
int LibvpxVp8Decoder::ReturnFrame(
|
||||
const vpx_image_t* img,
|
||||
uint32_t timestamp,
|
||||
int64_t ntp_time_ms,
|
||||
int qp,
|
||||
const webrtc::ColorSpace* explicit_color_space) {
|
||||
if (img == NULL) {
|
||||
// Decoder OK and NULL image => No show frame
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
|
||||
@ -54,7 +54,7 @@ class LibvpxVp8Decoder : public VideoDecoder {
|
||||
uint32_t timeStamp,
|
||||
int64_t ntp_time_ms,
|
||||
int qp,
|
||||
const ColorSpace* explicit_color_space);
|
||||
const webrtc::ColorSpace* explicit_color_space);
|
||||
const bool use_postproc_arm_;
|
||||
|
||||
I420BufferPool buffer_pool_;
|
||||
|
||||
@ -193,7 +193,7 @@ TEST_F(TestVp8Impl, EncodedColorSpaceEqualsInputColorSpace) {
|
||||
VideoFrame input_frame_w_color_space =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(input_frame->video_frame_buffer())
|
||||
.set_color_space(&color_space)
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
|
||||
EncodeAndWaitForFrame(input_frame_w_color_space, &encoded_frame,
|
||||
@ -229,7 +229,7 @@ TEST_F(TestVp8Impl, DecodedColorSpaceEqualsEncodedColorSpace) {
|
||||
|
||||
// Encoded frame with explicit color space information.
|
||||
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/false);
|
||||
encoded_frame.SetColorSpace(&color_space);
|
||||
encoded_frame.SetColorSpace(color_space);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
decoder_->Decode(encoded_frame, false, nullptr, -1));
|
||||
std::unique_ptr<VideoFrame> decoded_frame;
|
||||
|
||||
@ -185,7 +185,7 @@ TEST_F(TestVp9Impl, EncodedColorSpaceEqualsInputColorSpace) {
|
||||
VideoFrame input_frame_w_hdr =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(input_frame->video_frame_buffer())
|
||||
.set_color_space(&color_space)
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
encoder_->Encode(input_frame_w_hdr, nullptr, nullptr));
|
||||
@ -215,7 +215,7 @@ TEST_F(TestVp9Impl, DecodedColorSpaceEqualsEncodedColorSpace) {
|
||||
|
||||
// Encoded frame with explicit color space information.
|
||||
ColorSpace color_space = CreateTestColorSpace(/*with_hdr_metadata=*/true);
|
||||
encoded_frame.SetColorSpace(&color_space);
|
||||
encoded_frame.SetColorSpace(color_space);
|
||||
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
|
||||
decoder_->Decode(encoded_frame, false, nullptr, 0));
|
||||
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
|
||||
|
||||
@ -1480,11 +1480,12 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
||||
uint32_t timestamp,
|
||||
int64_t ntp_time_ms,
|
||||
int qp,
|
||||
const ColorSpace* explicit_color_space) {
|
||||
int VP9DecoderImpl::ReturnFrame(
|
||||
const vpx_image_t* img,
|
||||
uint32_t timestamp,
|
||||
int64_t ntp_time_ms,
|
||||
int qp,
|
||||
const webrtc::ColorSpace* explicit_color_space) {
|
||||
if (img == nullptr) {
|
||||
// Decoder OK and nullptr image => No show frame.
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
@ -1533,7 +1534,7 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
||||
.set_ntp_time_ms(ntp_time_ms)
|
||||
.set_rotation(webrtc::kVideoRotation_0);
|
||||
if (explicit_color_space) {
|
||||
builder.set_color_space(explicit_color_space);
|
||||
builder.set_color_space(*explicit_color_space);
|
||||
} else {
|
||||
builder.set_color_space(
|
||||
ExtractVP9ColorSpace(img->cs, img->range, img->bit_depth));
|
||||
|
||||
@ -177,7 +177,7 @@ class VP9DecoderImpl : public VP9Decoder {
|
||||
uint32_t timestamp,
|
||||
int64_t ntp_time_ms,
|
||||
int qp,
|
||||
const ColorSpace* explicit_color_space);
|
||||
const webrtc::ColorSpace* explicit_color_space);
|
||||
|
||||
// Memory pool used to share buffers between libvpx and webrtc.
|
||||
Vp9FrameBufferPool frame_buffer_pool_;
|
||||
|
||||
@ -73,9 +73,7 @@ RtpFrameObject::RtpFrameObject(PacketBuffer* packet_buffer,
|
||||
// frame (I-frame or IDR frame in H.264 (AVC), or an IRAP picture in H.265
|
||||
// (HEVC)).
|
||||
rotation_ = last_packet->video_header.rotation;
|
||||
SetColorSpace(last_packet->video_header.color_space
|
||||
? &last_packet->video_header.color_space.value()
|
||||
: nullptr);
|
||||
SetColorSpace(last_packet->video_header.color_space);
|
||||
_rotation_set = true;
|
||||
content_type_ = last_packet->video_header.content_type;
|
||||
if (last_packet->video_header.video_timing.flags !=
|
||||
|
||||
@ -77,18 +77,6 @@ instead of | use
|
||||
|
||||
See [the source](api/array_view.h) for more detailed docs.
|
||||
|
||||
### `absl::optional<T>` as function argument
|
||||
|
||||
`absl::optional<T>` is generally a good choice when you want to pass a
|
||||
possibly missing `T` to a function—provided of course that `T`
|
||||
is a type that it makes sense to pass by value.
|
||||
|
||||
However, when you want to avoid pass-by-value, generally **do not pass
|
||||
`const absl::optional<T>&`; use `const T*` instead.** `const
|
||||
absl::optional<T>&` forces the caller to store the `T` in an
|
||||
`absl::optional<T>`; `const T*`, on the other hand, makes no
|
||||
assumptions about how the `T` is stored.
|
||||
|
||||
### sigslot
|
||||
|
||||
sigslot is a lightweight library that adds a signal/slot language
|
||||
|
||||
@ -150,7 +150,7 @@ void FrameGeneratorCapturer::InsertFrame() {
|
||||
frame->set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
|
||||
frame->set_rotation(fake_rotation_);
|
||||
if (fake_color_space_) {
|
||||
frame->set_color_space(&fake_color_space_.value());
|
||||
frame->set_color_space(fake_color_space_);
|
||||
}
|
||||
if (first_frame_capture_time_ == -1) {
|
||||
first_frame_capture_time_ = frame->ntp_time_ms();
|
||||
|
||||
Reference in New Issue
Block a user