Add retransmission_allowed flag to encoder output
Using this flag, an encoder may inform the RTP sender module that the packet is not elligible for retransmission. Specifically, it may not be retransmitted in response to a NACK message, nor because of early loss detection (see CL #135881). Bug: webrtc:10702 Change-Id: Ib6a9cc361cf10ea7214cf672e05940c27899a6be Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/140105 Commit-Queue: Elad Alon <eladalon@webrtc.org> Reviewed-by: Danil Chapovalov <danilchap@webrtc.org> Reviewed-by: Erik Språng <sprang@webrtc.org> Reviewed-by: Niels Moller <nisse@webrtc.org> Cr-Commit-Position: refs/heads/master@{#28169}
This commit is contained in:
@ -76,6 +76,11 @@ class RTC_EXPORT EncodedImage {
|
||||
color_space_ = color_space;
|
||||
}
|
||||
|
||||
bool RetransmissionAllowed() const { return retransmission_allowed_; }
|
||||
void SetRetransmissionAllowed(bool retransmission_allowed) {
|
||||
retransmission_allowed_ = retransmission_allowed;
|
||||
}
|
||||
|
||||
size_t size() const { return size_; }
|
||||
void set_size(size_t new_size) {
|
||||
RTC_DCHECK_LE(new_size, capacity());
|
||||
@ -158,6 +163,7 @@ class RTC_EXPORT EncodedImage {
|
||||
absl::optional<int> spatial_index_;
|
||||
std::map<int, size_t> spatial_layer_frame_size_bytes_;
|
||||
absl::optional<webrtc::ColorSpace> color_space_;
|
||||
bool retransmission_allowed_ = true;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -42,7 +42,8 @@ Vp8FrameConfig::Vp8FrameConfig(BufferFlags last,
|
||||
layer_sync(false),
|
||||
freeze_entropy(freeze_entropy),
|
||||
first_reference(Vp8BufferReference::kNone),
|
||||
second_reference(Vp8BufferReference::kNone) {}
|
||||
second_reference(Vp8BufferReference::kNone),
|
||||
retransmission_allowed(true) {}
|
||||
|
||||
bool Vp8FrameConfig::References(Buffer buffer) const {
|
||||
switch (buffer) {
|
||||
|
@ -15,6 +15,8 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Configuration of a VP8 frame - which buffers are to be referenced
|
||||
// by it, which buffers should be updated, etc.
|
||||
struct Vp8FrameConfig {
|
||||
enum BufferFlags : int {
|
||||
kNone = 0,
|
||||
@ -86,6 +88,9 @@ struct Vp8FrameConfig {
|
||||
Vp8BufferReference first_reference;
|
||||
Vp8BufferReference second_reference;
|
||||
|
||||
// Whether this frame is eligible for retransmission.
|
||||
bool retransmission_allowed;
|
||||
|
||||
private:
|
||||
Vp8FrameConfig(BufferFlags last,
|
||||
BufferFlags golden,
|
||||
|
@ -421,8 +421,12 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage(
|
||||
// The payload router could be active but this module isn't sending.
|
||||
return Result(Result::ERROR_SEND_FAILED);
|
||||
}
|
||||
int64_t expected_retransmission_time_ms =
|
||||
rtp_streams_[stream_index].rtp_rtcp->ExpectedRetransmissionTimeMs();
|
||||
|
||||
absl::optional<int64_t> expected_retransmission_time_ms;
|
||||
if (encoded_image.RetransmissionAllowed()) {
|
||||
expected_retransmission_time_ms =
|
||||
rtp_streams_[stream_index].rtp_rtcp->ExpectedRetransmissionTimeMs();
|
||||
}
|
||||
|
||||
bool send_result = rtp_streams_[stream_index].sender_video->SendVideo(
|
||||
encoded_image._frameType, rtp_config_.payload_type, rtp_timestamp,
|
||||
|
@ -424,15 +424,16 @@ absl::optional<uint32_t> RTPSenderVideo::FlexfecSsrc() const {
|
||||
return absl::nullopt;
|
||||
}
|
||||
|
||||
bool RTPSenderVideo::SendVideo(VideoFrameType frame_type,
|
||||
int8_t payload_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
const uint8_t* payload_data,
|
||||
size_t payload_size,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const RTPVideoHeader* video_header,
|
||||
int64_t expected_retransmission_time_ms) {
|
||||
bool RTPSenderVideo::SendVideo(
|
||||
VideoFrameType frame_type,
|
||||
int8_t payload_type,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
const uint8_t* payload_data,
|
||||
size_t payload_size,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const RTPVideoHeader* video_header,
|
||||
absl::optional<int64_t> expected_retransmission_time_ms) {
|
||||
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, "Send", "type",
|
||||
FrameTypeToString(frame_type));
|
||||
|
||||
@ -624,8 +625,14 @@ bool RTPSenderVideo::SendVideo(VideoFrameType frame_type,
|
||||
*packetize_video_header, frame_type, fragmentation);
|
||||
|
||||
const uint8_t temporal_id = GetTemporalId(*video_header);
|
||||
StorageType storage = GetStorageType(temporal_id, retransmission_settings,
|
||||
expected_retransmission_time_ms);
|
||||
// TODO(bugs.webrtc.org/10714): retransmission_settings_ should generally be
|
||||
// replaced by expected_retransmission_time_ms.has_value(). For now, though,
|
||||
// only VP8 with an injected frame buffer controller actually controls it.
|
||||
const StorageType storage =
|
||||
expected_retransmission_time_ms.has_value()
|
||||
? GetStorageType(temporal_id, retransmission_settings,
|
||||
expected_retransmission_time_ms.value())
|
||||
: StorageType::kDontRetransmit;
|
||||
const size_t num_packets = packetizer->NumPackets();
|
||||
|
||||
size_t unpacketized_payload_size;
|
||||
|
@ -62,6 +62,7 @@ class RTPSenderVideo {
|
||||
const WebRtcKeyValueConfig& field_trials);
|
||||
virtual ~RTPSenderVideo();
|
||||
|
||||
// expected_retransmission_time_ms.has_value() -> retransmission allowed.
|
||||
bool SendVideo(VideoFrameType frame_type,
|
||||
int8_t payload_type,
|
||||
uint32_t capture_timestamp,
|
||||
@ -70,7 +71,7 @@ class RTPSenderVideo {
|
||||
size_t payload_size,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const RTPVideoHeader* video_header,
|
||||
int64_t expected_retransmission_time_ms);
|
||||
absl::optional<int64_t> expected_retransmission_time_ms);
|
||||
|
||||
void RegisterPayloadType(int8_t payload_type,
|
||||
absl::string_view payload_name,
|
||||
|
@ -941,12 +941,16 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
|
||||
|
||||
bool send_key_frame = key_frame_requested;
|
||||
bool drop_frame = false;
|
||||
bool retransmission_allowed = true;
|
||||
Vp8FrameConfig tl_configs[kMaxSimulcastStreams];
|
||||
for (size_t i = 0; i < encoders_.size(); ++i) {
|
||||
tl_configs[i] =
|
||||
frame_buffer_controller_->NextFrameConfig(i, frame.timestamp());
|
||||
send_key_frame |= tl_configs[i].IntraFrame();
|
||||
drop_frame |= tl_configs[i].drop_frame;
|
||||
RTC_DCHECK(i == 0 ||
|
||||
retransmission_allowed == tl_configs[i].retransmission_allowed);
|
||||
retransmission_allowed = tl_configs[i].retransmission_allowed;
|
||||
}
|
||||
|
||||
if (drop_frame && !send_key_frame) {
|
||||
@ -1065,7 +1069,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
|
||||
if (error)
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
// Examines frame timestamps only.
|
||||
error = GetEncodedPartitions(frame);
|
||||
error = GetEncodedPartitions(frame, retransmission_allowed);
|
||||
}
|
||||
// TODO(sprang): Shouldn't we use the frame timestamp instead?
|
||||
timestamp_ += duration;
|
||||
@ -1091,7 +1095,8 @@ void LibvpxVp8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
|
||||
(pkt.data.frame.flags & VPX_FRAME_IS_KEY) != 0, qp, codec_specific);
|
||||
}
|
||||
|
||||
int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image) {
|
||||
int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image,
|
||||
bool retransmission_allowed) {
|
||||
int stream_idx = static_cast<int>(encoders_.size()) - 1;
|
||||
int result = WEBRTC_VIDEO_CODEC_OK;
|
||||
for (size_t encoder_idx = 0; encoder_idx < encoders_.size();
|
||||
@ -1139,6 +1144,8 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image) {
|
||||
: VideoContentType::UNSPECIFIED;
|
||||
encoded_images_[encoder_idx].timing_.flags = VideoSendTiming::kInvalid;
|
||||
encoded_images_[encoder_idx].SetColorSpace(input_image.color_space());
|
||||
encoded_images_[encoder_idx].SetRetransmissionAllowed(
|
||||
retransmission_allowed);
|
||||
|
||||
if (send_stream_[stream_idx]) {
|
||||
if (encoded_images_[encoder_idx].size() > 0) {
|
||||
|
@ -82,7 +82,8 @@ class LibvpxVp8Encoder : public VideoEncoder {
|
||||
int encoder_idx,
|
||||
uint32_t timestamp);
|
||||
|
||||
int GetEncodedPartitions(const VideoFrame& input_image);
|
||||
int GetEncodedPartitions(const VideoFrame& input_image,
|
||||
bool retransmission_allowed);
|
||||
|
||||
// Set the stream state for stream |stream_idx|.
|
||||
void SetStreamState(bool send_stream, int stream_idx);
|
||||
|
Reference in New Issue
Block a user