Reland "Avoid critsect for protection- and qm setting callbacks in

VideoSender."

The original Cl is uploaded as patch set 1, the fix in ps#2 and I'll rebase in ps#3.

BUG=4534
R=pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/46769004

Cr-Commit-Position: refs/heads/master@{#9000}
This commit is contained in:
mflodman
2015-04-14 21:28:08 +02:00
parent 73ba7a690f
commit fcf54bdabb
18 changed files with 383 additions and 458 deletions

View File

@ -88,7 +88,6 @@ ProducerFec::ProducerFec(ForwardErrorCorrection* fec)
media_packets_fec_(),
fec_packets_(),
num_frames_(0),
incomplete_frame_(false),
num_first_partition_(0),
minimum_media_packets_fec_(1),
params_(),
@ -125,9 +124,8 @@ RedPacket* ProducerFec::BuildRedPacket(const uint8_t* data_buffer,
size_t payload_length,
size_t rtp_header_length,
int red_pl_type) {
RedPacket* red_packet = new RedPacket(payload_length +
kREDForFECHeaderLength +
rtp_header_length);
RedPacket* red_packet = new RedPacket(
payload_length + kREDForFECHeaderLength + rtp_header_length);
int pl_type = data_buffer[1] & 0x7f;
red_packet->CreateHeader(data_buffer, rtp_header_length,
red_pl_type, pl_type);
@ -142,7 +140,7 @@ int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
if (media_packets_fec_.empty()) {
params_ = new_params_;
}
incomplete_frame_ = true;
bool complete_frame = false;
const bool marker_bit = (data_buffer[1] & kRtpMarkerBitMask) ? true : false;
if (media_packets_fec_.size() < ForwardErrorCorrection::kMaxMediaPackets) {
// Generic FEC can only protect up to kMaxMediaPackets packets.
@ -153,13 +151,13 @@ int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
}
if (marker_bit) {
++num_frames_;
incomplete_frame_ = false;
complete_frame = true;
}
// Produce FEC over at most |params_.max_fec_frames| frames, or as soon as:
// (1) the excess overhead (actual overhead - requested/target overhead) is
// less than |kMaxExcessOverhead|, and
// (2) at least |minimum_media_packets_fec_| media packets is reached.
if (!incomplete_frame_ &&
if (complete_frame &&
(num_frames_ == params_.max_fec_frames ||
(ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) {
assert(num_first_partition_ <=
@ -206,37 +204,43 @@ bool ProducerFec::MinimumMediaPacketsReached() {
}
bool ProducerFec::FecAvailable() const {
return (fec_packets_.size() > 0);
return !fec_packets_.empty();
}
RedPacket* ProducerFec::GetFecPacket(int red_pl_type,
int fec_pl_type,
uint16_t seq_num,
size_t rtp_header_length) {
if (fec_packets_.empty())
return NULL;
// Build FEC packet. The FEC packets in |fec_packets_| doesn't
// have RTP headers, so we're reusing the header from the last
// media packet.
ForwardErrorCorrection::Packet* packet_to_send = fec_packets_.front();
ForwardErrorCorrection::Packet* last_media_packet = media_packets_fec_.back();
RedPacket* return_packet = new RedPacket(packet_to_send->length +
kREDForFECHeaderLength +
rtp_header_length);
return_packet->CreateHeader(last_media_packet->data,
rtp_header_length,
red_pl_type,
fec_pl_type);
return_packet->SetSeqNum(seq_num);
return_packet->ClearMarkerBit();
return_packet->AssignPayload(packet_to_send->data, packet_to_send->length);
fec_packets_.pop_front();
if (fec_packets_.empty()) {
// Done with all the FEC packets. Reset for next run.
DeletePackets();
num_frames_ = 0;
size_t ProducerFec::NumAvailableFecPackets() const {
return fec_packets_.size();
}
std::vector<RedPacket*> ProducerFec::GetFecPackets(int red_pl_type,
int fec_pl_type,
uint16_t first_seq_num,
size_t rtp_header_length) {
std::vector<RedPacket*> fec_packets;
fec_packets.reserve(fec_packets_.size());
uint16_t sequence_number = first_seq_num;
while (!fec_packets_.empty()) {
// Build FEC packet. The FEC packets in |fec_packets_| doesn't
// have RTP headers, so we're reusing the header from the last
// media packet.
ForwardErrorCorrection::Packet* packet_to_send = fec_packets_.front();
ForwardErrorCorrection::Packet* last_media_packet =
media_packets_fec_.back();
RedPacket* red_packet = new RedPacket(
packet_to_send->length + kREDForFECHeaderLength + rtp_header_length);
red_packet->CreateHeader(last_media_packet->data, rtp_header_length,
red_pl_type, fec_pl_type);
red_packet->SetSeqNum(sequence_number++);
red_packet->ClearMarkerBit();
red_packet->AssignPayload(packet_to_send->data, packet_to_send->length);
fec_packets.push_back(red_packet);
fec_packets_.pop_front();
}
return return_packet;
DeletePackets();
num_frames_ = 0;
return fec_packets;
}
int ProducerFec::Overhead() const {

View File

@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
#include <list>
#include <vector>
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
@ -45,6 +46,7 @@ class ProducerFec {
void SetFecParameters(const FecProtectionParams* params,
int max_fec_frames);
// The caller is expected to delete the memory when done.
RedPacket* BuildRedPacket(const uint8_t* data_buffer,
size_t payload_length,
size_t rtp_header_length,
@ -59,11 +61,14 @@ class ProducerFec {
bool MinimumMediaPacketsReached();
bool FecAvailable() const;
size_t NumAvailableFecPackets() const;
RedPacket* GetFecPacket(int red_pl_type,
int fec_pl_type,
uint16_t seq_num,
size_t rtp_header_length);
// GetFecPackets allocates memory and creates FEC packets, but the caller is
// assumed to delete the memory when done with the packets.
std::vector<RedPacket*> GetFecPackets(int red_pl_type,
int fec_pl_type,
uint16_t first_seq_num,
size_t rtp_header_length);
private:
void DeletePackets();
@ -72,7 +77,6 @@ class ProducerFec {
std::list<ForwardErrorCorrection::Packet*> media_packets_fec_;
std::list<ForwardErrorCorrection::Packet*> fec_packets_;
int num_frames_;
bool incomplete_frame_;
int num_first_partition_;
int minimum_media_packets_fec_;
FecProtectionParams params_;

View File

@ -77,19 +77,19 @@ TEST_F(ProducerFecTest, OneFrameFec) {
}
EXPECT_TRUE(producer_->FecAvailable());
uint16_t seq_num = generator_->NextSeqNum();
RedPacket* packet = producer_->GetFecPacket(kRedPayloadType,
kFecPayloadType,
seq_num,
kRtpHeaderSize);
std::vector<RedPacket*> packets = producer_->GetFecPackets(kRedPayloadType,
kFecPayloadType,
seq_num,
kRtpHeaderSize);
EXPECT_FALSE(producer_->FecAvailable());
ASSERT_TRUE(packet != NULL);
ASSERT_EQ(1u, packets.size());
VerifyHeader(seq_num, last_timestamp,
kRedPayloadType, kFecPayloadType, packet, false);
kRedPayloadType, kFecPayloadType, packets.front(), false);
while (!rtp_packets.empty()) {
delete rtp_packets.front();
rtp_packets.pop_front();
}
delete packet;
delete packets.front();
}
TEST_F(ProducerFecTest, TwoFrameFec) {
@ -120,39 +120,36 @@ TEST_F(ProducerFecTest, TwoFrameFec) {
}
EXPECT_TRUE(producer_->FecAvailable());
uint16_t seq_num = generator_->NextSeqNum();
RedPacket* packet = producer_->GetFecPacket(kRedPayloadType,
kFecPayloadType,
seq_num,
kRtpHeaderSize);
std::vector<RedPacket*> packets = producer_->GetFecPackets(kRedPayloadType,
kFecPayloadType,
seq_num,
kRtpHeaderSize);
EXPECT_FALSE(producer_->FecAvailable());
EXPECT_TRUE(packet != NULL);
VerifyHeader(seq_num, last_timestamp,
kRedPayloadType, kFecPayloadType, packet, false);
ASSERT_EQ(1u, packets.size());
VerifyHeader(seq_num, last_timestamp, kRedPayloadType, kFecPayloadType,
packets.front(), false);
while (!rtp_packets.empty()) {
delete rtp_packets.front();
rtp_packets.pop_front();
}
delete packet;
delete packets.front();
}
TEST_F(ProducerFecTest, BuildRedPacket) {
generator_->NewFrame(1);
RtpPacket* packet = generator_->NextPacket(0, 10);
RedPacket* red_packet = producer_->BuildRedPacket(packet->data,
packet->length -
kRtpHeaderSize,
kRtpHeaderSize,
kRedPayloadType);
rtc::scoped_ptr<RedPacket> red_packet(producer_->BuildRedPacket(
packet->data, packet->length - kRtpHeaderSize, kRtpHeaderSize,
kRedPayloadType));
EXPECT_EQ(packet->length + 1, red_packet->length());
VerifyHeader(packet->header.header.sequenceNumber,
packet->header.header.timestamp,
kRedPayloadType,
packet->header.header.payloadType,
red_packet,
red_packet.get(),
true); // Marker bit set.
for (int i = 0; i < 10; ++i)
EXPECT_EQ(i, red_packet->data()[kRtpHeaderSize + 1 + i]);
delete red_packet;
delete packet;
}

View File

@ -407,7 +407,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
}
return rtp_sender_.SendOutgoingData(
frame_type, payload_type, time_stamp, capture_time_ms, payload_data,
payload_size, fragmentation, NULL, rtp_video_hdr);
payload_size, fragmentation, rtp_video_hdr);
}
bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,

View File

@ -323,14 +323,14 @@ int32_t RTPSender::RegisterPayload(
}
return -1;
}
int32_t ret_val = -1;
int32_t ret_val = 0;
RtpUtility::Payload* payload = NULL;
if (audio_configured_) {
// TODO(mflodman): Change to CreateAudioPayload and make static.
ret_val = audio_->RegisterAudioPayload(payload_name, payload_number,
frequency, channels, rate, payload);
} else {
ret_val = video_->RegisterVideoPayload(payload_name, payload_number, rate,
payload);
payload = video_->CreateVideoPayload(payload_name, payload_number, rate);
}
if (payload) {
payload_type_map_[payload_number] = payload;
@ -489,7 +489,6 @@ int32_t RTPSender::SendOutgoingData(FrameType frame_type,
const uint8_t* payload_data,
size_t payload_size,
const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codec_info,
const RTPVideoHeader* rtp_hdr) {
uint32_t ssrc;
{
@ -526,7 +525,7 @@ int32_t RTPSender::SendOutgoingData(FrameType frame_type,
ret_val =
video_->SendVideo(video_type, frame_type, payload_type,
capture_timestamp, capture_time_ms, payload_data,
payload_size, fragmentation, codec_info, rtp_hdr);
payload_size, fragmentation, rtp_hdr);
}
CriticalSectionScoped cs(statistics_crit_.get());
@ -745,7 +744,8 @@ int RTPSender::SelectiveRetransmissions() const {
int RTPSender::SetSelectiveRetransmissions(uint8_t settings) {
if (!video_)
return -1;
return video_->SetSelectiveRetransmissions(settings);
video_->SetSelectiveRetransmissions(settings);
return 0;
}
void RTPSender::OnReceivedNACK(const std::list<uint16_t>& nack_sequence_numbers,
@ -1086,9 +1086,11 @@ size_t RTPSender::RTPHeaderLength() const {
return rtp_header_length;
}
uint16_t RTPSender::IncrementSequenceNumber() {
uint16_t RTPSender::AllocateSequenceNumber(uint16_t packets_to_send) {
CriticalSectionScoped cs(send_critsect_.get());
return sequence_number_++;
uint16_t first_allocated_sequence_number = sequence_number_;
sequence_number_ += packets_to_send;
return first_allocated_sequence_number;
}
void RTPSender::ResetDataCounters() {
@ -1729,14 +1731,6 @@ int32_t RTPSender::RED(int8_t *payload_type) const {
return audio_->RED(*payload_type);
}
// Video
VideoCodecInformation *RTPSender::CodecInformationVideo() {
if (audio_configured_) {
return NULL;
}
return video_->CodecInformationVideo();
}
RtpVideoCodecTypes RTPSender::VideoCodecType() const {
assert(!audio_configured_ && "Sender is an audio stream!");
return video_->VideoCodecType();
@ -1762,8 +1756,8 @@ int32_t RTPSender::SetGenericFECStatus(bool enable,
if (audio_configured_) {
return -1;
}
return video_->SetGenericFECStatus(enable, payload_type_red,
payload_type_fec);
video_->SetGenericFECStatus(enable, payload_type_red, payload_type_fec);
return 0;
}
int32_t RTPSender::GenericFECStatus(bool* enable,
@ -1772,8 +1766,8 @@ int32_t RTPSender::GenericFECStatus(bool* enable,
if (audio_configured_) {
return -1;
}
return video_->GenericFECStatus(
*enable, *payload_type_red, *payload_type_fec);
video_->GenericFECStatus(*enable, *payload_type_red, *payload_type_fec);
return 0;
}
int32_t RTPSender::SetFecParameters(
@ -1782,7 +1776,8 @@ int32_t RTPSender::SetFecParameters(
if (audio_configured_) {
return -1;
}
return video_->SetFecParameters(delta_params, key_params);
video_->SetFecParameters(delta_params, key_params);
return 0;
}
void RTPSender::BuildRtxPacket(uint8_t* buffer, size_t* length,

View File

@ -24,8 +24,8 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
#include "webrtc/modules/rtp_rtcp/source/video_codec_information.h"
#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1.
@ -61,7 +61,10 @@ class RTPSenderInterface {
bool inc_sequence_number = true) = 0;
virtual size_t RTPHeaderLength() const = 0;
virtual uint16_t IncrementSequenceNumber() = 0;
// Returns the next sequence number to use for a packet and allocates
// 'packets_to_send' number of sequence numbers. It's important all allocated
// sequence numbers are used in sequence to avoid perceived packet loss.
virtual uint16_t AllocateSequenceNumber(uint16_t packets_to_send) = 0;
virtual uint16_t SequenceNumber() const = 0;
virtual size_t MaxPayloadLength() const = 0;
virtual size_t MaxDataPayloadLength() const = 0;
@ -155,7 +158,6 @@ class RTPSender : public RTPSenderInterface {
const uint8_t* payload_data,
size_t payload_size,
const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codec_info = NULL,
const RTPVideoHeader* rtp_hdr = NULL);
// RTP header extension
@ -227,7 +229,7 @@ class RTPSender : public RTPSenderInterface {
const bool inc_sequence_number = true) override;
size_t RTPHeaderLength() const override;
uint16_t IncrementSequenceNumber() override;
uint16_t AllocateSequenceNumber(uint16_t packets_to_send) override;
size_t MaxPayloadLength() const override;
uint16_t PacketOverHead() const override;
@ -261,9 +263,6 @@ class RTPSender : public RTPSenderInterface {
// Get payload type for Redundant Audio Data RFC 2198.
int32_t RED(int8_t *payload_type) const;
// Video.
VideoCodecInformation *CodecInformationVideo();
RtpVideoCodecTypes VideoCodecType() const;
uint32_t MaxConfiguredBitrateVideo() const;

View File

@ -1353,7 +1353,7 @@ TEST_F(RtpSenderVideoTest, SendVideoWithCVO) {
rtp_sender_video_->SendVideo(kRtpVideoGeneric, kVideoFrameKey, kPayload,
kTimestamp, 0, packet_, sizeof(packet_), NULL,
NULL, &hdr);
&hdr);
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);

View File

@ -10,10 +10,12 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include <assert.h>
#include <stdlib.h>
#include <string.h>
#include <vector>
#include "webrtc/base/checks.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
@ -33,8 +35,8 @@ struct RtpPacket {
RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
: _rtpSender(*rtpSender),
crit_(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric),
_videoCodecInformation(NULL),
_maxBitrate(0),
_retransmissionSettings(kRetransmitBaseLayer),
@ -43,7 +45,6 @@ RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
_fecEnabled(false),
_payloadTypeRED(-1),
_payloadTypeFEC(-1),
_numberFirstPartition(0),
delta_fec_params_(),
key_fec_params_(),
producer_fec_(&_fec),
@ -57,9 +58,6 @@ RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
}
RTPSenderVideo::~RTPSenderVideo() {
if (_videoCodecInformation) {
delete _videoCodecInformation;
}
}
void RTPSenderVideo::SetVideoCodecType(RtpVideoCodecTypes videoType) {
@ -70,11 +68,11 @@ RtpVideoCodecTypes RTPSenderVideo::VideoCodecType() const {
return _videoType;
}
int32_t RTPSenderVideo::RegisterVideoPayload(
// Static.
RtpUtility::Payload* RTPSenderVideo::CreateVideoPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t maxBitRate,
RtpUtility::Payload*& payload) {
const uint32_t maxBitRate) {
RtpVideoCodecTypes videoType = kRtpVideoGeneric;
if (RtpUtility::StringCompare(payloadName, "VP8", 3)) {
videoType = kRtpVideoVp8;
@ -85,103 +83,94 @@ int32_t RTPSenderVideo::RegisterVideoPayload(
} else {
videoType = kRtpVideoGeneric;
}
payload = new RtpUtility::Payload;
RtpUtility::Payload* payload = new RtpUtility::Payload();
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
payload->typeSpecific.Video.videoCodecType = videoType;
payload->typeSpecific.Video.maxRate = maxBitRate;
payload->audio = false;
return 0;
return payload;
}
int32_t RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
const size_t payload_length,
const size_t rtp_header_length,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage,
bool protect) {
if (_fecEnabled) {
int ret = 0;
size_t fec_overhead_sent = 0;
size_t video_sent = 0;
void RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
const size_t payload_length,
const size_t rtp_header_length,
uint16_t seq_num,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage) {
if (_rtpSender.SendToNetwork(data_buffer, payload_length, rtp_header_length,
capture_time_ms, storage,
PacedSender::kNormalPriority) == 0) {
_videoBitrate.Update(payload_length + rtp_header_length);
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketNormal", "timestamp", capture_timestamp,
"seqnum", seq_num);
} else {
LOG(LS_WARNING) << "Failed to send video packet " << seq_num;
}
}
RedPacket* red_packet = producer_fec_.BuildRedPacket(
data_buffer, payload_length, rtp_header_length, _payloadTypeRED);
void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
const size_t payload_length,
const size_t rtp_header_length,
uint16_t media_seq_num,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType media_packet_storage,
bool protect) {
rtc::scoped_ptr<RedPacket> red_packet;
std::vector<RedPacket*> fec_packets;
StorageType fec_storage = kDontRetransmit;
uint16_t next_fec_sequence_number = 0;
{
// Only protect while creating RED and FEC packets, not when sending.
CriticalSectionScoped cs(crit_.get());
red_packet.reset(producer_fec_.BuildRedPacket(
data_buffer, payload_length, rtp_header_length, _payloadTypeRED));
if (protect) {
producer_fec_.AddRtpPacketAndGenerateFec(data_buffer, payload_length,
rtp_header_length);
}
uint16_t num_fec_packets = producer_fec_.NumAvailableFecPackets();
if (num_fec_packets > 0) {
next_fec_sequence_number =
_rtpSender.AllocateSequenceNumber(num_fec_packets);
fec_packets = producer_fec_.GetFecPackets(
_payloadTypeRED, _payloadTypeFEC, next_fec_sequence_number,
rtp_header_length);
DCHECK_EQ(num_fec_packets, fec_packets.size());
if (_retransmissionSettings & kRetransmitFECPackets)
fec_storage = kAllowRetransmission;
}
}
if (_rtpSender.SendToNetwork(
red_packet->data(), red_packet->length() - rtp_header_length,
rtp_header_length, capture_time_ms, media_packet_storage,
PacedSender::kNormalPriority) == 0) {
_videoBitrate.Update(red_packet->length());
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketRed", "timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber());
// Sending the media packet with RED header.
int packet_success =
_rtpSender.SendToNetwork(red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
ret |= packet_success;
if (packet_success == 0) {
video_sent += red_packet->length();
}
delete red_packet;
red_packet = NULL;
if (protect) {
ret = producer_fec_.AddRtpPacketAndGenerateFec(
data_buffer, payload_length, rtp_header_length);
if (ret != 0)
return ret;
}
while (producer_fec_.FecAvailable()) {
red_packet =
producer_fec_.GetFecPacket(_payloadTypeRED,
_payloadTypeFEC,
_rtpSender.IncrementSequenceNumber(),
rtp_header_length);
StorageType storage = kDontRetransmit;
if (_retransmissionSettings & kRetransmitFECPackets) {
storage = kAllowRetransmission;
}
"seqnum", media_seq_num);
} else {
LOG(LS_WARNING) << "Failed to send RED packet " << media_seq_num;
}
for (RedPacket* fec_packet : fec_packets) {
if (_rtpSender.SendToNetwork(
fec_packet->data(), fec_packet->length() - rtp_header_length,
rtp_header_length, capture_time_ms, fec_storage,
PacedSender::kNormalPriority) == 0) {
_fecOverheadRate.Update(fec_packet->length());
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketFec", "timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber());
// Sending FEC packet with RED header.
int packet_success =
_rtpSender.SendToNetwork(red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
ret |= packet_success;
if (packet_success == 0) {
fec_overhead_sent += red_packet->length();
}
delete red_packet;
red_packet = NULL;
"seqnum", next_fec_sequence_number);
} else {
LOG(LS_WARNING) << "Failed to send FEC packet "
<< next_fec_sequence_number;
}
_videoBitrate.Update(video_sent);
_fecOverheadRate.Update(fec_overhead_sent);
return ret;
delete fec_packet;
++next_fec_sequence_number;
}
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketNormal", "timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber());
int ret = _rtpSender.SendToNetwork(data_buffer,
payload_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
if (ret == 0) {
_videoBitrate.Update(payload_length + rtp_header_length);
}
return ret;
}
int32_t RTPSenderVideo::SendRTPIntraRequest() {
@ -204,9 +193,10 @@ int32_t RTPSenderVideo::SendRTPIntraRequest() {
data, 0, length, -1, kDontStore, PacedSender::kNormalPriority);
}
int32_t RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC) {
void RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC) {
CriticalSectionScoped cs(crit_.get());
_fecEnabled = enable;
_payloadTypeRED = payloadTypeRED;
_payloadTypeFEC = payloadTypeFEC;
@ -215,19 +205,19 @@ int32_t RTPSenderVideo::SetGenericFECStatus(const bool enable,
delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type =
kFecMaskRandom;
return 0;
}
int32_t RTPSenderVideo::GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const {
void RTPSenderVideo::GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const {
CriticalSectionScoped cs(crit_.get());
enable = _fecEnabled;
payloadTypeRED = _payloadTypeRED;
payloadTypeFEC = _payloadTypeFEC;
return 0;
}
size_t RTPSenderVideo::FECPacketOverhead() const {
CriticalSectionScoped cs(crit_.get());
if (_fecEnabled) {
// Overhead is FEC headers plus RED for FEC header plus anything in RTP
// header beyond the 12 bytes base header (CSRC list, extensions...)
@ -240,14 +230,13 @@ size_t RTPSenderVideo::FECPacketOverhead() const {
return 0;
}
int32_t RTPSenderVideo::SetFecParameters(
const FecProtectionParams* delta_params,
const FecProtectionParams* key_params) {
assert(delta_params);
assert(key_params);
void RTPSenderVideo::SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params) {
CriticalSectionScoped cs(crit_.get());
DCHECK(delta_params);
DCHECK(key_params);
delta_fec_params_ = *delta_params;
key_fec_params_ = *key_params;
return 0;
}
int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
@ -258,49 +247,26 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
const uint8_t* payloadData,
const size_t payloadSize,
const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codecInfo,
const RTPVideoHeader* rtpHdr) {
if (payloadSize == 0) {
return -1;
}
if (frameType == kVideoFrameKey) {
producer_fec_.SetFecParameters(&key_fec_params_, _numberFirstPartition);
} else {
producer_fec_.SetFecParameters(&delta_fec_params_, _numberFirstPartition);
rtc::scoped_ptr<RtpPacketizer> packetizer(
RtpPacketizer::Create(videoType, _rtpSender.MaxDataPayloadLength(),
&(rtpHdr->codecHeader), frameType));
StorageType storage = kDontStore;
bool fec_enabled = false;
{
CriticalSectionScoped cs(crit_.get());
FecProtectionParams* fec_params =
frameType == kVideoFrameKey ? &key_fec_params_ : &delta_fec_params_;
producer_fec_.SetFecParameters(fec_params, 0);
storage = packetizer->GetStorageType(_retransmissionSettings);
fec_enabled = _fecEnabled;
}
// Default setting for number of first partition packets:
// Will be extracted in SendVP8 for VP8 codec; other codecs use 0
_numberFirstPartition = 0;
return Send(videoType, frameType, payloadType, captureTimeStamp,
capture_time_ms, payloadData, payloadSize, fragmentation, rtpHdr)
? 0
: -1;
}
VideoCodecInformation* RTPSenderVideo::CodecInformationVideo() {
return _videoCodecInformation;
}
void RTPSenderVideo::SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate) {
_maxBitrate = maxBitrate;
}
uint32_t RTPSenderVideo::MaxConfiguredBitrateVideo() const {
return _maxBitrate;
}
bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const size_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpHdr) {
// Register CVO rtp header extension at the first time when we receive a frame
// with pending rotation.
RTPSenderInterface::CVOMode cvo_mode = RTPSenderInterface::kCVONone;
@ -311,10 +277,6 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
uint16_t rtp_header_length = _rtpSender.RTPHeaderLength();
size_t payload_bytes_to_send = payloadSize;
const uint8_t* data = payloadData;
size_t max_payload_length = _rtpSender.MaxDataPayloadLength();
rtc::scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
videoType, max_payload_length, &(rtpHdr->codecHeader), frameType));
// TODO(changbin): we currently don't support to configure the codec to
// output multiple partitions for VP8. Should remove below check after the
@ -328,16 +290,14 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
while (!last) {
uint8_t dataBuffer[IP_PACKET_SIZE] = {0};
size_t payload_bytes_in_packet = 0;
if (!packetizer->NextPacket(
&dataBuffer[rtp_header_length], &payload_bytes_in_packet, &last)) {
return false;
if (!packetizer->NextPacket(&dataBuffer[rtp_header_length],
&payload_bytes_in_packet, &last)) {
return -1;
}
// Write RTP header.
// Set marker bit true if this is the last packet in frame.
_rtpSender.BuildRTPheader(
dataBuffer, payloadType, last, captureTimeStamp, capture_time_ms);
// According to
// http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/
// ts_126114v120700p.pdf Section 7.4.5:
@ -350,7 +310,7 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
// value sent.
// Here we are adding it to every packet of every frame at this point.
if (!rtpHdr) {
assert(!_rtpSender.IsRtpHeaderExtensionRegistered(
DCHECK(!_rtpSender.IsRtpHeaderExtensionRegistered(
kRtpExtensionVideoRotation));
} else if (cvo_mode == RTPSenderInterface::kCVOActivated) {
// Checking whether CVO header extension is registered will require taking
@ -365,22 +325,29 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
_rtpSender.UpdateVideoRotation(dataBuffer, packetSize, rtp_header,
rtpHdr->rotation);
}
if (SendVideoPacket(dataBuffer,
payload_bytes_in_packet,
rtp_header_length,
captureTimeStamp,
capture_time_ms,
packetizer->GetStorageType(_retransmissionSettings),
packetizer->GetProtectionType() == kProtectedPacket)) {
LOG(LS_WARNING) << packetizer->ToString()
<< " failed to send packet number "
<< _rtpSender.SequenceNumber();
if (fec_enabled) {
SendVideoPacketAsRed(dataBuffer, payload_bytes_in_packet,
rtp_header_length, _rtpSender.SequenceNumber(),
captureTimeStamp, capture_time_ms, storage,
packetizer->GetProtectionType() == kProtectedPacket);
} else {
SendVideoPacket(dataBuffer, payload_bytes_in_packet, rtp_header_length,
_rtpSender.SequenceNumber(), captureTimeStamp,
capture_time_ms, storage);
}
}
TRACE_EVENT_ASYNC_END1(
"webrtc", "Video", capture_time_ms, "timestamp", _rtpSender.Timestamp());
return true;
return 0;
}
void RTPSenderVideo::SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate) {
_maxBitrate = maxBitrate;
}
uint32_t RTPSenderVideo::MaxConfiguredBitrateVideo() const {
return _maxBitrate;
}
void RTPSenderVideo::ProcessBitrate() {
@ -397,12 +364,13 @@ uint32_t RTPSenderVideo::FecOverheadRate() const {
}
int RTPSenderVideo::SelectiveRetransmissions() const {
CriticalSectionScoped cs(crit_.get());
return _retransmissionSettings;
}
int RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) {
void RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) {
CriticalSectionScoped cs(crit_.get());
_retransmissionSettings = settings;
return 0;
}
} // namespace webrtc

View File

@ -13,6 +13,8 @@
#include <list>
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
@ -37,10 +39,10 @@ class RTPSenderVideo {
size_t FECPacketOverhead() const;
int32_t RegisterVideoPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t maxBitRate,
RtpUtility::Payload*& payload);
static RtpUtility::Payload* CreateVideoPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType,
const uint32_t maxBitRate);
int32_t SendVideo(const RtpVideoCodecTypes videoType,
const FrameType frameType,
@ -50,30 +52,27 @@ class RTPSenderVideo {
const uint8_t* payloadData,
const size_t payloadSize,
const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codecInfo,
const RTPVideoHeader* rtpHdr);
int32_t SendRTPIntraRequest();
void SetVideoCodecType(RtpVideoCodecTypes type);
VideoCodecInformation* CodecInformationVideo();
void SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate);
uint32_t MaxConfiguredBitrateVideo() const;
// FEC
int32_t SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC);
void SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC);
int32_t GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const;
void GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const;
int32_t SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params);
void SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params);
void ProcessBitrate();
@ -81,45 +80,43 @@ class RTPSenderVideo {
uint32_t FecOverheadRate() const;
int SelectiveRetransmissions() const;
int SetSelectiveRetransmissions(uint8_t settings);
void SetSelectiveRetransmissions(uint8_t settings);
protected:
virtual int32_t SendVideoPacket(uint8_t* dataBuffer,
const size_t payloadLength,
const size_t rtpHeaderLength,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage,
bool protect);
private:
void SendVideoPacket(uint8_t* dataBuffer,
const size_t payloadLength,
const size_t rtpHeaderLength,
uint16_t seq_num,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType storage);
private:
bool Send(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const size_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpHdr);
void SendVideoPacketAsRed(uint8_t* dataBuffer,
const size_t payloadLength,
const size_t rtpHeaderLength,
uint16_t video_seq_num,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType media_packet_storage,
bool protect);
private:
RTPSenderInterface& _rtpSender;
// Should never be held when calling out of this class.
const rtc::scoped_ptr<CriticalSectionWrapper> crit_;
RtpVideoCodecTypes _videoType;
VideoCodecInformation* _videoCodecInformation;
uint32_t _maxBitrate;
int32_t _retransmissionSettings;
int32_t _retransmissionSettings GUARDED_BY(crit_);
// FEC
ForwardErrorCorrection _fec;
bool _fecEnabled;
int8_t _payloadTypeRED;
int8_t _payloadTypeFEC;
unsigned int _numberFirstPartition;
FecProtectionParams delta_fec_params_;
FecProtectionParams key_fec_params_;
ProducerFec producer_fec_;
bool _fecEnabled GUARDED_BY(crit_);
int8_t _payloadTypeRED GUARDED_BY(crit_);
int8_t _payloadTypeFEC GUARDED_BY(crit_);
FecProtectionParams delta_fec_params_ GUARDED_BY(crit_);
FecProtectionParams key_fec_params_ GUARDED_BY(crit_);
ProducerFec producer_fec_ GUARDED_BY(crit_);
// Bitrate used for FEC payload, RED headers, RTP headers for FEC packets
// and any padding overhead.