Move RTP timestamp calculation from BuildRTPheader to SendOutgoingData
BUG=webrtc:5565 Review-Url: https://codereview.webrtc.org/2249223005 Cr-Commit-Position: refs/heads/master@{#13842}
This commit is contained in:
@ -113,7 +113,7 @@ RTPSender::RTPSender(
|
||||
remote_ssrc_(0),
|
||||
sequence_number_forced_(false),
|
||||
ssrc_forced_(false),
|
||||
timestamp_(0),
|
||||
last_rtp_timestamp_(0),
|
||||
capture_time_ms_(0),
|
||||
last_timestamp_time_ms_(0),
|
||||
media_has_been_sent_(false),
|
||||
@ -436,11 +436,15 @@ bool RTPSender::SendOutgoingData(FrameType frame_type,
|
||||
uint32_t* transport_frame_id_out) {
|
||||
uint32_t ssrc;
|
||||
uint16_t sequence_number;
|
||||
uint32_t rtp_timestamp;
|
||||
{
|
||||
// Drop this packet if we're not sending media packets.
|
||||
rtc::CritScope lock(&send_critsect_);
|
||||
ssrc = ssrc_;
|
||||
sequence_number = sequence_number_;
|
||||
rtp_timestamp = timestamp_offset_ + capture_timestamp;
|
||||
if (transport_frame_id_out)
|
||||
*transport_frame_id_out = rtp_timestamp;
|
||||
if (!sending_media_)
|
||||
return true;
|
||||
}
|
||||
@ -453,12 +457,12 @@ bool RTPSender::SendOutgoingData(FrameType frame_type,
|
||||
|
||||
bool result;
|
||||
if (audio_configured_) {
|
||||
TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp,
|
||||
"Send", "type", FrameTypeToString(frame_type));
|
||||
TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", rtp_timestamp, "Send", "type",
|
||||
FrameTypeToString(frame_type));
|
||||
assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN ||
|
||||
frame_type == kEmptyFrame);
|
||||
|
||||
result = audio_->SendAudio(frame_type, payload_type, capture_timestamp,
|
||||
result = audio_->SendAudio(frame_type, payload_type, rtp_timestamp,
|
||||
payload_data, payload_size, fragmentation);
|
||||
} else {
|
||||
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms,
|
||||
@ -485,17 +489,10 @@ bool RTPSender::SendOutgoingData(FrameType frame_type,
|
||||
}
|
||||
|
||||
result = video_->SendVideo(video_type, frame_type, payload_type,
|
||||
capture_timestamp, capture_time_ms, payload_data,
|
||||
rtp_timestamp, capture_time_ms, payload_data,
|
||||
payload_size, fragmentation, rtp_header);
|
||||
}
|
||||
|
||||
if (transport_frame_id_out) {
|
||||
rtc::CritScope lock(&send_critsect_);
|
||||
// TODO(sergeyu): Move RTP timestamp calculation from BuildRTPheader() to
|
||||
// SendOutgoingData() and pass it to SendVideo()/SendAudio() calls.
|
||||
*transport_frame_id_out = timestamp_;
|
||||
}
|
||||
|
||||
rtc::CritScope cs(&statistics_crit_);
|
||||
// Note: This is currently only counting for video.
|
||||
if (frame_type == kVideoFrameKey) {
|
||||
@ -570,7 +567,7 @@ size_t RTPSender::SendPadData(size_t bytes,
|
||||
if (!sending_media_)
|
||||
return bytes_sent;
|
||||
if (!timestamp_provided) {
|
||||
timestamp = timestamp_;
|
||||
timestamp = last_rtp_timestamp_;
|
||||
capture_time_ms = capture_time_ms_;
|
||||
}
|
||||
if (rtx_ == kRtxOff) {
|
||||
@ -1082,20 +1079,20 @@ int32_t RTPSender::BuildRTPheader(uint8_t* data_buffer,
|
||||
int32_t RTPSender::BuildRtpHeader(uint8_t* data_buffer,
|
||||
int8_t payload_type,
|
||||
bool marker_bit,
|
||||
uint32_t capture_timestamp,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms) {
|
||||
assert(payload_type >= 0);
|
||||
rtc::CritScope lock(&send_critsect_);
|
||||
if (!sending_media_)
|
||||
return -1;
|
||||
|
||||
timestamp_ = timestamp_offset_ + capture_timestamp;
|
||||
last_rtp_timestamp_ = rtp_timestamp;
|
||||
last_timestamp_time_ms_ = clock_->TimeInMilliseconds();
|
||||
uint32_t sequence_number = sequence_number_++;
|
||||
capture_time_ms_ = capture_time_ms;
|
||||
last_packet_marker_bit_ = marker_bit;
|
||||
return CreateRtpHeader(data_buffer, payload_type, ssrc_, marker_bit,
|
||||
timestamp_, sequence_number, csrcs_);
|
||||
rtp_timestamp, sequence_number, csrcs_);
|
||||
}
|
||||
|
||||
uint16_t RTPSender::BuildRtpHeaderExtension(uint8_t* data_buffer,
|
||||
@ -1515,11 +1512,6 @@ bool RTPSender::SendingMedia() const {
|
||||
return sending_media_;
|
||||
}
|
||||
|
||||
uint32_t RTPSender::Timestamp() const {
|
||||
rtc::CritScope lock(&send_critsect_);
|
||||
return timestamp_;
|
||||
}
|
||||
|
||||
void RTPSender::SetTimestampOffset(uint32_t timestamp) {
|
||||
rtc::CritScope lock(&send_critsect_);
|
||||
timestamp_offset_ = timestamp;
|
||||
@ -1693,7 +1685,7 @@ void RTPSender::SetRtpState(const RtpState& rtp_state) {
|
||||
sequence_number_ = rtp_state.sequence_number;
|
||||
sequence_number_forced_ = true;
|
||||
timestamp_offset_ = rtp_state.start_timestamp;
|
||||
timestamp_ = rtp_state.timestamp;
|
||||
last_rtp_timestamp_ = rtp_state.timestamp;
|
||||
capture_time_ms_ = rtp_state.capture_time_ms;
|
||||
last_timestamp_time_ms_ = rtp_state.last_timestamp_time_ms;
|
||||
media_has_been_sent_ = rtp_state.media_has_been_sent;
|
||||
@ -1705,7 +1697,7 @@ RtpState RTPSender::GetRtpState() const {
|
||||
RtpState state;
|
||||
state.sequence_number = sequence_number_;
|
||||
state.start_timestamp = timestamp_offset_;
|
||||
state.timestamp = timestamp_;
|
||||
state.timestamp = last_rtp_timestamp_;
|
||||
state.capture_time_ms = capture_time_ms_;
|
||||
state.last_timestamp_time_ms = last_timestamp_time_ms_;
|
||||
state.media_has_been_sent = media_has_been_sent_;
|
||||
|
||||
@ -222,8 +222,6 @@ class RTPSender {
|
||||
uint16_t AllocateSequenceNumber(uint16_t packets_to_send);
|
||||
size_t MaxPayloadLength() const;
|
||||
|
||||
// Current timestamp.
|
||||
uint32_t Timestamp() const;
|
||||
uint32_t SSRC() const;
|
||||
|
||||
// Deprecated. Create RtpPacketToSend instead and use next function.
|
||||
@ -410,7 +408,7 @@ class RTPSender {
|
||||
uint16_t sequence_number_rtx_ GUARDED_BY(send_critsect_);
|
||||
bool ssrc_forced_ GUARDED_BY(send_critsect_);
|
||||
uint32_t ssrc_ GUARDED_BY(send_critsect_);
|
||||
uint32_t timestamp_ GUARDED_BY(send_critsect_);
|
||||
uint32_t last_rtp_timestamp_ GUARDED_BY(send_critsect_);
|
||||
int64_t capture_time_ms_ GUARDED_BY(send_critsect_);
|
||||
int64_t last_timestamp_time_ms_ GUARDED_BY(send_critsect_);
|
||||
bool media_has_been_sent_ GUARDED_BY(send_critsect_);
|
||||
|
||||
@ -145,11 +145,11 @@ bool RTPSenderAudio::MarkerBit(FrameType frame_type, int8_t payload_type) {
|
||||
}
|
||||
|
||||
bool RTPSenderAudio::SendAudio(FrameType frame_type,
|
||||
int8_t payload_type,
|
||||
uint32_t capture_timestamp,
|
||||
const uint8_t* payload_data,
|
||||
size_t data_size,
|
||||
const RTPFragmentationHeader* fragmentation) {
|
||||
int8_t payload_type,
|
||||
uint32_t rtp_timestamp,
|
||||
const uint8_t* payload_data,
|
||||
size_t data_size,
|
||||
const RTPFragmentationHeader* fragmentation) {
|
||||
// TODO(pwestin) Breakup function in smaller functions.
|
||||
size_t payload_size = data_size;
|
||||
size_t max_payload_length = rtp_sender_->MaxPayloadLength();
|
||||
@ -172,7 +172,7 @@ bool RTPSenderAudio::SendAudio(FrameType frame_type,
|
||||
|
||||
if (delaySinceLastDTMF > 100) {
|
||||
// New tone to play
|
||||
dtmf_timestamp_ = capture_timestamp;
|
||||
dtmf_timestamp_ = rtp_timestamp;
|
||||
if (NextDTMF(&key, &dtmf_length_ms, &dtmf_level_) >= 0) {
|
||||
dtmf_event_first_packet_sent_ = false;
|
||||
dtmf_key_ = key;
|
||||
@ -189,14 +189,13 @@ bool RTPSenderAudio::SendAudio(FrameType frame_type,
|
||||
// kEmptyFrame is used to drive the DTMF when in CN mode
|
||||
// it can be triggered more frequently than we want to send the
|
||||
// DTMF packets.
|
||||
if (packet_size_samples >
|
||||
(capture_timestamp - dtmf_timestamp_last_sent_)) {
|
||||
if (packet_size_samples > (rtp_timestamp - dtmf_timestamp_last_sent_)) {
|
||||
// not time to send yet
|
||||
return true;
|
||||
}
|
||||
}
|
||||
dtmf_timestamp_last_sent_ = capture_timestamp;
|
||||
uint32_t dtmf_duration_samples = capture_timestamp - dtmf_timestamp_;
|
||||
dtmf_timestamp_last_sent_ = rtp_timestamp;
|
||||
uint32_t dtmf_duration_samples = rtp_timestamp - dtmf_timestamp_;
|
||||
bool ended = false;
|
||||
bool send = true;
|
||||
|
||||
@ -217,7 +216,7 @@ bool RTPSenderAudio::SendAudio(FrameType frame_type,
|
||||
static_cast<uint16_t>(0xffff), false);
|
||||
|
||||
// set new timestap for this segment
|
||||
dtmf_timestamp_ = capture_timestamp;
|
||||
dtmf_timestamp_ = rtp_timestamp;
|
||||
dtmf_duration_samples -= 0xffff;
|
||||
dtmf_length_samples_ -= 0xffff;
|
||||
|
||||
@ -249,9 +248,9 @@ bool RTPSenderAudio::SendAudio(FrameType frame_type,
|
||||
|
||||
int32_t rtpHeaderLength = 0;
|
||||
|
||||
rtpHeaderLength = rtp_sender_->BuildRtpHeader(data_buffer, payload_type,
|
||||
marker_bit, capture_timestamp,
|
||||
clock_->TimeInMilliseconds());
|
||||
rtpHeaderLength =
|
||||
rtp_sender_->BuildRtpHeader(data_buffer, payload_type, marker_bit,
|
||||
rtp_timestamp, clock_->TimeInMilliseconds());
|
||||
if (rtpHeaderLength <= 0) {
|
||||
return false;
|
||||
}
|
||||
@ -283,8 +282,8 @@ bool RTPSenderAudio::SendAudio(FrameType frame_type,
|
||||
rtp_sender_->UpdateAudioLevel(data_buffer, packetSize, rtp_header,
|
||||
(frame_type == kAudioFrameSpeech),
|
||||
audio_level_dbov);
|
||||
TRACE_EVENT_ASYNC_END2("webrtc", "Audio", capture_timestamp, "timestamp",
|
||||
rtp_sender_->Timestamp(), "seqnum",
|
||||
TRACE_EVENT_ASYNC_END2("webrtc", "Audio", rtp_timestamp, "timestamp",
|
||||
rtp_timestamp, "seqnum",
|
||||
rtp_sender_->SequenceNumber());
|
||||
bool send_result = rtp_sender_->SendToNetwork(
|
||||
data_buffer, payload_size, rtpHeaderLength, rtc::TimeMillis(),
|
||||
|
||||
@ -76,7 +76,7 @@ void RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
|
||||
size_t payload_length,
|
||||
size_t rtp_header_length,
|
||||
uint16_t seq_num,
|
||||
uint32_t capture_timestamp,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
StorageType storage) {
|
||||
if (!rtp_sender_->SendToNetwork(data_buffer, payload_length,
|
||||
@ -89,7 +89,7 @@ void RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
|
||||
video_bitrate_.Update(payload_length + rtp_header_length,
|
||||
clock_->TimeInMilliseconds());
|
||||
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
|
||||
"Video::PacketNormal", "timestamp", capture_timestamp,
|
||||
"Video::PacketNormal", "timestamp", rtp_timestamp,
|
||||
"seqnum", seq_num);
|
||||
}
|
||||
|
||||
@ -97,7 +97,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
|
||||
size_t payload_length,
|
||||
size_t rtp_header_length,
|
||||
uint16_t media_seq_num,
|
||||
uint32_t capture_timestamp,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
StorageType media_packet_storage,
|
||||
bool protect) {
|
||||
@ -133,7 +133,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
|
||||
rtc::CritScope cs(&stats_crit_);
|
||||
video_bitrate_.Update(red_packet->length(), clock_->TimeInMilliseconds());
|
||||
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
|
||||
"Video::PacketRed", "timestamp", capture_timestamp,
|
||||
"Video::PacketRed", "timestamp", rtp_timestamp,
|
||||
"seqnum", media_seq_num);
|
||||
} else {
|
||||
LOG(LS_WARNING) << "Failed to send RED packet " << media_seq_num;
|
||||
@ -146,7 +146,7 @@ void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
|
||||
rtc::CritScope cs(&stats_crit_);
|
||||
fec_bitrate_.Update(fec_packet->length(), clock_->TimeInMilliseconds());
|
||||
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
|
||||
"Video::PacketFec", "timestamp", capture_timestamp,
|
||||
"Video::PacketFec", "timestamp", rtp_timestamp,
|
||||
"seqnum", next_fec_sequence_number);
|
||||
} else {
|
||||
LOG(LS_WARNING) << "Failed to send FEC packet "
|
||||
@ -208,7 +208,7 @@ void RTPSenderVideo::SetFecParameters(const FecProtectionParams* delta_params,
|
||||
bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
|
||||
FrameType frame_type,
|
||||
int8_t payload_type,
|
||||
uint32_t capture_timestamp,
|
||||
uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms,
|
||||
const uint8_t* payload_data,
|
||||
size_t payload_size,
|
||||
@ -269,7 +269,7 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
|
||||
|
||||
// Write RTP header.
|
||||
int32_t header_length = rtp_sender_->BuildRtpHeader(
|
||||
dataBuffer, payload_type, last, capture_timestamp, capture_time_ms);
|
||||
dataBuffer, payload_type, last, rtp_timestamp, capture_time_ms);
|
||||
if (header_length <= 0)
|
||||
return false;
|
||||
|
||||
@ -303,11 +303,11 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
|
||||
if (red_payload_type != 0) {
|
||||
SendVideoPacketAsRed(dataBuffer, payload_bytes_in_packet,
|
||||
rtp_header_length, rtp_sender_->SequenceNumber(),
|
||||
capture_timestamp, capture_time_ms, storage,
|
||||
rtp_timestamp, capture_time_ms, storage,
|
||||
packetizer->GetProtectionType() == kProtectedPacket);
|
||||
} else {
|
||||
SendVideoPacket(dataBuffer, payload_bytes_in_packet, rtp_header_length,
|
||||
rtp_sender_->SequenceNumber(), capture_timestamp,
|
||||
rtp_sender_->SequenceNumber(), rtp_timestamp,
|
||||
capture_time_ms, storage);
|
||||
}
|
||||
|
||||
@ -325,7 +325,7 @@ bool RTPSenderVideo::SendVideo(RtpVideoCodecTypes video_type,
|
||||
}
|
||||
|
||||
TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp",
|
||||
rtp_sender_->Timestamp());
|
||||
rtp_timestamp);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user