Add accessor methods for RTP timestamp of EncodedImage.

Intention is to make the member private, but downstream callers
must be updated to use the accessor methods first.

Bug: webrtc:9378
Change-Id: I3495bd8d545b7234fbea10abfd14f082caa420b6
Reviewed-on: https://webrtc-review.googlesource.com/82160
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Erik Språng <sprang@webrtc.org>
Reviewed-by: Sebastian Jansson <srte@webrtc.org>
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24352}
This commit is contained in:
Niels Möller
2018-08-16 10:24:12 +02:00
committed by Commit Bot
parent bcdf5f1a94
commit 2377588c82
52 changed files with 163 additions and 166 deletions

View File

@ -54,7 +54,7 @@ bool HasNonEmptyState(FrameListPair pair) {
}
void FrameList::InsertFrame(VCMFrameBuffer* frame) {
insert(rbegin().base(), FrameListPair(frame->TimeStamp(), frame));
insert(rbegin().base(), FrameListPair(frame->Timestamp(), frame));
}
VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) {
@ -110,7 +110,7 @@ void FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
}
free_frames->push_back(oldest_frame);
TRACE_EVENT_INSTANT1("webrtc", "JB::OldOrEmptyFrameDropped", "timestamp",
oldest_frame->TimeStamp());
oldest_frame->Timestamp());
erase(begin());
}
}
@ -212,7 +212,7 @@ void Vp9SsMap::UpdateFrames(FrameList* frames) {
continue;
}
SsMap::iterator ss_it;
if (Find(frame_it.second->TimeStamp(), &ss_it)) {
if (Find(frame_it.second->Timestamp(), &ss_it)) {
if (gof_idx >= ss_it->second.num_frames_in_gof) {
continue; // Assume corresponding SS not yet received.
}
@ -528,7 +528,7 @@ bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(uint32_t* timestamp) {
}
}
*timestamp = oldest_frame->TimeStamp();
*timestamp = oldest_frame->Timestamp();
return true;
}
@ -564,7 +564,7 @@ VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
// Wait for this one to get complete.
waiting_for_completion_.frame_size = frame->Length();
waiting_for_completion_.latest_packet_time = frame->LatestPacketTimeMs();
waiting_for_completion_.timestamp = frame->TimeStamp();
waiting_for_completion_.timestamp = frame->Timestamp();
}
}
@ -715,8 +715,8 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
frame->InsertPacket(packet, now_ms, decode_error_mode_, frame_data);
if (previous_state != kStateComplete) {
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", frame->TimeStamp(), "timestamp",
frame->TimeStamp());
TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", frame->Timestamp(), "timestamp",
frame->Timestamp());
}
if (buffer_state > 0) {
@ -831,7 +831,7 @@ bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const {
for (FrameList::const_iterator it = decodable_frames_.begin();
it != decodable_frames_.end(); ++it) {
VCMFrameBuffer* decodable_frame = it->second;
if (IsNewerTimestamp(decodable_frame->TimeStamp(), frame.TimeStamp())) {
if (IsNewerTimestamp(decodable_frame->Timestamp(), frame.Timestamp())) {
break;
}
decoding_state.SetState(decodable_frame);
@ -865,7 +865,7 @@ void VCMJitterBuffer::FindAndInsertContinuousFramesWithState(
it != incomplete_frames_.end();) {
VCMFrameBuffer* frame = it->second;
if (IsNewerTimestamp(original_decoded_state.time_stamp(),
frame->TimeStamp())) {
frame->Timestamp())) {
++it;
continue;
}
@ -947,11 +947,11 @@ int VCMJitterBuffer::NonContinuousOrIncompleteDuration() {
if (incomplete_frames_.empty()) {
return 0;
}
uint32_t start_timestamp = incomplete_frames_.Front()->TimeStamp();
uint32_t start_timestamp = incomplete_frames_.Front()->Timestamp();
if (!decodable_frames_.empty()) {
start_timestamp = decodable_frames_.Back()->TimeStamp();
start_timestamp = decodable_frames_.Back()->Timestamp();
}
return incomplete_frames_.Back()->TimeStamp() - start_timestamp;
return incomplete_frames_.Back()->Timestamp() - start_timestamp;
}
uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
@ -1184,10 +1184,10 @@ void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
incoming_frame_count_++;
if (frame.FrameType() == kVideoFrameKey) {
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.Timestamp(),
"KeyComplete");
} else {
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.TimeStamp(),
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", frame.Timestamp(),
"DeltaComplete");
}
@ -1263,7 +1263,7 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
}
// No retransmitted frames should be a part of the jitter
// estimate.
UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.Timestamp(),
frame.Length(), incomplete_frame);
}