Add performance tracing for PlatformThread and parts of the video code.

BUG=webrtc:7219

Review-Url: https://codereview.webrtc.org/2729783004
Cr-Commit-Position: refs/heads/master@{#17009}
This commit is contained in:
tommi
2017-03-03 07:21:18 -08:00
committed by Commit bot
parent b4ec8765d8
commit db23ea69b6
8 changed files with 39 additions and 3 deletions

View File

@ -13,6 +13,7 @@
#include "webrtc/base/atomicops.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#if defined(WEBRTC_LINUX)
#include <sys/prctl.h>
@ -235,6 +236,8 @@ void PlatformThread::Run() {
#endif
do {
TRACE_EVENT1("webrtc", "PlatformThread::Run", "name", name_.c_str());
// The interface contract of Start/Stop is that for a successful call to
// Start, there should be at least one call to the run function. So we
// call the function before checking |stop_|.

View File

@ -13,6 +13,7 @@
#include <memory>
#include "webrtc/base/timeutils.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_video/video_render_frames.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
@ -60,6 +61,7 @@ IncomingVideoStream::~IncomingVideoStream() {
}
void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) {
TRACE_EVENT0("webrtc", "IncomingVideoStream::OnFrame");
RTC_CHECK_RUNS_SERIALIZED(&decoder_race_checker_);
RTC_DCHECK(!incoming_render_queue_.IsCurrent());
incoming_render_queue_.PostTask(
@ -67,6 +69,7 @@ void IncomingVideoStream::OnFrame(const VideoFrame& video_frame) {
}
void IncomingVideoStream::Dequeue() {
TRACE_EVENT0("webrtc", "IncomingVideoStream::Dequeue");
RTC_DCHECK(incoming_render_queue_.IsCurrent());
rtc::Optional<VideoFrame> frame_to_render = render_buffers_.FrameToRender();
if (frame_to_render)

View File

@ -14,6 +14,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/media/engine/internaldecoderfactory.h"
#include "webrtc/modules/video_coding/include/video_error_codes.h"
@ -75,6 +76,7 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Decode(
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codec_specific_info,
int64_t render_time_ms) {
TRACE_EVENT0("webrtc", "VideoDecoderSoftwareFallbackWrapper::Decode");
// Try initializing and decoding with the provided decoder on every keyframe
// or when there's no fallback decoder. This is the normal case.
if (!fallback_decoder_ || input_image._frameType == kVideoFrameKey) {

View File

@ -14,6 +14,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet/common_header.h"
@ -414,6 +415,7 @@ std::vector<int64_t> TransportFeedback::GetReceiveDeltasUs() const {
bool TransportFeedback::Parse(const CommonHeader& packet) {
RTC_DCHECK_EQ(packet.type(), kPacketType);
RTC_DCHECK_EQ(packet.fmt(), kFeedbackMessageType);
TRACE_EVENT0("webrtc", "TransportFeedback::Parse");
if (packet.payload_size_bytes() < kMinPayloadSizeBytes) {
LOG(LS_WARNING) << "Buffer too small (" << packet.payload_size_bytes()

View File

@ -15,6 +15,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/modules/video_coding/packet.h"
namespace webrtc {
@ -71,6 +72,7 @@ std::vector<NaluInfo> VCMFrameBuffer::GetNaluInfos() const {
}
void VCMFrameBuffer::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::SetGofInfo");
_sessionInfo.SetGofInfo(gof_info, idx);
// TODO(asapersson): Consider adding hdr->VP9.ref_picture_id for testing.
_codecSpecificInfo.codecSpecific.VP9.temporal_idx =
@ -80,6 +82,7 @@ void VCMFrameBuffer::SetGofInfo(const GofInfoVP9& gof_info, size_t idx) {
}
bool VCMFrameBuffer::IsSessionComplete() const {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::IsSessionComplete");
return _sessionInfo.complete();
}
@ -89,6 +92,7 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
int64_t timeInMs,
VCMDecodeErrorMode decode_error_mode,
const FrameData& frame_data) {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::InsertPacket");
assert(!(NULL == packet.dataPtr && packet.sizeBytes > 0));
if (packet.dataPtr != NULL) {
_payloadType = packet.payloadType;
@ -176,30 +180,37 @@ VCMFrameBufferEnum VCMFrameBuffer::InsertPacket(
}
int64_t VCMFrameBuffer::LatestPacketTimeMs() const {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::LatestPacketTimeMs");
return _latestPacketTimeMs;
}
void VCMFrameBuffer::IncrementNackCount() {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::IncrementNackCount");
_nackCount++;
}
int16_t VCMFrameBuffer::GetNackCount() const {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::GetNackCount");
return _nackCount;
}
bool VCMFrameBuffer::HaveFirstPacket() const {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::HaveFirstPacket");
return _sessionInfo.HaveFirstPacket();
}
bool VCMFrameBuffer::HaveLastPacket() const {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::HaveLastPacket");
return _sessionInfo.HaveLastPacket();
}
int VCMFrameBuffer::NumPackets() const {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::NumPackets");
return _sessionInfo.NumPackets();
}
void VCMFrameBuffer::Reset() {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::Reset");
_length = 0;
_timeStamp = 0;
_sessionInfo.Reset();
@ -212,6 +223,7 @@ void VCMFrameBuffer::Reset() {
// Set state of frame
void VCMFrameBuffer::SetState(VCMFrameBufferStateEnum state) {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::SetState");
if (_state == state) {
return;
}
@ -248,6 +260,7 @@ VCMFrameBufferStateEnum VCMFrameBuffer::GetState() const {
// Get current state of frame
VCMFrameBufferStateEnum VCMFrameBuffer::GetState(uint32_t& timeStamp) const {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::GetState");
timeStamp = TimeStamp();
return GetState();
}
@ -257,6 +270,7 @@ bool VCMFrameBuffer::IsRetransmitted() const {
}
void VCMFrameBuffer::PrepareForDecode(bool continuous) {
TRACE_EVENT0("webrtc", "VCMFrameBuffer::PrepareForDecode");
size_t bytes_removed = _sessionInfo.MakeDecodable();
_length -= bytes_removed;
// Transfer frame information to EncodedFrame and create any codec

View File

@ -16,6 +16,7 @@
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/modules/video_coding/include/video_coding_defines.h"
#include "webrtc/modules/video_coding/jitter_estimator.h"
#include "webrtc/modules/video_coding/timing.h"
@ -55,6 +56,7 @@ FrameBuffer::~FrameBuffer() {}
FrameBuffer::ReturnReason FrameBuffer::NextFrame(
int64_t max_wait_time_ms,
std::unique_ptr<FrameObject>* frame_out) {
TRACE_EVENT0("webrtc", "FrameBuffer::NextFrame");
int64_t latest_return_time_ms =
clock_->TimeInMilliseconds() + max_wait_time_ms;
int64_t wait_ms = max_wait_time_ms;
@ -154,22 +156,26 @@ FrameBuffer::ReturnReason FrameBuffer::NextFrame(
}
void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) {
TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode");
rtc::CritScope lock(&crit_);
protection_mode_ = mode;
}
void FrameBuffer::Start() {
TRACE_EVENT0("webrtc", "FrameBuffer::Start");
rtc::CritScope lock(&crit_);
stopped_ = false;
}
void FrameBuffer::Stop() {
TRACE_EVENT0("webrtc", "FrameBuffer::Stop");
rtc::CritScope lock(&crit_);
stopped_ = true;
new_countinuous_frame_event_.Set();
}
int FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
TRACE_EVENT0("webrtc", "FrameBuffer::InsertFrame");
rtc::CritScope lock(&crit_);
RTC_DCHECK(frame);
@ -252,6 +258,7 @@ int FrameBuffer::InsertFrame(std::unique_ptr<FrameObject> frame) {
}
void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
TRACE_EVENT0("webrtc", "FrameBuffer::PropagateContinuity");
RTC_DCHECK(start->second.continuous);
if (last_continuous_frame_it_ == frames_.end())
last_continuous_frame_it_ = start;
@ -282,6 +289,7 @@ void FrameBuffer::PropagateContinuity(FrameMap::iterator start) {
}
void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
TRACE_EVENT0("webrtc", "FrameBuffer::PropagateDecodability");
for (size_t d = 0; d < info.num_dependent_frames; ++d) {
auto ref_info = frames_.find(info.dependent_frames[d]);
RTC_DCHECK(ref_info != frames_.end());
@ -291,6 +299,7 @@ void FrameBuffer::PropagateDecodability(const FrameInfo& info) {
}
void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
TRACE_EVENT0("webrtc", "FrameBuffer::AdvanceLastDecodedFrame");
if (last_decoded_frame_it_ == frames_.end()) {
last_decoded_frame_it_ = frames_.begin();
} else {
@ -316,6 +325,7 @@ void FrameBuffer::AdvanceLastDecodedFrame(FrameMap::iterator decoded) {
bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
FrameMap::iterator info) {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdateFrameInfoWithIncomingFrame");
FrameKey key(frame.picture_id, frame.spatial_layer);
info->second.num_missing_continuous = frame.num_references;
info->second.num_missing_decodable = frame.num_references;
@ -388,6 +398,7 @@ bool FrameBuffer::UpdateFrameInfoWithIncomingFrame(const FrameObject& frame,
}
void FrameBuffer::UpdateJitterDelay() {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
if (!stats_callback_)
return;
@ -408,6 +419,7 @@ void FrameBuffer::UpdateJitterDelay() {
}
void FrameBuffer::ClearFramesAndHistory() {
TRACE_EVENT0("webrtc", "FrameBuffer::UpdateJitterDelay");
frames_.clear();
last_decoded_frame_it_ = frames_.end();
last_continuous_frame_it_ = frames_.end();

View File

@ -325,8 +325,7 @@ int32_t VideoReceiver::RequestKeyFrame() {
// Must be called from inside the receive side critical section.
int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", frame.TimeStamp(), "Decode",
"type", frame.FrameType());
TRACE_EVENT0("webrtc", "VideoReceiver::Decode");
// Change decoder if payload type has changed
_decoder = _codecDataBase.GetDecoder(frame, &_decodedFrameCallback);
if (_decoder == nullptr) {
@ -356,7 +355,6 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
rtc::CritScope cs(&process_crit_);
_scheduleKeyRequest = true;
}
TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp());
return ret;
}

View File

@ -20,6 +20,7 @@
#include "webrtc/base/location.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/optional.h"
#include "webrtc/base/trace_event.h"
#include "webrtc/common_video/h264/profile_level_id.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_receiver.h"
@ -473,6 +474,7 @@ bool VideoReceiveStream::DecodeThreadFunction(void* ptr) {
}
bool VideoReceiveStream::Decode() {
TRACE_EVENT0("webrtc", "VideoReceiveStream::Decode");
static const int kMaxWaitForFrameMs = 3000;
std::unique_ptr<video_coding::FrameObject> frame;
video_coding::FrameBuffer::ReturnReason res =