Add send frame rate statistics callback

BUG=2235
R=mflodman@webrtc.org, pbos@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/4479005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5213 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
sprang@webrtc.org
2013-12-04 15:09:27 +00:00
parent 9e5b0342f6
commit 71f055fb41
17 changed files with 205 additions and 107 deletions

View File

@ -138,7 +138,6 @@ enum FileFormats
kFileFormatPcm32kHzFile = 9
};
enum ProcessingTypes
{
kPlaybackPerChannel = 0,
@ -148,6 +147,15 @@ enum ProcessingTypes
kRecordingPreprocessing
};
enum FrameType
{
kFrameEmpty = 0,
kAudioFrameSpeech = 1,
kAudioFrameCN = 2,
kVideoFrameKey = 3, // independent frame
kVideoFrameDelta = 4, // depends on the previus frame
};
// Interface for encrypting and decrypting regular data and rtp/rtcp packets.
// Implement this interface if you wish to provide an encryption scheme to
// the voice or video engines.
@ -302,9 +310,9 @@ class BitrateStatisticsObserver {
class FrameCountObserver {
public:
virtual ~FrameCountObserver() {}
virtual void Notify(const unsigned int key_frames,
const unsigned int delta_frames,
const unsigned int ssrc) = 0;
virtual void FrameCountUpdated(FrameType frame_type,
uint32_t frame_count,
const unsigned int ssrc) = 0;
};
// ==================================================================
@ -322,17 +330,6 @@ struct CodecInst
int rate; // bits/sec unlike {start,min,max}Bitrate elsewhere in this file!
};
enum FrameType
{
kFrameEmpty = 0,
kAudioFrameSpeech = 1,
kAudioFrameCN = 2,
kVideoFrameKey = 3, // independent frame
kVideoFrameDelta = 4, // depends on the previus frame
kVideoFrameGolden = 5, // depends on a old known previus frame
kVideoFrameAltRef = 6
};
// RTP
enum {kRtpCsrcSize = 15}; // RFC 3550 page 13

View File

@ -331,6 +331,10 @@ class RtpRtcp : public Module {
virtual int TimeToSendPadding(int bytes) = 0;
virtual void RegisterSendFrameCountObserver(
FrameCountObserver* observer) = 0;
virtual FrameCountObserver* GetSendFrameCountObserver() const = 0;
/**************************************************************************
*
* RTCP

View File

@ -240,7 +240,10 @@ class MockRtpRtcp : public RtpRtcp {
int32_t());
MOCK_METHOD0(Process,
int32_t());
MOCK_METHOD1(RegisterSendFrameCountObserver,
void(FrameCountObserver*));
MOCK_CONST_METHOD0(GetSendFrameCountObserver,
FrameCountObserver*(void));
// Members.
unsigned int remote_ssrc_;
};

View File

@ -1645,4 +1645,13 @@ uint32_t ModuleRtpRtcpImpl::rtt_ms() const {
return rtt_ms_;
}
void ModuleRtpRtcpImpl::RegisterSendFrameCountObserver(
FrameCountObserver* observer) {
rtp_sender_.RegisterFrameCountObserver(observer);
}
FrameCountObserver* ModuleRtpRtcpImpl::GetSendFrameCountObserver() const {
return rtp_sender_.GetFrameCountObserver();
}
} // Namespace webrtc

View File

@ -363,6 +363,10 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
void OnRequestSendReport();
virtual void RegisterSendFrameCountObserver(
FrameCountObserver* observer) OVERRIDE;
virtual FrameCountObserver* GetSendFrameCountObserver() const OVERRIDE;
protected:
void RegisterChildModule(RtpRtcp* module);

View File

@ -32,8 +32,6 @@ const char* FrameTypeToString(const FrameType frame_type) {
case kAudioFrameCN: return "audio_cn";
case kVideoFrameKey: return "video_key";
case kVideoFrameDelta: return "video_delta";
case kVideoFrameGolden: return "video_golden";
case kVideoFrameAltRef: return "video_altref";
}
return "";
}
@ -61,7 +59,8 @@ RTPSender::RTPSender(const int32_t id, const bool audio, Clock *clock,
remote_ssrc_(0), sequence_number_forced_(false), ssrc_forced_(false),
timestamp_(0), capture_time_ms_(0), last_timestamp_time_ms_(0),
last_packet_marker_bit_(false), num_csrcs_(0), csrcs_(),
include_csrcs_(true), rtx_(kRtxOff), payload_type_rtx_(-1) {
include_csrcs_(true), rtx_(kRtxOff), payload_type_rtx_(-1),
frame_counts_(), frame_count_observer_(NULL) {
memset(nack_byte_count_times_, 0, sizeof(nack_byte_count_times_));
memset(nack_byte_count_, 0, sizeof(nack_byte_count_));
memset(csrcs_, 0, sizeof(csrcs_));
@ -359,14 +358,15 @@ int32_t RTPSender::SendOutgoingData(
return -1;
}
uint32_t ret_val;
if (audio_configured_) {
TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp,
"Send", "type", FrameTypeToString(frame_type));
assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN ||
frame_type == kFrameEmpty);
return audio_->SendAudio(frame_type, payload_type, capture_timestamp,
payload_data, payload_size, fragmentation);
ret_val = audio_->SendAudio(frame_type, payload_type, capture_timestamp,
payload_data, payload_size, fragmentation);
} else {
TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms,
"Send", "type", FrameTypeToString(frame_type));
@ -380,11 +380,23 @@ int32_t RTPSender::SendOutgoingData(
return SendPaddingAccordingToBitrate(payload_type, capture_timestamp,
capture_time_ms) ? 0 : -1;
}
return video_->SendVideo(video_type, frame_type, payload_type,
capture_timestamp, capture_time_ms, payload_data,
payload_size, fragmentation, codec_info,
rtp_type_hdr);
ret_val = video_->SendVideo(video_type, frame_type, payload_type,
capture_timestamp, capture_time_ms,
payload_data, payload_size,
fragmentation, codec_info,
rtp_type_hdr);
}
CriticalSectionScoped cs(statistics_crit_.get());
uint32_t frame_count = ++frame_counts_[frame_type];
if (frame_count_observer_) {
frame_count_observer_->FrameCountUpdated(frame_type,
frame_count,
ssrc_);
}
return ret_val;
}
int RTPSender::SendRedundantPayloads(int payload_type, int bytes_to_send) {
@ -1478,4 +1490,16 @@ void RTPSender::BuildRtxPacket(uint8_t* buffer, uint16_t* length,
*length += 2;
}
void RTPSender::RegisterFrameCountObserver(FrameCountObserver* observer) {
CriticalSectionScoped cs(statistics_crit_.get());
if (observer != NULL)
assert(frame_count_observer_ == NULL);
frame_count_observer_ = observer;
}
FrameCountObserver* RTPSender::GetFrameCountObserver() const {
CriticalSectionScoped cs(statistics_crit_.get());
return frame_count_observer_;
}
} // namespace webrtc

View File

@ -260,6 +260,9 @@ class RTPSender : public Bitrate, public RTPSenderInterface {
int32_t SetFecParameters(const FecProtectionParams *delta_params,
const FecProtectionParams *key_params);
virtual void RegisterFrameCountObserver(FrameCountObserver* observer);
virtual FrameCountObserver* GetFrameCountObserver() const;
protected:
int32_t CheckPayloadType(const int8_t payload_type,
RtpVideoCodecTypes *video_type);
@ -346,6 +349,8 @@ class RTPSender : public Bitrate, public RTPSenderInterface {
int rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
std::map<FrameType, uint32_t> frame_counts_;
FrameCountObserver* frame_count_observer_;
};
} // namespace webrtc

View File

@ -717,6 +717,68 @@ TEST_F(RtpSenderTest, SendGenericVideo) {
EXPECT_EQ(0, memcmp(payload, payload_data, sizeof(payload)));
}
TEST_F(RtpSenderTest, FrameCountCallbacks) {
class TestCallback : public FrameCountObserver {
public:
TestCallback()
: FrameCountObserver(), num_calls_(0), ssrc_(0),
key_frames_(0), delta_frames_(0) {}
virtual ~TestCallback() {}
virtual void FrameCountUpdated(FrameType frame_type,
uint32_t frame_count,
const unsigned int ssrc) {
++num_calls_;
ssrc_ = ssrc;
switch (frame_type) {
case kVideoFrameDelta:
delta_frames_ = frame_count;
break;
case kVideoFrameKey:
key_frames_ = frame_count;
break;
default:
break;
}
}
uint32_t num_calls_;
uint32_t ssrc_;
uint32_t key_frames_;
uint32_t delta_frames_;
} callback;
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "GENERIC";
const uint8_t payload_type = 127;
ASSERT_EQ(0, rtp_sender_->RegisterPayload(payload_name, payload_type, 90000,
0, 1500));
uint8_t payload[] = {47, 11, 32, 93, 89};
rtp_sender_->SetStorePacketsStatus(true, 1);
uint32_t ssrc = rtp_sender_->SSRC();
rtp_sender_->RegisterFrameCountObserver(&callback);
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameKey, payload_type, 1234,
4321, payload, sizeof(payload),
NULL));
EXPECT_EQ(1U, callback.num_calls_);
EXPECT_EQ(ssrc, callback.ssrc_);
EXPECT_EQ(1U, callback.key_frames_);
EXPECT_EQ(0U, callback.delta_frames_);
ASSERT_EQ(0, rtp_sender_->SendOutgoingData(kVideoFrameDelta,
payload_type, 1234, 4321, payload,
sizeof(payload), NULL));
EXPECT_EQ(2U, callback.num_calls_);
EXPECT_EQ(ssrc, callback.ssrc_);
EXPECT_EQ(1U, callback.key_frames_);
EXPECT_EQ(1U, callback.delta_frames_);
rtp_sender_->RegisterFrameCountObserver(NULL);
}
class RtpSenderAudioTest : public RtpSenderTest {
protected:
RtpSenderAudioTest() {}

View File

@ -174,33 +174,16 @@ VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize)
webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType)
{
switch(frameType)
{
switch(frameType) {
case kKeyFrame:
{
return kVideoFrameKey;
}
return kVideoFrameKey;
case kDeltaFrame:
{
return kVideoFrameDelta;
}
case kGoldenFrame:
{
return kVideoFrameGolden;
}
case kAltRefFrame:
{
return kVideoFrameAltRef;
}
return kVideoFrameDelta;
case kSkipFrame:
{
return kFrameEmpty;
}
return kFrameEmpty;
default:
{
return kVideoFrameDelta;
}
}
return kVideoFrameDelta;
}
}
VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frame_type) {
@ -209,10 +192,6 @@ VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frame_type) {
return kKeyFrame;
case kVideoFrameDelta:
return kDeltaFrame;
case kVideoFrameGolden:
return kGoldenFrame;
case kVideoFrameAltRef:
return kAltRefFrame;
default:
assert(false);
return kDeltaFrame;

View File

@ -147,7 +147,6 @@ VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
incomplete_frames_(),
last_decoded_state_(),
first_packet_since_reset_(true),
receive_statistics_(),
incoming_frame_rate_(0),
incoming_frame_count_(0),
time_last_incoming_frame_count_(0),
@ -172,7 +171,6 @@ VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
average_packets_per_frame_(0.0f),
frame_counter_(0) {
memset(frame_buffers_, 0, sizeof(frame_buffers_));
memset(receive_statistics_, 0, sizeof(receive_statistics_));
for (int i = 0; i < kStartNumberOfFrames; i++) {
frame_buffers_[i] = new VCMFrameBuffer();
@ -218,8 +216,7 @@ void VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs) {
assert(max_nack_list_size_ == rhs.max_nack_list_size_);
assert(max_packet_age_to_nack_ == rhs.max_packet_age_to_nack_);
assert(max_incomplete_time_ms_ == rhs.max_incomplete_time_ms_);
memcpy(receive_statistics_, rhs.receive_statistics_,
sizeof(receive_statistics_));
receive_statistics_ = rhs.receive_statistics_;
nack_seq_nums_.resize(rhs.nack_seq_nums_.size());
missing_sequence_numbers_ = rhs.missing_sequence_numbers_;
latest_received_sequence_number_ = rhs.latest_received_sequence_number_;
@ -264,7 +261,7 @@ void VCMJitterBuffer::Start() {
incoming_bit_count_ = 0;
incoming_bit_rate_ = 0;
time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
memset(receive_statistics_, 0, sizeof(receive_statistics_));
receive_statistics_.clear();
num_consecutive_old_frames_ = 0;
num_consecutive_old_packets_ = 0;
@ -336,13 +333,9 @@ void VCMJitterBuffer::Flush() {
}
// Get received key and delta frames
void VCMJitterBuffer::FrameStatistics(uint32_t* received_delta_frames,
uint32_t* received_key_frames) const {
assert(received_delta_frames);
assert(received_key_frames);
std::map<FrameType, uint32_t> VCMJitterBuffer::FrameStatistics() const {
CriticalSectionScoped cs(crit_sect_);
*received_delta_frames = receive_statistics_[1] + receive_statistics_[3];
*received_key_frames = receive_statistics_[0] + receive_statistics_[2];
return receive_statistics_;
}
int VCMJitterBuffer::num_discarded_packets() const {
@ -1206,26 +1199,7 @@ void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
// Update receive statistics. We count all layers, thus when you use layers
// adding all key and delta frames might differ from frame count.
if (frame.IsSessionComplete()) {
switch (frame.FrameType()) {
case kVideoFrameKey: {
receive_statistics_[0]++;
break;
}
case kVideoFrameDelta: {
receive_statistics_[1]++;
break;
}
case kVideoFrameGolden: {
receive_statistics_[2]++;
break;
}
case kVideoFrameAltRef: {
receive_statistics_[3]++;
break;
}
default:
assert(false);
}
++receive_statistics_[frame.FrameType()];
}
}

View File

@ -98,10 +98,9 @@ class VCMJitterBuffer {
// Empty the jitter buffer of all its data.
void Flush();
// Get the number of received key and delta frames since the jitter buffer
// Get the number of received frames, by type, since the jitter buffer
// was started.
void FrameStatistics(uint32_t* received_delta_frames,
uint32_t* received_key_frames) const;
std::map<FrameType, uint32_t> FrameStatistics() const;
// The number of packets discarded by the jitter buffer because the decoder
// won't be able to decode them.
@ -297,8 +296,8 @@ class VCMJitterBuffer {
bool first_packet_since_reset_;
// Statistics.
// Frame counter for each type (key, delta, golden, key-delta).
unsigned int receive_statistics_[4];
// Frame counts for each type (key, delta, ...)
std::map<FrameType, uint32_t> receive_statistics_;
// Latest calculated frame rates of incoming stream.
unsigned int incoming_frame_rate_;
unsigned int incoming_frame_count_;

View File

@ -1632,11 +1632,9 @@ TEST_F(TestRunningJitterBuffer, EmptyPackets) {
}
TEST_F(TestRunningJitterBuffer, StatisticsTest) {
uint32_t num_delta_frames = 0;
uint32_t num_key_frames = 0;
jitter_buffer_->FrameStatistics(&num_delta_frames, &num_key_frames);
EXPECT_EQ(0u, num_delta_frames);
EXPECT_EQ(0u, num_key_frames);
std::map<FrameType, uint32_t> frame_stats(jitter_buffer_->FrameStatistics());
EXPECT_EQ(0u, frame_stats[kVideoFrameDelta]);
EXPECT_EQ(0u, frame_stats[kVideoFrameKey]);
uint32_t framerate = 0;
uint32_t bitrate = 0;
@ -1654,9 +1652,9 @@ TEST_F(TestRunningJitterBuffer, StatisticsTest) {
// being decoded.
EXPECT_TRUE(DecodeCompleteFrame());
EXPECT_TRUE(DecodeCompleteFrame());
jitter_buffer_->FrameStatistics(&num_delta_frames, &num_key_frames);
EXPECT_EQ(3u, num_delta_frames);
EXPECT_EQ(2u, num_key_frames);
frame_stats = jitter_buffer_->FrameStatistics();
EXPECT_EQ(3u, frame_stats[kVideoFrameDelta]);
EXPECT_EQ(2u, frame_stats[kVideoFrameKey]);
// Insert 20 more frames to get estimates of bitrate and framerate over
// 1 second.

View File

@ -291,8 +291,7 @@ int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length,
UpdateSentBitrate(now_ms);
UpdateSentFramerate();
if (encoded_length > 0) {
const bool delta_frame = (encoded_frame_type != kVideoFrameKey &&
encoded_frame_type != kVideoFrameGolden);
const bool delta_frame = (encoded_frame_type != kVideoFrameKey);
frame_dropper_->Fill(encoded_length, delta_frame);
if (max_payload_size_ > 0 && encoded_length > 0) {

View File

@ -238,8 +238,9 @@ void VCMReceiver::ReceiveStatistics(uint32_t* bitrate,
void VCMReceiver::ReceivedFrameCount(VCMFrameCount* frame_count) const {
assert(frame_count);
jitter_buffer_.FrameStatistics(&frame_count->numDeltaFrames,
&frame_count->numKeyFrames);
std::map<FrameType, uint32_t> counts(jitter_buffer_.FrameStatistics());
frame_count->numDeltaFrames = counts[kVideoFrameDelta];
frame_count->numKeyFrames = counts[kVideoFrameKey];
}
uint32_t VCMReceiver::DiscardedPackets() const {

View File

@ -356,6 +356,7 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
module_process_thread_.DeRegisterModule(rtp_rtcp);
rtp_rtcp->SetSendingStatus(false);
rtp_rtcp->SetSendingMediaStatus(false);
rtp_rtcp->RegisterSendFrameCountObserver(NULL);
simulcast_rtp_rtcp_.pop_back();
removed_rtp_rtcp_.push_front(rtp_rtcp);
}
@ -410,6 +411,8 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
kRtpExtensionAbsoluteSendTime);
}
rtp_rtcp->SetRtcpXrRrtrStatus(rtp_rtcp_->RtcpXrRrtrStatus());
rtp_rtcp->RegisterSendFrameCountObserver(
rtp_rtcp_->GetSendFrameCountObserver());
}
// |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
// modules can be deleted after this step.
@ -420,6 +423,7 @@ int32_t ViEChannel::SetSendCodec(const VideoCodec& video_codec,
module_process_thread_.DeRegisterModule(rtp_rtcp);
rtp_rtcp->SetSendingStatus(false);
rtp_rtcp->SetSendingMediaStatus(false);
rtp_rtcp->RegisterSendFrameCountObserver(NULL);
simulcast_rtp_rtcp_.pop_back();
removed_rtp_rtcp_.push_front(rtp_rtcp);
}
@ -1971,4 +1975,15 @@ void ViEChannel::ResetStatistics(uint32_t ssrc) {
statistician->ResetStatistics();
}
void ViEChannel::RegisterSendFrameCountObserver(
FrameCountObserver* observer) {
rtp_rtcp_->RegisterSendFrameCountObserver(observer);
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
it != simulcast_rtp_rtcp_.end();
it++) {
(*it)->RegisterSendFrameCountObserver(observer);
}
}
} // namespace webrtc

View File

@ -326,6 +326,8 @@ class ViEChannel
void RegisterPreDecodeImageCallback(
EncodedImageCallback* pre_decode_callback);
void RegisterSendFrameCountObserver(FrameCountObserver* observer);
protected:
static bool ChannelDecodeThreadFunction(void* obj);
bool ChannelDecodeProcess();

View File

@ -1178,16 +1178,39 @@ int ViERTP_RTCPImpl::DeregisterSendBitrateObserver(
}
int ViERTP_RTCPImpl::RegisterSendFrameCountObserver(
int channel, FrameCountObserver* callback) {
// TODO(sprang): Implement
return -1;
int video_channel, FrameCountObserver* callback) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
vie_channel->RegisterSendFrameCountObserver(callback);
return 0;
}
int ViERTP_RTCPImpl::DeregisterSendFrameCountObserver(
int channel, FrameCountObserver* callback) {
// TODO(sprang): Implement
return -1;
int video_channel, FrameCountObserver* callback) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(channel: %d)", __FUNCTION__, video_channel);
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEChannel* vie_channel = cs.Channel(video_channel);
if (!vie_channel) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
return -1;
}
vie_channel->RegisterSendFrameCountObserver(NULL);
return 0;
}
} // namespace webrtc