Run FullStack tests without render windows.

Also disables test on valgrind platforms, it has no chance to keep up.

BUG=2278
R=stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2159008

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4972 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org
2013-10-16 11:05:37 +00:00
parent 5ed4f46af1
commit 9401524211
3 changed files with 164 additions and 52 deletions

View File

@ -0,0 +1,2 @@
# Tests that are too slow.
FullStack/*

View File

@ -82,6 +82,11 @@ FrameGeneratorCapturer::~FrameGeneratorCapturer() {
}
bool FrameGeneratorCapturer::Init() {
// This check is added because frame_generator_ might be file based and should
// not crash because a file moved.
if (frame_generator_.get() == NULL)
return false;
if (!tick_->StartTimer(true, 1000 / target_fps_))
return false;
thread_.reset(ThreadWrapper::CreateThread(FrameGeneratorCapturer::Run,

View File

@ -21,6 +21,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/sleep.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_engine/new_include/call.h"
@ -69,16 +70,15 @@ class VideoAnalyzer : public PacketReceiver,
public:
VideoAnalyzer(VideoSendStreamInput* input,
Transport* transport,
VideoRenderer* loopback_video,
const char* test_label,
double avg_psnr_threshold,
double avg_ssim_threshold,
uint64_t duration_frames)
int duration_frames)
: input_(input),
transport_(transport),
renderer_(loopback_video),
receiver_(NULL),
test_label_(test_label),
dropped_frames_(0),
rtp_timestamp_delta_(0),
first_send_frame_(NULL),
last_render_time_(0),
@ -86,9 +86,17 @@ class VideoAnalyzer : public PacketReceiver,
avg_ssim_threshold_(avg_ssim_threshold),
frames_left_(duration_frames),
crit_(CriticalSectionWrapper::CreateCriticalSection()),
trigger_(EventWrapper::Create()) {}
comparison_lock_(CriticalSectionWrapper::CreateCriticalSection()),
comparison_thread_(ThreadWrapper::CreateThread(&FrameComparisonThread,
this)),
trigger_(EventWrapper::Create()) {
unsigned int id;
EXPECT_TRUE(comparison_thread_->Start(id));
}
~VideoAnalyzer() {
EXPECT_TRUE(comparison_thread_->Stop());
while (!frames_.empty()) {
delete frames_.back();
frames_.pop_back();
@ -99,6 +107,8 @@ class VideoAnalyzer : public PacketReceiver,
}
}
virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
virtual bool DeliverPacket(const uint8_t* packet, size_t length) OVERRIDE {
scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
RTPHeader header;
@ -164,12 +174,15 @@ class VideoAnalyzer : public PacketReceiver,
virtual void RenderFrame(const I420VideoFrame& video_frame,
int time_to_render_ms) OVERRIDE {
int64_t render_time_ms =
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_;
{
CriticalSectionScoped cs(crit_.get());
while (frames_.front()->timestamp() < send_timestamp) {
AddFrameComparison(frames_.front(), &last_rendered_frame_, true);
AddFrameComparison(
frames_.front(), &last_rendered_frame_, true, render_time_ms);
frame_pool_.push_back(frames_.front());
frames_.pop_front();
}
@ -177,56 +190,150 @@ class VideoAnalyzer : public PacketReceiver,
I420VideoFrame* reference_frame = frames_.front();
frames_.pop_front();
assert(reference_frame != NULL);
EXPECT_EQ(reference_frame->timestamp(), send_timestamp);
assert(reference_frame->timestamp() == send_timestamp);
AddFrameComparison(reference_frame, &video_frame, false);
AddFrameComparison(reference_frame, &video_frame, false, render_time_ms);
frame_pool_.push_back(reference_frame);
}
last_rendered_frame_.CopyFrame(video_frame);
}
void Wait() { trigger_->Wait(120 * 1000); }
VideoSendStreamInput* input_;
Transport* transport_;
PacketReceiver* receiver_;
private:
struct FrameComparison {
FrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t send_time_ms,
int64_t recv_time_ms,
int64_t render_time_ms)
: dropped(dropped),
send_time_ms(send_time_ms),
recv_time_ms(recv_time_ms),
render_time_ms(render_time_ms) {
this->reference.CopyFrame(*reference);
this->render.CopyFrame(*render);
}
FrameComparison(const FrameComparison& compare)
: dropped(compare.dropped),
send_time_ms(compare.send_time_ms),
recv_time_ms(compare.recv_time_ms),
render_time_ms(compare.render_time_ms) {
this->reference.CopyFrame(compare.reference);
this->render.CopyFrame(compare.render);
}
~FrameComparison() {}
I420VideoFrame reference;
I420VideoFrame render;
bool dropped;
int64_t send_time_ms;
int64_t recv_time_ms;
int64_t render_time_ms;
};
void AddFrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t render_time_ms) {
int64_t send_time_ms = send_times_[reference->timestamp()];
send_times_.erase(reference->timestamp());
int64_t recv_time_ms = recv_times_[reference->timestamp()];
recv_times_.erase(reference->timestamp());
CriticalSectionScoped crit(comparison_lock_.get());
comparisons_.push_back(FrameComparison(reference,
render,
dropped,
send_time_ms,
recv_time_ms,
render_time_ms));
}
static bool FrameComparisonThread(void* obj) {
return static_cast<VideoAnalyzer*>(obj)->CompareFrames();
}
bool CompareFrames() {
assert(frames_left_ > 0);
I420VideoFrame reference;
I420VideoFrame render;
bool dropped;
int64_t send_time_ms;
int64_t recv_time_ms;
int64_t render_time_ms;
SleepMs(10);
while (true) {
{
CriticalSectionScoped crit(comparison_lock_.get());
if (comparisons_.empty())
return true;
reference.SwapFrame(&comparisons_.front().reference);
render.SwapFrame(&comparisons_.front().render);
dropped = comparisons_.front().dropped;
send_time_ms = comparisons_.front().send_time_ms;
recv_time_ms = comparisons_.front().recv_time_ms;
render_time_ms = comparisons_.front().render_time_ms;
comparisons_.pop_front();
}
PerformFrameComparison(&reference,
&render,
dropped,
send_time_ms,
recv_time_ms,
render_time_ms);
if (--frames_left_ == 0) {
PrintResult("psnr", psnr_, " dB");
PrintResult("ssim", ssim_, "");
PrintResult("sender_time", sender_time_, " ms");
printf(
"RESULT dropped_frames: %s = %d\n", test_label_, dropped_frames_);
PrintResult("receiver_time", receiver_time_, " ms");
PrintResult("total_delay_incl_network", end_to_end_, " ms");
PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
trigger_->Set();
return false;
}
}
renderer_->RenderFrame(video_frame, time_to_render_ms);
last_rendered_frame_.CopyFrame(video_frame);
}
void Wait() { trigger_->Wait(WEBRTC_EVENT_INFINITE); }
VideoSendStreamInput* input_;
Transport* transport_;
VideoRenderer* renderer_;
PacketReceiver* receiver_;
private:
void AddFrameComparison(const I420VideoFrame* reference_frame,
const I420VideoFrame* render,
bool dropped) {
int64_t render_time = Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
psnr_.AddSample(I420PSNR(reference_frame, render));
ssim_.AddSample(I420SSIM(reference_frame, render));
if (dropped)
void PerformFrameComparison(const I420VideoFrame* reference,
const I420VideoFrame* render,
bool dropped,
int64_t send_time_ms,
int64_t recv_time_ms,
int64_t render_time_ms) {
psnr_.AddSample(I420PSNR(reference, render));
ssim_.AddSample(I420SSIM(reference, render));
if (dropped) {
++dropped_frames_;
return;
}
if (last_render_time_ != 0)
rendered_delta_.AddSample(render_time - last_render_time_);
last_render_time_ = render_time;
rendered_delta_.AddSample(render_time_ms - last_render_time_);
last_render_time_ = render_time_ms;
int64_t input_time = reference_frame->render_time_ms();
int64_t send_time = send_times_[reference_frame->timestamp()];
send_times_.erase(reference_frame->timestamp());
sender_time_.AddSample(send_time - input_time);
int64_t recv_time = recv_times_[reference_frame->timestamp()];
recv_times_.erase(reference_frame->timestamp());
receiver_time_.AddSample(render_time - recv_time);
end_to_end_.AddSample(render_time - input_time);
int64_t input_time_ms = reference->render_time_ms();
sender_time_.AddSample(send_time_ms - input_time_ms);
receiver_time_.AddSample(render_time_ms - recv_time_ms);
end_to_end_.AddSample(render_time_ms - input_time_ms);
}
void PrintResult(const char* result_type,
@ -248,6 +355,7 @@ class VideoAnalyzer : public PacketReceiver,
test::Statistics end_to_end_;
test::Statistics rendered_delta_;
int dropped_frames_;
std::deque<I420VideoFrame*> frames_;
std::deque<I420VideoFrame*> frame_pool_;
I420VideoFrame last_rendered_frame_;
@ -258,40 +366,34 @@ class VideoAnalyzer : public PacketReceiver,
int64_t last_render_time_;
double avg_psnr_threshold_;
double avg_ssim_threshold_;
uint32_t frames_left_;
int frames_left_;
scoped_ptr<CriticalSectionWrapper> crit_;
scoped_ptr<CriticalSectionWrapper> comparison_lock_;
scoped_ptr<ThreadWrapper> comparison_thread_;
std::deque<FrameComparison> comparisons_;
scoped_ptr<EventWrapper> trigger_;
};
TEST_P(FullStackTest, DISABLED_NoPacketLoss) {
TEST_P(FullStackTest, NoPacketLoss) {
FullStackTestParams params = GetParam();
scoped_ptr<test::VideoRenderer> local_preview(test::VideoRenderer::Create(
"Local Preview", params.clip.width, params.clip.height));
scoped_ptr<test::VideoRenderer> loopback_video(test::VideoRenderer::Create(
"Loopback Video", params.clip.width, params.clip.height));
test::DirectTransport transport;
VideoAnalyzer analyzer(
NULL,
&transport,
loopback_video.get(),
params.test_label,
params.avg_psnr_threshold,
params.avg_ssim_threshold,
static_cast<uint64_t>(FLAGS_seconds * params.clip.fps));
VideoAnalyzer analyzer(NULL,
&transport,
params.test_label,
params.avg_psnr_threshold,
params.avg_ssim_threshold,
FLAGS_seconds * params.clip.fps);
Call::Config call_config(&analyzer);
scoped_ptr<Call> call(Call::Create(call_config));
analyzer.receiver_ = call->Receiver();
analyzer.SetReceiver(call->Receiver());
transport.SetReceiver(&analyzer);
VideoSendStream::Config send_config = call->GetDefaultSendConfig();
test::GenerateRandomSsrcs(&send_config, &reserved_ssrcs_);
send_config.local_renderer = local_preview.get();
// TODO(pbos): static_cast shouldn't be required after mflodman refactors the
// VideoCodec struct.
send_config.codec.width = static_cast<uint16_t>(params.clip.width);
@ -311,6 +413,9 @@ TEST_P(FullStackTest, DISABLED_NoPacketLoss) {
params.clip.height,
params.clip.fps,
Clock::GetRealTimeClock()));
ASSERT_TRUE(file_capturer.get() != NULL)
<< "Could not create capturer for " << params.clip.name
<< ".yuv. Is this resource file present?";
VideoReceiveStream::Config receive_config = call->GetDefaultReceiveConfig();
receive_config.rtp.ssrc = send_config.rtp.ssrcs[0];