Use microsecond timestamp in cricket::VideoFrame.
BUG=webrtc:5740 Review URL: https://codereview.webrtc.org/1865283002 Cr-Commit-Position: refs/heads/master@{#12348}
This commit is contained in:
@ -31,7 +31,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
|
||||
public:
|
||||
FakeVideoCapturer(bool is_screencast)
|
||||
: running_(false),
|
||||
initial_unix_timestamp_(time(NULL) * rtc::kNumNanosecsPerSec),
|
||||
initial_timestamp_(rtc::TimeNanos()),
|
||||
next_timestamp_(rtc::kNumNanosecsPerMillisec),
|
||||
is_screencast_(is_screencast),
|
||||
rotation_(webrtc::kVideoRotation_0) {
|
||||
@ -99,7 +99,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
|
||||
frame.height = height;
|
||||
frame.fourcc = fourcc;
|
||||
frame.data_size = size;
|
||||
frame.time_stamp = initial_unix_timestamp_ + next_timestamp_;
|
||||
frame.time_stamp = initial_timestamp_ + next_timestamp_;
|
||||
next_timestamp_ += timestamp_interval;
|
||||
|
||||
std::unique_ptr<char[]> data(new char[size]);
|
||||
@ -153,7 +153,7 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
|
||||
|
||||
private:
|
||||
bool running_;
|
||||
int64_t initial_unix_timestamp_;
|
||||
int64_t initial_timestamp_;
|
||||
int64_t next_timestamp_;
|
||||
const bool is_screencast_;
|
||||
webrtc::VideoRotation rotation_;
|
||||
|
||||
@ -99,13 +99,13 @@ const cricket::VideoFrame& VideoBroadcaster::GetBlackFrame(
|
||||
if (black_frame_ && black_frame_->width() == frame.width() &&
|
||||
black_frame_->height() == frame.height() &&
|
||||
black_frame_->rotation() == frame.rotation()) {
|
||||
black_frame_->SetTimeStamp(frame.GetTimeStamp());
|
||||
black_frame_->set_timestamp_us(frame.timestamp_us());
|
||||
return *black_frame_;
|
||||
}
|
||||
black_frame_.reset(new cricket::WebRtcVideoFrame(
|
||||
new rtc::RefCountedObject<webrtc::I420Buffer>(
|
||||
frame.width(), frame.height()),
|
||||
frame.GetTimeStamp(), frame.rotation()));
|
||||
new rtc::RefCountedObject<webrtc::I420Buffer>(frame.width(),
|
||||
frame.height()),
|
||||
frame.rotation(), frame.timestamp_us()));
|
||||
black_frame_->SetToBlack();
|
||||
return *black_frame_;
|
||||
}
|
||||
|
||||
@ -135,14 +135,14 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
|
||||
broadcaster.AddOrUpdateSink(&sink2, wants2);
|
||||
|
||||
cricket::WebRtcVideoFrame frame1;
|
||||
frame1.InitToBlack(100, 200, 10 /*ts*/);
|
||||
frame1.InitToBlack(100, 200, 10000 /*ts*/);
|
||||
// Make it not all-black
|
||||
frame1.GetUPlane()[0] = 0;
|
||||
broadcaster.OnFrame(frame1);
|
||||
EXPECT_TRUE(sink1.black_frame());
|
||||
EXPECT_EQ(10, sink1.timestamp());
|
||||
EXPECT_EQ(10000, sink1.timestamp());
|
||||
EXPECT_FALSE(sink2.black_frame());
|
||||
EXPECT_EQ(10, sink2.timestamp());
|
||||
EXPECT_EQ(10000, sink2.timestamp());
|
||||
|
||||
// Switch the sink wants.
|
||||
wants1.black_frames = false;
|
||||
@ -151,12 +151,12 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
|
||||
broadcaster.AddOrUpdateSink(&sink2, wants2);
|
||||
|
||||
cricket::WebRtcVideoFrame frame2;
|
||||
frame2.InitToBlack(100, 200, 30 /*ts*/);
|
||||
frame2.InitToBlack(100, 200, 30000 /*ts*/);
|
||||
// Make it not all-black
|
||||
frame2.GetUPlane()[0] = 0;
|
||||
broadcaster.OnFrame(frame2);
|
||||
EXPECT_FALSE(sink1.black_frame());
|
||||
EXPECT_EQ(30, sink1.timestamp());
|
||||
EXPECT_EQ(30000, sink1.timestamp());
|
||||
EXPECT_TRUE(sink2.black_frame());
|
||||
EXPECT_EQ(30, sink2.timestamp());
|
||||
EXPECT_EQ(30000, sink2.timestamp());
|
||||
}
|
||||
|
||||
@ -57,8 +57,19 @@ class VideoFrame {
|
||||
virtual rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
|
||||
const = 0;
|
||||
|
||||
virtual int64_t GetTimeStamp() const = 0;
|
||||
virtual void SetTimeStamp(int64_t time_stamp) = 0;
|
||||
// System monotonic clock, same timebase as rtc::TimeMicros().
|
||||
virtual int64_t timestamp_us() const = 0;
|
||||
virtual void set_timestamp_us(int64_t time_us) = 0;
|
||||
|
||||
// Deprecated methods, for backwards compatibility.
|
||||
// TODO(nisse): Delete when usage in Chrome and other applications
|
||||
// have been replaced.
|
||||
virtual int64_t GetTimeStamp() const {
|
||||
return rtc::kNumNanosecsPerMicrosec * timestamp_us();
|
||||
}
|
||||
virtual void SetTimeStamp(int64_t time_ns) {
|
||||
set_timestamp_us(time_ns / rtc::kNumNanosecsPerMicrosec);
|
||||
}
|
||||
|
||||
// Indicates the rotation angle in degrees.
|
||||
virtual webrtc::VideoRotation rotation() const = 0;
|
||||
@ -137,8 +148,9 @@ class VideoFrame {
|
||||
int32_t dst_pitch_v) const;
|
||||
|
||||
// Creates an empty frame.
|
||||
virtual VideoFrame *CreateEmptyFrame(int w, int h,
|
||||
int64_t time_stamp) const = 0;
|
||||
virtual VideoFrame* CreateEmptyFrame(int w,
|
||||
int h,
|
||||
int64_t timestamp_us) const = 0;
|
||||
virtual void set_rotation(webrtc::VideoRotation rotation) = 0;
|
||||
};
|
||||
|
||||
|
||||
@ -1524,7 +1524,6 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream(
|
||||
pending_encoder_reconfiguration_(false),
|
||||
allocated_encoder_(nullptr, webrtc::kVideoCodecUnknown, false),
|
||||
sending_(false),
|
||||
first_frame_timestamp_ms_(0),
|
||||
last_frame_timestamp_ms_(0) {
|
||||
parameters_.config.rtp.max_packet_size = kVideoMtu;
|
||||
parameters_.conference_mode = send_params.conference_mode;
|
||||
@ -1583,12 +1582,15 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
|
||||
}
|
||||
|
||||
int64_t frame_delta_ms = frame.GetTimeStamp() / rtc::kNumNanosecsPerMillisec;
|
||||
|
||||
// frame->GetTimeStamp() is essentially a delta, align to webrtc time
|
||||
if (first_frame_timestamp_ms_ == 0) {
|
||||
first_frame_timestamp_ms_ = rtc::Time() - frame_delta_ms;
|
||||
if (!first_frame_timestamp_ms_) {
|
||||
first_frame_timestamp_ms_ =
|
||||
rtc::Optional<int64_t>(rtc::Time() - frame_delta_ms);
|
||||
}
|
||||
|
||||
last_frame_timestamp_ms_ = first_frame_timestamp_ms_ + frame_delta_ms;
|
||||
last_frame_timestamp_ms_ = *first_frame_timestamp_ms_ + frame_delta_ms;
|
||||
|
||||
video_frame.set_render_time_ms(last_frame_timestamp_ms_);
|
||||
// Reconfigure codec if necessary.
|
||||
SetDimensions(video_frame.width(), video_frame.height());
|
||||
@ -1618,7 +1620,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSource(
|
||||
|
||||
// Reset timestamps to realign new incoming frames to a webrtc timestamp. A
|
||||
// new capturer may have a different timestamp delta than the previous one.
|
||||
first_frame_timestamp_ms_ = 0;
|
||||
first_frame_timestamp_ms_ = rtc::Optional<int64_t>();
|
||||
|
||||
if (source == NULL) {
|
||||
if (stream_ != NULL) {
|
||||
@ -2402,8 +2404,8 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::OnFrame(
|
||||
last_height_ = frame.height();
|
||||
|
||||
const WebRtcVideoFrame render_frame(
|
||||
frame.video_frame_buffer(),
|
||||
frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, frame.rotation());
|
||||
frame.video_frame_buffer(), frame.rotation(),
|
||||
frame.render_time_ms() * rtc::kNumNanosecsPerMicrosec);
|
||||
sink_->OnFrame(render_frame);
|
||||
}
|
||||
|
||||
|
||||
@ -395,7 +395,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
|
||||
|
||||
// The timestamp of the first frame received
|
||||
// Used to generate the timestamps of subsequent frames
|
||||
int64_t first_frame_timestamp_ms_ GUARDED_BY(lock_);
|
||||
rtc::Optional<int64_t> first_frame_timestamp_ms_ GUARDED_BY(lock_);
|
||||
|
||||
// The timestamp of the last frame received
|
||||
// Used to generate timestamp for the black frame when source is removed
|
||||
|
||||
@ -512,8 +512,8 @@ TEST_F(WebRtcVideoEngine2Test,
|
||||
std::unique_ptr<char[]> data(new char[frame.data_size]);
|
||||
frame.data = data.get();
|
||||
memset(frame.data, 1, frame.data_size);
|
||||
const int kInitialTimestamp = 123456;
|
||||
frame.time_stamp = kInitialTimestamp;
|
||||
int64_t initial_timestamp = rtc::TimeNanos();
|
||||
frame.time_stamp = initial_timestamp;
|
||||
|
||||
// Deliver initial frame.
|
||||
capturer1.SignalCapturedFrame(&frame);
|
||||
@ -531,7 +531,7 @@ TEST_F(WebRtcVideoEngine2Test,
|
||||
rtc::Thread::Current()->SleepMs(1);
|
||||
// Deliver with a timestamp (10 seconds) before the previous initial one,
|
||||
// these should not be related at all anymore and it should still work fine.
|
||||
frame.time_stamp = kInitialTimestamp - 10000;
|
||||
frame.time_stamp = initial_timestamp - 10 * rtc::kNumNanosecsPerSec;
|
||||
capturer2.SignalCapturedFrame(&frame);
|
||||
|
||||
// New timestamp should be at least 1ms in the future and not old.
|
||||
|
||||
@ -22,18 +22,24 @@ using webrtc::kVPlane;
|
||||
|
||||
namespace cricket {
|
||||
|
||||
WebRtcVideoFrame::WebRtcVideoFrame():
|
||||
time_stamp_ns_(0),
|
||||
rotation_(webrtc::kVideoRotation_0) {}
|
||||
WebRtcVideoFrame::WebRtcVideoFrame()
|
||||
: timestamp_us_(0), rotation_(webrtc::kVideoRotation_0) {}
|
||||
|
||||
WebRtcVideoFrame::WebRtcVideoFrame(
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||
webrtc::VideoRotation rotation,
|
||||
int64_t timestamp_us)
|
||||
: video_frame_buffer_(buffer),
|
||||
timestamp_us_(timestamp_us),
|
||||
rotation_(rotation) {}
|
||||
|
||||
WebRtcVideoFrame::WebRtcVideoFrame(
|
||||
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||
int64_t time_stamp_ns,
|
||||
webrtc::VideoRotation rotation)
|
||||
: video_frame_buffer_(buffer),
|
||||
time_stamp_ns_(time_stamp_ns),
|
||||
rotation_(rotation) {
|
||||
}
|
||||
: WebRtcVideoFrame(buffer,
|
||||
rotation,
|
||||
time_stamp_ns / rtc::kNumNanosecsPerMicrosec) {}
|
||||
|
||||
WebRtcVideoFrame::~WebRtcVideoFrame() {}
|
||||
|
||||
@ -47,7 +53,7 @@ bool WebRtcVideoFrame::Init(uint32_t format,
|
||||
int64_t time_stamp_ns,
|
||||
webrtc::VideoRotation rotation) {
|
||||
return Reset(format, w, h, dw, dh, sample, sample_size,
|
||||
time_stamp_ns, rotation,
|
||||
time_stamp_ns / rtc::kNumNanosecsPerMicrosec, rotation,
|
||||
true /*apply_rotation*/);
|
||||
}
|
||||
|
||||
@ -55,7 +61,7 @@ bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh,
|
||||
bool apply_rotation) {
|
||||
return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
|
||||
static_cast<uint8_t*>(frame->data), frame->data_size,
|
||||
frame->time_stamp,
|
||||
frame->time_stamp / rtc::kNumNanosecsPerMicrosec,
|
||||
frame->rotation, apply_rotation);
|
||||
}
|
||||
|
||||
@ -126,9 +132,7 @@ WebRtcVideoFrame::video_frame_buffer() const {
|
||||
}
|
||||
|
||||
VideoFrame* WebRtcVideoFrame::Copy() const {
|
||||
WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
|
||||
video_frame_buffer_, time_stamp_ns_, rotation_);
|
||||
return new_frame;
|
||||
return new WebRtcVideoFrame(video_frame_buffer_, rotation_, timestamp_us_);
|
||||
}
|
||||
|
||||
size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
|
||||
@ -147,7 +151,7 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
|
||||
int dh,
|
||||
uint8_t* sample,
|
||||
size_t sample_size,
|
||||
int64_t time_stamp_ns,
|
||||
int64_t timestamp_us,
|
||||
webrtc::VideoRotation rotation,
|
||||
bool apply_rotation) {
|
||||
if (!Validate(format, w, h, sample, sample_size)) {
|
||||
@ -166,8 +170,7 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
|
||||
new_height = dw;
|
||||
}
|
||||
|
||||
InitToEmptyBuffer(new_width, new_height,
|
||||
time_stamp_ns);
|
||||
InitToEmptyBuffer(new_width, new_height);
|
||||
rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
|
||||
|
||||
int horiz_crop = ((w - dw) / 2) & ~1;
|
||||
@ -192,21 +195,27 @@ bool WebRtcVideoFrame::Reset(uint32_t format,
|
||||
<< " return code : " << r;
|
||||
return false;
|
||||
}
|
||||
timestamp_us_ = timestamp_us;
|
||||
return true;
|
||||
}
|
||||
|
||||
VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
|
||||
int w, int h,
|
||||
int64_t time_stamp_ns) const {
|
||||
VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(int w,
|
||||
int h,
|
||||
int64_t timestamp_us) const {
|
||||
WebRtcVideoFrame* frame = new WebRtcVideoFrame();
|
||||
frame->InitToEmptyBuffer(w, h, time_stamp_ns);
|
||||
frame->InitToEmptyBuffer(w, h, rtc::kNumNanosecsPerMicrosec * timestamp_us);
|
||||
return frame;
|
||||
}
|
||||
|
||||
void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h) {
|
||||
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
|
||||
rotation_ = webrtc::kVideoRotation_0;
|
||||
}
|
||||
|
||||
void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h,
|
||||
int64_t time_stamp_ns) {
|
||||
video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
|
||||
time_stamp_ns_ = time_stamp_ns;
|
||||
SetTimeStamp(time_stamp_ns);
|
||||
rotation_ = webrtc::kVideoRotation_0;
|
||||
}
|
||||
|
||||
@ -237,8 +246,8 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
|
||||
rotated_height = orig_width;
|
||||
}
|
||||
|
||||
rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height,
|
||||
GetTimeStamp()));
|
||||
rotated_frame_.reset(
|
||||
CreateEmptyFrame(rotated_width, rotated_height, timestamp_us_));
|
||||
|
||||
// TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from
|
||||
// VideoRotation to libyuv::RotationMode.
|
||||
|
||||
@ -27,6 +27,13 @@ struct CapturedFrame;
|
||||
class WebRtcVideoFrame : public VideoFrame {
|
||||
public:
|
||||
WebRtcVideoFrame();
|
||||
|
||||
// Preferred construction, with microsecond timestamp.
|
||||
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||
webrtc::VideoRotation rotation,
|
||||
int64_t timestamp_us);
|
||||
|
||||
// TODO(nisse): Deprecate/delete.
|
||||
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||
int64_t time_stamp_ns,
|
||||
webrtc::VideoRotation rotation);
|
||||
@ -47,8 +54,13 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
int64_t time_stamp_ns,
|
||||
webrtc::VideoRotation rotation);
|
||||
|
||||
// The timestamp of the captured frame is expected to use the same
|
||||
// timescale and epoch as rtc::Time.
|
||||
// TODO(nisse): Consider adding a warning message, or even an RTC_DCHECK, if
|
||||
// the time is too far off.
|
||||
bool Init(const CapturedFrame* frame, int dw, int dh, bool apply_rotation);
|
||||
|
||||
void InitToEmptyBuffer(int w, int h);
|
||||
void InitToEmptyBuffer(int w, int h, int64_t time_stamp_ns);
|
||||
|
||||
bool InitToBlack(int w, int h, int64_t time_stamp_ns);
|
||||
@ -69,10 +81,9 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer()
|
||||
const override;
|
||||
|
||||
int64_t GetTimeStamp() const override { return time_stamp_ns_; }
|
||||
void SetTimeStamp(int64_t time_stamp_ns) override {
|
||||
time_stamp_ns_ = time_stamp_ns;
|
||||
}
|
||||
/* System monotonic clock */
|
||||
int64_t timestamp_us() const override { return timestamp_us_; }
|
||||
void set_timestamp_us(int64_t time_us) { timestamp_us_ = time_us; };
|
||||
|
||||
webrtc::VideoRotation rotation() const override { return rotation_; }
|
||||
|
||||
@ -95,15 +106,15 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
// |dh| is destination height, like |dw|, but must be a positive number.
|
||||
// Returns whether the function succeeded or failed.
|
||||
bool Reset(uint32_t format,
|
||||
int w,
|
||||
int h,
|
||||
int dw,
|
||||
int dh,
|
||||
uint8_t* sample,
|
||||
size_t sample_size,
|
||||
int64_t time_stamp_ns,
|
||||
webrtc::VideoRotation rotation,
|
||||
bool apply_rotation);
|
||||
int w,
|
||||
int h,
|
||||
int dw,
|
||||
int dh,
|
||||
uint8_t* sample,
|
||||
size_t sample_size,
|
||||
int64_t timestamp_us,
|
||||
webrtc::VideoRotation rotation,
|
||||
bool apply_rotation);
|
||||
|
||||
private:
|
||||
VideoFrame* CreateEmptyFrame(int w, int h,
|
||||
@ -111,7 +122,7 @@ class WebRtcVideoFrame : public VideoFrame {
|
||||
|
||||
// An opaque reference counted handle that stores the pixel data.
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
|
||||
int64_t time_stamp_ns_;
|
||||
int64_t timestamp_us_;
|
||||
webrtc::VideoRotation rotation_;
|
||||
|
||||
// This is mutable as the calculation is expensive but once calculated, it
|
||||
|
||||
@ -20,6 +20,7 @@ namespace {
|
||||
|
||||
class WebRtcVideoTestFrame : public cricket::WebRtcVideoFrame {
|
||||
public:
|
||||
// The ApplyRotationToFrame test needs this as a public method.
|
||||
using cricket::WebRtcVideoFrame::set_rotation;
|
||||
|
||||
virtual VideoFrame* CreateEmptyFrame(int w,
|
||||
@ -47,7 +48,7 @@ class WebRtcVideoFrameTest : public VideoFrameTest<cricket::WebRtcVideoFrame> {
|
||||
// Build the CapturedFrame.
|
||||
cricket::CapturedFrame captured_frame;
|
||||
captured_frame.fourcc = cricket::FOURCC_I420;
|
||||
captured_frame.time_stamp = 5678;
|
||||
captured_frame.time_stamp = rtc::TimeNanos();
|
||||
captured_frame.rotation = frame_rotation;
|
||||
captured_frame.width = frame_width;
|
||||
captured_frame.height = frame_height;
|
||||
@ -66,7 +67,8 @@ class WebRtcVideoFrameTest : public VideoFrameTest<cricket::WebRtcVideoFrame> {
|
||||
apply_rotation));
|
||||
|
||||
// Verify the new frame.
|
||||
EXPECT_EQ(5678, frame.GetTimeStamp());
|
||||
EXPECT_EQ(captured_frame.time_stamp / rtc::kNumNanosecsPerMicrosec,
|
||||
frame.timestamp_us());
|
||||
if (apply_rotation)
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, frame.rotation());
|
||||
else
|
||||
@ -271,13 +273,16 @@ TEST_F(WebRtcVideoFrameTest, TextureInitialValues) {
|
||||
webrtc::NativeHandleBuffer* buffer =
|
||||
new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
|
||||
dummy_handle, 640, 480);
|
||||
cricket::WebRtcVideoFrame frame(buffer, 200, webrtc::kVideoRotation_0);
|
||||
// Timestamp is converted from ns to us, so last three digits are lost.
|
||||
cricket::WebRtcVideoFrame frame(buffer, 20000, webrtc::kVideoRotation_0);
|
||||
EXPECT_EQ(dummy_handle, frame.GetNativeHandle());
|
||||
EXPECT_EQ(640, frame.width());
|
||||
EXPECT_EQ(480, frame.height());
|
||||
EXPECT_EQ(200, frame.GetTimeStamp());
|
||||
frame.SetTimeStamp(400);
|
||||
EXPECT_EQ(400, frame.GetTimeStamp());
|
||||
EXPECT_EQ(20000, frame.GetTimeStamp());
|
||||
EXPECT_EQ(20, frame.timestamp_us());
|
||||
frame.set_timestamp_us(40);
|
||||
EXPECT_EQ(40000, frame.GetTimeStamp());
|
||||
EXPECT_EQ(40, frame.timestamp_us());
|
||||
}
|
||||
|
||||
TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
|
||||
@ -286,12 +291,14 @@ TEST_F(WebRtcVideoFrameTest, CopyTextureFrame) {
|
||||
webrtc::NativeHandleBuffer* buffer =
|
||||
new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
|
||||
dummy_handle, 640, 480);
|
||||
cricket::WebRtcVideoFrame frame1(buffer, 200, webrtc::kVideoRotation_0);
|
||||
// Timestamp is converted from ns to us, so last three digits are lost.
|
||||
cricket::WebRtcVideoFrame frame1(buffer, 20000, webrtc::kVideoRotation_0);
|
||||
cricket::VideoFrame* frame2 = frame1.Copy();
|
||||
EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
|
||||
EXPECT_EQ(frame1.width(), frame2->width());
|
||||
EXPECT_EQ(frame1.height(), frame2->height());
|
||||
EXPECT_EQ(frame1.GetTimeStamp(), frame2->GetTimeStamp());
|
||||
EXPECT_EQ(frame1.timestamp_us(), frame2->timestamp_us());
|
||||
delete frame2;
|
||||
}
|
||||
|
||||
|
||||
@ -29,7 +29,7 @@ class WebRtcVideoFrameFactoryTest
|
||||
captured_frame_.fourcc = cricket::FOURCC_I420;
|
||||
captured_frame_.pixel_width = 1;
|
||||
captured_frame_.pixel_height = 1;
|
||||
captured_frame_.time_stamp = 5678;
|
||||
captured_frame_.time_stamp = rtc::TimeNanos();
|
||||
captured_frame_.rotation = frame_rotation;
|
||||
captured_frame_.width = frame_width;
|
||||
captured_frame_.height = frame_height;
|
||||
|
||||
Reference in New Issue
Block a user