Add cricket::VideoFrame::transport_frame_id() and set it to RTP timestamp.

Passing transport_frame_id() to VideoSink will allow to identify incoming video
frames, which will make it possible to correlate video frames on the
sender and on the receiver.

BUG=chromium:621691
R=mflodman@webrtc.org, stefan@webrtc.org

Review URL: https://codereview.webrtc.org/2088953002 .

Cr-Commit-Position: refs/heads/master@{#13596}
This commit is contained in:
Sergey Ulanov
2016-08-01 13:35:55 -07:00
parent 7fbe2ee224
commit 19ee1e6eb1
11 changed files with 70 additions and 44 deletions

View File

@ -234,12 +234,12 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
scaled_buffer->ScaleFrom(buffer); scaled_buffer->ScaleFrom(buffer);
buffer = scaled_buffer; buffer = scaled_buffer;
} }
capturer_->OnFrame(cricket::WebRtcVideoFrame( capturer_->OnFrame(
buffer, cricket::WebRtcVideoFrame(
capturer_->apply_rotation() buffer, capturer_->apply_rotation()
? webrtc::kVideoRotation_0 ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation), : static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us), translated_camera_time_us, 0),
width, height); width, height);
} }
@ -289,15 +289,14 @@ void AndroidVideoCapturerJni::OnTextureFrame(int width,
matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
} }
capturer_->OnFrame( capturer_->OnFrame(cricket::WebRtcVideoFrame(
cricket::WebRtcVideoFrame(
surface_texture_helper_->CreateTextureFrame( surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height, adapted_width, adapted_height,
NativeHandleImpl(handle.oes_texture_id, matrix)), NativeHandleImpl(handle.oes_texture_id, matrix)),
capturer_->apply_rotation() capturer_->apply_rotation()
? webrtc::kVideoRotation_0 ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation), : static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us), translated_camera_time_us, 0),
width, height); width, height);
} }

View File

@ -146,7 +146,7 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
buffer, buffer,
apply_rotation_ ? webrtc::kVideoRotation_0 apply_rotation_ ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation), : static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us), translated_camera_time_us, 0),
width, height); width, height);
} }
@ -197,7 +197,7 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
apply_rotation_ ? webrtc::kVideoRotation_0 apply_rotation_ ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation), : static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us), translated_camera_time_us, 0),
width, height); width, height);
} }

View File

@ -55,8 +55,8 @@ void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
for (auto& sink_pair : sink_pairs()) { for (auto& sink_pair : sink_pairs()) {
if (sink_pair.wants.black_frames) { if (sink_pair.wants.black_frames) {
sink_pair.sink->OnFrame(cricket::WebRtcVideoFrame( sink_pair.sink->OnFrame(cricket::WebRtcVideoFrame(
GetBlackFrameBuffer(frame.width(), frame.height()), GetBlackFrameBuffer(frame.width(), frame.height()), frame.rotation(),
frame.rotation(), frame.timestamp_us())); frame.timestamp_us(), frame.transport_frame_id()));
} else { } else {
sink_pair.sink->OnFrame(frame); sink_pair.sink->OnFrame(frame);
} }

View File

@ -139,8 +139,8 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
// Makes it not all black. // Makes it not all black.
buffer->InitializeData(); buffer->InitializeData();
cricket::WebRtcVideoFrame frame1( cricket::WebRtcVideoFrame frame1(buffer, webrtc::kVideoRotation_0,
buffer, webrtc::kVideoRotation_0, 10 /* timestamp_us */); 10 /* timestamp_us */, 0 /* frame_id */);
broadcaster.OnFrame(frame1); broadcaster.OnFrame(frame1);
EXPECT_TRUE(sink1.black_frame()); EXPECT_TRUE(sink1.black_frame());
EXPECT_EQ(10000, sink1.timestamp()); EXPECT_EQ(10000, sink1.timestamp());
@ -153,8 +153,8 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
wants2.black_frames = true; wants2.black_frames = true;
broadcaster.AddOrUpdateSink(&sink2, wants2); broadcaster.AddOrUpdateSink(&sink2, wants2);
cricket::WebRtcVideoFrame frame2( cricket::WebRtcVideoFrame frame2(buffer, webrtc::kVideoRotation_0,
buffer, webrtc::kVideoRotation_0, 30 /* timestamp_us */); 30 /* timestamp_us */, 0 /* frame_id */);
broadcaster.OnFrame(frame2); broadcaster.OnFrame(frame2);
EXPECT_FALSE(sink1.black_frame()); EXPECT_FALSE(sink1.black_frame());
EXPECT_EQ(30000, sink1.timestamp()); EXPECT_EQ(30000, sink1.timestamp());

View File

@ -38,6 +38,9 @@ class VideoFrame {
virtual const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& virtual const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
video_frame_buffer() const = 0; video_frame_buffer() const = 0;
// Frame ID. Normally RTP timestamp when the frame was received using RTP.
virtual uint32_t transport_frame_id() const = 0;
// System monotonic clock, same timebase as rtc::TimeMicros(). // System monotonic clock, same timebase as rtc::TimeMicros().
virtual int64_t timestamp_us() const = 0; virtual int64_t timestamp_us() const = 0;
virtual void set_timestamp_us(int64_t time_us) = 0; virtual void set_timestamp_us(int64_t time_us) = 0;

View File

@ -44,7 +44,8 @@ VideoFrame* VideoFrameFactory::CreateAliasedFrame(
scaled_buffer->CropAndScaleFrom(cropped_input_frame->video_frame_buffer()); scaled_buffer->CropAndScaleFrom(cropped_input_frame->video_frame_buffer());
return new WebRtcVideoFrame(scaled_buffer, cropped_input_frame->rotation(), return new WebRtcVideoFrame(scaled_buffer, cropped_input_frame->rotation(),
cropped_input_frame->timestamp_us()); cropped_input_frame->timestamp_us(),
cropped_input_frame->transport_frame_id());
} }
} // namespace cricket } // namespace cricket

View File

@ -2469,9 +2469,9 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::OnFrame(
last_width_ = frame.width(); last_width_ = frame.width();
last_height_ = frame.height(); last_height_ = frame.height();
const WebRtcVideoFrame render_frame( WebRtcVideoFrame render_frame(
frame.video_frame_buffer(), frame.rotation(), frame.video_frame_buffer(), frame.rotation(),
frame.render_time_ms() * rtc::kNumNanosecsPerMicrosec); frame.render_time_ms() * rtc::kNumNanosecsPerMicrosec, frame.timestamp());
sink_->OnFrame(render_frame); sink_->OnFrame(render_frame);
} }

View File

@ -28,9 +28,11 @@ WebRtcVideoFrame::WebRtcVideoFrame()
WebRtcVideoFrame::WebRtcVideoFrame( WebRtcVideoFrame::WebRtcVideoFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
webrtc::VideoRotation rotation, webrtc::VideoRotation rotation,
int64_t timestamp_us) int64_t timestamp_us,
uint32_t transport_frame_id)
: video_frame_buffer_(buffer), : video_frame_buffer_(buffer),
timestamp_us_(timestamp_us), timestamp_us_(timestamp_us),
transport_frame_id_(transport_frame_id),
rotation_(rotation) {} rotation_(rotation) {}
WebRtcVideoFrame::WebRtcVideoFrame( WebRtcVideoFrame::WebRtcVideoFrame(
@ -39,7 +41,8 @@ WebRtcVideoFrame::WebRtcVideoFrame(
webrtc::VideoRotation rotation) webrtc::VideoRotation rotation)
: WebRtcVideoFrame(buffer, : WebRtcVideoFrame(buffer,
rotation, rotation,
time_stamp_ns / rtc::kNumNanosecsPerMicrosec) {} time_stamp_ns / rtc::kNumNanosecsPerMicrosec,
0) {}
WebRtcVideoFrame::~WebRtcVideoFrame() {} WebRtcVideoFrame::~WebRtcVideoFrame() {}
@ -78,8 +81,25 @@ WebRtcVideoFrame::video_frame_buffer() const {
return video_frame_buffer_; return video_frame_buffer_;
} }
uint32_t WebRtcVideoFrame::transport_frame_id() const {
return transport_frame_id_;
}
int64_t WebRtcVideoFrame::timestamp_us() const {
return timestamp_us_;
}
void WebRtcVideoFrame::set_timestamp_us(int64_t time_us) {
timestamp_us_ = time_us;
}
webrtc::VideoRotation WebRtcVideoFrame::rotation() const {
return rotation_;
}
VideoFrame* WebRtcVideoFrame::Copy() const { VideoFrame* WebRtcVideoFrame::Copy() const {
return new WebRtcVideoFrame(video_frame_buffer_, rotation_, timestamp_us_); return new WebRtcVideoFrame(video_frame_buffer_, rotation_, timestamp_us_,
transport_frame_id_);
} }
size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
@ -195,8 +215,8 @@ const VideoFrame* WebRtcVideoFrame::GetCopyWithRotationApplied() const {
current_width, current_height, current_width, current_height,
static_cast<libyuv::RotationMode>(rotation())); static_cast<libyuv::RotationMode>(rotation()));
if (ret == 0) { if (ret == 0) {
rotated_frame_.reset( rotated_frame_.reset(new WebRtcVideoFrame(
new WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0, timestamp_us_)); buffer, webrtc::kVideoRotation_0, timestamp_us_, transport_frame_id_));
} }
return rotated_frame_.get(); return rotated_frame_.get();

View File

@ -38,12 +38,13 @@ class WebRtcVideoFrame : public VideoFrame {
// Preferred constructor. // Preferred constructor.
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
webrtc::VideoRotation rotation, webrtc::VideoRotation rotation,
int64_t timestamp_us); int64_t timestamp_us,
uint32_t transport_frame_id);
// TODO(nisse): Deprecated, delete as soon as all callers have switched to the // TODO(nisse): Deprecated, delete as soon as all callers have switched to the
// above constructor with microsecond timestamp. // above constructor with microsecond timestamp.
WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer, WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int64_t time_stamp_ns, int64_t timestamp_ns,
webrtc::VideoRotation rotation); webrtc::VideoRotation rotation);
~WebRtcVideoFrame(); ~WebRtcVideoFrame();
@ -59,7 +60,7 @@ class WebRtcVideoFrame : public VideoFrame {
int dh, int dh,
uint8_t* sample, uint8_t* sample,
size_t sample_size, size_t sample_size,
int64_t time_stamp_ns, int64_t timestamp_ns,
webrtc::VideoRotation rotation); webrtc::VideoRotation rotation);
// TODO(nisse): We're moving to have all timestamps use the same // TODO(nisse): We're moving to have all timestamps use the same
@ -79,11 +80,12 @@ class WebRtcVideoFrame : public VideoFrame {
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer() const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer()
const override; const override;
/* System monotonic clock */ uint32_t transport_frame_id() const override;
int64_t timestamp_us() const override { return timestamp_us_; }
void set_timestamp_us(int64_t time_us) override { timestamp_us_ = time_us; };
webrtc::VideoRotation rotation() const override { return rotation_; } int64_t timestamp_us() const override;
void set_timestamp_us(int64_t time_us) override;
webrtc::VideoRotation rotation() const override;
VideoFrame* Copy() const override; VideoFrame* Copy() const override;
@ -118,6 +120,7 @@ class WebRtcVideoFrame : public VideoFrame {
// An opaque reference counted handle that stores the pixel data. // An opaque reference counted handle that stores the pixel data.
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_; rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
int64_t timestamp_us_; int64_t timestamp_us_;
uint32_t transport_frame_id_;
webrtc::VideoRotation rotation_; webrtc::VideoRotation rotation_;
// This is mutable as the calculation is expensive but once calculated, it // This is mutable as the calculation is expensive but once calculated, it

View File

@ -36,9 +36,9 @@ class VideoRendererAdapter
rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer = rtc::scoped_refptr<webrtc::VideoFrameBuffer> i420Buffer =
nativeVideoFrame.video_frame_buffer()->NativeToI420Buffer(); nativeVideoFrame.video_frame_buffer()->NativeToI420Buffer();
std::unique_ptr<cricket::VideoFrame> cpuFrame( std::unique_ptr<cricket::VideoFrame> cpuFrame(
new cricket::WebRtcVideoFrame(i420Buffer, new cricket::WebRtcVideoFrame(i420Buffer, nativeVideoFrame.rotation(),
nativeVideoFrame.rotation(), nativeVideoFrame.timestamp_us(),
nativeVideoFrame.timestamp_us())); nativeVideoFrame.transport_frame_id()));
const cricket::VideoFrame *rotatedFrame = const cricket::VideoFrame *rotatedFrame =
cpuFrame->GetCopyWithRotationApplied(); cpuFrame->GetCopyWithRotationApplied();
videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame]; videoFrame = [[RTCVideoFrame alloc] initWithNativeFrame:rotatedFrame];

View File

@ -718,7 +718,7 @@ void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
} }
OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0, OnFrame(cricket::WebRtcVideoFrame(buffer, webrtc::kVideoRotation_0,
translated_camera_time_us), translated_camera_time_us, 0),
captured_width, captured_height); captured_width, captured_height);
CVBufferRelease(image_buffer); CVBufferRelease(image_buffer);