Reland of Delete all use of cricket::VideoFrame and cricket::WebRtcVideoFrame. (patchset #1 id:1 of https://codereview.webrtc.org/2471783002/ )

Reason for revert:
Relanding after known downstream breakages have been fixed.

Original issue's description:
> Revert of Delete all use of cricket::VideoFrame and cricket::WebRtcVideoFrame. (patchset #7 id:120001 of https://codereview.webrtc.org/2383093002/ )
>
> Reason for revert:
> Breaks chrome, see https://build.chromium.org/p/chromium.webrtc.fyi/builders/Mac%20Builder/builds/19019/steps/compile/logs/stdio
>
> Analysis: Chrome uses cricket::VideoFrame, without explicitly including webrtc/media/base/videoframe.h, and breaks when that file is no longer included by any other webrtc headers. Will reland after updating Chrome.
>
> Original issue's description:
> > Delete all use of cricket::VideoFrame and cricket::WebRtcVideoFrame.
> >
> > Replaced with webrtc::VideoFrame.
> >
> > TBR=mflodman@webrtc.org
> > BUG=webrtc:5682
> >
> > Committed: https://crrev.com/45c8b8940042bd2574c39920804ade8343cefdba
> > Cr-Commit-Position: refs/heads/master@{#14885}
>
> TBR=perkj@webrtc.org,pthatcher@webrtc.org,tkchin@webrtc.org,mflodman@webrtc.org,stefan@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=webrtc:5682
>
> Committed: https://crrev.com/7341ab8e2505c9763d208e069bda269018357e7d
> Cr-Commit-Position: refs/heads/master@{#14886}

TBR=perkj@webrtc.org,pthatcher@webrtc.org,tkchin@webrtc.org,mflodman@webrtc.org,stefan@webrtc.org
# Not skipping CQ checks because original CL landed more than 1 days ago.
BUG=webrtc:5682

Review-Url: https://codereview.webrtc.org/2487633002
Cr-Commit-Position: refs/heads/master@{#15039}
This commit is contained in:
nisse
2016-11-11 03:55:13 -08:00
committed by Commit bot
parent e6f98c7a37
commit acd935b540
46 changed files with 181 additions and 212 deletions

View File

@ -20,7 +20,7 @@ import java.nio.ByteBuffer;
*/
public class VideoRenderer {
/**
* Java version of cricket::VideoFrame. Frames are only constructed from native code and test
* Java version of webrtc::VideoFrame. Frames are only constructed from native code and test
* code.
*/
public static class I420Frame {

View File

@ -733,7 +733,7 @@ class StatsObserverWrapper : public StatsObserver {
// Wrapper dispatching rtc::VideoSinkInterface to a Java VideoRenderer
// instance.
class JavaVideoRendererWrapper
: public rtc::VideoSinkInterface<cricket::VideoFrame> {
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
: j_callbacks_(jni, j_callbacks),
@ -753,7 +753,7 @@ class JavaVideoRendererWrapper
virtual ~JavaVideoRendererWrapper() {}
void OnFrame(const cricket::VideoFrame& video_frame) override {
void OnFrame(const webrtc::VideoFrame& video_frame) override {
ScopedLocalRefFrame local_ref_frame(jni());
jobject j_frame =
(video_frame.video_frame_buffer()->native_handle() != nullptr)
@ -769,13 +769,12 @@ class JavaVideoRendererWrapper
// Make a shallow copy of |frame| to be used with Java. The callee has
// ownership of the frame, and the frame should be released with
// VideoRenderer.releaseNativeFrame().
static jlong javaShallowCopy(const cricket::VideoFrame* frame) {
return jlongFromPointer(new cricket::WebRtcVideoFrame(
frame->video_frame_buffer(), frame->rotation(), frame->timestamp_us()));
static jlong javaShallowCopy(const webrtc::VideoFrame* frame) {
return jlongFromPointer(new webrtc::VideoFrame(*frame));
}
// Return a VideoRenderer.I420Frame referring to the data in |frame|.
jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
jobject CricketToJavaI420Frame(const webrtc::VideoFrame* frame) {
jintArray strides = jni()->NewIntArray(3);
jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
strides_array[0] = frame->video_frame_buffer()->StrideY();
@ -806,7 +805,7 @@ class JavaVideoRendererWrapper
}
// Return a VideoRenderer.I420Frame referring texture object in |frame|.
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
jobject CricketToJavaTextureFrame(const webrtc::VideoFrame* frame) {
NativeHandleImpl* handle = reinterpret_cast<NativeHandleImpl*>(
frame->video_frame_buffer()->native_handle());
jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni());
@ -951,7 +950,7 @@ JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
JOW(void, VideoRenderer_releaseNativeFrame)(
JNIEnv* jni, jclass, jlong j_frame_ptr) {
delete reinterpret_cast<const cricket::VideoFrame*>(j_frame_ptr);
delete reinterpret_cast<const webrtc::VideoFrame*>(j_frame_ptr);
}
JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
@ -2123,7 +2122,7 @@ JOW(void, VideoTrack_nativeAddRenderer)(
LOG(LS_INFO) << "VideoTrack::nativeAddRenderer";
reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)
->AddOrUpdateSink(
reinterpret_cast<rtc::VideoSinkInterface<cricket::VideoFrame>*>(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer),
rtc::VideoSinkWants());
}
@ -2133,7 +2132,7 @@ JOW(void, VideoTrack_nativeRemoveRenderer)(
jlong j_video_track_pointer, jlong j_renderer_pointer) {
reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)
->RemoveSink(
reinterpret_cast<rtc::VideoSinkInterface<cricket::VideoFrame>*>(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer));
}

View File

@ -95,9 +95,8 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
buffer->MutableDataU(), buffer->StrideU(),
buffer->width(), buffer->height());
OnFrame(cricket::WebRtcVideoFrame(
buffer, static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us));
OnFrame(VideoFrame(buffer, static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us));
}
void AndroidVideoTrackSource::OnTextureFrameCaptured(
@ -147,13 +146,13 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
}
OnFrame(cricket::WebRtcVideoFrame(
surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height,
webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
do_rotate ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us));
OnFrame(VideoFrame(
surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height,
webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
do_rotate ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us));
}
void AndroidVideoTrackSource::OnOutputFormatRequest(int width,

View File

@ -25,9 +25,9 @@
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/optional.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/video_frame.h"
namespace webrtc {
@ -98,7 +98,7 @@ class MediaStreamTrackInterface : public rtc::RefCountInterface,
// The same source can be used in multiple VideoTracks.
class VideoTrackSourceInterface
: public MediaSourceInterface,
public rtc::VideoSourceInterface<cricket::VideoFrame> {
public rtc::VideoSourceInterface<VideoFrame> {
public:
struct Stats {
// Original size of captured frame, before video adaptation.
@ -131,13 +131,12 @@ class VideoTrackSourceInterface
class VideoTrackInterface
: public MediaStreamTrackInterface,
public rtc::VideoSourceInterface<cricket::VideoFrame> {
public rtc::VideoSourceInterface<VideoFrame> {
public:
// Register a video sink for this track.
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override{};
void RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override{};
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override{};
virtual VideoTrackSourceInterface* GetSource() const = 0;

View File

@ -42,12 +42,10 @@ BEGIN_PROXY_MAP(VideoTrack)
PROXY_CONSTMETHOD0(bool, enabled)
PROXY_METHOD1(bool, set_enabled, bool)
PROXY_WORKER_METHOD2(void,
AddOrUpdateSink,
rtc::VideoSinkInterface<cricket::VideoFrame>*,
const rtc::VideoSinkWants&)
PROXY_WORKER_METHOD1(void,
RemoveSink,
rtc::VideoSinkInterface<cricket::VideoFrame>*)
AddOrUpdateSink,
rtc::VideoSinkInterface<VideoFrame>*,
const rtc::VideoSinkWants&)
PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)

View File

@ -232,9 +232,9 @@ class FakeVideoTrackSourceForStats
bool remote() const override { return false; }
void RegisterObserver(ObserverInterface* observer) override {}
void UnregisterObserver(ObserverInterface* observer) override {}
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {}
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override {
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
}
bool is_screencast() const override { return false; }
rtc::Optional<bool> needs_denoising() const override {

View File

@ -18,7 +18,6 @@
#include "webrtc/media/base/fakemediaengine.h"
#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/media/base/fakevideorenderer.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
using webrtc::FakeConstraints;
using webrtc::VideoCapturerTrackSource;

View File

@ -28,11 +28,9 @@ BEGIN_PROXY_MAP(VideoTrackSource)
PROXY_METHOD1(bool, GetStats, Stats*)
PROXY_WORKER_METHOD2(void,
AddOrUpdateSink,
rtc::VideoSinkInterface<cricket::VideoFrame>*,
rtc::VideoSinkInterface<VideoFrame>*,
const rtc::VideoSinkWants&)
PROXY_WORKER_METHOD1(void,
RemoveSink,
rtc::VideoSinkInterface<cricket::VideoFrame>*)
PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface<VideoFrame>*)
PROXY_METHOD1(void, RegisterObserver, ObserverInterface*)
PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*)
END_PROXY()

View File

@ -34,9 +34,8 @@ std::string VideoTrack::kind() const {
// AddOrUpdateSink and RemoveSink should be called on the worker
// thread.
void VideoTrack::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
VideoSourceBase::AddOrUpdateSink(sink, wants);
rtc::VideoSinkWants modified_wants = wants;
@ -44,8 +43,7 @@ void VideoTrack::AddOrUpdateSink(
video_source_->AddOrUpdateSink(sink, modified_wants);
}
void VideoTrack::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
void VideoTrack::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
VideoSourceBase::RemoveSink(sink);
video_source_->RemoveSink(sink);

View File

@ -29,9 +29,9 @@ class VideoTrack : public MediaStreamTrack<VideoTrackInterface>,
const std::string& label,
VideoTrackSourceInterface* source);
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
VideoTrackSourceInterface* GetSource() const override {
return video_source_.get();

View File

@ -17,7 +17,6 @@
#include "webrtc/base/gunit.h"
#include "webrtc/media/base/fakevideocapturer.h"
#include "webrtc/media/base/fakemediaengine.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
using webrtc::FakeVideoTrackRenderer;
using webrtc::MediaSourceInterface;

View File

@ -15,7 +15,7 @@
namespace webrtc {
VideoTrackSource::VideoTrackSource(
rtc::VideoSourceInterface<cricket::VideoFrame>* source,
rtc::VideoSourceInterface<VideoFrame>* source,
bool remote)
: source_(source), state_(kInitializing), remote_(remote) {
worker_thread_checker_.DetachFromThread();
@ -33,7 +33,7 @@ void VideoTrackSource::OnSourceDestroyed() {
}
void VideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (!source_) {
@ -42,8 +42,7 @@ void VideoTrackSource::AddOrUpdateSink(
source_->AddOrUpdateSink(sink, wants);
}
void VideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
void VideoTrackSource::RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
if (!source_) {
return;

View File

@ -22,8 +22,7 @@ namespace webrtc {
class VideoTrackSource : public Notifier<VideoTrackSourceInterface> {
public:
VideoTrackSource(rtc::VideoSourceInterface<cricket::VideoFrame>* source,
bool remote);
VideoTrackSource(rtc::VideoSourceInterface<VideoFrame>* source, bool remote);
void SetState(SourceState new_state);
// OnSourceDestroyed clears this instance pointer to |source_|. It is useful
// when the underlying rtc::VideoSourceInterface is destroyed before the
@ -39,13 +38,13 @@ class VideoTrackSource : public Notifier<VideoTrackSourceInterface> {
bool GetStats(Stats* stats) override { return false; }
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override;
private:
rtc::ThreadChecker worker_thread_checker_;
rtc::VideoSourceInterface<cricket::VideoFrame>* source_;
rtc::VideoSourceInterface<VideoFrame>* source_;
cricket::VideoOptions options_;
SourceState state_;
const bool remote_;

View File

@ -19,7 +19,6 @@
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
using rtc::sprintfn;
@ -483,18 +482,15 @@ void GtkMainWnd::VideoRenderer::SetSize(int width, int height) {
}
void GtkMainWnd::VideoRenderer::OnFrame(
const cricket::VideoFrame& video_frame) {
const webrtc::VideoFrame& video_frame) {
gdk_threads_enter();
const cricket::WebRtcVideoFrame frame(
webrtc::I420Buffer::Rotate(video_frame.video_frame_buffer(),
video_frame.rotation()),
webrtc::kVideoRotation_0, video_frame.timestamp_us());
SetSize(frame.width(), frame.height());
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
frame.video_frame_buffer());
webrtc::I420Buffer::Rotate(video_frame.video_frame_buffer(),
video_frame.rotation()));
SetSize(buffer->width(), buffer->height());
libyuv::I420ToRGBA(buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),

View File

@ -72,14 +72,14 @@ class GtkMainWnd : public MainWindow {
void OnRedraw();
protected:
class VideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRenderer(GtkMainWnd* main_wnd,
webrtc::VideoTrackInterface* track_to_render);
virtual ~VideoRenderer();
// VideoSinkInterface implementation
void OnFrame(const cricket::VideoFrame& frame) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
const uint8_t* image() const { return image_.get(); }

View File

@ -17,7 +17,6 @@
#include "webrtc/base/arraysize.h"
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
ATOM MainWnd::wnd_class_ = 0;
const wchar_t MainWnd::kClassName[] = L"WebRTC_MainWnd";
@ -601,21 +600,18 @@ void MainWnd::VideoRenderer::SetSize(int width, int height) {
}
void MainWnd::VideoRenderer::OnFrame(
const cricket::VideoFrame& video_frame) {
const webrtc::VideoFrame& video_frame) {
{
AutoLock<VideoRenderer> lock(this);
const cricket::WebRtcVideoFrame frame(
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
webrtc::I420Buffer::Rotate(video_frame.video_frame_buffer(),
video_frame.rotation()),
webrtc::kVideoRotation_0, video_frame.timestamp_us());
video_frame.rotation()));
SetSize(frame.width(), frame.height());
SetSize(buffer->width(), buffer->height());
ASSERT(image_.get() != NULL);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
frame.video_frame_buffer());
libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),

View File

@ -21,7 +21,7 @@
#include "webrtc/examples/peerconnection/client/peer_connection_client.h"
#include "webrtc/media/base/mediachannel.h"
#include "webrtc/media/base/videocommon.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/video_frame.h"
class MainWndCallback {
public:
@ -102,7 +102,7 @@ class MainWnd : public MainWindow {
HWND handle() const { return wnd_; }
class VideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRenderer(HWND wnd, int width, int height,
webrtc::VideoTrackInterface* track_to_render);
@ -117,7 +117,7 @@ class MainWnd : public MainWindow {
}
// VideoSinkInterface implementation
void OnFrame(const cricket::VideoFrame& frame) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
const BITMAPINFO& bmi() const { return bmi_; }
const uint8_t* image() const { return image_.get(); }

View File

@ -27,7 +27,7 @@ bool AdaptedVideoTrackSource::GetStats(Stats* stats) {
return true;
}
void AdaptedVideoTrackSource::OnFrame(const cricket::VideoFrame& frame) {
void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
frame.video_frame_buffer());
/* Note that this is a "best effort" approach to
@ -42,7 +42,7 @@ void AdaptedVideoTrackSource::OnFrame(const cricket::VideoFrame& frame) {
frame.rotation() != webrtc::kVideoRotation_0 &&
!buffer->native_handle()) {
/* Apply pending rotation. */
broadcaster_.OnFrame(cricket::WebRtcVideoFrame(
broadcaster_.OnFrame(webrtc::VideoFrame(
webrtc::I420Buffer::Rotate(buffer, frame.rotation()),
webrtc::kVideoRotation_0, frame.timestamp_us()));
} else {
@ -51,7 +51,7 @@ void AdaptedVideoTrackSource::OnFrame(const cricket::VideoFrame& frame) {
}
void AdaptedVideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
@ -60,7 +60,7 @@ void AdaptedVideoTrackSource::AddOrUpdateSink(
}
void AdaptedVideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);

View File

@ -31,7 +31,7 @@ class AdaptedVideoTrackSource
// Checks the apply_rotation() flag. If the frame needs rotation, and it is a
// plain memory frame, it is rotated. Subclasses producing native frames must
// handle apply_rotation() themselves.
void OnFrame(const cricket::VideoFrame& frame);
void OnFrame(const webrtc::VideoFrame& frame);
// Reports the appropriate frame size after adaptation. Returns true
// if a frame is wanted. Returns false if there are no interested
@ -57,9 +57,9 @@ class AdaptedVideoTrackSource
private:
// Implements rtc::VideoSourceInterface.
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
// Part of VideoTrackSourceInterface.
bool GetStats(Stats* stats) override;

View File

@ -518,8 +518,8 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
const std::vector<VideoCodec>& codecs() const { return send_codecs(); }
bool rendering() const { return playout(); }
const VideoOptions& options() const { return options_; }
const std::map<uint32_t, rtc::VideoSinkInterface<VideoFrame>*>& sinks()
const {
const std::map<uint32_t, rtc::VideoSinkInterface<webrtc::VideoFrame>*>&
sinks() const {
return sinks_;
}
int max_bps() const { return max_bps_; }
@ -547,7 +547,7 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
return true;
}
bool SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override {
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
if (ssrc != 0 && sinks_.find(ssrc) == sinks_.end()) {
return false;
}
@ -565,7 +565,7 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
uint32_t ssrc,
bool enable,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) override {
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override {
if (!RtpHelper<VideoMediaChannel>::MuteStream(ssrc, !enable)) {
return false;
}
@ -627,8 +627,8 @@ class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
FakeVideoEngine* engine_;
std::vector<VideoCodec> recv_codecs_;
std::vector<VideoCodec> send_codecs_;
std::map<uint32_t, rtc::VideoSinkInterface<VideoFrame>*> sinks_;
std::map<uint32_t, rtc::VideoSourceInterface<VideoFrame>*> sources_;
std::map<uint32_t, rtc::VideoSinkInterface<webrtc::VideoFrame>*> sinks_;
std::map<uint32_t, rtc::VideoSourceInterface<webrtc::VideoFrame>*> sources_;
VideoOptions options_;
int max_bps_;
};

View File

@ -19,7 +19,7 @@
#include "webrtc/base/timeutils.h"
#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/base/videocommon.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/video_frame.h"
namespace cricket {
@ -97,8 +97,9 @@ class FakeVideoCapturer : public cricket::VideoCapturer {
webrtc::I420Buffer::Create(adapted_width, adapted_height));
buffer->InitializeData();
OnFrame(WebRtcVideoFrame(buffer, rotation_,
next_timestamp_ / rtc::kNumNanosecsPerMicrosec),
OnFrame(webrtc::VideoFrame(
buffer, rotation_,
next_timestamp_ / rtc::kNumNanosecsPerMicrosec),
width, height);
}
next_timestamp_ += timestamp_interval;

View File

@ -12,13 +12,13 @@
#define WEBRTC_MEDIA_BASE_FAKEVIDEORENDERER_H_
#include "webrtc/base/logging.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/video_frame.h"
namespace cricket {
// Faked video renderer that has a callback for actions on rendering.
class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
FakeVideoRenderer()
: errors_(0),
@ -29,7 +29,7 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
num_rendered_frames_(0),
black_frame_(false) {}
virtual void OnFrame(const VideoFrame& frame) {
virtual void OnFrame(const webrtc::VideoFrame& frame) {
rtc::CritScope cs(&crit_);
// TODO(zhurunz) Check with VP8 team to see if we can remove this
// tolerance on Y values. Some unit tests produce Y values close
@ -79,7 +79,7 @@ class FakeVideoRenderer : public rtc::VideoSinkInterface<cricket::VideoFrame> {
uint8_t u_max,
uint8_t v_min,
uint8_t v_max,
const cricket::VideoFrame* frame) {
const webrtc::VideoFrame* frame) {
if (!frame || !frame->video_frame_buffer()) {
return false;
}

View File

@ -31,9 +31,6 @@
#include "webrtc/media/base/codec.h"
#include "webrtc/media/base/mediaconstants.h"
#include "webrtc/media/base/streamparams.h"
// TODO(nisse): Temporarily; to be replaced with a forward declaration
// of webrtc::VideoFrame when dependency on cricket::VideoFrame is deleted.
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourceinterface.h"
// TODO(juberti): re-evaluate this include
@ -46,6 +43,7 @@ class Timing;
namespace webrtc {
class AudioSinkInterface;
class VideoFrame;
}
namespace cricket {
@ -1034,11 +1032,11 @@ class VideoMediaChannel : public MediaChannel {
uint32_t ssrc,
bool enable,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) = 0;
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) = 0;
// Sets the sink object to be used for the specified stream.
// If SSRC is 0, the renderer is used for the 'default' stream.
virtual bool SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) = 0;
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) = 0;
// Gets quality stats for the channel.
virtual bool GetStats(VideoMediaInfo* info) = 0;
};

View File

@ -23,7 +23,7 @@
#include "webrtc/base/testutils.h"
#include "webrtc/media/base/rtpdump.h"
#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/video_frame.h"
namespace cricket {
@ -230,7 +230,7 @@ void VideoCapturerListener::OnStateChange(VideoCapturer* capturer,
last_capture_state_ = result;
}
void VideoCapturerListener::OnFrame(const VideoFrame& frame) {
void VideoCapturerListener::OnFrame(const webrtc::VideoFrame& frame) {
++frame_count_;
if (1 == frame_count_) {
frame_width_ = frame.width();

View File

@ -29,6 +29,10 @@ class ByteBufferWriter;
class StreamInterface;
}
namespace webrtc {
class VideoFrame;
}
namespace cricket {
// Returns size of 420 image with rounding on chroma for odd sizes.
@ -115,7 +119,7 @@ class RtpTestUtility {
// Test helper for testing VideoCapturer implementations.
class VideoCapturerListener
: public sigslot::has_slots<>,
public rtc::VideoSinkInterface<cricket::VideoFrame> {
public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
explicit VideoCapturerListener(VideoCapturer* cap);
~VideoCapturerListener();
@ -127,7 +131,7 @@ class VideoCapturerListener
bool resolution_changed() const { return resolution_changed_; }
void OnStateChange(VideoCapturer* capturer, CaptureState state);
void OnFrame(const VideoFrame& frame) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
private:
VideoCapturer* capturer_;

View File

@ -42,7 +42,7 @@ class VideoAdapterTest : public testing::Test {
protected:
class VideoCapturerListener
: public rtc::VideoSinkInterface<cricket::VideoFrame> {
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
struct Stats {
int captured_frames;
@ -62,7 +62,7 @@ class VideoAdapterTest : public testing::Test {
last_adapt_was_no_op_(false) {
}
void OnFrame(const cricket::VideoFrame& frame) {
void OnFrame(const webrtc::VideoFrame& frame) {
rtc::CritScope lock(&crit_);
const int in_width = frame.width();
const int in_height = frame.height();

View File

@ -22,7 +22,7 @@ VideoBroadcaster::VideoBroadcaster() {
}
void VideoBroadcaster::AddOrUpdateSink(
VideoSinkInterface<cricket::VideoFrame>* sink,
VideoSinkInterface<webrtc::VideoFrame>* sink,
const VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
@ -32,7 +32,7 @@ void VideoBroadcaster::AddOrUpdateSink(
}
void VideoBroadcaster::RemoveSink(
VideoSinkInterface<cricket::VideoFrame>* sink) {
VideoSinkInterface<webrtc::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
rtc::CritScope cs(&sinks_and_wants_lock_);
@ -50,7 +50,7 @@ VideoSinkWants VideoBroadcaster::wants() const {
return current_wants_;
}
void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) {
rtc::CritScope cs(&sinks_and_wants_lock_);
for (auto& sink_pair : sink_pairs()) {
if (sink_pair.wants.rotation_applied &&
@ -63,7 +63,7 @@ void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
continue;
}
if (sink_pair.wants.black_frames) {
sink_pair.sink->OnFrame(cricket::WebRtcVideoFrame(
sink_pair.sink->OnFrame(webrtc::VideoFrame(
GetBlackFrameBuffer(frame.width(), frame.height()), frame.rotation(),
frame.timestamp_us()));
} else {

View File

@ -17,10 +17,9 @@
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/media/base/videosourcebase.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/video_frame.h"
namespace rtc {
@ -31,12 +30,12 @@ namespace rtc {
// Video frames can be broadcasted on any thread. I.e VideoBroadcaster::OnFrame
// can be called on any thread.
class VideoBroadcaster : public VideoSourceBase,
public VideoSinkInterface<cricket::VideoFrame> {
public VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoBroadcaster();
void AddOrUpdateSink(VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
const VideoSinkWants& wants) override;
void RemoveSink(VideoSinkInterface<cricket::VideoFrame>* sink) override;
void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
// Returns true if the next frame will be delivered to at least one sink.
bool frame_wanted() const;
@ -49,7 +48,7 @@ class VideoBroadcaster : public VideoSourceBase,
// it will never receive a frame with pending rotation. Our caller
// may pass in frames without precise synchronization with changes
// to the VideoSinkWants.
void OnFrame(const cricket::VideoFrame& frame) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
protected:
void UpdateWants() EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);

View File

@ -11,12 +11,11 @@
#include "webrtc/base/gunit.h"
#include "webrtc/media/base/fakevideorenderer.h"
#include "webrtc/media/base/videobroadcaster.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/video_frame.h"
using rtc::VideoBroadcaster;
using rtc::VideoSinkWants;
using cricket::FakeVideoRenderer;
using cricket::WebRtcVideoFrame;
TEST(VideoBroadcasterTest, frame_wanted) {
@ -39,7 +38,7 @@ TEST(VideoBroadcasterTest, OnFrame) {
broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants());
WebRtcVideoFrame frame;
webrtc::VideoFrame frame;
broadcaster.OnFrame(frame);
EXPECT_EQ(1, sink1.num_rendered_frames());
@ -139,8 +138,8 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
// Makes it not all black.
buffer->InitializeData();
cricket::WebRtcVideoFrame frame1(buffer, webrtc::kVideoRotation_0,
10 /* timestamp_us */);
webrtc::VideoFrame frame1(buffer, webrtc::kVideoRotation_0,
10 /* timestamp_us */);
broadcaster.OnFrame(frame1);
EXPECT_TRUE(sink1.black_frame());
EXPECT_EQ(10, sink1.timestamp_us());
@ -153,8 +152,8 @@ TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
wants2.black_frames = true;
broadcaster.AddOrUpdateSink(&sink2, wants2);
cricket::WebRtcVideoFrame frame2(buffer, webrtc::kVideoRotation_0,
30 /* timestamp_us */);
webrtc::VideoFrame frame2(buffer, webrtc::kVideoRotation_0,
30 /* timestamp_us */);
broadcaster.OnFrame(frame2);
EXPECT_FALSE(sink1.black_frame());
EXPECT_EQ(30, sink1.timestamp_us());

View File

@ -18,7 +18,7 @@
#include "webrtc/base/common.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/systeminfo.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/video_frame.h"
namespace cricket {
@ -132,14 +132,14 @@ bool VideoCapturer::GetInputSize(int* width, int* height) {
}
void VideoCapturer::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);
OnSinkWantsChanged(broadcaster_.wants());
}
void VideoCapturer::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.AddOrUpdateSink(sink, wants);
@ -196,7 +196,7 @@ bool VideoCapturer::AdaptFrame(int width,
return true;
}
void VideoCapturer::OnFrame(const VideoFrame& frame,
void VideoCapturer::OnFrame(const webrtc::VideoFrame& frame,
int orig_width,
int orig_height) {
// For a child class which implements rotation itself, we should
@ -215,7 +215,7 @@ void VideoCapturer::OnFrame(const VideoFrame& frame,
LOG(LS_WARNING) << "Native frame requiring rotation. Discarding.";
return;
}
broadcaster_.OnFrame(WebRtcVideoFrame(
broadcaster_.OnFrame(webrtc::VideoFrame(
webrtc::I420Buffer::Rotate(buffer, frame.rotation()),
webrtc::kVideoRotation_0, frame.timestamp_us()));
} else {

View File

@ -29,6 +29,9 @@
#include "webrtc/media/base/videobroadcaster.h"
#include "webrtc/media/base/videocommon.h"
namespace webrtc {
class VideoFrame;
}
namespace cricket {
@ -69,7 +72,7 @@ enum CaptureState {
// thread safe.
//
class VideoCapturer : public sigslot::has_slots<>,
public rtc::VideoSourceInterface<cricket::VideoFrame> {
public rtc::VideoSourceInterface<webrtc::VideoFrame> {
public:
VideoCapturer();
@ -169,9 +172,9 @@ class VideoCapturer : public sigslot::has_slots<>,
bool GetInputSize(int* width, int* height);
// Implements VideoSourceInterface
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
protected:
// OnSinkWantsChanged can be overridden to change the default behavior
@ -210,7 +213,9 @@ class VideoCapturer : public sigslot::has_slots<>,
// VideoFrame. OnFrame can be called directly by an implementation
// that does not use SignalFrameCaptured or OnFrameCaptured. The
// orig_width and orig_height are used only to produce stats.
void OnFrame(const VideoFrame& frame, int orig_width, int orig_height);
void OnFrame(const webrtc::VideoFrame& frame,
int orig_width,
int orig_height);
VideoAdapter* video_adapter() { return &video_adapter_; }

View File

@ -19,7 +19,7 @@ VideoSourceBase::VideoSourceBase() {
}
void VideoSourceBase::AddOrUpdateSink(
VideoSinkInterface<cricket::VideoFrame>* sink,
VideoSinkInterface<webrtc::VideoFrame>* sink,
const VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
@ -32,8 +32,7 @@ void VideoSourceBase::AddOrUpdateSink(
}
}
void VideoSourceBase::RemoveSink(
VideoSinkInterface<cricket::VideoFrame>* sink) {
void VideoSourceBase::RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
RTC_DCHECK(sink != nullptr);
RTC_DCHECK(FindSinkPair(sink));
@ -45,7 +44,7 @@ void VideoSourceBase::RemoveSink(
}
VideoSourceBase::SinkPair* VideoSourceBase::FindSinkPair(
const VideoSinkInterface<cricket::VideoFrame>* sink) {
const VideoSinkInterface<webrtc::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
auto sink_pair_it = std::find_if(
sinks_.begin(), sinks_.end(),

View File

@ -14,28 +14,27 @@
#include <vector>
#include "webrtc/base/thread_checker.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/video_frame.h"
namespace rtc {
// VideoSourceBase is not thread safe.
class VideoSourceBase : public VideoSourceInterface<cricket::VideoFrame> {
class VideoSourceBase : public VideoSourceInterface<webrtc::VideoFrame> {
public:
VideoSourceBase();
void AddOrUpdateSink(VideoSinkInterface<cricket::VideoFrame>* sink,
void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
const VideoSinkWants& wants) override;
void RemoveSink(VideoSinkInterface<cricket::VideoFrame>* sink) override;
void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
protected:
struct SinkPair {
SinkPair(VideoSinkInterface<cricket::VideoFrame>* sink,
VideoSinkWants wants)
SinkPair(VideoSinkInterface<webrtc::VideoFrame>* sink, VideoSinkWants wants)
: sink(sink), wants(wants) {}
VideoSinkInterface<cricket::VideoFrame>* sink;
VideoSinkInterface<webrtc::VideoFrame>* sink;
VideoSinkWants wants;
};
SinkPair* FindSinkPair(const VideoSinkInterface<cricket::VideoFrame>* sink);
SinkPair* FindSinkPair(const VideoSinkInterface<webrtc::VideoFrame>* sink);
const std::vector<SinkPair>& sink_pairs() const { return sinks_; }
ThreadChecker thread_checker_;

View File

@ -11,13 +11,13 @@
// Implementation of GtkVideoRenderer
#include "webrtc/media/devices/gtkvideorenderer.h"
#include "webrtc/video_frame.h"
#include <gdk/gdk.h>
#include <glib.h>
#include <gtk/gtk.h>
#include "libyuv/convert_argb.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
namespace cricket {
@ -80,24 +80,21 @@ bool GtkVideoRenderer::SetSize(int width, int height) {
return true;
}
void GtkVideoRenderer::OnFrame(const VideoFrame& video_frame) {
const cricket::WebRtcVideoFrame frame(
void GtkVideoRenderer::OnFrame(const webrtc::VideoFrame& video_frame) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
webrtc::I420Buffer::Rotate(video_frame.video_frame_buffer(),
video_frame.rotation()),
webrtc::kVideoRotation_0, video_frame.timestamp_us());
video_frame.rotation()));
// Need to set size as the frame might be rotated.
if (!SetSize(frame.width(), frame.height())) {
if (!SetSize(buffer->width(), buffer->height())) {
return;
}
// convert I420 frame to ABGR format, which is accepted by GTK
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
frame.video_frame_buffer());
libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),
image_.get(), frame.width() * 4,
image_.get(), buffer->width() * 4,
buffer->width(), buffer->height());
ScopedGdkLock lock;
@ -111,11 +108,11 @@ void GtkVideoRenderer::OnFrame(const VideoFrame& video_frame) {
draw_area_->style->fg_gc[GTK_STATE_NORMAL],
0,
0,
frame.width(),
frame.height(),
buffer->width(),
buffer->height(),
GDK_RGB_DITHER_MAX,
image_.get(),
frame.width() * 4);
buffer->width() * 4);
// Run the Gtk main loop to refresh the window.
Pump();

View File

@ -17,22 +17,22 @@
#include <memory>
#include "webrtc/base/basictypes.h"
// TODO(nisse): Temporarily; to be replaced with a forward declaration
// of webrtc::VideoFrame when dependency on cricket::VideoFrame is deleted.
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/base/videosinkinterface.h"
typedef struct _GtkWidget GtkWidget; // forward declaration, defined in gtk.h
namespace webrtc {
class VideoFrame;
}
namespace cricket {
class GtkVideoRenderer : public rtc::VideoSinkInterface<VideoFrame> {
class GtkVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
GtkVideoRenderer(int x, int y);
virtual ~GtkVideoRenderer();
// Implementation of VideoSinkInterface.
void OnFrame(const VideoFrame& frame) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
private:
bool SetSize(int width, int height);

View File

@ -18,7 +18,6 @@
#include "webrtc/base/safe_conversions.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/base/win32.h" // Need this to #include the impl files.
#include "webrtc/modules/video_capture/video_capture_factory.h"
@ -353,10 +352,7 @@ void WebRtcVideoCapturer::OnIncomingCapturedFrame(
<< ". Expected format " << GetCaptureFormat()->ToString();
}
OnFrame(cricket::WebRtcVideoFrame(
sample.video_frame_buffer(), sample.rotation(),
sample.render_time_ms() * rtc::kNumMicrosecsPerMillisec),
sample.width(), sample.height());
OnFrame(sample, sample.width(), sample.height());
}
void WebRtcVideoCapturer::OnCaptureDelayChanged(const int32_t id,

View File

@ -21,7 +21,6 @@
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/media/base/device.h"
#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
#include "webrtc/modules/video_capture/video_capture.h"
namespace cricket {

View File

@ -524,14 +524,14 @@ UnsignalledSsrcHandler::Action DefaultUnsignalledSsrcHandler::OnUnsignalledSsrc(
return kDeliverPacket;
}
rtc::VideoSinkInterface<VideoFrame>*
rtc::VideoSinkInterface<webrtc::VideoFrame>*
DefaultUnsignalledSsrcHandler::GetDefaultSink() const {
return default_sink_;
}
void DefaultUnsignalledSsrcHandler::SetDefaultSink(
VideoMediaChannel* channel,
rtc::VideoSinkInterface<VideoFrame>* sink) {
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
default_sink_ = sink;
if (default_recv_ssrc_ != 0) {
channel->SetSink(default_recv_ssrc_, default_sink_);
@ -1052,7 +1052,7 @@ bool WebRtcVideoChannel2::SetVideoSend(
uint32_t ssrc,
bool enable,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
TRACE_EVENT0("webrtc", "SetVideoSend");
RTC_DCHECK(ssrc != 0);
LOG(LS_INFO) << "SetVideoSend (ssrc= " << ssrc << ", enable = " << enable
@ -1293,8 +1293,9 @@ bool WebRtcVideoChannel2::RemoveRecvStream(uint32_t ssrc) {
return true;
}
bool WebRtcVideoChannel2::SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<VideoFrame>* sink) {
bool WebRtcVideoChannel2::SetSink(
uint32_t ssrc,
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
LOG(LS_INFO) << "SetSink: ssrc:" << ssrc << " "
<< (sink ? "(ptr)" : "nullptr");
if (ssrc == 0) {
@ -1584,7 +1585,7 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::~WebRtcVideoSendStream() {
}
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
const VideoFrame& frame) {
const webrtc::VideoFrame& frame) {
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::OnFrame");
webrtc::VideoFrame video_frame(frame.video_frame_buffer(),
frame.rotation(),
@ -1623,7 +1624,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetVideoSend(
bool enable,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetVideoSend");
RTC_DCHECK_RUN_ON(&thread_checker_);
@ -2371,7 +2372,7 @@ bool WebRtcVideoChannel2::WebRtcVideoReceiveStream::IsDefaultStream() const {
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
rtc::CritScope crit(&sink_lock_);
sink_ = sink;
}

View File

@ -26,10 +26,10 @@
#include "webrtc/media/base/videosourceinterface.h"
#include "webrtc/call.h"
#include "webrtc/media/base/mediaengine.h"
#include "webrtc/media/base/videoframe.h"
#include "webrtc/media/engine/webrtcvideodecoderfactory.h"
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
#include "webrtc/transport.h"
#include "webrtc/video_frame.h"
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
@ -82,14 +82,14 @@ class DefaultUnsignalledSsrcHandler : public UnsignalledSsrcHandler {
Action OnUnsignalledSsrc(WebRtcVideoChannel2* channel,
uint32_t ssrc) override;
rtc::VideoSinkInterface<VideoFrame>* GetDefaultSink() const;
rtc::VideoSinkInterface<webrtc::VideoFrame>* GetDefaultSink() const;
void SetDefaultSink(VideoMediaChannel* channel,
rtc::VideoSinkInterface<VideoFrame>* sink);
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink);
virtual ~DefaultUnsignalledSsrcHandler() = default;
private:
uint32_t default_recv_ssrc_;
rtc::VideoSinkInterface<VideoFrame>* default_sink_;
rtc::VideoSinkInterface<webrtc::VideoFrame>* default_sink_;
};
// WebRtcVideoEngine2 is used for the new native WebRTC Video API (webrtc:1667).
@ -155,14 +155,14 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
uint32_t ssrc,
bool enable,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) override;
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override;
bool AddSendStream(const StreamParams& sp) override;
bool RemoveSendStream(uint32_t ssrc) override;
bool AddRecvStream(const StreamParams& sp) override;
bool AddRecvStream(const StreamParams& sp, bool default_stream);
bool RemoveRecvStream(uint32_t ssrc) override;
bool SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<VideoFrame>* sink) override;
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
bool GetStats(VideoMediaInfo* info) override;
void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
@ -237,7 +237,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
// Wrapper for the sender part, this is where the source is connected and
// frames are then converted from cricket frames to webrtc frames.
class WebRtcVideoSendStream
: public rtc::VideoSinkInterface<cricket::VideoFrame>,
: public rtc::VideoSinkInterface<webrtc::VideoFrame>,
public rtc::VideoSourceInterface<webrtc::VideoFrame> {
public:
WebRtcVideoSendStream(
@ -259,18 +259,17 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
// Implements rtc::VideoSourceInterface<webrtc::VideoFrame>.
// WebRtcVideoSendStream acts as a source to the webrtc::VideoSendStream
// in |stream_|. The reason is that WebRtcVideoSendStream receives
// cricket::VideoFrames and forwards webrtc::VideoFrames to |source_|.
// in |stream_|.
// TODO(perkj, nisse): Refactor WebRtcVideoSendStream to directly connect
// the camera input |source_|
void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
void OnFrame(const cricket::VideoFrame& frame) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
bool SetVideoSend(bool mute,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source);
rtc::VideoSourceInterface<webrtc::VideoFrame>* source);
void SetSend(bool send);
@ -347,7 +346,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
const std::vector<SsrcGroup> ssrc_groups_ ACCESS_ON(&thread_checker_);
webrtc::Call* const call_;
const bool enable_cpu_overuse_detection_;
rtc::VideoSourceInterface<cricket::VideoFrame>* source_
rtc::VideoSourceInterface<webrtc::VideoFrame>* source_
ACCESS_ON(&thread_checker_);
WebRtcVideoEncoderFactory* const external_encoder_factory_
ACCESS_ON(&thread_checker_);
@ -377,9 +376,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
};
// Wrapper for the receiver part, contains configs etc. that are needed to
// reconstruct the underlying VideoReceiveStream. Also serves as a wrapper
// between rtc::VideoSinkInterface<webrtc::VideoFrame> and
// rtc::VideoSinkInterface<cricket::VideoFrame>.
// reconstruct the underlying VideoReceiveStream.
class WebRtcVideoReceiveStream
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
@ -406,7 +403,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
void OnFrame(const webrtc::VideoFrame& frame) override;
bool IsDefaultStream() const;
void SetSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink);
void SetSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink);
VideoReceiverInfo GetVideoReceiverInfo(bool log_stats);
@ -444,7 +441,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
std::vector<AllocatedDecoder> allocated_decoders_;
rtc::CriticalSection sink_lock_;
rtc::VideoSinkInterface<cricket::VideoFrame>* sink_ GUARDED_BY(sink_lock_);
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink_ GUARDED_BY(sink_lock_);
// Expands remote RTP timestamps to int64_t to be able to estimate how long
// the stream has been running.
rtc::TimestampWrapAroundHandler timestamp_wraparound_handler_

View File

@ -1908,7 +1908,7 @@ VideoChannel::~VideoChannel() {
}
bool VideoChannel::SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<VideoFrame>* sink) {
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
worker_thread()->Invoke<void>(
RTC_FROM_HERE,
Bind(&VideoMediaChannel::SetSink, media_channel(), ssrc, sink));
@ -1919,7 +1919,7 @@ bool VideoChannel::SetVideoSend(
uint32_t ssrc,
bool mute,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source) {
rtc::VideoSourceInterface<webrtc::VideoFrame>* source) {
return InvokeOnWorker(RTC_FROM_HERE,
Bind(&VideoMediaChannel::SetVideoSend, media_channel(),
ssrc, mute, options, source));

View File

@ -551,7 +551,8 @@ class VideoChannel : public BaseChannel {
return static_cast<VideoMediaChannel*>(BaseChannel::media_channel());
}
bool SetSink(uint32_t ssrc, rtc::VideoSinkInterface<VideoFrame>* sink);
bool SetSink(uint32_t ssrc,
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink);
// Get statistics about the current media session.
bool GetStats(VideoMediaInfo* stats);
@ -567,7 +568,7 @@ class VideoChannel : public BaseChannel {
bool SetVideoSend(uint32_t ssrc,
bool enable,
const VideoOptions* options,
rtc::VideoSourceInterface<cricket::VideoFrame>* source);
rtc::VideoSourceInterface<webrtc::VideoFrame>* source);
webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const;
bool SetRtpSendParameters(uint32_t ssrc,
const webrtc::RtpParameters& parameters);

View File

@ -10,8 +10,8 @@
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/common_video/rotation.h"
#include "webrtc/media/base/videoframe.h"
NS_ASSUME_NONNULL_BEGIN

View File

@ -31,7 +31,7 @@ NS_ASSUME_NONNULL_BEGIN
* during construction. This pointer is unsafe and owned by this class.
*/
@property(nonatomic, readonly)
rtc::VideoSinkInterface<cricket::VideoFrame> *nativeVideoRenderer;
rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer

View File

@ -14,19 +14,17 @@
#include <memory>
#include "webrtc/media/engine/webrtcvideoframe.h"
namespace webrtc {
class VideoRendererAdapter
: public rtc::VideoSinkInterface<cricket::VideoFrame> {
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRendererAdapter(RTCVideoRendererAdapter* adapter) {
adapter_ = adapter;
size_ = CGSizeZero;
}
void OnFrame(const cricket::VideoFrame& nativeVideoFrame) override {
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
rotation:nativeVideoFrame.rotation()
@ -64,7 +62,7 @@ class VideoRendererAdapter
return self;
}
- (rtc::VideoSinkInterface<cricket::VideoFrame> *)nativeVideoRenderer {
- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)nativeVideoRenderer {
return _adapter.get();
}

View File

@ -15,7 +15,7 @@
NS_ASSUME_NONNULL_BEGIN
// RTCVideoFrame is an ObjectiveC version of cricket::VideoFrame.
// RTCVideoFrame is an ObjectiveC version of webrtc::VideoFrame.
RTC_EXPORT
@interface RTCVideoFrame : NSObject

View File

@ -20,9 +20,6 @@
namespace webrtc {
// TODO(nisse): This class duplicates cricket::VideoFrame. There's
// ongoing work to merge the classes. See
// https://bugs.chromium.org/p/webrtc/issues/detail?id=5682.
class VideoFrame {
public:
// TODO(nisse): Deprecated. Using the default constructor violates the