This reland https://codereview.webrtc.org/1655793003/ with the change that cricket::VideoCapturer::SignalVideoFrame is added back and used for frame forwarding. It is used in Chrome remoting.
BUG=webrtc:5426 R=pthatcher@webrtc.org TBR=pthatcher@webrtc.org for moved channelmanager.... Review URL: https://codereview.webrtc.org/1689923002 . Cr-Commit-Position: refs/heads/master@{#11597}
This commit is contained in:
@ -27,7 +27,8 @@ namespace webrtc {
|
||||
// for ref counted I420 frames instead of this hack.
|
||||
class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
|
||||
public:
|
||||
FrameFactory(const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
|
||||
explicit FrameFactory(
|
||||
const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
|
||||
: delegate_(delegate) {
|
||||
// Create a CapturedFrame that only contains header information, not the
|
||||
// actual pixel data.
|
||||
@ -141,8 +142,6 @@ AndroidVideoCapturer::AndroidVideoCapturer(
|
||||
formats.push_back(format);
|
||||
}
|
||||
SetSupportedFormats(formats);
|
||||
// Do not apply frame rotation by default.
|
||||
SetApplyRotation(false);
|
||||
}
|
||||
|
||||
AndroidVideoCapturer::~AndroidVideoCapturer() {
|
||||
|
||||
@ -34,13 +34,10 @@ class RemoteVideoCapturerTest : public testing::Test,
|
||||
virtual void SetUp() {
|
||||
capturer_.SignalStateChange.connect(
|
||||
this, &RemoteVideoCapturerTest::OnStateChange);
|
||||
capturer_.SignalVideoFrame.connect(
|
||||
this, &RemoteVideoCapturerTest::OnVideoFrame);
|
||||
}
|
||||
|
||||
~RemoteVideoCapturerTest() {
|
||||
capturer_.SignalStateChange.disconnect(this);
|
||||
capturer_.SignalVideoFrame.disconnect(this);
|
||||
}
|
||||
|
||||
int captured_frame_num() const {
|
||||
@ -60,11 +57,6 @@ class RemoteVideoCapturerTest : public testing::Test,
|
||||
capture_state_ = capture_state;
|
||||
}
|
||||
|
||||
void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* frame) {
|
||||
EXPECT_EQ(&capturer_, capturer);
|
||||
++captured_frame_num_;
|
||||
}
|
||||
|
||||
int captured_frame_num_;
|
||||
CaptureState capture_state_;
|
||||
};
|
||||
@ -103,13 +95,3 @@ TEST_F(RemoteVideoCapturerTest, GetBestCaptureFormat) {
|
||||
EXPECT_TRUE(capturer_.GetBestCaptureFormat(desired, &best_format));
|
||||
EXPECT_EQ(expected_format, best_format);
|
||||
}
|
||||
|
||||
TEST_F(RemoteVideoCapturerTest, InputFrame) {
|
||||
EXPECT_EQ(0, captured_frame_num());
|
||||
|
||||
cricket::WebRtcVideoFrame test_frame;
|
||||
capturer_.SignalVideoFrame(&capturer_, &test_frame);
|
||||
EXPECT_EQ(1, captured_frame_num());
|
||||
capturer_.SignalVideoFrame(&capturer_, &test_frame);
|
||||
EXPECT_EQ(2, captured_frame_num());
|
||||
}
|
||||
|
||||
@ -23,7 +23,7 @@ class VideoCapturerState {
|
||||
public:
|
||||
static const VideoFormatPod kDefaultCaptureFormat;
|
||||
|
||||
static VideoCapturerState* Create(VideoCapturer* video_capturer);
|
||||
explicit VideoCapturerState(VideoCapturer* capturer);
|
||||
~VideoCapturerState() {}
|
||||
|
||||
void AddCaptureResolution(const VideoFormat& desired_format);
|
||||
@ -32,13 +32,9 @@ class VideoCapturerState {
|
||||
|
||||
int IncCaptureStartRef();
|
||||
int DecCaptureStartRef();
|
||||
CaptureRenderAdapter* adapter() {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
return adapter_.get();
|
||||
}
|
||||
VideoCapturer* GetVideoCapturer() {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
return adapter()->video_capturer();
|
||||
return video_capturer_;
|
||||
}
|
||||
|
||||
int start_count() const {
|
||||
@ -53,11 +49,9 @@ class VideoCapturerState {
|
||||
};
|
||||
typedef std::vector<CaptureResolutionInfo> CaptureFormats;
|
||||
|
||||
explicit VideoCapturerState(CaptureRenderAdapter* adapter);
|
||||
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
rtc::scoped_ptr<CaptureRenderAdapter> adapter_;
|
||||
|
||||
VideoCapturer* video_capturer_;
|
||||
int start_count_;
|
||||
CaptureFormats capture_formats_;
|
||||
};
|
||||
@ -66,17 +60,8 @@ const VideoFormatPod VideoCapturerState::kDefaultCaptureFormat = {
|
||||
640, 360, FPS_TO_INTERVAL(30), FOURCC_ANY
|
||||
};
|
||||
|
||||
VideoCapturerState::VideoCapturerState(CaptureRenderAdapter* adapter)
|
||||
: adapter_(adapter), start_count_(1) {}
|
||||
|
||||
// static
|
||||
VideoCapturerState* VideoCapturerState::Create(VideoCapturer* video_capturer) {
|
||||
CaptureRenderAdapter* adapter = CaptureRenderAdapter::Create(video_capturer);
|
||||
if (!adapter) {
|
||||
return NULL;
|
||||
}
|
||||
return new VideoCapturerState(adapter);
|
||||
}
|
||||
VideoCapturerState::VideoCapturerState(VideoCapturer* capturer)
|
||||
: video_capturer_(capturer), start_count_(1) {}
|
||||
|
||||
void VideoCapturerState::AddCaptureResolution(
|
||||
const VideoFormat& desired_format) {
|
||||
@ -276,11 +261,10 @@ void CaptureManager::AddVideoSink(VideoCapturer* video_capturer,
|
||||
if (!video_capturer || !sink) {
|
||||
return;
|
||||
}
|
||||
CaptureRenderAdapter* adapter = GetAdapter(video_capturer);
|
||||
if (!adapter) {
|
||||
return;
|
||||
}
|
||||
adapter->AddSink(sink);
|
||||
rtc::VideoSinkWants wants;
|
||||
// Renderers must be able to apply rotation.
|
||||
wants.rotation_applied = false;
|
||||
video_capturer->AddOrUpdateSink(sink, wants);
|
||||
}
|
||||
|
||||
void CaptureManager::RemoveVideoSink(
|
||||
@ -290,11 +274,7 @@ void CaptureManager::RemoveVideoSink(
|
||||
if (!video_capturer || !sink) {
|
||||
return;
|
||||
}
|
||||
CaptureRenderAdapter* adapter = GetAdapter(video_capturer);
|
||||
if (!adapter) {
|
||||
return;
|
||||
}
|
||||
adapter->RemoveSink(sink);
|
||||
video_capturer->RemoveSink(sink);
|
||||
}
|
||||
|
||||
bool CaptureManager::IsCapturerRegistered(VideoCapturer* video_capturer) const {
|
||||
@ -304,11 +284,7 @@ bool CaptureManager::IsCapturerRegistered(VideoCapturer* video_capturer) const {
|
||||
|
||||
bool CaptureManager::RegisterVideoCapturer(VideoCapturer* video_capturer) {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
VideoCapturerState* capture_state =
|
||||
VideoCapturerState::Create(video_capturer);
|
||||
if (!capture_state) {
|
||||
return false;
|
||||
}
|
||||
VideoCapturerState* capture_state = new VideoCapturerState(video_capturer);
|
||||
capture_states_[video_capturer] = capture_state;
|
||||
SignalCapturerStateChange.repeat(video_capturer->SignalStateChange);
|
||||
return true;
|
||||
@ -376,14 +352,4 @@ VideoCapturerState* CaptureManager::GetCaptureState(
|
||||
return iter->second;
|
||||
}
|
||||
|
||||
CaptureRenderAdapter* CaptureManager::GetAdapter(
|
||||
VideoCapturer* video_capturer) const {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
VideoCapturerState* capture_state = GetCaptureState(video_capturer);
|
||||
if (!capture_state) {
|
||||
return NULL;
|
||||
}
|
||||
return capture_state->adapter();
|
||||
}
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
@ -29,13 +29,13 @@
|
||||
|
||||
#include "webrtc/base/sigslotrepeater.h"
|
||||
#include "webrtc/base/thread_checker.h"
|
||||
#include "webrtc/media/base/capturerenderadapter.h"
|
||||
#include "webrtc/media/base/videocommon.h"
|
||||
#include "webrtc/media/base/videocapturer.h"
|
||||
#include "webrtc/media/base/videosinkinterface.h"
|
||||
|
||||
namespace cricket {
|
||||
|
||||
class VideoCapturer;
|
||||
class VideoRenderer;
|
||||
class VideoFrame;
|
||||
class VideoCapturerState;
|
||||
|
||||
class CaptureManager : public sigslot::has_slots<> {
|
||||
@ -80,7 +80,6 @@ class CaptureManager : public sigslot::has_slots<> {
|
||||
VideoCapturer* video_capturer);
|
||||
|
||||
VideoCapturerState* GetCaptureState(VideoCapturer* video_capturer) const;
|
||||
CaptureRenderAdapter* GetAdapter(VideoCapturer* video_capturer) const;
|
||||
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
CaptureStates capture_states_;
|
||||
|
||||
@ -8,76 +8,4 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/media/base/capturerenderadapter.h"
|
||||
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/media/base/videocapturer.h"
|
||||
#include "webrtc/media/base/videorenderer.h"
|
||||
|
||||
namespace cricket {
|
||||
|
||||
CaptureRenderAdapter::CaptureRenderAdapter(VideoCapturer* video_capturer)
|
||||
: video_capturer_(video_capturer) {
|
||||
}
|
||||
|
||||
CaptureRenderAdapter::~CaptureRenderAdapter() {
|
||||
// Since the signal we're connecting to is multi-threaded,
|
||||
// disconnect_all() will block until all calls are serviced, meaning any
|
||||
// outstanding calls to OnVideoFrame will be done when this is done, and no
|
||||
// more calls will be serviced by this.
|
||||
// We do this explicitly instead of just letting the has_slots<> destructor
|
||||
// take care of it because we need to do this *before* sinks_ is
|
||||
// cleared by the destructor; otherwise we could mess with it while
|
||||
// OnVideoFrame is running.
|
||||
// We *don't* take capture_crit_ here since it could deadlock with the lock
|
||||
// taken by the video frame signal.
|
||||
disconnect_all();
|
||||
}
|
||||
|
||||
CaptureRenderAdapter* CaptureRenderAdapter::Create(
|
||||
VideoCapturer* video_capturer) {
|
||||
if (!video_capturer) {
|
||||
return NULL;
|
||||
}
|
||||
CaptureRenderAdapter* return_value = new CaptureRenderAdapter(video_capturer);
|
||||
return_value->Init(); // Can't fail.
|
||||
return return_value;
|
||||
}
|
||||
|
||||
void CaptureRenderAdapter::AddSink(rtc::VideoSinkInterface<VideoFrame>* sink) {
|
||||
RTC_DCHECK(sink);
|
||||
|
||||
rtc::CritScope cs(&capture_crit_);
|
||||
// This implements set semantics, the same renderer can only be
|
||||
// added once.
|
||||
// TODO(nisse): Is this really needed?
|
||||
if (std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end())
|
||||
sinks_.push_back(sink);
|
||||
}
|
||||
|
||||
void CaptureRenderAdapter::RemoveSink(
|
||||
rtc::VideoSinkInterface<VideoFrame>* sink) {
|
||||
RTC_DCHECK(sink);
|
||||
|
||||
rtc::CritScope cs(&capture_crit_);
|
||||
sinks_.erase(std::remove(sinks_.begin(), sinks_.end(), sink), sinks_.end());
|
||||
}
|
||||
|
||||
void CaptureRenderAdapter::Init() {
|
||||
video_capturer_->SignalVideoFrame.connect(
|
||||
this,
|
||||
&CaptureRenderAdapter::OnVideoFrame);
|
||||
}
|
||||
|
||||
void CaptureRenderAdapter::OnVideoFrame(VideoCapturer* capturer,
|
||||
const VideoFrame* video_frame) {
|
||||
rtc::CritScope cs(&capture_crit_);
|
||||
if (sinks_.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (auto* sink : sinks_)
|
||||
sink->OnFrame(*video_frame);
|
||||
}
|
||||
|
||||
} // namespace cricket
|
||||
// TODO(perkj): Remove this dummy file once Chrome is not depending on it.
|
||||
|
||||
@ -8,51 +8,4 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
// This file contains the class CaptureRenderAdapter. The class connects a
|
||||
// VideoCapturer to any number of VideoRenders such that the former feeds the
|
||||
// latter.
|
||||
// CaptureRenderAdapter is Thread-unsafe. This means that none of its APIs may
|
||||
// be called concurrently.
|
||||
|
||||
#ifndef WEBRTC_MEDIA_BASE_CAPTURERENDERADAPTER_H_
|
||||
#define WEBRTC_MEDIA_BASE_CAPTURERENDERADAPTER_H_
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/base/sigslot.h"
|
||||
#include "webrtc/media/base/videocapturer.h"
|
||||
#include "webrtc/media/base/videosinkinterface.h"
|
||||
|
||||
namespace cricket {
|
||||
|
||||
class VideoCapturer;
|
||||
class VideoProcessor;
|
||||
|
||||
class CaptureRenderAdapter : public sigslot::has_slots<> {
|
||||
public:
|
||||
static CaptureRenderAdapter* Create(VideoCapturer* video_capturer);
|
||||
~CaptureRenderAdapter();
|
||||
|
||||
void AddSink(rtc::VideoSinkInterface<VideoFrame>* sink);
|
||||
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink);
|
||||
|
||||
VideoCapturer* video_capturer() { return video_capturer_; }
|
||||
private:
|
||||
|
||||
explicit CaptureRenderAdapter(VideoCapturer* video_capturer);
|
||||
void Init();
|
||||
|
||||
// Callback for frames received from the capturer.
|
||||
void OnVideoFrame(VideoCapturer* capturer, const VideoFrame* video_frame);
|
||||
|
||||
// Just pointers since ownership is not handed over to this class.
|
||||
std::vector<rtc::VideoSinkInterface<VideoFrame>*> sinks_;
|
||||
VideoCapturer* video_capturer_;
|
||||
// Critical section synchronizing the capture thread.
|
||||
rtc::CriticalSection capture_crit_;
|
||||
};
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
#endif // WEBRTC_MEDIA_BASE_CAPTURERENDERADAPTER_H_
|
||||
// TODO(perkj): Remove this dummy file once Chrome is not depending on it.
|
||||
|
||||
@ -43,6 +43,7 @@ class FakeVideoRenderer : public VideoRenderer {
|
||||
++num_rendered_frames_;
|
||||
width_ = static_cast<int>(frame->GetWidth());
|
||||
height_ = static_cast<int>(frame->GetHeight());
|
||||
rotation_ = frame->GetVideoRotation();
|
||||
SignalRenderFrame(frame);
|
||||
return true;
|
||||
}
|
||||
@ -56,6 +57,10 @@ class FakeVideoRenderer : public VideoRenderer {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return height_;
|
||||
}
|
||||
int rotation() const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return rotation_;
|
||||
}
|
||||
int num_rendered_frames() const {
|
||||
rtc::CritScope cs(&crit_);
|
||||
return num_rendered_frames_;
|
||||
@ -123,6 +128,7 @@ class FakeVideoRenderer : public VideoRenderer {
|
||||
int errors_;
|
||||
int width_;
|
||||
int height_;
|
||||
webrtc::VideoRotation rotation_;
|
||||
int num_rendered_frames_;
|
||||
bool black_frame_;
|
||||
rtc::CriticalSection crit_;
|
||||
|
||||
@ -10,5 +10,73 @@
|
||||
|
||||
#include "webrtc/media/base/videobroadcaster.h"
|
||||
|
||||
// TODO(perkj): Implement.
|
||||
#include "webrtc/base/checks.h"
|
||||
|
||||
namespace rtc {
|
||||
|
||||
VideoBroadcaster::VideoBroadcaster() {
|
||||
thread_checker_.DetachFromThread();
|
||||
}
|
||||
|
||||
void VideoBroadcaster::AddOrUpdateSink(
|
||||
VideoSinkInterface<cricket::VideoFrame>* sink,
|
||||
const VideoSinkWants& wants) {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
RTC_DCHECK(sink != nullptr);
|
||||
|
||||
SinkPair* sink_pair = FindSinkPair(sink);
|
||||
if (!sink_pair) {
|
||||
sinks_.push_back(SinkPair(sink, wants));
|
||||
} else {
|
||||
sink_pair->wants = wants;
|
||||
}
|
||||
|
||||
// Rotation must be applied by the source if one sink wants it.
|
||||
current_wants_.rotation_applied = false;
|
||||
for (auto& sink_pair : sinks_) {
|
||||
current_wants_.rotation_applied |= sink_pair.wants.rotation_applied;
|
||||
}
|
||||
}
|
||||
|
||||
void VideoBroadcaster::RemoveSink(
|
||||
VideoSinkInterface<cricket::VideoFrame>* sink) {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
RTC_DCHECK(sink != nullptr);
|
||||
RTC_DCHECK(FindSinkPair(sink));
|
||||
|
||||
sinks_.erase(std::remove_if(sinks_.begin(), sinks_.end(),
|
||||
[sink](const SinkPair& sink_pair) {
|
||||
return sink_pair.sink == sink;
|
||||
}),
|
||||
sinks_.end());
|
||||
}
|
||||
|
||||
bool VideoBroadcaster::frame_wanted() const {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
return !sinks_.empty();
|
||||
}
|
||||
|
||||
VideoSinkWants VideoBroadcaster::wants() const {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
return current_wants_;
|
||||
}
|
||||
|
||||
void VideoBroadcaster::OnFrame(const cricket::VideoFrame& frame) {
|
||||
RTC_DCHECK(thread_checker_.CalledOnValidThread());
|
||||
for (auto& sink_pair : sinks_) {
|
||||
sink_pair.sink->OnFrame(frame);
|
||||
}
|
||||
}
|
||||
|
||||
VideoBroadcaster::SinkPair* VideoBroadcaster::FindSinkPair(
|
||||
const VideoSinkInterface<cricket::VideoFrame>* sink) {
|
||||
auto sink_pair_it = std::find_if(
|
||||
sinks_.begin(), sinks_.end(),
|
||||
[sink](const SinkPair& sink_pair) { return sink_pair.sink == sink; });
|
||||
if (sink_pair_it != sinks_.end()) {
|
||||
return &*sink_pair_it;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
} // namespace rtc
|
||||
|
||||
@ -11,6 +11,50 @@
|
||||
#ifndef WEBRTC_MEDIA_BASE_VIDEOBROADCASTER_H_
|
||||
#define WEBRTC_MEDIA_BASE_VIDEOBROADCASTER_H_
|
||||
|
||||
// TODO(perkj): Implement.
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "webrtc/base/thread_checker.h"
|
||||
#include "webrtc/media/base/videoframe.h"
|
||||
#include "webrtc/media/base/videosinkinterface.h"
|
||||
#include "webrtc/media/base/videosourceinterface.h"
|
||||
|
||||
namespace rtc {
|
||||
|
||||
class VideoBroadcaster : public VideoSourceInterface<cricket::VideoFrame>,
|
||||
public VideoSinkInterface<cricket::VideoFrame> {
|
||||
public:
|
||||
VideoBroadcaster();
|
||||
void AddOrUpdateSink(VideoSinkInterface<cricket::VideoFrame>* sink,
|
||||
const VideoSinkWants& wants) override;
|
||||
void RemoveSink(VideoSinkInterface<cricket::VideoFrame>* sink) override;
|
||||
|
||||
// Returns true if the next frame will be delivered to at least one sink.
|
||||
bool frame_wanted() const;
|
||||
|
||||
// Returns VideoSinkWants a source is requested to fulfill. They are
|
||||
// aggregated by all VideoSinkWants from all sinks.
|
||||
VideoSinkWants wants() const;
|
||||
|
||||
void OnFrame(const cricket::VideoFrame& frame) override;
|
||||
|
||||
protected:
|
||||
struct SinkPair {
|
||||
SinkPair(VideoSinkInterface<cricket::VideoFrame>* sink,
|
||||
VideoSinkWants wants)
|
||||
: sink(sink), wants(wants) {}
|
||||
VideoSinkInterface<cricket::VideoFrame>* sink;
|
||||
VideoSinkWants wants;
|
||||
};
|
||||
SinkPair* FindSinkPair(const VideoSinkInterface<cricket::VideoFrame>* sink);
|
||||
|
||||
ThreadChecker thread_checker_;
|
||||
|
||||
VideoSinkWants current_wants_;
|
||||
|
||||
std::vector<SinkPair> sinks_;
|
||||
};
|
||||
|
||||
} // namespace rtc
|
||||
|
||||
#endif // WEBRTC_MEDIA_BASE_VIDEOBROADCASTER_H_
|
||||
|
||||
@ -102,6 +102,10 @@ void VideoCapturer::Construct() {
|
||||
square_pixel_aspect_ratio_ = false;
|
||||
capture_state_ = CS_STOPPED;
|
||||
SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
|
||||
// TODO(perkj) SignalVideoFrame is used directly by Chrome remoting.
|
||||
// Before that is refactored, SignalVideoFrame must forward frames to the
|
||||
// |VideoBroadcaster|;
|
||||
SignalVideoFrame.connect(this, &VideoCapturer::OnFrame);
|
||||
scaled_width_ = 0;
|
||||
scaled_height_ = 0;
|
||||
muted_ = false;
|
||||
@ -226,16 +230,6 @@ bool VideoCapturer::MuteToBlackThenPause(bool muted) {
|
||||
return Pause(false);
|
||||
}
|
||||
|
||||
// Note that the last caller decides whether rotation should be applied if there
|
||||
// are multiple send streams using the same camera.
|
||||
bool VideoCapturer::SetApplyRotation(bool enable) {
|
||||
apply_rotation_ = enable;
|
||||
if (frame_factory_) {
|
||||
frame_factory_->SetApplyRotation(apply_rotation_);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void VideoCapturer::SetSupportedFormats(
|
||||
const std::vector<VideoFormat>& formats) {
|
||||
supported_formats_ = formats;
|
||||
@ -323,6 +317,25 @@ void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
|
||||
frame_time_data_.Reset();
|
||||
}
|
||||
|
||||
void VideoCapturer::RemoveSink(
|
||||
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
|
||||
broadcaster_.RemoveSink(sink);
|
||||
}
|
||||
|
||||
void VideoCapturer::AddOrUpdateSink(
|
||||
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
|
||||
const rtc::VideoSinkWants& wants) {
|
||||
broadcaster_.AddOrUpdateSink(sink, wants);
|
||||
OnSinkWantsChanged(broadcaster_.wants());
|
||||
}
|
||||
|
||||
void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
|
||||
apply_rotation_ = wants.rotation_applied;
|
||||
if (frame_factory_) {
|
||||
frame_factory_->SetApplyRotation(apply_rotation_);
|
||||
}
|
||||
}
|
||||
|
||||
void VideoCapturer::OnFrameCaptured(VideoCapturer*,
|
||||
const CapturedFrame* captured_frame) {
|
||||
if (muted_) {
|
||||
@ -333,7 +346,7 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
|
||||
}
|
||||
}
|
||||
|
||||
if (SignalVideoFrame.is_empty()) {
|
||||
if (!broadcaster_.frame_wanted()) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -517,10 +530,13 @@ void VideoCapturer::OnFrameCaptured(VideoCapturer*,
|
||||
adapted_frame->SetToBlack();
|
||||
}
|
||||
SignalVideoFrame(this, adapted_frame.get());
|
||||
|
||||
UpdateStats(captured_frame);
|
||||
}
|
||||
|
||||
void VideoCapturer::OnFrame(VideoCapturer* capturer, const VideoFrame* frame) {
|
||||
broadcaster_.OnFrame(*frame);
|
||||
}
|
||||
|
||||
void VideoCapturer::SetCaptureState(CaptureState state) {
|
||||
if (state == capture_state_) {
|
||||
// Don't trigger a state changed callback if the state hasn't changed.
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
|
||||
#include "webrtc/base/basictypes.h"
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/media/base/videosourceinterface.h"
|
||||
#include "webrtc/base/messagehandler.h"
|
||||
#include "webrtc/base/rollingaccumulator.h"
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
@ -27,6 +28,7 @@
|
||||
#include "webrtc/base/timing.h"
|
||||
#include "webrtc/media/base/mediachannel.h"
|
||||
#include "webrtc/media/base/videoadapter.h"
|
||||
#include "webrtc/media/base/videobroadcaster.h"
|
||||
#include "webrtc/media/base/videocommon.h"
|
||||
#include "webrtc/media/base/videoframefactory.h"
|
||||
#include "webrtc/media/devices/devicemanager.h"
|
||||
@ -73,7 +75,7 @@ struct CapturedFrame {
|
||||
// time with nanosecond units.
|
||||
uint32_t data_size; // number of bytes of the frame data
|
||||
|
||||
webrtc::VideoRotation rotation; // rotation in degrees of the frame.
|
||||
webrtc::VideoRotation rotation; // rotation in degrees of the frame.
|
||||
|
||||
void* data; // pointer to the frame data. This object allocates the
|
||||
// memory or points to an existing memory.
|
||||
@ -108,9 +110,9 @@ struct CapturedFrame {
|
||||
// media engine thread). Hence, the VideoCapture subclasses dont need to be
|
||||
// thread safe.
|
||||
//
|
||||
class VideoCapturer
|
||||
: public sigslot::has_slots<>,
|
||||
public rtc::MessageHandler {
|
||||
class VideoCapturer : public sigslot::has_slots<>,
|
||||
public rtc::MessageHandler,
|
||||
public rtc::VideoSourceInterface<cricket::VideoFrame> {
|
||||
public:
|
||||
// All signals are marshalled to |thread| or the creating thread if
|
||||
// none is provided.
|
||||
@ -197,11 +199,6 @@ class VideoCapturer
|
||||
return capture_state_;
|
||||
}
|
||||
|
||||
// Tells videocapturer whether to apply the pending rotation. By default, the
|
||||
// rotation is applied and the generated frame is up right. When set to false,
|
||||
// generated frames will carry the rotation information from
|
||||
// SetCaptureRotation. Return value indicates whether this operation succeeds.
|
||||
virtual bool SetApplyRotation(bool enable);
|
||||
virtual bool GetApplyRotation() { return apply_rotation_; }
|
||||
|
||||
// Returns true if the capturer is screencasting. This can be used to
|
||||
@ -240,10 +237,6 @@ class VideoCapturer
|
||||
// Signal the captured frame to downstream.
|
||||
sigslot::signal2<VideoCapturer*, const CapturedFrame*,
|
||||
sigslot::multi_threaded_local> SignalFrameCaptured;
|
||||
// Signal the captured and possibly adapted frame to downstream consumers
|
||||
// such as the encoder.
|
||||
sigslot::signal2<VideoCapturer*, const VideoFrame*,
|
||||
sigslot::multi_threaded_local> SignalVideoFrame;
|
||||
|
||||
// If true, run video adaptation. By default, video adaptation is enabled
|
||||
// and users must call video_adapter()->OnOutputFormatRequest()
|
||||
@ -269,10 +262,30 @@ class VideoCapturer
|
||||
VariableInfo<double>* frame_time_stats,
|
||||
VideoFormat* last_captured_frame_format);
|
||||
|
||||
// Implements VideoSourceInterface
|
||||
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
|
||||
const rtc::VideoSinkWants& wants) override;
|
||||
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
|
||||
|
||||
protected:
|
||||
// Signal the captured and possibly adapted frame to downstream consumers
|
||||
// such as the encoder.
|
||||
// TODO(perkj): Remove once it is not used by remoting in Chrome.
|
||||
sigslot::signal2<VideoCapturer*, const VideoFrame*,
|
||||
sigslot::multi_threaded_local> SignalVideoFrame;
|
||||
|
||||
// OnSinkWantsChanged can be overridden to change the default behavior
|
||||
// when a sink changes its VideoSinkWants by calling AddOrUpdateSink.
|
||||
virtual void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
|
||||
|
||||
// Callback attached to SignalFrameCaptured where SignalVideoFrames is called.
|
||||
void OnFrameCaptured(VideoCapturer* video_capturer,
|
||||
const CapturedFrame* captured_frame);
|
||||
|
||||
// Callback attached to SignalVideoFrame.
|
||||
// TODO(perkj): Remove once SignalVideoFrame is removed.
|
||||
void OnFrame(VideoCapturer* capturer, const VideoFrame* frame);
|
||||
|
||||
void SetCaptureState(CaptureState state);
|
||||
|
||||
// Marshals SignalStateChange onto thread_.
|
||||
@ -344,6 +357,7 @@ class VideoCapturer
|
||||
bool muted_;
|
||||
int black_frame_count_down_;
|
||||
|
||||
rtc::VideoBroadcaster broadcaster_;
|
||||
bool enable_video_adapter_;
|
||||
CoordinatedVideoAdapter video_adapter_;
|
||||
|
||||
|
||||
@ -35,29 +35,13 @@ class VideoCapturerTest
|
||||
public testing::Test {
|
||||
public:
|
||||
VideoCapturerTest()
|
||||
: capture_state_(cricket::CS_STOPPED),
|
||||
num_state_changes_(0),
|
||||
video_frames_received_(0),
|
||||
expects_rotation_applied_(true) {
|
||||
capturer_.SignalVideoFrame.connect(this, &VideoCapturerTest::OnVideoFrame);
|
||||
: capture_state_(cricket::CS_STOPPED), num_state_changes_(0) {
|
||||
capturer_.SignalStateChange.connect(this,
|
||||
&VideoCapturerTest::OnStateChange);
|
||||
}
|
||||
|
||||
void set_expected_compensation(bool compensation) {
|
||||
expects_rotation_applied_ = compensation;
|
||||
capturer_.AddOrUpdateSink(&renderer_, rtc::VideoSinkWants());
|
||||
}
|
||||
|
||||
protected:
|
||||
void OnVideoFrame(cricket::VideoCapturer*, const cricket::VideoFrame* frame) {
|
||||
++video_frames_received_;
|
||||
if (expects_rotation_applied_) {
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, frame->GetVideoRotation());
|
||||
} else {
|
||||
EXPECT_EQ(capturer_.GetRotation(), frame->GetVideoRotation());
|
||||
}
|
||||
renderer_.RenderFrame(frame);
|
||||
}
|
||||
void OnStateChange(cricket::VideoCapturer*,
|
||||
cricket::CaptureState capture_state) {
|
||||
capture_state_ = capture_state;
|
||||
@ -65,14 +49,10 @@ class VideoCapturerTest
|
||||
}
|
||||
cricket::CaptureState capture_state() { return capture_state_; }
|
||||
int num_state_changes() { return num_state_changes_; }
|
||||
int video_frames_received() const {
|
||||
return video_frames_received_;
|
||||
}
|
||||
|
||||
cricket::FakeVideoCapturer capturer_;
|
||||
cricket::CaptureState capture_state_;
|
||||
int num_state_changes_;
|
||||
int video_frames_received_;
|
||||
cricket::FakeVideoRenderer renderer_;
|
||||
bool expects_rotation_applied_;
|
||||
};
|
||||
@ -149,9 +129,9 @@ TEST_F(VideoCapturerTest, CameraOffOnMute) {
|
||||
cricket::VideoFormat::FpsToInterval(30),
|
||||
cricket::FOURCC_I420)));
|
||||
EXPECT_TRUE(capturer_.IsRunning());
|
||||
EXPECT_EQ(0, video_frames_received());
|
||||
EXPECT_EQ(0, renderer_.num_rendered_frames());
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(1, video_frames_received());
|
||||
EXPECT_EQ(1, renderer_.num_rendered_frames());
|
||||
EXPECT_FALSE(capturer_.IsMuted());
|
||||
|
||||
// Mute the camera and expect black output frame.
|
||||
@ -161,13 +141,13 @@ TEST_F(VideoCapturerTest, CameraOffOnMute) {
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_TRUE(renderer_.black_frame());
|
||||
}
|
||||
EXPECT_EQ(32, video_frames_received());
|
||||
EXPECT_EQ(32, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ_WAIT(cricket::CS_PAUSED,
|
||||
capturer_.capture_state(), kTimeout);
|
||||
|
||||
// Verify that the camera is off.
|
||||
EXPECT_FALSE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(32, video_frames_received());
|
||||
EXPECT_EQ(32, renderer_.num_rendered_frames());
|
||||
|
||||
// Unmute the camera and expect non-black output frame.
|
||||
capturer_.MuteToBlackThenPause(false);
|
||||
@ -176,7 +156,7 @@ TEST_F(VideoCapturerTest, CameraOffOnMute) {
|
||||
capturer_.capture_state(), kTimeout);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_FALSE(renderer_.black_frame());
|
||||
EXPECT_EQ(33, video_frames_received());
|
||||
EXPECT_EQ(33, renderer_.num_rendered_frames());
|
||||
}
|
||||
|
||||
TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
|
||||
@ -203,7 +183,7 @@ TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
|
||||
EXPECT_EQ(kHeight, renderer_.height());
|
||||
}
|
||||
|
||||
TEST_F(VideoCapturerTest, TestRotationPending) {
|
||||
TEST_F(VideoCapturerTest, TestRotationAppliedBySource) {
|
||||
int kWidth = 800;
|
||||
int kHeight = 400;
|
||||
int frame_count = 0;
|
||||
@ -214,6 +194,7 @@ TEST_F(VideoCapturerTest, TestRotationPending) {
|
||||
cricket::FOURCC_I420));
|
||||
|
||||
capturer_.ResetSupportedFormats(formats);
|
||||
|
||||
// capturer_ should compensate rotation as default.
|
||||
capturer_.UpdateAspectRatio(400, 200);
|
||||
|
||||
@ -234,6 +215,7 @@ TEST_F(VideoCapturerTest, TestRotationPending) {
|
||||
// Swapped width and height
|
||||
EXPECT_EQ(kWidth, renderer_.height());
|
||||
EXPECT_EQ(kHeight, renderer_.width());
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
|
||||
|
||||
capturer_.SetRotation(webrtc::kVideoRotation_270);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
@ -241,6 +223,7 @@ TEST_F(VideoCapturerTest, TestRotationPending) {
|
||||
// Swapped width and height
|
||||
EXPECT_EQ(kWidth, renderer_.height());
|
||||
EXPECT_EQ(kHeight, renderer_.width());
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
|
||||
|
||||
capturer_.SetRotation(webrtc::kVideoRotation_180);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
@ -248,9 +231,10 @@ TEST_F(VideoCapturerTest, TestRotationPending) {
|
||||
// Back to normal width and height
|
||||
EXPECT_EQ(kWidth, renderer_.width());
|
||||
EXPECT_EQ(kHeight, renderer_.height());
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
|
||||
}
|
||||
|
||||
TEST_F(VideoCapturerTest, TestRotationApplied) {
|
||||
TEST_F(VideoCapturerTest, TestRotationAppliedBySink) {
|
||||
int kWidth = 800;
|
||||
int kHeight = 400;
|
||||
|
||||
@ -260,10 +244,12 @@ TEST_F(VideoCapturerTest, TestRotationApplied) {
|
||||
cricket::FOURCC_I420));
|
||||
|
||||
capturer_.ResetSupportedFormats(formats);
|
||||
rtc::VideoSinkWants wants;
|
||||
// capturer_ should not compensate rotation.
|
||||
capturer_.SetApplyRotation(false);
|
||||
wants.rotation_applied = false;
|
||||
capturer_.AddOrUpdateSink(&renderer_, wants);
|
||||
|
||||
capturer_.UpdateAspectRatio(400, 200);
|
||||
set_expected_compensation(false);
|
||||
|
||||
EXPECT_EQ(cricket::CS_RUNNING,
|
||||
capturer_.Start(cricket::VideoFormat(
|
||||
@ -281,18 +267,64 @@ TEST_F(VideoCapturerTest, TestRotationApplied) {
|
||||
capturer_.SetRotation(webrtc::kVideoRotation_0);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(capturer_.GetRotation(), renderer_.rotation());
|
||||
|
||||
capturer_.SetRotation(webrtc::kVideoRotation_90);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(capturer_.GetRotation(), renderer_.rotation());
|
||||
|
||||
capturer_.SetRotation(webrtc::kVideoRotation_180);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(capturer_.GetRotation(), renderer_.rotation());
|
||||
|
||||
capturer_.SetRotation(webrtc::kVideoRotation_270);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(capturer_.GetRotation(), renderer_.rotation());
|
||||
}
|
||||
|
||||
TEST_F(VideoCapturerTest, TestRotationAppliedBySourceWhenDifferentWants) {
|
||||
int kWidth = 800;
|
||||
int kHeight = 400;
|
||||
|
||||
std::vector<cricket::VideoFormat> formats;
|
||||
formats.push_back(cricket::VideoFormat(kWidth, kHeight,
|
||||
cricket::VideoFormat::FpsToInterval(5),
|
||||
cricket::FOURCC_I420));
|
||||
|
||||
capturer_.ResetSupportedFormats(formats);
|
||||
rtc::VideoSinkWants wants;
|
||||
// capturer_ should not compensate rotation.
|
||||
wants.rotation_applied = false;
|
||||
capturer_.AddOrUpdateSink(&renderer_, wants);
|
||||
|
||||
capturer_.UpdateAspectRatio(400, 200);
|
||||
|
||||
EXPECT_EQ(cricket::CS_RUNNING,
|
||||
capturer_.Start(cricket::VideoFormat(
|
||||
kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
|
||||
cricket::FOURCC_I420)));
|
||||
EXPECT_TRUE(capturer_.IsRunning());
|
||||
EXPECT_EQ(0, renderer_.num_rendered_frames());
|
||||
|
||||
int frame_count = 0;
|
||||
capturer_.SetRotation(webrtc::kVideoRotation_90);
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(capturer_.GetRotation(), renderer_.rotation());
|
||||
|
||||
// Add another sink that wants frames to be rotated.
|
||||
cricket::FakeVideoRenderer renderer2;
|
||||
wants.rotation_applied = true;
|
||||
capturer_.AddOrUpdateSink(&renderer2, wants);
|
||||
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
|
||||
EXPECT_EQ(1, renderer2.num_rendered_frames());
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
|
||||
EXPECT_EQ(webrtc::kVideoRotation_0, renderer2.rotation());
|
||||
}
|
||||
|
||||
TEST_F(VideoCapturerTest, ScreencastScaledSuperLarge) {
|
||||
@ -715,25 +747,6 @@ TEST_F(VideoCapturerTest, TestRequest16x10_9) {
|
||||
EXPECT_EQ(360, best.height);
|
||||
}
|
||||
|
||||
// If HAVE_WEBRTC_VIDEO is not defined the video capturer will not be able to
|
||||
// provide OnVideoFrame-callbacks since they require cricket::CapturedFrame to
|
||||
// be decoded as a cricket::VideoFrame (i.e. an I420 frame). This functionality
|
||||
// only exist if HAVE_WEBRTC_VIDEO is defined below. I420 frames are also a
|
||||
// requirement for the VideoProcessors so they will not be called either.
|
||||
#if defined(HAVE_WEBRTC_VIDEO)
|
||||
TEST_F(VideoCapturerTest, VideoFrame) {
|
||||
EXPECT_EQ(cricket::CS_RUNNING, capturer_.Start(cricket::VideoFormat(
|
||||
640,
|
||||
480,
|
||||
cricket::VideoFormat::FpsToInterval(30),
|
||||
cricket::FOURCC_I420)));
|
||||
EXPECT_TRUE(capturer_.IsRunning());
|
||||
EXPECT_EQ(0, video_frames_received());
|
||||
EXPECT_TRUE(capturer_.CaptureFrame());
|
||||
EXPECT_EQ(1, video_frames_received());
|
||||
}
|
||||
#endif // HAVE_WEBRTC_VIDEO
|
||||
|
||||
bool HdFormatInList(const std::vector<cricket::VideoFormat>& formats) {
|
||||
for (std::vector<cricket::VideoFormat>::const_iterator found =
|
||||
formats.begin(); found != formats.end(); ++found) {
|
||||
|
||||
@ -86,21 +86,6 @@ class VideoEngineOverride : public T {
|
||||
virtual ~VideoEngineOverride() {
|
||||
}
|
||||
bool is_camera_on() const { return T::GetVideoCapturer()->IsRunning(); }
|
||||
void set_has_senders(bool has_senders) {
|
||||
cricket::VideoCapturer* video_capturer = T::GetVideoCapturer();
|
||||
if (has_senders) {
|
||||
video_capturer->SignalVideoFrame.connect(this,
|
||||
&VideoEngineOverride<T>::OnLocalFrame);
|
||||
} else {
|
||||
video_capturer->SignalVideoFrame.disconnect(this);
|
||||
}
|
||||
}
|
||||
void OnLocalFrame(cricket::VideoCapturer*,
|
||||
const cricket::VideoFrame*) {
|
||||
}
|
||||
void OnLocalFrameFormat(cricket::VideoCapturer*,
|
||||
const cricket::VideoFormat*) {
|
||||
}
|
||||
|
||||
void TriggerMediaFrame(uint32_t ssrc,
|
||||
cricket::VideoFrame* frame,
|
||||
|
||||
@ -17,9 +17,8 @@ template <typename VideoFrameT>
|
||||
class VideoSinkInterface {
|
||||
public:
|
||||
virtual void OnFrame(const VideoFrameT& frame) = 0;
|
||||
|
||||
protected:
|
||||
~VideoSinkInterface() {}
|
||||
virtual ~VideoSinkInterface() {}
|
||||
};
|
||||
|
||||
} // namespace rtc
|
||||
|
||||
46
webrtc/media/base/videosourceinterface.h
Normal file
46
webrtc/media/base/videosourceinterface.h
Normal file
@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
|
||||
#define WEBRTC_MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
|
||||
|
||||
#include "webrtc/media/base/videosinkinterface.h"
|
||||
#include "webrtc/base/callback.h"
|
||||
|
||||
namespace rtc {
|
||||
|
||||
// VideoSinkWants is used for notifying the source of properties a video frame
|
||||
// should have when it is delivered to a certain sink.
|
||||
struct VideoSinkWants {
|
||||
bool operator==(const VideoSinkWants& rh) const {
|
||||
return rotation_applied == rh.rotation_applied;
|
||||
}
|
||||
bool operator!=(const VideoSinkWants& rh) const { return !operator==(rh); }
|
||||
|
||||
// Tells the source whether the sink wants frames with rotation applied.
|
||||
// By default, the rotation is applied by the source.
|
||||
bool rotation_applied = true;
|
||||
};
|
||||
|
||||
template <typename VideoFrameT>
|
||||
class VideoSourceInterface {
|
||||
public:
|
||||
virtual void AddOrUpdateSink(VideoSinkInterface<VideoFrameT>* sink,
|
||||
const VideoSinkWants& wants) = 0;
|
||||
// RemoveSink must guarantee that at the time the method returns,
|
||||
// there is no current and no future calls to VideoSinkInterface::OnFrame.
|
||||
virtual void RemoveSink(VideoSinkInterface<VideoFrameT>* sink) = 0;
|
||||
|
||||
protected:
|
||||
virtual ~VideoSourceInterface() {}
|
||||
};
|
||||
|
||||
} // namespace rtc
|
||||
#endif // WEBRTC_MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
|
||||
@ -14,7 +14,6 @@
|
||||
#include <config.h>
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_WEBRTC_VIDEO
|
||||
#include "webrtc/base/arraysize.h"
|
||||
#include "webrtc/base/bind.h"
|
||||
#include "webrtc/base/checks.h"
|
||||
@ -203,9 +202,6 @@ bool WebRtcVideoCapturer::Init(const Device& device) {
|
||||
SetId(device.id);
|
||||
SetSupportedFormats(supported);
|
||||
|
||||
// Ensure these 2 have the same value.
|
||||
SetApplyRotation(module_->GetApplyRotation());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -243,7 +239,7 @@ bool WebRtcVideoCapturer::GetBestCaptureFormat(const VideoFormat& desired,
|
||||
}
|
||||
return true;
|
||||
}
|
||||
bool WebRtcVideoCapturer::SetApplyRotation(bool enable) {
|
||||
void WebRtcVideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
|
||||
// Can't take lock here as this will cause deadlock with
|
||||
// OnIncomingCapturedFrame. In fact, the whole method, including methods it
|
||||
// calls, can't take lock.
|
||||
@ -253,13 +249,14 @@ bool WebRtcVideoCapturer::SetApplyRotation(bool enable) {
|
||||
webrtc::field_trial::FindFullName("WebRTC-CVO");
|
||||
|
||||
if (group_name == "Disabled") {
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!VideoCapturer::SetApplyRotation(enable)) {
|
||||
return false;
|
||||
}
|
||||
return module_->SetApplyRotation(enable);
|
||||
VideoCapturer::OnSinkWantsChanged(wants);
|
||||
bool result = module_->SetApplyRotation(wants.rotation_applied);
|
||||
RTC_CHECK(result);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
CaptureState WebRtcVideoCapturer::Start(const VideoFormat& capture_format) {
|
||||
@ -427,5 +424,3 @@ WebRtcCapturedFrame::WebRtcCapturedFrame(const webrtc::VideoFrame& sample,
|
||||
}
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
#endif // HAVE_WEBRTC_VIDEO
|
||||
|
||||
@ -11,8 +11,6 @@
|
||||
#ifndef WEBRTC_MEDIA_ENGINE_WEBRTCVIDEOCAPTURER_H_
|
||||
#define WEBRTC_MEDIA_ENGINE_WEBRTCVIDEOCAPTURER_H_
|
||||
|
||||
#ifdef HAVE_WEBRTC_VIDEO
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
@ -50,15 +48,15 @@ class WebRtcVideoCapturer : public VideoCapturer,
|
||||
bool Init(webrtc::VideoCaptureModule* module);
|
||||
|
||||
// Override virtual methods of the parent class VideoCapturer.
|
||||
virtual bool GetBestCaptureFormat(const VideoFormat& desired,
|
||||
VideoFormat* best_format);
|
||||
virtual CaptureState Start(const VideoFormat& capture_format);
|
||||
virtual void Stop();
|
||||
virtual bool IsRunning();
|
||||
virtual bool IsScreencast() const { return false; }
|
||||
virtual bool SetApplyRotation(bool enable);
|
||||
bool GetBestCaptureFormat(const VideoFormat& desired,
|
||||
VideoFormat* best_format) override;
|
||||
CaptureState Start(const VideoFormat& capture_format) override;
|
||||
void Stop() override;
|
||||
bool IsRunning() override;
|
||||
bool IsScreencast() const override { return false; }
|
||||
|
||||
protected:
|
||||
void OnSinkWantsChanged(const rtc::VideoSinkWants& wants) override;
|
||||
// Override virtual methods of the parent class VideoCapturer.
|
||||
virtual bool GetPreferredFourccs(std::vector<uint32_t>* fourccs);
|
||||
|
||||
@ -95,5 +93,4 @@ struct WebRtcCapturedFrame : public CapturedFrame {
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
#endif // HAVE_WEBRTC_VIDEO
|
||||
#endif // WEBRTC_MEDIA_ENGINE_WEBRTCVIDEOCAPTURER_H_
|
||||
#endif // WEBRTC_MEDIA_WEBRTC_WEBRTCVIDEOCAPTURER_H_
|
||||
|
||||
@ -8,7 +8,6 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifdef HAVE_WEBRTC_VIDEO
|
||||
#include "webrtc/media/engine/webrtcvideoengine2.h"
|
||||
|
||||
#include <algorithm>
|
||||
@ -960,7 +959,7 @@ bool WebRtcVideoChannel2::SetVideoSend(uint32_t ssrc, bool enable,
|
||||
|
||||
bool WebRtcVideoChannel2::ValidateSendSsrcAvailability(
|
||||
const StreamParams& sp) const {
|
||||
for (uint32_t ssrc: sp.ssrcs) {
|
||||
for (uint32_t ssrc : sp.ssrcs) {
|
||||
if (send_ssrcs_.find(ssrc) != send_ssrcs_.end()) {
|
||||
LOG(LS_ERROR) << "Send stream with SSRC '" << ssrc << "' already exists.";
|
||||
return false;
|
||||
@ -971,7 +970,7 @@ bool WebRtcVideoChannel2::ValidateSendSsrcAvailability(
|
||||
|
||||
bool WebRtcVideoChannel2::ValidateReceiveSsrcAvailability(
|
||||
const StreamParams& sp) const {
|
||||
for (uint32_t ssrc: sp.ssrcs) {
|
||||
for (uint32_t ssrc : sp.ssrcs) {
|
||||
if (receive_ssrcs_.find(ssrc) != receive_ssrcs_.end()) {
|
||||
LOG(LS_ERROR) << "Receive stream with SSRC '" << ssrc
|
||||
<< "' already exists.";
|
||||
@ -1277,11 +1276,6 @@ bool WebRtcVideoChannel2::SetCapturer(uint32_t ssrc, VideoCapturer* capturer) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (capturer) {
|
||||
capturer->SetApplyRotation(!ContainsHeaderExtension(
|
||||
send_rtp_extensions_, kRtpVideoRotationHeaderExtension));
|
||||
}
|
||||
{
|
||||
rtc::CritScope lock(&capturer_crit_);
|
||||
capturers_[ssrc] = capturer;
|
||||
@ -1547,12 +1541,11 @@ static void CreateBlackFrame(webrtc::VideoFrame* video_frame,
|
||||
video_frame->set_rotation(rotation);
|
||||
}
|
||||
|
||||
void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
|
||||
VideoCapturer* capturer,
|
||||
const VideoFrame* frame) {
|
||||
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::InputFrame");
|
||||
webrtc::VideoFrame video_frame(frame->GetVideoFrameBuffer(), 0, 0,
|
||||
frame->GetVideoRotation());
|
||||
void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame(
|
||||
const VideoFrame& frame) {
|
||||
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::OnFrame");
|
||||
webrtc::VideoFrame video_frame(frame.GetVideoFrameBuffer(), 0, 0,
|
||||
frame.GetVideoRotation());
|
||||
rtc::CritScope cs(&lock_);
|
||||
if (stream_ == NULL) {
|
||||
// Frame input before send codecs are configured, dropping frame.
|
||||
@ -1566,12 +1559,13 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
|
||||
|
||||
if (muted_) {
|
||||
// Create a black frame to transmit instead.
|
||||
CreateBlackFrame(&video_frame, static_cast<int>(frame->GetWidth()),
|
||||
static_cast<int>(frame->GetHeight()),
|
||||
frame->GetVideoRotation());
|
||||
CreateBlackFrame(&video_frame,
|
||||
static_cast<int>(frame.GetWidth()),
|
||||
static_cast<int>(frame.GetHeight()),
|
||||
video_frame.rotation());
|
||||
}
|
||||
|
||||
int64_t frame_delta_ms = frame->GetTimeStamp() / rtc::kNumNanosecsPerMillisec;
|
||||
int64_t frame_delta_ms = frame.GetTimeStamp() / rtc::kNumNanosecsPerMillisec;
|
||||
// frame->GetTimeStamp() is essentially a delta, align to webrtc time
|
||||
if (first_frame_timestamp_ms_ == 0) {
|
||||
first_frame_timestamp_ms_ = rtc::Time() - frame_delta_ms;
|
||||
@ -1580,8 +1574,8 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::InputFrame(
|
||||
last_frame_timestamp_ms_ = first_frame_timestamp_ms_ + frame_delta_ms;
|
||||
video_frame.set_render_time_ms(last_frame_timestamp_ms_);
|
||||
// Reconfigure codec if necessary.
|
||||
SetDimensions(
|
||||
video_frame.width(), video_frame.height(), capturer->IsScreencast());
|
||||
SetDimensions(video_frame.width(), video_frame.height(),
|
||||
capturer_->IsScreencast());
|
||||
last_rotation_ = video_frame.rotation();
|
||||
|
||||
stream_->Input()->IncomingCapturedFrame(video_frame);
|
||||
@ -1624,10 +1618,8 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
|
||||
}
|
||||
|
||||
capturer_ = capturer;
|
||||
capturer_->AddOrUpdateSink(this, sink_wants_);
|
||||
}
|
||||
// Lock cannot be held while connecting the capturer to prevent lock-order
|
||||
// violations.
|
||||
capturer->SignalVideoFrame.connect(this, &WebRtcVideoSendStream::InputFrame);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1649,7 +1641,8 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectCapturer() {
|
||||
capturer = capturer_;
|
||||
capturer_ = NULL;
|
||||
}
|
||||
capturer->SignalVideoFrame.disconnect(this);
|
||||
capturer->RemoveSink(this);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1786,9 +1779,10 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters(
|
||||
}
|
||||
if (params.rtp_header_extensions) {
|
||||
parameters_.config.rtp.extensions = *params.rtp_header_extensions;
|
||||
sink_wants_.rotation_applied = !ContainsHeaderExtension(
|
||||
*params.rtp_header_extensions, kRtpVideoRotationHeaderExtension);
|
||||
if (capturer_) {
|
||||
capturer_->SetApplyRotation(!ContainsHeaderExtension(
|
||||
*params.rtp_header_extensions, kRtpVideoRotationHeaderExtension));
|
||||
capturer_->AddOrUpdateSink(this, sink_wants_);
|
||||
}
|
||||
recreate_stream = true;
|
||||
}
|
||||
@ -2496,5 +2490,3 @@ WebRtcVideoChannel2::MapCodecs(const std::vector<VideoCodec>& codecs) {
|
||||
}
|
||||
|
||||
} // namespace cricket
|
||||
|
||||
#endif // HAVE_WEBRTC_VIDEO
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
#define WEBRTC_MEDIA_ENGINE_WEBRTCVIDEOENGINE2_H_
|
||||
|
||||
#include <map>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
@ -20,6 +21,7 @@
|
||||
#include "webrtc/base/thread_annotations.h"
|
||||
#include "webrtc/base/thread_checker.h"
|
||||
#include "webrtc/media/base/videosinkinterface.h"
|
||||
#include "webrtc/media/base/videosourceinterface.h"
|
||||
#include "webrtc/call.h"
|
||||
#include "webrtc/media/base/mediaengine.h"
|
||||
#include "webrtc/media/engine/webrtcvideochannelfactory.h"
|
||||
@ -226,7 +228,8 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
|
||||
// Wrapper for the sender part, this is where the capturer is connected and
|
||||
// frames are then converted from cricket frames to webrtc frames.
|
||||
class WebRtcVideoSendStream : public sigslot::has_slots<> {
|
||||
class WebRtcVideoSendStream
|
||||
: public rtc::VideoSinkInterface<cricket::VideoFrame> {
|
||||
public:
|
||||
WebRtcVideoSendStream(
|
||||
webrtc::Call* call,
|
||||
@ -238,13 +241,13 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
const rtc::Optional<VideoCodecSettings>& codec_settings,
|
||||
const std::vector<webrtc::RtpExtension>& rtp_extensions,
|
||||
const VideoSendParameters& send_params);
|
||||
~WebRtcVideoSendStream();
|
||||
virtual ~WebRtcVideoSendStream();
|
||||
|
||||
void SetOptions(const VideoOptions& options);
|
||||
// TODO(pbos): Move logic from SetOptions into this method.
|
||||
void SetSendParameters(const ChangedSendParameters& send_params);
|
||||
|
||||
void InputFrame(VideoCapturer* capturer, const VideoFrame* frame);
|
||||
void OnFrame(const cricket::VideoFrame& frame) override;
|
||||
bool SetCapturer(VideoCapturer* capturer);
|
||||
void MuteStream(bool mute);
|
||||
bool DisconnectCapturer();
|
||||
@ -340,6 +343,7 @@ class WebRtcVideoChannel2 : public VideoMediaChannel,
|
||||
const std::vector<uint32_t> ssrcs_;
|
||||
const std::vector<SsrcGroup> ssrc_groups_;
|
||||
webrtc::Call* const call_;
|
||||
rtc::VideoSinkWants sink_wants_;
|
||||
WebRtcVideoEncoderFactory* const external_encoder_factory_
|
||||
GUARDED_BY(lock_);
|
||||
|
||||
|
||||
@ -35,8 +35,6 @@
|
||||
'base/audiorenderer.h',
|
||||
'base/capturemanager.cc',
|
||||
'base/capturemanager.h',
|
||||
'base/capturerenderadapter.cc',
|
||||
'base/capturerenderadapter.h',
|
||||
'base/codec.cc',
|
||||
'base/codec.h',
|
||||
'base/constants.cc',
|
||||
@ -64,6 +62,8 @@
|
||||
'base/turnutils.h',
|
||||
'base/videoadapter.cc',
|
||||
'base/videoadapter.h',
|
||||
'base/videobroadcaster.cc',
|
||||
'base/videobroadcaster.h',
|
||||
'base/videocapturer.cc',
|
||||
'base/videocapturer.h',
|
||||
'base/videocapturerfactory.h',
|
||||
|
||||
@ -86,7 +86,6 @@ void ChannelManager::Construct(MediaEngineInterface* me,
|
||||
main_thread_ = rtc::Thread::Current();
|
||||
worker_thread_ = worker_thread;
|
||||
audio_output_volume_ = kNotSetOutputVolume;
|
||||
local_renderer_ = NULL;
|
||||
capturing_ = false;
|
||||
enable_rtx_ = false;
|
||||
|
||||
|
||||
@ -214,7 +214,6 @@ class ChannelManager : public rtc::MessageHandler,
|
||||
DataChannels data_channels_;
|
||||
|
||||
int audio_output_volume_;
|
||||
VideoRenderer* local_renderer_;
|
||||
bool enable_rtx_;
|
||||
|
||||
bool capturing_;
|
||||
|
||||
Reference in New Issue
Block a user