AndroidVideoCapturerJni: Fix threading issues
The primary fix in this CL is to remove the dangling |thread_| pointer in AndroidVideoCapturerJni. That thread is not safe to use after Stop() has been called. Even after Stop() has been called, we must still be able to return late frames to Java in order to not leak them, so that path has been made thread safe instead. To make sure that we always return frames, the Java frame should be wrapped in a scoped_refptr as quickly as possible, so this CL moves the wrapping from AndroidVideoCapturer to AndroidVideoCapturerJni. This also removes the need for the interface function AndroidVideoCapturerDelegate::ReturnBuffer(). Some other minor changes are: * Remove |valid_global_refs_| and all logic related to that. Now that rtc::Bind() captures method objects as scoped_refptr, the destructor of AndroidVideoCapturerJni will not be called before all frames are returned. * Remove global ref |j_frame_observer_|. No need for this, we don’t call it and it is kept alive with standard Java memory management. * Add helper function ShallowCenterCrop() for VideoFrameBuffers. This functionality already exists in the constructor of WrappedI420Buffer, but it’s more convenient to have it as a separate function. BUG=webrtc:4742,webrtc:4909 R=glaznev@webrtc.org, tommi@webrtc.org Review URL: https://codereview.webrtc.org/1307973002 . Cr-Commit-Position: refs/heads/master@{#9784}
This commit is contained in:
@ -27,69 +27,50 @@
|
|||||||
#include "talk/app/webrtc/androidvideocapturer.h"
|
#include "talk/app/webrtc/androidvideocapturer.h"
|
||||||
|
|
||||||
#include "talk/media/webrtc/webrtcvideoframe.h"
|
#include "talk/media/webrtc/webrtcvideoframe.h"
|
||||||
#include "webrtc/base/bind.h"
|
|
||||||
#include "webrtc/base/callback.h"
|
|
||||||
#include "webrtc/base/common.h"
|
#include "webrtc/base/common.h"
|
||||||
#include "webrtc/base/json.h"
|
#include "webrtc/base/json.h"
|
||||||
#include "webrtc/base/timeutils.h"
|
#include "webrtc/base/timeutils.h"
|
||||||
#include "webrtc/base/thread.h"
|
|
||||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
using cricket::WebRtcVideoFrame;
|
// A hack for avoiding deep frame copies in
|
||||||
using rtc::scoped_ptr;
|
// cricket::VideoCapturer.SignalFrameCaptured() using a custom FrameFactory.
|
||||||
using rtc::scoped_refptr;
|
// A frame is injected using UpdateCapturedFrame(), and converted into a
|
||||||
|
// cricket::VideoFrame with CreateAliasedFrame(). UpdateCapturedFrame() should
|
||||||
// An implementation of cricket::VideoFrameFactory for frames that are not
|
// be called before CreateAliasedFrame() for every frame.
|
||||||
// guaranteed to outlive the created cricket::VideoFrame.
|
// TODO(magjed): Add an interface cricket::VideoCapturer::OnFrameCaptured()
|
||||||
// A frame is injected using UpdateCapturedFrame, and converted into a
|
// for ref counted I420 frames instead of this hack.
|
||||||
// cricket::VideoFrame with
|
|
||||||
// CreateAliasedFrame. UpdateCapturedFrame should be called before
|
|
||||||
// CreateAliasedFrame for every frame.
|
|
||||||
class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
|
class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
|
||||||
public:
|
public:
|
||||||
FrameFactory(int width,
|
FrameFactory(const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
|
||||||
int height,
|
: start_time_(rtc::TimeNanos()), delegate_(delegate) {
|
||||||
const scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
|
|
||||||
: start_time_(rtc::TimeNanos()), delegate_(delegate) {
|
|
||||||
// Create a CapturedFrame that only contains header information, not the
|
// Create a CapturedFrame that only contains header information, not the
|
||||||
// actual pixel data.
|
// actual pixel data.
|
||||||
captured_frame_.width = width;
|
|
||||||
captured_frame_.height = height;
|
|
||||||
captured_frame_.pixel_height = 1;
|
captured_frame_.pixel_height = 1;
|
||||||
captured_frame_.pixel_width = 1;
|
captured_frame_.pixel_width = 1;
|
||||||
captured_frame_.rotation = 0;
|
captured_frame_.data = nullptr;
|
||||||
captured_frame_.data = NULL;
|
|
||||||
captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize;
|
captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize;
|
||||||
captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_ANY);
|
captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_ANY);
|
||||||
}
|
}
|
||||||
|
|
||||||
void UpdateCapturedFrame(void* frame_data,
|
void UpdateCapturedFrame(
|
||||||
int length,
|
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
|
||||||
int width,
|
int rotation,
|
||||||
int height,
|
int64 time_stamp_in_ns) {
|
||||||
int rotation,
|
buffer_ = buffer;
|
||||||
int64 time_stamp_in_ns) {
|
captured_frame_.width = buffer->width();
|
||||||
// Make sure we don't overwrite the previous frame.
|
captured_frame_.height = buffer->height();
|
||||||
CHECK(captured_frame_.data == nullptr);
|
|
||||||
captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_YV12);
|
|
||||||
captured_frame_.data = frame_data;
|
|
||||||
captured_frame_.width = width;
|
|
||||||
captured_frame_.height = height;
|
|
||||||
captured_frame_.elapsed_time = rtc::TimeNanos() - start_time_;
|
captured_frame_.elapsed_time = rtc::TimeNanos() - start_time_;
|
||||||
captured_frame_.time_stamp = time_stamp_in_ns;
|
captured_frame_.time_stamp = time_stamp_in_ns;
|
||||||
captured_frame_.rotation = rotation;
|
captured_frame_.rotation = rotation;
|
||||||
captured_frame_.data_size = length;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void ClearCapturedFrame() const {
|
void ClearCapturedFrame() {
|
||||||
captured_frame_.data = nullptr;
|
buffer_ = nullptr;
|
||||||
captured_frame_.width = 0;
|
captured_frame_.width = 0;
|
||||||
captured_frame_.height = 0;
|
captured_frame_.height = 0;
|
||||||
captured_frame_.elapsed_time = 0;
|
captured_frame_.elapsed_time = 0;
|
||||||
captured_frame_.time_stamp = 0;
|
captured_frame_.time_stamp = 0;
|
||||||
captured_frame_.data_size = 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const cricket::CapturedFrame* GetCapturedFrame() const {
|
const cricket::CapturedFrame* GetCapturedFrame() const {
|
||||||
@ -100,64 +81,23 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
|
|||||||
const cricket::CapturedFrame* captured_frame,
|
const cricket::CapturedFrame* captured_frame,
|
||||||
int dst_width,
|
int dst_width,
|
||||||
int dst_height) const override {
|
int dst_height) const override {
|
||||||
// This override of CreateAliasedFrame creates a copy of the frame since
|
|
||||||
// |captured_frame_.data| is only guaranteed to be valid during the scope
|
|
||||||
// of |AndroidVideoCapturer::OnIncomingFrame_w|.
|
|
||||||
// Check that captured_frame is actually our frame.
|
// Check that captured_frame is actually our frame.
|
||||||
CHECK(captured_frame == &captured_frame_);
|
CHECK(captured_frame == &captured_frame_);
|
||||||
CHECK(captured_frame->data != nullptr);
|
rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
|
||||||
|
ShallowCenterCrop(buffer_, dst_width, dst_height),
|
||||||
if (!apply_rotation_ || captured_frame->rotation == kVideoRotation_0) {
|
captured_frame->elapsed_time, captured_frame->time_stamp,
|
||||||
CHECK(captured_frame->fourcc == cricket::FOURCC_YV12);
|
captured_frame->GetRotation()));
|
||||||
const uint8_t* y_plane = static_cast<uint8_t*>(captured_frame_.data);
|
// Caller takes ownership.
|
||||||
|
// TODO(magjed): Change CreateAliasedFrame() to return a rtc::scoped_ptr.
|
||||||
// Android guarantees that the stride is a multiple of 16.
|
return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
|
||||||
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
|
: frame.release();
|
||||||
int y_stride;
|
|
||||||
int uv_stride;
|
|
||||||
webrtc::Calc16ByteAlignedStride(captured_frame->width, &y_stride,
|
|
||||||
&uv_stride);
|
|
||||||
const uint8_t* v_plane = y_plane + y_stride * captured_frame->height;
|
|
||||||
const uint8_t* u_plane =
|
|
||||||
v_plane + uv_stride * webrtc::AlignInt(captured_frame->height, 2) / 2;
|
|
||||||
|
|
||||||
// Create a WrappedI420Buffer and bind the |no_longer_used| callback
|
|
||||||
// to the static method ReturnFrame. The |delegate_| is bound as an
|
|
||||||
// argument which means that the callback will hold a reference to
|
|
||||||
// |delegate_|.
|
|
||||||
rtc::scoped_refptr<WrappedI420Buffer> buffer(
|
|
||||||
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
|
||||||
dst_width, dst_height, captured_frame->width,
|
|
||||||
captured_frame->height, y_plane, y_stride, u_plane, uv_stride,
|
|
||||||
v_plane, uv_stride,
|
|
||||||
rtc::Bind(&AndroidVideoCapturer::FrameFactory::ReturnFrame,
|
|
||||||
delegate_,
|
|
||||||
captured_frame->time_stamp)));
|
|
||||||
cricket::VideoFrame* cricket_frame = new WebRtcVideoFrame(
|
|
||||||
buffer, captured_frame->elapsed_time,
|
|
||||||
captured_frame->time_stamp, captured_frame->GetRotation());
|
|
||||||
// |cricket_frame| is now responsible for returning the frame. Clear
|
|
||||||
// |captured_frame_| so the frame isn't returned twice.
|
|
||||||
ClearCapturedFrame();
|
|
||||||
return cricket_frame;
|
|
||||||
}
|
|
||||||
|
|
||||||
scoped_ptr<WebRtcVideoFrame> frame(new WebRtcVideoFrame());
|
|
||||||
frame->Init(captured_frame, dst_width, dst_height, apply_rotation_);
|
|
||||||
return frame.release();
|
|
||||||
}
|
|
||||||
|
|
||||||
static void ReturnFrame(scoped_refptr<AndroidVideoCapturerDelegate> delegate,
|
|
||||||
int64 time_stamp) {
|
|
||||||
delegate->ReturnBuffer(time_stamp);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
uint64 start_time_;
|
uint64 start_time_;
|
||||||
// |captured_frame_| is mutable as a hacky way to modify it inside
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_;
|
||||||
// CreateAliasedframe().
|
cricket::CapturedFrame captured_frame_;
|
||||||
mutable cricket::CapturedFrame captured_frame_;
|
rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
|
||||||
scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
AndroidVideoCapturer::AndroidVideoCapturer(
|
AndroidVideoCapturer::AndroidVideoCapturer(
|
||||||
@ -202,8 +142,7 @@ cricket::CaptureState AndroidVideoCapturer::Start(
|
|||||||
CHECK(thread_checker_.CalledOnValidThread());
|
CHECK(thread_checker_.CalledOnValidThread());
|
||||||
CHECK(!running_);
|
CHECK(!running_);
|
||||||
|
|
||||||
frame_factory_ = new AndroidVideoCapturer::FrameFactory(
|
frame_factory_ = new AndroidVideoCapturer::FrameFactory(delegate_.get());
|
||||||
capture_format.width, capture_format.height, delegate_.get());
|
|
||||||
set_frame_factory(frame_factory_);
|
set_frame_factory(frame_factory_);
|
||||||
|
|
||||||
running_ = true;
|
running_ = true;
|
||||||
@ -252,24 +191,14 @@ void AndroidVideoCapturer::OnCapturerStarted(bool success) {
|
|||||||
SignalStateChange(this, new_state);
|
SignalStateChange(this, new_state);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturer::OnIncomingFrame(void* frame_data,
|
void AndroidVideoCapturer::OnIncomingFrame(
|
||||||
int length,
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer,
|
||||||
int width,
|
int rotation,
|
||||||
int height,
|
int64 time_stamp) {
|
||||||
int rotation,
|
|
||||||
int64 time_stamp) {
|
|
||||||
CHECK(thread_checker_.CalledOnValidThread());
|
CHECK(thread_checker_.CalledOnValidThread());
|
||||||
frame_factory_->UpdateCapturedFrame(frame_data, length, width, height,
|
frame_factory_->UpdateCapturedFrame(buffer, rotation, time_stamp);
|
||||||
rotation, time_stamp);
|
|
||||||
SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
|
SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
|
||||||
if (frame_factory_->GetCapturedFrame()->data == nullptr) {
|
frame_factory_->ClearCapturedFrame();
|
||||||
// Ownership has been passed to a WrappedI420Buffer. Do nothing.
|
|
||||||
} else {
|
|
||||||
// |captured_frame_| has either been copied or dropped, return it
|
|
||||||
// immediately.
|
|
||||||
delegate_->ReturnBuffer(time_stamp);
|
|
||||||
frame_factory_->ClearCapturedFrame();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturer::OnOutputFormatRequest(
|
void AndroidVideoCapturer::OnOutputFormatRequest(
|
||||||
|
@ -32,6 +32,7 @@
|
|||||||
|
|
||||||
#include "talk/media/base/videocapturer.h"
|
#include "talk/media/base/videocapturer.h"
|
||||||
#include "webrtc/base/thread_checker.h"
|
#include "webrtc/base/thread_checker.h"
|
||||||
|
#include "webrtc/common_video/interface/video_frame_buffer.h"
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
|
||||||
@ -49,10 +50,6 @@ class AndroidVideoCapturerDelegate : public rtc::RefCountInterface {
|
|||||||
// The delegate may not call into AndroidVideoCapturer after this call.
|
// The delegate may not call into AndroidVideoCapturer after this call.
|
||||||
virtual void Stop() = 0;
|
virtual void Stop() = 0;
|
||||||
|
|
||||||
// Notify that a frame received in OnIncomingFrame with |time_stamp| has been
|
|
||||||
// processed and can be returned. May be called on an arbitrary thread.
|
|
||||||
virtual void ReturnBuffer(int64 time_stamp) = 0;
|
|
||||||
|
|
||||||
// Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}"
|
// Must returns a JSON string "{{width=xxx, height=xxx, framerate = xxx}}"
|
||||||
virtual std::string GetSupportedFormats() = 0;
|
virtual std::string GetSupportedFormats() = 0;
|
||||||
};
|
};
|
||||||
@ -69,10 +66,8 @@ class AndroidVideoCapturer : public cricket::VideoCapturer {
|
|||||||
void OnCapturerStarted(bool success);
|
void OnCapturerStarted(bool success);
|
||||||
|
|
||||||
// Called from JNI when a new frame has been captured.
|
// Called from JNI when a new frame has been captured.
|
||||||
void OnIncomingFrame(void* video_frame,
|
// Argument |buffer| is intentionally by value, for use with rtc::Bind.
|
||||||
int length,
|
void OnIncomingFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer,
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
int rotation,
|
int rotation,
|
||||||
int64 time_stamp);
|
int64 time_stamp);
|
||||||
|
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
|
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
|
||||||
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
||||||
#include "webrtc/base/bind.h"
|
#include "webrtc/base/bind.h"
|
||||||
|
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||||
|
|
||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
|
|
||||||
@ -70,9 +71,7 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
|
|||||||
jni,
|
jni,
|
||||||
FindClass(jni,
|
FindClass(jni,
|
||||||
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
|
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
|
||||||
capturer_(nullptr),
|
capturer_(nullptr) {
|
||||||
thread_(nullptr),
|
|
||||||
valid_global_refs_(true) {
|
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
|
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
|
||||||
thread_checker_.DetachFromThread();
|
thread_checker_.DetachFromThread();
|
||||||
}
|
}
|
||||||
@ -88,30 +87,24 @@ bool AndroidVideoCapturerJni::Init(jstring device_name) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
|
AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
|
||||||
valid_global_refs_ = false;
|
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
|
||||||
if (thread_ != nullptr) {
|
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor - flush invoker";
|
|
||||||
invoker_.Flush(thread_);
|
|
||||||
}
|
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni dtor done";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
||||||
webrtc::AndroidVideoCapturer* capturer) {
|
webrtc::AndroidVideoCapturer* capturer) {
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni start";
|
LOG(LS_INFO) << "AndroidVideoCapturerJni start";
|
||||||
CHECK(thread_checker_.CalledOnValidThread());
|
DCHECK(thread_checker_.CalledOnValidThread());
|
||||||
CHECK(capturer_ == nullptr);
|
{
|
||||||
thread_ = rtc::Thread::Current();
|
rtc::CritScope cs(&capturer_lock_);
|
||||||
capturer_ = capturer;
|
CHECK(capturer_ == nullptr);
|
||||||
|
CHECK(invoker_.get() == nullptr);
|
||||||
j_frame_observer_ = NewGlobalRef(
|
capturer_ = capturer;
|
||||||
jni(),
|
invoker_.reset(new rtc::GuardedAsyncInvoker());
|
||||||
|
}
|
||||||
|
jobject j_frame_observer =
|
||||||
jni()->NewObject(*j_observer_class_,
|
jni()->NewObject(*j_observer_class_,
|
||||||
GetMethodID(jni(),
|
GetMethodID(jni(), *j_observer_class_, "<init>", "(J)V"),
|
||||||
*j_observer_class_,
|
jlongFromPointer(this));
|
||||||
"<init>",
|
|
||||||
"(J)V"),
|
|
||||||
jlongFromPointer(this)));
|
|
||||||
CHECK_EXCEPTION(jni()) << "error during NewObject";
|
CHECK_EXCEPTION(jni()) << "error during NewObject";
|
||||||
|
|
||||||
jmethodID m = GetMethodID(
|
jmethodID m = GetMethodID(
|
||||||
@ -122,33 +115,40 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
|||||||
m, width, height,
|
m, width, height,
|
||||||
framerate,
|
framerate,
|
||||||
application_context_,
|
application_context_,
|
||||||
j_frame_observer_);
|
j_frame_observer);
|
||||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
|
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.startCapture";
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::Stop() {
|
void AndroidVideoCapturerJni::Stop() {
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
|
LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
|
||||||
CHECK(thread_checker_.CalledOnValidThread());
|
DCHECK(thread_checker_.CalledOnValidThread());
|
||||||
capturer_ = nullptr;
|
{
|
||||||
|
rtc::CritScope cs(&capturer_lock_);
|
||||||
|
// Destroying |invoker_| will cancel all pending calls to |capturer_|.
|
||||||
|
invoker_ = nullptr;
|
||||||
|
capturer_ = nullptr;
|
||||||
|
}
|
||||||
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
||||||
"stopCapture", "()V");
|
"stopCapture", "()V");
|
||||||
jni()->CallVoidMethod(*j_capturer_global_, m);
|
jni()->CallVoidMethod(*j_capturer_global_, m);
|
||||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
|
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.stopCapture";
|
||||||
DeleteGlobalRef(jni(), j_frame_observer_);
|
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
|
LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::ReturnBuffer(int64 time_stamp) {
|
template <typename... Args>
|
||||||
invoker_.AsyncInvoke<void>(
|
void AndroidVideoCapturerJni::AsyncCapturerInvoke(
|
||||||
thread_,
|
const char* method_name,
|
||||||
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer_w, this, time_stamp));
|
void (webrtc::AndroidVideoCapturer::*method)(Args...),
|
||||||
}
|
Args... args) {
|
||||||
|
rtc::CritScope cs(&capturer_lock_);
|
||||||
void AndroidVideoCapturerJni::ReturnBuffer_w(int64 time_stamp) {
|
if (!invoker_) {
|
||||||
if (!valid_global_refs_) {
|
LOG(LS_WARNING) << method_name << "() called for closed capturer.";
|
||||||
LOG(LS_ERROR) << "ReturnBuffer_w is called for invalid global refs.";
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
|
||||||
|
}
|
||||||
|
|
||||||
|
void AndroidVideoCapturerJni::ReturnBuffer(int64 time_stamp) {
|
||||||
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
||||||
"returnBuffer", "(J)V");
|
"returnBuffer", "(J)V");
|
||||||
jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
|
jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
|
||||||
@ -166,10 +166,10 @@ std::string AndroidVideoCapturerJni::GetSupportedFormats() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
|
void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
|
LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
|
||||||
invoker_.AsyncInvoke<void>(
|
AsyncCapturerInvoke("OnCapturerStarted",
|
||||||
thread_,
|
&webrtc::AndroidVideoCapturer::OnCapturerStarted,
|
||||||
rtc::Bind(&AndroidVideoCapturerJni::OnCapturerStarted_w, this, success));
|
success);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame,
|
void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame,
|
||||||
@ -178,57 +178,34 @@ void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame,
|
|||||||
int height,
|
int height,
|
||||||
int rotation,
|
int rotation,
|
||||||
int64 time_stamp) {
|
int64 time_stamp) {
|
||||||
invoker_.AsyncInvoke<void>(
|
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
|
||||||
thread_,
|
// Android guarantees that the stride is a multiple of 16.
|
||||||
rtc::Bind(&AndroidVideoCapturerJni::OnIncomingFrame_w, this, video_frame,
|
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
|
||||||
length, width, height, rotation, time_stamp));
|
int y_stride;
|
||||||
|
int uv_stride;
|
||||||
|
webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride);
|
||||||
|
const uint8_t* v_plane = y_plane + y_stride * height;
|
||||||
|
const uint8_t* u_plane =
|
||||||
|
v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2;
|
||||||
|
|
||||||
|
// Wrap the Java buffer, and call ReturnBuffer() in the wrapped
|
||||||
|
// VideoFrameBuffer destructor.
|
||||||
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
||||||
|
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
||||||
|
width, height, y_plane, y_stride, u_plane, uv_stride, v_plane,
|
||||||
|
uv_stride,
|
||||||
|
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this, time_stamp)));
|
||||||
|
AsyncCapturerInvoke("OnIncomingFrame",
|
||||||
|
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
|
||||||
|
buffer, rotation, time_stamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
|
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
|
||||||
int height,
|
int height,
|
||||||
int fps) {
|
int fps) {
|
||||||
invoker_.AsyncInvoke<void>(
|
AsyncCapturerInvoke("OnOutputFormatRequest",
|
||||||
thread_,
|
&webrtc::AndroidVideoCapturer::OnOutputFormatRequest,
|
||||||
rtc::Bind(&AndroidVideoCapturerJni::OnOutputFormatRequest_w,
|
width, height, fps);
|
||||||
this, width, height, fps));
|
|
||||||
}
|
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::OnCapturerStarted_w(bool success) {
|
|
||||||
CHECK(thread_checker_.CalledOnValidThread());
|
|
||||||
if (capturer_) {
|
|
||||||
capturer_->OnCapturerStarted(success);
|
|
||||||
} else {
|
|
||||||
LOG(LS_WARNING) << "OnCapturerStarted_w is called for closed capturer.";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::OnIncomingFrame_w(void* video_frame,
|
|
||||||
int length,
|
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
int rotation,
|
|
||||||
int64 time_stamp) {
|
|
||||||
CHECK(thread_checker_.CalledOnValidThread());
|
|
||||||
if (capturer_) {
|
|
||||||
capturer_->OnIncomingFrame(video_frame, length, width, height, rotation,
|
|
||||||
time_stamp);
|
|
||||||
} else {
|
|
||||||
LOG(LS_INFO) <<
|
|
||||||
"Frame arrived after camera has been stopped: " << time_stamp <<
|
|
||||||
". Valid global refs: " << valid_global_refs_;
|
|
||||||
ReturnBuffer_w(time_stamp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::OnOutputFormatRequest_w(int width,
|
|
||||||
int height,
|
|
||||||
int fps) {
|
|
||||||
CHECK(thread_checker_.CalledOnValidThread());
|
|
||||||
if (capturer_) {
|
|
||||||
capturer_->OnOutputFormatRequest(width, height, fps);
|
|
||||||
} else {
|
|
||||||
LOG(LS_WARNING) << "OnOutputFormatRequest_w is called for closed capturer.";
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
|
JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
|
||||||
@ -238,16 +215,14 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnFrameCaptured)
|
|||||||
jint width, jint height, jint rotation, jlong ts) {
|
jint width, jint height, jint rotation, jlong ts) {
|
||||||
jboolean is_copy = true;
|
jboolean is_copy = true;
|
||||||
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
|
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
|
||||||
if (!is_copy) {
|
// If this is a copy of the original frame, it means that the memory
|
||||||
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
|
// is not direct memory and thus VideoCapturerAndroid does not guarantee
|
||||||
->OnIncomingFrame(bytes, length, width, height, rotation, ts);
|
// that the memory is valid when we have released |j_frame|.
|
||||||
} else {
|
// TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and
|
||||||
// If this is a copy of the original frame, it means that the memory
|
// remove this check.
|
||||||
// is not direct memory and thus VideoCapturerAndroid does not guarantee
|
CHECK(!is_copy) << "NativeObserver_nativeOnFrameCaptured: frame is a copy";
|
||||||
// that the memory is valid when we have released |j_frame|.
|
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
|
||||||
LOG(LS_ERROR) << "NativeObserver_nativeOnFrameCaptured: frame is a copy";
|
->OnIncomingFrame(bytes, length, width, height, rotation, ts);
|
||||||
CHECK(false) << "j_frame is a copy.";
|
|
||||||
}
|
|
||||||
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
|
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -267,4 +242,3 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest)
|
|||||||
}
|
}
|
||||||
|
|
||||||
} // namespace webrtc_jni
|
} // namespace webrtc_jni
|
||||||
|
|
||||||
|
@ -34,6 +34,7 @@
|
|||||||
#include "talk/app/webrtc/androidvideocapturer.h"
|
#include "talk/app/webrtc/androidvideocapturer.h"
|
||||||
#include "talk/app/webrtc/java/jni/jni_helpers.h"
|
#include "talk/app/webrtc/java/jni/jni_helpers.h"
|
||||||
#include "webrtc/base/asyncinvoker.h"
|
#include "webrtc/base/asyncinvoker.h"
|
||||||
|
#include "webrtc/base/criticalsection.h"
|
||||||
#include "webrtc/base/thread_checker.h"
|
#include "webrtc/base/thread_checker.h"
|
||||||
|
|
||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
@ -56,8 +57,6 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
|||||||
webrtc::AndroidVideoCapturer* capturer) override;
|
webrtc::AndroidVideoCapturer* capturer) override;
|
||||||
void Stop() override;
|
void Stop() override;
|
||||||
|
|
||||||
void ReturnBuffer(int64 time_stamp) override;
|
|
||||||
|
|
||||||
std::string GetSupportedFormats() override;
|
std::string GetSupportedFormats() override;
|
||||||
|
|
||||||
// Called from VideoCapturerAndroid::NativeObserver on a Java thread.
|
// Called from VideoCapturerAndroid::NativeObserver on a Java thread.
|
||||||
@ -75,34 +74,31 @@ protected:
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
bool Init(jstring device_name);
|
bool Init(jstring device_name);
|
||||||
|
void ReturnBuffer(int64 time_stamp);
|
||||||
void OnCapturerStarted_w(bool success);
|
|
||||||
void OnCapturerStopped_w();
|
|
||||||
void OnIncomingFrame_w(void* video_frame,
|
|
||||||
int length,
|
|
||||||
int width,
|
|
||||||
int height,
|
|
||||||
int rotation,
|
|
||||||
int64 time_stamp);
|
|
||||||
void OnOutputFormatRequest_w(int width, int height, int fps);
|
|
||||||
void ReturnBuffer_w(int64 time_stamp);
|
|
||||||
|
|
||||||
JNIEnv* jni();
|
JNIEnv* jni();
|
||||||
|
|
||||||
|
// Helper function to make safe asynchronous calls to |capturer_|. The calls
|
||||||
|
// are not guaranteed to be delivered.
|
||||||
|
template <typename... Args>
|
||||||
|
void AsyncCapturerInvoke(
|
||||||
|
const char* method_name,
|
||||||
|
void (webrtc::AndroidVideoCapturer::*method)(Args...),
|
||||||
|
Args... args);
|
||||||
|
|
||||||
const ScopedGlobalRef<jobject> j_capturer_global_;
|
const ScopedGlobalRef<jobject> j_capturer_global_;
|
||||||
const ScopedGlobalRef<jclass> j_video_capturer_class_;
|
const ScopedGlobalRef<jclass> j_video_capturer_class_;
|
||||||
const ScopedGlobalRef<jclass> j_observer_class_;
|
const ScopedGlobalRef<jclass> j_observer_class_;
|
||||||
volatile bool valid_global_refs_;
|
|
||||||
jobject j_frame_observer_;
|
|
||||||
|
|
||||||
rtc::ThreadChecker thread_checker_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
|
|
||||||
rtc::Thread* thread_; // The thread where Start is called on.
|
|
||||||
// |capturer| is a guaranteed to be a valid pointer between a call to
|
// |capturer| is a guaranteed to be a valid pointer between a call to
|
||||||
// AndroidVideoCapturerDelegate::Start
|
// AndroidVideoCapturerDelegate::Start
|
||||||
// until AndroidVideoCapturerDelegate::Stop.
|
// until AndroidVideoCapturerDelegate::Stop.
|
||||||
webrtc::AndroidVideoCapturer* capturer_;
|
rtc::CriticalSection capturer_lock_;
|
||||||
rtc::AsyncInvoker invoker_;
|
webrtc::AndroidVideoCapturer* capturer_ GUARDED_BY(capturer_lock_);
|
||||||
|
// |invoker_| is used to communicate with |capturer_| on the thread Start() is
|
||||||
|
// called on.
|
||||||
|
rtc::scoped_ptr<rtc::GuardedAsyncInvoker> invoker_ GUARDED_BY(capturer_lock_);
|
||||||
|
|
||||||
static jobject application_context_;
|
static jobject application_context_;
|
||||||
|
|
||||||
|
@ -720,17 +720,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
frameObserver.OnOutputFormatRequest(width, height, fps);
|
frameObserver.OnOutputFormatRequest(width, height, fps);
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized void returnBuffer(final long timeStamp) {
|
void returnBuffer(long timeStamp) {
|
||||||
if (cameraThreadHandler == null) {
|
videoBuffers.returnBuffer(timeStamp);
|
||||||
// The camera has been stopped.
|
|
||||||
videoBuffers.returnBuffer(timeStamp);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
cameraThreadHandler.post(new Runnable() {
|
|
||||||
@Override public void run() {
|
|
||||||
videoBuffers.returnBuffer(timeStamp);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private int getDeviceOrientation() {
|
private int getDeviceOrientation() {
|
||||||
@ -854,7 +845,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Class used for allocating and bookkeeping video frames. All buffers are
|
// Class used for allocating and bookkeeping video frames. All buffers are
|
||||||
// direct allocated so that they can be directly used from native code.
|
// direct allocated so that they can be directly used from native code. This class is
|
||||||
|
// synchronized and can be called from multiple threads.
|
||||||
private static class FramePool {
|
private static class FramePool {
|
||||||
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
||||||
// lower number means more sensitivity to processing time in the client (and
|
// lower number means more sensitivity to processing time in the client (and
|
||||||
@ -869,12 +861,12 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
private int frameSize = 0;
|
private int frameSize = 0;
|
||||||
private Camera camera;
|
private Camera camera;
|
||||||
|
|
||||||
int numCaptureBuffersAvailable() {
|
synchronized int numCaptureBuffersAvailable() {
|
||||||
return queuedBuffers.size();
|
return queuedBuffers.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Discards previous queued buffers and adds new callback buffers to camera.
|
// Discards previous queued buffers and adds new callback buffers to camera.
|
||||||
void queueCameraBuffers(int frameSize, Camera camera) {
|
synchronized void queueCameraBuffers(int frameSize, Camera camera) {
|
||||||
this.camera = camera;
|
this.camera = camera;
|
||||||
this.frameSize = frameSize;
|
this.frameSize = frameSize;
|
||||||
|
|
||||||
@ -888,7 +880,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
+ " buffers of size " + frameSize + ".");
|
+ " buffers of size " + frameSize + ".");
|
||||||
}
|
}
|
||||||
|
|
||||||
String pendingFramesTimeStamps() {
|
synchronized String pendingFramesTimeStamps() {
|
||||||
List<Long> timeStampsMs = new ArrayList<Long>();
|
List<Long> timeStampsMs = new ArrayList<Long>();
|
||||||
for (Long timeStampNs : pendingBuffers.keySet()) {
|
for (Long timeStampNs : pendingBuffers.keySet()) {
|
||||||
timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(timeStampNs));
|
timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(timeStampNs));
|
||||||
@ -896,7 +888,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
return timeStampsMs.toString();
|
return timeStampsMs.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
void stopReturnBuffersToCamera() {
|
synchronized void stopReturnBuffersToCamera() {
|
||||||
this.camera = null;
|
this.camera = null;
|
||||||
queuedBuffers.clear();
|
queuedBuffers.clear();
|
||||||
// Frames in |pendingBuffers| need to be kept alive until they are returned.
|
// Frames in |pendingBuffers| need to be kept alive until they are returned.
|
||||||
@ -906,7 +898,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
: " Pending buffers: " + pendingFramesTimeStamps() + "."));
|
: " Pending buffers: " + pendingFramesTimeStamps() + "."));
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean reserveByteBuffer(byte[] data, long timeStamp) {
|
synchronized boolean reserveByteBuffer(byte[] data, long timeStamp) {
|
||||||
final ByteBuffer buffer = queuedBuffers.remove(data);
|
final ByteBuffer buffer = queuedBuffers.remove(data);
|
||||||
if (buffer == null) {
|
if (buffer == null) {
|
||||||
// Frames might be posted to |onPreviewFrame| with the previous format while changing
|
// Frames might be posted to |onPreviewFrame| with the previous format while changing
|
||||||
@ -930,7 +922,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void returnBuffer(long timeStamp) {
|
synchronized void returnBuffer(long timeStamp) {
|
||||||
final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
|
final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
|
||||||
if (returnedFrame == null) {
|
if (returnedFrame == null) {
|
||||||
throw new RuntimeException("unknown data buffer with time stamp "
|
throw new RuntimeException("unknown data buffer with time stamp "
|
||||||
|
@ -109,9 +109,7 @@ class NativeHandleBuffer : public VideoFrameBuffer {
|
|||||||
|
|
||||||
class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
|
class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
|
||||||
public:
|
public:
|
||||||
WrappedI420Buffer(int desired_width,
|
WrappedI420Buffer(int width,
|
||||||
int desired_height,
|
|
||||||
int width,
|
|
||||||
int height,
|
int height,
|
||||||
const uint8_t* y_plane,
|
const uint8_t* y_plane,
|
||||||
int y_stride,
|
int y_stride,
|
||||||
@ -135,17 +133,24 @@ class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
|
|||||||
friend class rtc::RefCountedObject<WrappedI420Buffer>;
|
friend class rtc::RefCountedObject<WrappedI420Buffer>;
|
||||||
~WrappedI420Buffer() override;
|
~WrappedI420Buffer() override;
|
||||||
|
|
||||||
int width_;
|
const int width_;
|
||||||
int height_;
|
const int height_;
|
||||||
const uint8_t* y_plane_;
|
const uint8_t* const y_plane_;
|
||||||
const uint8_t* u_plane_;
|
const uint8_t* const u_plane_;
|
||||||
const uint8_t* v_plane_;
|
const uint8_t* const v_plane_;
|
||||||
const int y_stride_;
|
const int y_stride_;
|
||||||
const int u_stride_;
|
const int u_stride_;
|
||||||
const int v_stride_;
|
const int v_stride_;
|
||||||
rtc::Callback0<void> no_longer_used_cb_;
|
rtc::Callback0<void> no_longer_used_cb_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Helper function to crop |buffer| without making a deep copy. May only be used
|
||||||
|
// for non-native frames.
|
||||||
|
rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
|
||||||
|
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||||
|
int cropped_width,
|
||||||
|
int cropped_height);
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|
||||||
#endif // WEBRTC_VIDEO_FRAME_BUFFER_H_
|
#endif // WEBRTC_VIDEO_FRAME_BUFFER_H_
|
||||||
|
@ -17,6 +17,12 @@
|
|||||||
static const int kBufferAlignment = 64;
|
static const int kBufferAlignment = 64;
|
||||||
|
|
||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
// Used in rtc::Bind to keep a buffer alive until destructor is called.
|
||||||
|
static void NoLongerUsedCallback(rtc::scoped_refptr<VideoFrameBuffer> dummy) {}
|
||||||
|
|
||||||
|
} // anonymous namespace
|
||||||
|
|
||||||
VideoFrameBuffer::~VideoFrameBuffer() {}
|
VideoFrameBuffer::~VideoFrameBuffer() {}
|
||||||
|
|
||||||
@ -135,9 +141,7 @@ void* NativeHandleBuffer::native_handle() const {
|
|||||||
return native_handle_;
|
return native_handle_;
|
||||||
}
|
}
|
||||||
|
|
||||||
WrappedI420Buffer::WrappedI420Buffer(int desired_width,
|
WrappedI420Buffer::WrappedI420Buffer(int width,
|
||||||
int desired_height,
|
|
||||||
int width,
|
|
||||||
int height,
|
int height,
|
||||||
const uint8_t* y_plane,
|
const uint8_t* y_plane,
|
||||||
int y_stride,
|
int y_stride,
|
||||||
@ -146,31 +150,21 @@ WrappedI420Buffer::WrappedI420Buffer(int desired_width,
|
|||||||
const uint8_t* v_plane,
|
const uint8_t* v_plane,
|
||||||
int v_stride,
|
int v_stride,
|
||||||
const rtc::Callback0<void>& no_longer_used)
|
const rtc::Callback0<void>& no_longer_used)
|
||||||
: width_(desired_width),
|
: width_(width),
|
||||||
height_(desired_height),
|
height_(height),
|
||||||
y_plane_(y_plane),
|
y_plane_(y_plane),
|
||||||
u_plane_(u_plane),
|
u_plane_(u_plane),
|
||||||
v_plane_(v_plane),
|
v_plane_(v_plane),
|
||||||
y_stride_(y_stride),
|
y_stride_(y_stride),
|
||||||
u_stride_(u_stride),
|
u_stride_(u_stride),
|
||||||
v_stride_(v_stride),
|
v_stride_(v_stride),
|
||||||
no_longer_used_cb_(no_longer_used) {
|
no_longer_used_cb_(no_longer_used) {
|
||||||
CHECK(width >= desired_width && height >= desired_height);
|
|
||||||
|
|
||||||
// Center crop to |desired_width| x |desired_height|.
|
|
||||||
// Make sure offset is even so that u/v plane becomes aligned.
|
|
||||||
const int offset_x = ((width - desired_width) / 2) & ~1;
|
|
||||||
const int offset_y = ((height - desired_height) / 2) & ~1;
|
|
||||||
y_plane_ += y_stride_ * offset_y + offset_x;
|
|
||||||
u_plane_ += u_stride_ * (offset_y / 2) + (offset_x / 2);
|
|
||||||
v_plane_ += v_stride_ * (offset_y / 2) + (offset_x / 2);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
WrappedI420Buffer::~WrappedI420Buffer() {
|
WrappedI420Buffer::~WrappedI420Buffer() {
|
||||||
no_longer_used_cb_();
|
no_longer_used_cb_();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int WrappedI420Buffer::width() const {
|
int WrappedI420Buffer::width() const {
|
||||||
return width_;
|
return width_;
|
||||||
}
|
}
|
||||||
@ -221,4 +215,37 @@ rtc::scoped_refptr<VideoFrameBuffer> WrappedI420Buffer::NativeToI420Buffer() {
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
|
||||||
|
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||||
|
int cropped_width,
|
||||||
|
int cropped_height) {
|
||||||
|
CHECK(buffer->native_handle() == nullptr);
|
||||||
|
CHECK_LE(cropped_width, buffer->width());
|
||||||
|
CHECK_LE(cropped_height, buffer->height());
|
||||||
|
if (buffer->width() == cropped_width && buffer->height() == cropped_height)
|
||||||
|
return buffer;
|
||||||
|
|
||||||
|
// Center crop to |cropped_width| x |cropped_height|.
|
||||||
|
// Make sure offset is even so that u/v plane becomes aligned.
|
||||||
|
const int uv_offset_x = (buffer->width() - cropped_width) / 4;
|
||||||
|
const int uv_offset_y = (buffer->height() - cropped_height) / 4;
|
||||||
|
const int offset_x = uv_offset_x * 2;
|
||||||
|
const int offset_y = uv_offset_y * 2;
|
||||||
|
|
||||||
|
// Const cast to call the correct const-version of data().
|
||||||
|
const VideoFrameBuffer* const_buffer(buffer.get());
|
||||||
|
const uint8_t* y_plane = const_buffer->data(kYPlane) +
|
||||||
|
buffer->stride(kYPlane) * offset_y + offset_x;
|
||||||
|
const uint8_t* u_plane = const_buffer->data(kUPlane) +
|
||||||
|
buffer->stride(kUPlane) * uv_offset_y + uv_offset_x;
|
||||||
|
const uint8_t* v_plane = const_buffer->data(kVPlane) +
|
||||||
|
buffer->stride(kVPlane) * uv_offset_y + uv_offset_x;
|
||||||
|
return new rtc::RefCountedObject<WrappedI420Buffer>(
|
||||||
|
cropped_width, cropped_height,
|
||||||
|
y_plane, buffer->stride(kYPlane),
|
||||||
|
u_plane, buffer->stride(kUPlane),
|
||||||
|
v_plane, buffer->stride(kVPlane),
|
||||||
|
rtc::Bind(&NoLongerUsedCallback, buffer));
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
@ -739,7 +739,6 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
|
|||||||
// using a WrappedI420Buffer.
|
// using a WrappedI420Buffer.
|
||||||
rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
|
rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
|
||||||
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
||||||
img->d_w, img->d_h,
|
|
||||||
img->d_w, img->d_h,
|
img->d_w, img->d_h,
|
||||||
img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||||
|
Reference in New Issue
Block a user