Native changes for VideoCapturerAndroid surface texture support

These are the necessary changes in C++ related to the video capturer necessary to capture to a surface texture.
It does not handle scaling / cropping yet though.

BUG=
R=magjed@webrtc.org

Review URL: https://codereview.webrtc.org/1395673003 .

Cr-Commit-Position: refs/heads/master@{#10218}
This commit is contained in:
perkj
2015-10-08 15:32:38 +02:00
parent 4382d800d2
commit ac30642461
4 changed files with 95 additions and 24 deletions

View File

@ -103,13 +103,12 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
}
/* TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
public void testStartVideoCapturerUsingTextures() throws InterruptedException {
VideoCapturerAndroid capturer =
VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
}*/
}
@SmallTest
// This test that the camera can be started and that the frames are forwarded
@ -146,12 +145,11 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
}
/* TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
public void testSwitchVideoCapturerUsingTextures() throws InterruptedException {
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
}*/
}
@MediumTest
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@ -181,12 +179,11 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
}
/* TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest
public void testStopRestartVideoSourceUsingTextures() throws InterruptedException {
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
}*/
}
@SmallTest
// This test that the camera can be started at different resolutions.
@ -237,11 +234,10 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
}
/* TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@MediumTest
public void testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
final VideoCapturerAndroid capturer =
VideoCapturerAndroid.create("", null, EGL14.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
}*/
}
}

View File

@ -81,6 +81,8 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
int dst_height) const override {
// Check that captured_frame is actually our frame.
RTC_CHECK(captured_frame == &captured_frame_);
RTC_CHECK(buffer_->native_handle() == nullptr);
rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
ShallowCenterCrop(buffer_, dst_width, dst_height),
captured_frame->time_stamp, captured_frame->GetRotation()));
@ -90,6 +92,25 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
: frame.release();
}
cricket::VideoFrame* CreateAliasedFrame(
const cricket::CapturedFrame* input_frame,
int cropped_input_width,
int cropped_input_height,
int output_width,
int output_height) const override {
if (buffer_->native_handle() != nullptr) {
// TODO(perkj): Implement CreateAliasedFrame properly for textures.
rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
buffer_, input_frame->time_stamp, input_frame->GetRotation()));
return frame.release();
}
return VideoFrameFactory::CreateAliasedFrame(input_frame,
cropped_input_width,
cropped_input_height,
output_width,
output_height);
}
private:
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_;
cricket::CapturedFrame captured_frame_;

View File

@ -28,11 +28,40 @@
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/bind.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
namespace webrtc_jni {
namespace {
class CameraTextureBuffer : public webrtc::NativeHandleBuffer {
public:
CameraTextureBuffer(int width, int height,
const NativeHandleImpl& native_handle,
const rtc::Callback0<void>& no_longer_used)
: webrtc::NativeHandleBuffer(&native_handle_, width, height),
native_handle_(native_handle),
no_longer_used_cb_(no_longer_used) {}
~CameraTextureBuffer() {
no_longer_used_cb_();
}
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override {
RTC_NOTREACHED()
<< "CameraTextureBuffer::NativeToI420Buffer not implemented.";
return nullptr;
}
private:
NativeHandleImpl native_handle_;
rtc::Callback0<void> no_longer_used_cb_;
};
} // anonymous namespace
jobject AndroidVideoCapturerJni::application_context_ = nullptr;
// static
@ -150,12 +179,12 @@ void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
success);
}
void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame,
void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
int length,
int width,
int height,
int rotation,
int64_t time_stamp) {
int64_t timestamp_ns) {
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
// Android guarantees that the stride is a multiple of 16.
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
@ -172,10 +201,25 @@ void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame,
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
width, height, y_plane, y_stride, u_plane, uv_stride, v_plane,
uv_stride,
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this, time_stamp)));
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
timestamp_ns)));
AsyncCapturerInvoke("OnIncomingFrame",
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
buffer, rotation, time_stamp);
buffer, rotation, timestamp_ns);
}
void AndroidVideoCapturerJni::OnTextureFrame(int width,
int height,
int64_t timestamp_ns,
const NativeHandleImpl& handle) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
new rtc::RefCountedObject<CameraTextureBuffer>(
width, height, handle,
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
timestamp_ns)));
AsyncCapturerInvoke("OnIncomingFrame",
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
buffer, 0, timestamp_ns);
}
void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
@ -191,7 +235,7 @@ JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
JOW(void,
VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured)
(JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
jint width, jint height, jint rotation, jlong ts) {
jint width, jint height, jint rotation, jlong timestamp) {
jboolean is_copy = true;
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
// If this is a copy of the original frame, it means that the memory
@ -202,10 +246,20 @@ JOW(void,
RTC_CHECK(!is_copy)
<< "NativeObserver_nativeOnFrameCaptured: frame is a copy";
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
->OnIncomingFrame(bytes, length, width, height, rotation, ts);
->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
(JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
jint j_oes_texture_id, jfloatArray j_transform_matrix,
jlong j_timestamp) {
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
->OnTextureFrame(j_width, j_height, j_timestamp,
NativeHandleImpl(jni, j_oes_texture_id,
j_transform_matrix));
}
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
(JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";

View File

@ -39,6 +39,8 @@
namespace webrtc_jni {
class NativeHandleImpl;
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
// The purpose of the delegate is to hide the JNI specifics from the C++ only
// AndroidVideoCapturer.
@ -56,12 +58,10 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
// Called from VideoCapturerAndroid::NativeObserver on a Java thread.
void OnCapturerStarted(bool success);
void OnIncomingFrame(void* video_frame,
int length,
int width,
int height,
int rotation,
int64_t time_stamp);
void OnMemoryBufferFrame(void* video_frame, int length, int width,
int height, int rotation, int64_t timestamp_ns);
void OnTextureFrame(int width, int height, int64_t timestamp_ns,
const NativeHandleImpl& handle);
void OnOutputFormatRequest(int width, int height, int fps);
protected: