Add support for capturers to capture VideoFrames.

BUG=webrtc:7749, webrtc:7760

Review-Url: https://codereview.webrtc.org/2982213002
Cr-Commit-Position: refs/heads/master@{#19318}
This commit is contained in:
sakal
2017-08-11 00:26:05 -07:00
committed by Commit Bot
parent 6c27b39fbe
commit be910460e0
9 changed files with 157 additions and 7 deletions

View File

@ -43,10 +43,18 @@ class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver
nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(),
frame.getRotation(), frame.getTimestampNs(), frame.getBuffer());
}
private native void nativeCapturerStarted(long nativeSource, boolean success);
private native void nativeCapturerStopped(long nativeSource);
private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private native void nativeOnFrameCaptured(long nativeSource, int width, int height, int rotation,
long timestampNs, VideoFrame.Buffer frame);
}

View File

@ -13,6 +13,7 @@
#include <utility>
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace {
// MediaCodec wants resolution to be divisible by 2.
@ -32,9 +33,16 @@ AndroidVideoTrackSource::AndroidVideoTrackSource(
new rtc::RefCountedObject<webrtc_jni::SurfaceTextureHelper>(
jni,
j_surface_texture_helper)),
video_buffer_factory_(jni),
is_screencast_(is_screencast) {
LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
camera_thread_checker_.DetachFromThread();
jclass j_video_frame_buffer_class =
webrtc_jni::FindClass(jni, "org/webrtc/VideoFrame$Buffer");
j_crop_and_scale_id_ =
jni->GetMethodID(j_video_frame_buffer_class, "cropAndScale",
"(IIIIII)Lorg/webrtc/VideoFrame$Buffer;");
}
void AndroidVideoTrackSource::SetState(SourceState state) {
@ -152,6 +160,46 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnFrameCaptured(JNIEnv* jni,
int width,
int height,
int64_t timestamp_ns,
VideoRotation rotation,
jobject j_video_frame_buffer) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x,
&crop_y)) {
return;
}
jobject j_adapted_video_frame_buffer = jni->CallObjectMethod(
j_video_frame_buffer, j_crop_and_scale_id_, crop_x, crop_y, crop_width,
crop_height, adapted_width, adapted_height);
rtc::scoped_refptr<VideoFrameBuffer> buffer =
video_buffer_factory_.WrapBuffer(jni, j_adapted_video_frame_buffer);
// AdaptedVideoTrackSource handles applying rotation for I420 frames.
if (apply_rotation()) {
buffer = buffer->ToI420();
}
OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
int height,
int fps) {

View File

@ -11,6 +11,8 @@
#ifndef WEBRTC_API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
#define WEBRTC_API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
#include <jni.h>
#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/media/base/adaptedvideotracksource.h"
@ -59,6 +61,13 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
int64_t timestamp_ns,
const webrtc_jni::NativeHandleImpl& handle);
void OnFrameCaptured(JNIEnv* jni,
int width,
int height,
int64_t timestamp_ns,
VideoRotation rotation,
jobject j_video_frame_buffer);
void OnOutputFormatRequest(int width, int height, int fps);
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper>
@ -75,7 +84,10 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
NV12ToI420Scaler nv12toi420_scaler_;
I420BufferPool buffer_pool_;
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_;
webrtc_jni::AndroidVideoBufferFactory video_buffer_factory_;
const bool is_screencast_;
jmethodID j_crop_and_scale_id_;
};
} // namespace webrtc

View File

@ -69,6 +69,22 @@ JOW_OBSERVER_METHOD(void, nativeOnTextureFrameCaptured)
NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix));
}
JOW_OBSERVER_METHOD(void, nativeOnFrameCaptured)
(JNIEnv* jni,
jclass,
jlong j_source,
jint j_width,
jint j_height,
jint j_rotation,
jlong j_timestamp_ns,
jobject j_video_frame_buffer) {
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnFrameCaptured(jni, j_width, j_height, j_timestamp_ns,
jintToVideoRotation(j_rotation),
j_video_frame_buffer);
}
JOW_OBSERVER_METHOD(void, nativeCapturerStarted)
(JNIEnv* jni, jclass, jlong j_source, jboolean j_success) {
LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStarted";

View File

@ -322,18 +322,39 @@ rtc::scoped_refptr<webrtc::I420BufferInterface> AndroidTextureBuffer::ToI420() {
return copy;
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::WrapReference(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer) {
return new rtc::RefCountedObject<AndroidVideoBuffer>(
jni, j_release_id, width, height, j_video_frame_buffer);
}
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: AndroidVideoBuffer(jni,
j_release_id,
width,
height,
j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
height_(height),
j_video_frame_buffer_(jni, j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
j_video_frame_buffer_(jni, j_video_frame_buffer) {}
AndroidVideoBuffer::~AndroidVideoBuffer() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
@ -422,15 +443,24 @@ webrtc::VideoFrame AndroidVideoBufferFactory::CreateFrame(
uint32_t timestamp_ns =
jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_);
rtc::scoped_refptr<AndroidVideoBuffer> buffer =
CreateBuffer(j_video_frame_buffer);
CreateBuffer(jni, j_video_frame_buffer);
return webrtc::VideoFrame(buffer, timestamp_rtp,
timestamp_ns / rtc::kNumNanosecsPerMillisec,
static_cast<webrtc::VideoRotation>(rotation));
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return AndroidVideoBuffer::WrapReference(jni, j_release_id_, width, height,
j_video_frame_buffer);
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return new rtc::RefCountedObject<AndroidVideoBuffer>(

View File

@ -103,12 +103,28 @@ class AndroidTextureBuffer : public AndroidVideoFrameBuffer {
class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
public:
// Wraps an existing reference to a Java VideoBuffer. Retain will not be
// called but release will be called when the C++ object is destroyed.
static rtc::scoped_refptr<AndroidVideoBuffer> WrapReference(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Should not be called directly. Wraps a reference. Use
// AndroidVideoBuffer::WrapReference instead for clarity.
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
~AndroidVideoBuffer() override;
jobject video_frame_buffer() const;
@ -140,7 +156,14 @@ class AndroidVideoBufferFactory {
jobject j_video_frame,
uint32_t timestamp_rtp) const;
// Wraps a buffer to AndroidVideoBuffer without incrementing the reference
// count.
rtc::scoped_refptr<AndroidVideoBuffer> WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
rtc::scoped_refptr<AndroidVideoBuffer> CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
private: