Add support for scaling textures in AndroidVideoCapturer.

The idea is to also reuse AndroidTextureBuffer::CropAndScale when scaling in the encoder.

BUG=webrtc:4993
R=magjed@webrtc.org

Review URL: https://codereview.webrtc.org/1471333003 .

Cr-Commit-Position: refs/heads/master@{#10802}
This commit is contained in:
Per
2015-11-26 13:41:44 +01:00
parent fd5dae395b
commit a3c20bb9a0
6 changed files with 112 additions and 4 deletions

View File

@ -299,4 +299,20 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.cameraErrorEventOnBufferStarvation(capturer,
cameraEvents, getInstrumentation().getContext());
}
@MediumTest
// This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
// called. This test both Java and C++ parts of of the stack.
public void testScaleCameraOutput() throws InterruptedException {
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
}
@MediumTest
// This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
// called. This test both Java and C++ parts of of the stack.
public void testScaleCameraOutputUsingTextures() throws InterruptedException {
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
}
}

View File

@ -43,16 +43,32 @@ public class VideoCapturerAndroidTestFixtures {
static class RendererCallbacks implements VideoRenderer.Callbacks {
private int framesRendered = 0;
private Object frameLock = 0;
private int width = 0;
private int height = 0;
@Override
public void renderFrame(I420Frame frame) {
synchronized (frameLock) {
++framesRendered;
width = frame.rotatedWidth();
height = frame.rotatedHeight();
frameLock.notify();
}
VideoRenderer.renderFrameDone(frame);
}
public int frameWidth() {
synchronized (frameLock) {
return width;
}
}
public int frameHeight() {
synchronized (frameLock) {
return height;
}
}
public int WaitForNextFrameToRender() throws InterruptedException {
synchronized (frameLock) {
frameLock.wait();
@ -541,4 +557,43 @@ public class VideoCapturerAndroidTestFixtures {
capturer.dispose();
assertTrue(capturer.isReleased());
}
static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
PeerConnectionFactory factory = new PeerConnectionFactory();
VideoSource source =
factory.createVideoSource(capturer, new MediaConstraints());
VideoTrack track = factory.createVideoTrack("dummy", source);
RendererCallbacks renderer = new RendererCallbacks();
track.addRenderer(new VideoRenderer(renderer));
assertTrue(renderer.WaitForNextFrameToRender() > 0);
final int startWidth = renderer.frameWidth();
final int startHeight = renderer.frameHeight();
final int frameRate = 30;
final int scaledWidth = startWidth / 2;
final int scaledHeight = startHeight / 2;
// Request the captured frames to be scaled.
capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
boolean gotExpectedResolution = false;
int numberOfInspectedFrames = 0;
do {
renderer.WaitForNextFrameToRender();
++numberOfInspectedFrames;
gotExpectedResolution = (renderer.frameWidth() == scaledWidth
&& renderer.frameHeight() == scaledHeight);
} while (!gotExpectedResolution && numberOfInspectedFrames < 30);
source.stop();
track.dispose();
source.dispose();
factory.dispose();
assertTrue(capturer.isReleased());
assertTrue(gotExpectedResolution);
}
}

View File

@ -26,6 +26,7 @@
*/
#include "talk/app/webrtc/androidvideocapturer.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "webrtc/base/common.h"
#include "webrtc/base/json.h"
@ -101,10 +102,12 @@ class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
int output_width,
int output_height) const override {
if (buffer_->native_handle() != nullptr) {
// TODO(perkj): Implement CreateAliasedFrame properly for textures.
rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
buffer_, input_frame->time_stamp, input_frame->rotation));
return frame.release();
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get())
->CropAndScale(cropped_input_width, cropped_input_height,
output_width, output_height));
return new cricket::WebRtcVideoFrame(
scaled_buffer, input_frame->time_stamp, input_frame->rotation);
}
return VideoFrameFactory::CreateAliasedFrame(input_frame,
cropped_input_width,

View File

@ -300,6 +300,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements
// Requests a new output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
// It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
// the same result as |width| = 480, |height| = 640.
// TODO(magjed/perkj): Document what this function does. Change name?
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
cameraThreadHandler.post(new Runnable() {

View File

@ -28,9 +28,17 @@
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/bind.h"
using rtc::scoped_refptr;
using webrtc::NativeHandleBuffer;
namespace webrtc_jni {
namespace {
void ScaledFrameNotInUse(scoped_refptr<NativeHandleBuffer> original) {}
} // anonymous namespace
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix)
@ -64,4 +72,22 @@ AndroidTextureBuffer::NativeToI420Buffer() {
return nullptr;
}
rtc::scoped_refptr<AndroidTextureBuffer> AndroidTextureBuffer::CropAndScale(
int cropped_input_width,
int cropped_input_height,
int dst_widht,
int dst_height) {
// TODO(perkj) Implement cropping.
RTC_CHECK_EQ(cropped_input_width, width_);
RTC_CHECK_EQ(cropped_input_height, height_);
// Here we use Bind magic to add a reference count to |this| until the newly
// created AndroidTextureBuffer is destructed. ScaledFrameNotInUse will be
// called that happens and when it finishes, the reference count to |this|
// will be decreased by one.
return new rtc::RefCountedObject<AndroidTextureBuffer>(
dst_widht, dst_height, native_handle_,
rtc::Bind(&ScaledFrameNotInUse, this));
}
} // namespace webrtc_jni

View File

@ -54,6 +54,12 @@ class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
rtc::scoped_refptr<AndroidTextureBuffer> CropAndScale(
int cropped_input_width,
int cropped_input_height,
int dst_widht,
int dst_height);
private:
NativeHandleImpl native_handle_;
rtc::Callback0<void> no_longer_used_cb_;