Android: Stop using VideoRenderer class

This CL updates the WebRTC code to stop using the old VideoRenderer and
VideoRenderer.I420Frame classes and instead use the new VideoSink and
VideoFrame classes.

This CL is the first step and the old classes are still left in the code
for now to keep backwards compatibility.

Bug: webrtc:9181
Change-Id: Ib0caa18cbaa2758b7859e850ddcaba003cfb06d6
Reviewed-on: https://webrtc-review.googlesource.com/71662
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22989}
This commit is contained in:
Magnus Jedvert
2018-04-23 16:14:47 +02:00
committed by Commit Bot
parent b9ac121598
commit e987f2b765
15 changed files with 137 additions and 187 deletions

View File

@ -59,7 +59,6 @@ import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFileRenderer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSink;
/**
@ -133,25 +132,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
// Peer connection statistics callback period in ms.
private static final int STAT_CALLBACK_PERIOD = 1000;
private static class ProxyRenderer implements VideoRenderer.Callbacks {
private VideoRenderer.Callbacks target;
@Override
synchronized public void renderFrame(VideoRenderer.I420Frame frame) {
if (target == null) {
Logging.d(TAG, "Dropping frame in proxy because target is null.");
VideoRenderer.renderFrameDone(frame);
return;
}
target.renderFrame(frame);
}
synchronized public void setTarget(VideoRenderer.Callbacks target) {
this.target = target;
}
}
private static class ProxyVideoSink implements VideoSink {
private VideoSink target;
@ -170,7 +150,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
}
}
private final ProxyRenderer remoteProxyRenderer = new ProxyRenderer();
private final ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink();
private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
@Nullable
private PeerConnectionClient peerConnectionClient = null;
@ -186,7 +166,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
private SurfaceViewRenderer fullscreenRenderer;
@Nullable
private VideoFileRenderer videoFileRenderer;
private final List<VideoRenderer.Callbacks> remoteRenderers = new ArrayList<>();
private final List<VideoSink> remoteSinks = new ArrayList<>();
private Toast logToast;
private boolean commandLineRun;
private boolean activityRunning;
@ -251,7 +231,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
});
fullscreenRenderer.setOnClickListener(listener);
remoteRenderers.add(remoteProxyRenderer);
remoteSinks.add(remoteProxyRenderer);
final Intent intent = getIntent();
final EglBase eglBase = EglBase.create();
@ -268,7 +248,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
try {
videoFileRenderer = new VideoFileRenderer(
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext());
remoteRenderers.add(videoFileRenderer);
remoteSinks.add(videoFileRenderer);
} catch (IOException e) {
throw new RuntimeException(
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
@ -776,7 +756,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
videoCapturer = createVideoCapturer();
}
peerConnectionClient.createPeerConnection(
localProxyVideoSink, remoteRenderers, videoCapturer, signalingParameters);
localProxyVideoSink, remoteSinks, videoCapturer, signalingParameters);
if (signalingParameters.initiator) {
logAndToast("Creating OFFER...");

View File

@ -66,7 +66,6 @@ import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoDecoderFactory;
import org.webrtc.VideoEncoderFactory;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSink;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
@ -147,8 +146,7 @@ public class PeerConnectionClient {
private boolean isError;
@Nullable
private VideoSink localRender;
@Nullable
private List<VideoRenderer.Callbacks> remoteRenders;
@Nullable private List<VideoSink> remoteSinks;
private SignalingParameters signalingParameters;
private int videoWidth;
private int videoHeight;
@ -356,25 +354,23 @@ public class PeerConnectionClient {
executor.execute(() -> createPeerConnectionFactoryInternal(options));
}
public void createPeerConnection(final VideoSink localRender,
final VideoRenderer.Callbacks remoteRender, final VideoCapturer videoCapturer,
final SignalingParameters signalingParameters) {
public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink,
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) {
Log.w(TAG, "Video call enabled but no video capturer provided.");
}
createPeerConnection(
localRender, Collections.singletonList(remoteRender), videoCapturer, signalingParameters);
localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters);
}
public void createPeerConnection(final VideoSink localRender,
final List<VideoRenderer.Callbacks> remoteRenders, final VideoCapturer videoCapturer,
final SignalingParameters signalingParameters) {
public void createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks,
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
if (peerConnectionParameters == null) {
Log.e(TAG, "Creating peer connection without initializing factory.");
return;
}
this.localRender = localRender;
this.remoteRenders = remoteRenders;
this.remoteSinks = remoteSinks;
this.videoCapturer = videoCapturer;
this.signalingParameters = signalingParameters;
executor.execute(() -> {
@ -685,8 +681,8 @@ public class PeerConnectionClient {
// answer to get the remote track.
remoteVideoTrack = getRemoteVideoTrack();
remoteVideoTrack.setEnabled(renderVideo);
for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
for (VideoSink remoteSink : remoteSinks) {
remoteVideoTrack.addSink(remoteSink);
}
}
peerConnection.addTrack(createAudioTrack(), mediaStreamLabels);
@ -781,7 +777,7 @@ public class PeerConnectionClient {
saveRecordedAudioToFile = null;
}
localRender = null;
remoteRenders = null;
remoteSinks = null;
Log.d(TAG, "Closing peer connection factory.");
if (factory != null) {
factory.dispose();

View File

@ -44,7 +44,6 @@ import org.webrtc.SessionDescription;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSink;
@RunWith(AndroidJUnit4.class)
@ -87,7 +86,7 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
private final Object closeEvent = new Object();
// Mock renderer implementation.
private static class MockRenderer implements VideoRenderer.Callbacks {
private static class MockRenderer implements VideoSink {
// These are protected by 'this' since we gets called from worker threads.
private String rendererName;
private boolean renderFrameCalled = false;
@ -111,17 +110,17 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
public synchronized void onFrame(VideoFrame frame) {
if (!renderFrameCalled) {
if (rendererName != null) {
Log.d(TAG, rendererName + " render frame: " + frame.rotatedWidth() + " x "
+ frame.rotatedHeight());
Log.d(TAG,
rendererName + " render frame: " + frame.getRotatedWidth() + " x "
+ frame.getRotatedHeight());
} else {
Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight());
Log.d(TAG, "Render frame: " + frame.getRotatedWidth() + " x " + frame.getRotatedHeight());
}
}
renderFrameCalled = true;
VideoRenderer.renderFrameDone(frame);
doneRendering.countDown();
}

View File

@ -279,6 +279,7 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/VideoEncoderFactory.java",
"api/org/webrtc/VideoEncoderFallback.java",
"api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoFrameDrawer.java",
"api/org/webrtc/VideoRenderer.java",
"api/org/webrtc/VideoSink.java",
"api/org/webrtc/VideoSource.java",
@ -333,6 +334,7 @@ rtc_static_library("video_jni") {
"src/jni/videoencoderwrapper.h",
"src/jni/videoframe.cc",
"src/jni/videoframe.h",
"src/jni/videoframedrawer.cc",
"src/jni/videosink.cc",
"src/jni/videosink.h",
"src/jni/videotrack.cc",

View File

@ -97,13 +97,6 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal
super.pauseVideo();
}
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
updateFrameDimensionsAndReportEvents(frame);
super.renderFrame(frame);
}
// VideoSink interface.
@Override
public void onFrame(VideoFrame frame) {
@ -132,33 +125,6 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal
logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
}
// Update frame dimensions and report any changes to |rendererEvents|.
private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
synchronized (layoutLock) {
if (isRenderingPaused) {
return;
}
if (!isFirstFrameRendered) {
isFirstFrameRendered = true;
logD("Reporting first rendered frame.");
if (rendererEvents != null) {
rendererEvents.onFirstFrameRendered();
}
}
if (rotatedFrameWidth != frame.rotatedWidth() || rotatedFrameHeight != frame.rotatedHeight()
|| frameRotation != frame.rotationDegree) {
logD("Reporting frame resolution changed to " + frame.width + "x" + frame.height
+ " with rotation " + frame.rotationDegree);
if (rendererEvents != null) {
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
}
rotatedFrameWidth = frame.rotatedWidth();
rotatedFrameHeight = frame.rotatedHeight();
frameRotation = frame.rotationDegree;
}
}
}
// Update frame dimensions and report any changes to |rendererEvents|.
private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
synchronized (layoutLock) {

View File

@ -21,9 +21,8 @@ import android.view.SurfaceView;
/**
* Display the video stream on a SurfaceView.
*/
public class SurfaceViewRenderer extends SurfaceView implements SurfaceHolder.Callback,
VideoRenderer.Callbacks, VideoSink,
RendererCommon.RendererEvents {
public class SurfaceViewRenderer extends SurfaceView
implements SurfaceHolder.Callback, VideoSink, RendererCommon.RendererEvents {
private static final String TAG = "SurfaceViewRenderer";
// Cached resource name.
@ -180,12 +179,6 @@ public class SurfaceViewRenderer extends SurfaceView implements SurfaceHolder.Ca
eglRenderer.pauseVideo();
}
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
eglRenderer.renderFrame(frame);
}
// VideoSink interface.
@Override
public void onFrame(VideoFrame frame) {

View File

@ -25,7 +25,7 @@ import java.util.concurrent.LinkedBlockingQueue;
* Can be used to save the video frames to file.
*/
@JNINamespace("webrtc::jni")
public class VideoFileRenderer implements VideoRenderer.Callbacks, VideoSink {
public class VideoFileRenderer implements VideoSink {
private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread;
@ -73,13 +73,6 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks, VideoSink {
});
}
@Override
public void renderFrame(final VideoRenderer.I420Frame i420Frame) {
final VideoFrame frame = i420Frame.toVideoFrame();
onFrame(frame);
frame.release();
}
@Override
public void onFrame(VideoFrame frame) {
frame.retain();

View File

@ -21,6 +21,7 @@ import java.nio.ByteBuffer;
* drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
* taken into account. You can supply an additional render matrix for custom transformations.
*/
@JNINamespace("webrtc::jni")
public class VideoFrameDrawer {
/**
* Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
@ -97,7 +98,7 @@ public class VideoFrameDrawer {
// Input is packed already.
packedByteBuffer = planes[i];
} else {
VideoRenderer.nativeCopyPlane(
nativeCopyPlane(
planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
packedByteBuffer = copyBuffer;
}
@ -228,4 +229,8 @@ public class VideoFrameDrawer {
yuvUploader.release();
lastI420Frame = null;
}
// Helper native function to do a video frame plane copying.
static native void nativeCopyPlane(
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
}

View File

@ -195,10 +195,6 @@ public class VideoRenderer {
}
}
// Helper native function to do a video frame plane copying.
static native void nativeCopyPlane(
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoSinkInterface. */
public static interface Callbacks {
// |frame| might have pending rotation and implementation of Callbacks

View File

@ -29,7 +29,7 @@ import javax.annotation.Nullable;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.runner.RunWith;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame;
import org.webrtc.VideoFrame;
class CameraVideoCapturerTestFixtures {
static final String TAG = "CameraVideoCapturerTestFixtures";
@ -38,21 +38,20 @@ class CameraVideoCapturerTestFixtures {
static final int DEFAULT_HEIGHT = 480;
static final int DEFAULT_FPS = 15;
static private class RendererCallbacks implements VideoRenderer.Callbacks {
static private class RendererCallbacks implements VideoSink {
private final Object frameLock = new Object();
private int framesRendered = 0;
private int width = 0;
private int height = 0;
@Override
public void renderFrame(I420Frame frame) {
public void onFrame(VideoFrame frame) {
synchronized (frameLock) {
++framesRendered;
width = frame.rotatedWidth();
height = frame.rotatedHeight();
width = frame.getRotatedWidth();
height = frame.getRotatedHeight();
frameLock.notify();
}
VideoRenderer.renderFrameDone(frame);
}
public int frameWidth() {
@ -79,25 +78,26 @@ class CameraVideoCapturerTestFixtures {
}
}
static private class FakeAsyncRenderer implements VideoRenderer.Callbacks {
private final List<I420Frame> pendingFrames = new ArrayList<I420Frame>();
static private class FakeAsyncRenderer implements VideoSink {
private final List<VideoFrame> pendingFrames = new ArrayList<VideoFrame>();
@Override
public void renderFrame(I420Frame frame) {
public void onFrame(VideoFrame frame) {
synchronized (pendingFrames) {
frame.retain();
pendingFrames.add(frame);
pendingFrames.notifyAll();
}
}
// Wait until at least one frame have been received, before returning them.
public List<I420Frame> waitForPendingFrames() throws InterruptedException {
public List<VideoFrame> waitForPendingFrames() throws InterruptedException {
Logging.d(TAG, "Waiting for pending frames");
synchronized (pendingFrames) {
while (pendingFrames.isEmpty()) {
pendingFrames.wait();
}
return new ArrayList<I420Frame>(pendingFrames);
return new ArrayList<VideoFrame>(pendingFrames);
}
}
}
@ -387,13 +387,13 @@ class CameraVideoCapturerTestFixtures {
}
private VideoTrackWithRenderer createVideoTrackWithRenderer(
CameraVideoCapturer capturer, VideoRenderer.Callbacks rendererCallbacks) {
CameraVideoCapturer capturer, VideoSink rendererCallbacks) {
VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer);
capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
videoTrackWithRenderer.track =
peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source);
videoTrackWithRenderer.track.addRenderer(new VideoRenderer(rendererCallbacks));
videoTrackWithRenderer.track.addSink(rendererCallbacks);
return videoTrackWithRenderer;
}
@ -727,13 +727,13 @@ class CameraVideoCapturerTestFixtures {
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
// Return the frame(s), on a different thread out of spite.
final List<I420Frame> pendingFrames =
final List<VideoFrame> pendingFrames =
videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
final Thread returnThread = new Thread(new Runnable() {
@Override
public void run() {
for (I420Frame frame : pendingFrames) {
VideoRenderer.renderFrameDone(frame);
for (VideoFrame frame : pendingFrames) {
frame.release();
}
}
});

View File

@ -248,9 +248,12 @@ public class EglRendererTest {
/** Tells eglRenderer to render test frame with given index. */
private void feedFrame(int i) {
eglRenderer.renderFrame(new VideoRenderer.I420Frame(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT, 0,
new int[] {TEST_FRAME_WIDTH, TEST_FRAME_WIDTH / 2, TEST_FRAME_WIDTH / 2}, TEST_FRAMES[i],
0));
final VideoFrame.I420Buffer buffer = JavaI420Buffer.wrap(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT,
TEST_FRAMES[i][0], TEST_FRAME_WIDTH, TEST_FRAMES[i][1], TEST_FRAME_WIDTH / 2,
TEST_FRAMES[i][2], TEST_FRAME_WIDTH / 2, null /* releaseCallback */);
final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, 0 /* timestamp */);
eglRenderer.onFrame(frame);
frame.release();
}
@Test

View File

@ -59,8 +59,8 @@ public class PeerConnectionTest {
}
private static class ObserverExpectations
implements PeerConnection.Observer, VideoRenderer.Callbacks, DataChannel.Observer,
StatsObserver, RTCStatsCollectorCallback, RtpReceiver.Observer {
implements PeerConnection.Observer, VideoSink, DataChannel.Observer, StatsObserver,
RTCStatsCollectorCallback, RtpReceiver.Observer {
private final String name;
private int expectedIceCandidates = 0;
private int expectedErrors = 0;
@ -75,7 +75,7 @@ public class PeerConnectionTest {
private Queue<String> expectedAddStreamLabels = new ArrayDeque<>();
private Queue<String> expectedRemoveStreamLabels = new ArrayDeque<>();
private final List<IceCandidate> gotIceCandidates = new ArrayList<>();
private Map<MediaStream, WeakReference<VideoRenderer>> renderers = new IdentityHashMap<>();
private Map<MediaStream, WeakReference<VideoSink>> videoSinks = new IdentityHashMap<>();
private DataChannel dataChannel;
private Queue<DataChannel.Buffer> expectedBuffers = new ArrayDeque<>();
private Queue<DataChannel.State> expectedStateChanges = new ArrayDeque<>();
@ -140,13 +140,12 @@ public class PeerConnectionTest {
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
public synchronized void onFrame(VideoFrame frame) {
assertTrue(expectedWidth > 0);
assertTrue(expectedHeight > 0);
assertEquals(expectedWidth, frame.rotatedWidth());
assertEquals(expectedHeight, frame.rotatedHeight());
assertEquals(expectedWidth, frame.getRotatedWidth());
assertEquals(expectedHeight, frame.getRotatedHeight());
--expectedFramesDelivered;
VideoRenderer.renderFrameDone(frame);
}
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@ -231,9 +230,8 @@ public class PeerConnectionTest {
}
for (VideoTrack track : stream.videoTracks) {
assertEquals("video", track.kind());
VideoRenderer renderer = createVideoRenderer(this);
track.addRenderer(renderer);
assertNull(renderers.put(stream, new WeakReference<VideoRenderer>(renderer)));
track.addSink(this);
assertNull(videoSinks.put(stream, new WeakReference<VideoSink>(this)));
}
gotRemoteStreams.add(stream);
}
@ -249,11 +247,11 @@ public class PeerConnectionTest {
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void onRemoveStream(MediaStream stream) {
assertEquals(expectedRemoveStreamLabels.remove(), stream.getId());
WeakReference<VideoRenderer> renderer = renderers.remove(stream);
assertNotNull(renderer);
assertNotNull(renderer.get());
WeakReference<VideoSink> videoSink = videoSinks.remove(stream);
assertNotNull(videoSink);
assertNotNull(videoSink.get());
assertEquals(1, stream.videoTracks.size());
stream.videoTracks.get(0).removeRenderer(renderer.get());
stream.videoTracks.get(0).removeSink(videoSink.get());
gotRemoteStreams.remove(stream);
}
@ -510,7 +508,7 @@ public class PeerConnectionTest {
// Sets the expected resolution for an ObserverExpectations once a frame
// has been captured.
private static class ExpectedResolutionSetter implements VideoRenderer.Callbacks {
private static class ExpectedResolutionSetter implements VideoSink {
private ObserverExpectations observer;
public ExpectedResolutionSetter(ObserverExpectations observer) {
@ -520,12 +518,13 @@ public class PeerConnectionTest {
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
public synchronized void onFrame(VideoFrame frame) {
// Because different camera devices (fake & physical) produce different
// resolutions, we only sanity-check the set sizes,
assertTrue(frame.rotatedWidth() > 0);
assertTrue(frame.rotatedHeight() > 0);
observer.setExpectedResolution(frame.rotatedWidth(), frame.rotatedHeight());
assertTrue(frame.getRotatedWidth() > 0);
assertTrue(frame.getRotatedHeight() > 0);
observer.setExpectedResolution(frame.getRotatedWidth(), frame.getRotatedHeight());
frame.retain();
}
}
@ -584,21 +583,16 @@ public class PeerConnectionTest {
static int videoWindowsMapped = -1;
private static VideoRenderer createVideoRenderer(VideoRenderer.Callbacks videoCallbacks) {
return new VideoRenderer(videoCallbacks);
}
// Return a weak reference to test that ownership is correctly held by
// PeerConnection, not by test code.
private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
String audioTrackId, VideoRenderer.Callbacks videoCallbacks) {
String audioTrackId, VideoSink videoSink) {
MediaStream lMS = factory.createLocalMediaStream(streamLabel);
VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource);
assertNotNull(videoTrack);
VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks);
assertNotNull(videoRenderer);
videoTrack.addRenderer(videoRenderer);
assertNotNull(videoSink);
videoTrack.addSink(videoSink);
lMS.addTrack(videoTrack);
// Just for fun, let's remove and re-add the track.
lMS.removeTrack(videoTrack);

View File

@ -48,14 +48,17 @@ public class SurfaceViewRendererOnMeasureTest {
/**
* Returns a dummy YUV frame.
*/
static VideoRenderer.I420Frame createFrame(int width, int height, int rotationDegree) {
static VideoFrame createFrame(int width, int height, int rotationDegree) {
final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
for (int i = 0; i < 3; ++i) {
yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
}
return new VideoRenderer.I420Frame(width, height, rotationDegree, yuvStrides, yuvPlanes, 0);
final VideoFrame.I420Buffer buffer =
JavaI420Buffer.wrap(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1], yuvStrides[1],
yuvPlanes[2], yuvStrides[2], null /* releaseCallback */);
return new VideoFrame(buffer, rotationDegree, 0 /* timestamp */);
}
/**
@ -167,13 +170,13 @@ public class SurfaceViewRendererOnMeasureTest {
final int rotatedHeight = 720;
final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
final VideoRenderer.I420Frame frame =
createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
assertEquals(rotatedWidth, frame.rotatedWidth());
assertEquals(rotatedHeight, frame.rotatedHeight());
final VideoFrame frame = createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
assertEquals(rotatedWidth, frame.getRotatedWidth());
assertEquals(rotatedHeight, frame.getRotatedHeight());
final String frameDimensions =
unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
surfaceViewRenderer.renderFrame(frame);
surfaceViewRenderer.onFrame(frame);
frame.release();
rendererEvents.waitForFrameSize(unrotatedWidth, unrotatedHeight, rotationDegree);
// Test forcing to zero size.

View File

@ -131,37 +131,5 @@ static jlong JNI_VideoRenderer_CreateVideoRenderer(
return jlongFromPointer(renderer.release());
}
static void JNI_VideoRenderer_CopyPlane(
JNIEnv* jni,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_src_buffer,
jint width,
jint height,
jint src_stride,
const JavaParamRef<jobject>& j_dst_buffer,
jint dst_stride) {
size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer.obj());
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer.obj());
RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
RTC_CHECK(src_size >= src_stride * height)
<< "Insufficient source buffer capacity " << src_size;
RTC_CHECK(dst_size >= dst_stride * height)
<< "Insufficient destination buffer capacity " << dst_size;
uint8_t* src = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_src_buffer.obj()));
uint8_t* dst = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_dst_buffer.obj()));
if (src_stride == dst_stride) {
memcpy(dst, src, src_stride * height);
} else {
for (int i = 0; i < height; i++) {
memcpy(dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,52 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "sdk/android/generated_video_jni/jni/VideoFrameDrawer_jni.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
namespace webrtc {
namespace jni {
static void JNI_VideoFrameDrawer_CopyPlane(
JNIEnv* jni,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_src_buffer,
jint width,
jint height,
jint src_stride,
const JavaParamRef<jobject>& j_dst_buffer,
jint dst_stride) {
size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer.obj());
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer.obj());
RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
RTC_CHECK(src_size >= src_stride * height)
<< "Insufficient source buffer capacity " << src_size;
RTC_CHECK(dst_size >= dst_stride * height)
<< "Insufficient destination buffer capacity " << dst_size;
uint8_t* src = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_src_buffer.obj()));
uint8_t* dst = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_dst_buffer.obj()));
if (src_stride == dst_stride) {
memcpy(dst, src, src_stride * height);
} else {
for (int i = 0; i < height; i++) {
memcpy(dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
}
} // namespace jni
} // namespace webrtc