Android: Stop using VideoRenderer class

This CL updates the WebRTC code to stop using the old VideoRenderer and
VideoRenderer.I420Frame classes and instead use the new VideoSink and
VideoFrame classes.

This CL is the first step and the old classes are still left in the code
for now to keep backwards compatibility.

Bug: webrtc:9181
Change-Id: Ib0caa18cbaa2758b7859e850ddcaba003cfb06d6
Reviewed-on: https://webrtc-review.googlesource.com/71662
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22989}
This commit is contained in:
Magnus Jedvert
2018-04-23 16:14:47 +02:00
committed by Commit Bot
parent b9ac121598
commit e987f2b765
15 changed files with 137 additions and 187 deletions

View File

@ -59,7 +59,6 @@ import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer; import org.webrtc.VideoCapturer;
import org.webrtc.VideoFileRenderer; import org.webrtc.VideoFileRenderer;
import org.webrtc.VideoFrame; import org.webrtc.VideoFrame;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSink; import org.webrtc.VideoSink;
/** /**
@ -133,25 +132,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
// Peer connection statistics callback period in ms. // Peer connection statistics callback period in ms.
private static final int STAT_CALLBACK_PERIOD = 1000; private static final int STAT_CALLBACK_PERIOD = 1000;
private static class ProxyRenderer implements VideoRenderer.Callbacks {
private VideoRenderer.Callbacks target;
@Override
synchronized public void renderFrame(VideoRenderer.I420Frame frame) {
if (target == null) {
Logging.d(TAG, "Dropping frame in proxy because target is null.");
VideoRenderer.renderFrameDone(frame);
return;
}
target.renderFrame(frame);
}
synchronized public void setTarget(VideoRenderer.Callbacks target) {
this.target = target;
}
}
private static class ProxyVideoSink implements VideoSink { private static class ProxyVideoSink implements VideoSink {
private VideoSink target; private VideoSink target;
@ -170,7 +150,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
} }
} }
private final ProxyRenderer remoteProxyRenderer = new ProxyRenderer(); private final ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink();
private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink(); private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
@Nullable @Nullable
private PeerConnectionClient peerConnectionClient = null; private PeerConnectionClient peerConnectionClient = null;
@ -186,7 +166,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
private SurfaceViewRenderer fullscreenRenderer; private SurfaceViewRenderer fullscreenRenderer;
@Nullable @Nullable
private VideoFileRenderer videoFileRenderer; private VideoFileRenderer videoFileRenderer;
private final List<VideoRenderer.Callbacks> remoteRenderers = new ArrayList<>(); private final List<VideoSink> remoteSinks = new ArrayList<>();
private Toast logToast; private Toast logToast;
private boolean commandLineRun; private boolean commandLineRun;
private boolean activityRunning; private boolean activityRunning;
@ -251,7 +231,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
}); });
fullscreenRenderer.setOnClickListener(listener); fullscreenRenderer.setOnClickListener(listener);
remoteRenderers.add(remoteProxyRenderer); remoteSinks.add(remoteProxyRenderer);
final Intent intent = getIntent(); final Intent intent = getIntent();
final EglBase eglBase = EglBase.create(); final EglBase eglBase = EglBase.create();
@ -268,7 +248,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
try { try {
videoFileRenderer = new VideoFileRenderer( videoFileRenderer = new VideoFileRenderer(
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext()); saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext());
remoteRenderers.add(videoFileRenderer); remoteSinks.add(videoFileRenderer);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException( throw new RuntimeException(
"Failed to open video file for output: " + saveRemoteVideoToFile, e); "Failed to open video file for output: " + saveRemoteVideoToFile, e);
@ -776,7 +756,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
videoCapturer = createVideoCapturer(); videoCapturer = createVideoCapturer();
} }
peerConnectionClient.createPeerConnection( peerConnectionClient.createPeerConnection(
localProxyVideoSink, remoteRenderers, videoCapturer, signalingParameters); localProxyVideoSink, remoteSinks, videoCapturer, signalingParameters);
if (signalingParameters.initiator) { if (signalingParameters.initiator) {
logAndToast("Creating OFFER..."); logAndToast("Creating OFFER...");

View File

@ -66,7 +66,6 @@ import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer; import org.webrtc.VideoCapturer;
import org.webrtc.VideoDecoderFactory; import org.webrtc.VideoDecoderFactory;
import org.webrtc.VideoEncoderFactory; import org.webrtc.VideoEncoderFactory;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSink; import org.webrtc.VideoSink;
import org.webrtc.VideoSource; import org.webrtc.VideoSource;
import org.webrtc.VideoTrack; import org.webrtc.VideoTrack;
@ -147,8 +146,7 @@ public class PeerConnectionClient {
private boolean isError; private boolean isError;
@Nullable @Nullable
private VideoSink localRender; private VideoSink localRender;
@Nullable @Nullable private List<VideoSink> remoteSinks;
private List<VideoRenderer.Callbacks> remoteRenders;
private SignalingParameters signalingParameters; private SignalingParameters signalingParameters;
private int videoWidth; private int videoWidth;
private int videoHeight; private int videoHeight;
@ -356,25 +354,23 @@ public class PeerConnectionClient {
executor.execute(() -> createPeerConnectionFactoryInternal(options)); executor.execute(() -> createPeerConnectionFactoryInternal(options));
} }
public void createPeerConnection(final VideoSink localRender, public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink,
final VideoRenderer.Callbacks remoteRender, final VideoCapturer videoCapturer, final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
final SignalingParameters signalingParameters) {
if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) { if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) {
Log.w(TAG, "Video call enabled but no video capturer provided."); Log.w(TAG, "Video call enabled but no video capturer provided.");
} }
createPeerConnection( createPeerConnection(
localRender, Collections.singletonList(remoteRender), videoCapturer, signalingParameters); localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters);
} }
public void createPeerConnection(final VideoSink localRender, public void createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks,
final List<VideoRenderer.Callbacks> remoteRenders, final VideoCapturer videoCapturer, final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
final SignalingParameters signalingParameters) {
if (peerConnectionParameters == null) { if (peerConnectionParameters == null) {
Log.e(TAG, "Creating peer connection without initializing factory."); Log.e(TAG, "Creating peer connection without initializing factory.");
return; return;
} }
this.localRender = localRender; this.localRender = localRender;
this.remoteRenders = remoteRenders; this.remoteSinks = remoteSinks;
this.videoCapturer = videoCapturer; this.videoCapturer = videoCapturer;
this.signalingParameters = signalingParameters; this.signalingParameters = signalingParameters;
executor.execute(() -> { executor.execute(() -> {
@ -685,8 +681,8 @@ public class PeerConnectionClient {
// answer to get the remote track. // answer to get the remote track.
remoteVideoTrack = getRemoteVideoTrack(); remoteVideoTrack = getRemoteVideoTrack();
remoteVideoTrack.setEnabled(renderVideo); remoteVideoTrack.setEnabled(renderVideo);
for (VideoRenderer.Callbacks remoteRender : remoteRenders) { for (VideoSink remoteSink : remoteSinks) {
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender)); remoteVideoTrack.addSink(remoteSink);
} }
} }
peerConnection.addTrack(createAudioTrack(), mediaStreamLabels); peerConnection.addTrack(createAudioTrack(), mediaStreamLabels);
@ -781,7 +777,7 @@ public class PeerConnectionClient {
saveRecordedAudioToFile = null; saveRecordedAudioToFile = null;
} }
localRender = null; localRender = null;
remoteRenders = null; remoteSinks = null;
Log.d(TAG, "Closing peer connection factory."); Log.d(TAG, "Closing peer connection factory.");
if (factory != null) { if (factory != null) {
factory.dispose(); factory.dispose();

View File

@ -44,7 +44,6 @@ import org.webrtc.SessionDescription;
import org.webrtc.StatsReport; import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer; import org.webrtc.VideoCapturer;
import org.webrtc.VideoFrame; import org.webrtc.VideoFrame;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSink; import org.webrtc.VideoSink;
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@ -87,7 +86,7 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
private final Object closeEvent = new Object(); private final Object closeEvent = new Object();
// Mock renderer implementation. // Mock renderer implementation.
private static class MockRenderer implements VideoRenderer.Callbacks { private static class MockRenderer implements VideoSink {
// These are protected by 'this' since we gets called from worker threads. // These are protected by 'this' since we gets called from worker threads.
private String rendererName; private String rendererName;
private boolean renderFrameCalled = false; private boolean renderFrameCalled = false;
@ -111,17 +110,17 @@ public class PeerConnectionClientTest implements PeerConnectionEvents {
@Override @Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck") @SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void renderFrame(VideoRenderer.I420Frame frame) { public synchronized void onFrame(VideoFrame frame) {
if (!renderFrameCalled) { if (!renderFrameCalled) {
if (rendererName != null) { if (rendererName != null) {
Log.d(TAG, rendererName + " render frame: " + frame.rotatedWidth() + " x " Log.d(TAG,
+ frame.rotatedHeight()); rendererName + " render frame: " + frame.getRotatedWidth() + " x "
+ frame.getRotatedHeight());
} else { } else {
Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight()); Log.d(TAG, "Render frame: " + frame.getRotatedWidth() + " x " + frame.getRotatedHeight());
} }
} }
renderFrameCalled = true; renderFrameCalled = true;
VideoRenderer.renderFrameDone(frame);
doneRendering.countDown(); doneRendering.countDown();
} }

View File

@ -279,6 +279,7 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/VideoEncoderFactory.java", "api/org/webrtc/VideoEncoderFactory.java",
"api/org/webrtc/VideoEncoderFallback.java", "api/org/webrtc/VideoEncoderFallback.java",
"api/org/webrtc/VideoFrame.java", "api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoFrameDrawer.java",
"api/org/webrtc/VideoRenderer.java", "api/org/webrtc/VideoRenderer.java",
"api/org/webrtc/VideoSink.java", "api/org/webrtc/VideoSink.java",
"api/org/webrtc/VideoSource.java", "api/org/webrtc/VideoSource.java",
@ -333,6 +334,7 @@ rtc_static_library("video_jni") {
"src/jni/videoencoderwrapper.h", "src/jni/videoencoderwrapper.h",
"src/jni/videoframe.cc", "src/jni/videoframe.cc",
"src/jni/videoframe.h", "src/jni/videoframe.h",
"src/jni/videoframedrawer.cc",
"src/jni/videosink.cc", "src/jni/videosink.cc",
"src/jni/videosink.h", "src/jni/videosink.h",
"src/jni/videotrack.cc", "src/jni/videotrack.cc",

View File

@ -97,13 +97,6 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal
super.pauseVideo(); super.pauseVideo();
} }
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
updateFrameDimensionsAndReportEvents(frame);
super.renderFrame(frame);
}
// VideoSink interface. // VideoSink interface.
@Override @Override
public void onFrame(VideoFrame frame) { public void onFrame(VideoFrame frame) {
@ -132,33 +125,6 @@ public class SurfaceEglRenderer extends EglRenderer implements SurfaceHolder.Cal
logD("surfaceChanged: format: " + format + " size: " + width + "x" + height); logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
} }
// Update frame dimensions and report any changes to |rendererEvents|.
private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
synchronized (layoutLock) {
if (isRenderingPaused) {
return;
}
if (!isFirstFrameRendered) {
isFirstFrameRendered = true;
logD("Reporting first rendered frame.");
if (rendererEvents != null) {
rendererEvents.onFirstFrameRendered();
}
}
if (rotatedFrameWidth != frame.rotatedWidth() || rotatedFrameHeight != frame.rotatedHeight()
|| frameRotation != frame.rotationDegree) {
logD("Reporting frame resolution changed to " + frame.width + "x" + frame.height
+ " with rotation " + frame.rotationDegree);
if (rendererEvents != null) {
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
}
rotatedFrameWidth = frame.rotatedWidth();
rotatedFrameHeight = frame.rotatedHeight();
frameRotation = frame.rotationDegree;
}
}
}
// Update frame dimensions and report any changes to |rendererEvents|. // Update frame dimensions and report any changes to |rendererEvents|.
private void updateFrameDimensionsAndReportEvents(VideoFrame frame) { private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
synchronized (layoutLock) { synchronized (layoutLock) {

View File

@ -21,9 +21,8 @@ import android.view.SurfaceView;
/** /**
* Display the video stream on a SurfaceView. * Display the video stream on a SurfaceView.
*/ */
public class SurfaceViewRenderer extends SurfaceView implements SurfaceHolder.Callback, public class SurfaceViewRenderer extends SurfaceView
VideoRenderer.Callbacks, VideoSink, implements SurfaceHolder.Callback, VideoSink, RendererCommon.RendererEvents {
RendererCommon.RendererEvents {
private static final String TAG = "SurfaceViewRenderer"; private static final String TAG = "SurfaceViewRenderer";
// Cached resource name. // Cached resource name.
@ -180,12 +179,6 @@ public class SurfaceViewRenderer extends SurfaceView implements SurfaceHolder.Ca
eglRenderer.pauseVideo(); eglRenderer.pauseVideo();
} }
// VideoRenderer.Callbacks interface.
@Override
public void renderFrame(VideoRenderer.I420Frame frame) {
eglRenderer.renderFrame(frame);
}
// VideoSink interface. // VideoSink interface.
@Override @Override
public void onFrame(VideoFrame frame) { public void onFrame(VideoFrame frame) {

View File

@ -25,7 +25,7 @@ import java.util.concurrent.LinkedBlockingQueue;
* Can be used to save the video frames to file. * Can be used to save the video frames to file.
*/ */
@JNINamespace("webrtc::jni") @JNINamespace("webrtc::jni")
public class VideoFileRenderer implements VideoRenderer.Callbacks, VideoSink { public class VideoFileRenderer implements VideoSink {
private static final String TAG = "VideoFileRenderer"; private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread; private final HandlerThread renderThread;
@ -73,13 +73,6 @@ public class VideoFileRenderer implements VideoRenderer.Callbacks, VideoSink {
}); });
} }
@Override
public void renderFrame(final VideoRenderer.I420Frame i420Frame) {
final VideoFrame frame = i420Frame.toVideoFrame();
onFrame(frame);
frame.release();
}
@Override @Override
public void onFrame(VideoFrame frame) { public void onFrame(VideoFrame frame) {
frame.retain(); frame.retain();

View File

@ -21,6 +21,7 @@ import java.nio.ByteBuffer;
* drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation * drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
* taken into account. You can supply an additional render matrix for custom transformations. * taken into account. You can supply an additional render matrix for custom transformations.
*/ */
@JNINamespace("webrtc::jni")
public class VideoFrameDrawer { public class VideoFrameDrawer {
/** /**
* Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb * Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
@ -97,7 +98,7 @@ public class VideoFrameDrawer {
// Input is packed already. // Input is packed already.
packedByteBuffer = planes[i]; packedByteBuffer = planes[i];
} else { } else {
VideoRenderer.nativeCopyPlane( nativeCopyPlane(
planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]); planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
packedByteBuffer = copyBuffer; packedByteBuffer = copyBuffer;
} }
@ -228,4 +229,8 @@ public class VideoFrameDrawer {
yuvUploader.release(); yuvUploader.release();
lastI420Frame = null; lastI420Frame = null;
} }
// Helper native function to do a video frame plane copying.
static native void nativeCopyPlane(
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
} }

View File

@ -195,10 +195,6 @@ public class VideoRenderer {
} }
} }
// Helper native function to do a video frame plane copying.
static native void nativeCopyPlane(
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
/** The real meat of VideoSinkInterface. */ /** The real meat of VideoSinkInterface. */
public static interface Callbacks { public static interface Callbacks {
// |frame| might have pending rotation and implementation of Callbacks // |frame| might have pending rotation and implementation of Callbacks

View File

@ -29,7 +29,7 @@ import javax.annotation.Nullable;
import org.chromium.base.test.BaseJUnit4ClassRunner; import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat; import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame; import org.webrtc.VideoFrame;
class CameraVideoCapturerTestFixtures { class CameraVideoCapturerTestFixtures {
static final String TAG = "CameraVideoCapturerTestFixtures"; static final String TAG = "CameraVideoCapturerTestFixtures";
@ -38,21 +38,20 @@ class CameraVideoCapturerTestFixtures {
static final int DEFAULT_HEIGHT = 480; static final int DEFAULT_HEIGHT = 480;
static final int DEFAULT_FPS = 15; static final int DEFAULT_FPS = 15;
static private class RendererCallbacks implements VideoRenderer.Callbacks { static private class RendererCallbacks implements VideoSink {
private final Object frameLock = new Object(); private final Object frameLock = new Object();
private int framesRendered = 0; private int framesRendered = 0;
private int width = 0; private int width = 0;
private int height = 0; private int height = 0;
@Override @Override
public void renderFrame(I420Frame frame) { public void onFrame(VideoFrame frame) {
synchronized (frameLock) { synchronized (frameLock) {
++framesRendered; ++framesRendered;
width = frame.rotatedWidth(); width = frame.getRotatedWidth();
height = frame.rotatedHeight(); height = frame.getRotatedHeight();
frameLock.notify(); frameLock.notify();
} }
VideoRenderer.renderFrameDone(frame);
} }
public int frameWidth() { public int frameWidth() {
@ -79,25 +78,26 @@ class CameraVideoCapturerTestFixtures {
} }
} }
static private class FakeAsyncRenderer implements VideoRenderer.Callbacks { static private class FakeAsyncRenderer implements VideoSink {
private final List<I420Frame> pendingFrames = new ArrayList<I420Frame>(); private final List<VideoFrame> pendingFrames = new ArrayList<VideoFrame>();
@Override @Override
public void renderFrame(I420Frame frame) { public void onFrame(VideoFrame frame) {
synchronized (pendingFrames) { synchronized (pendingFrames) {
frame.retain();
pendingFrames.add(frame); pendingFrames.add(frame);
pendingFrames.notifyAll(); pendingFrames.notifyAll();
} }
} }
// Wait until at least one frame have been received, before returning them. // Wait until at least one frame have been received, before returning them.
public List<I420Frame> waitForPendingFrames() throws InterruptedException { public List<VideoFrame> waitForPendingFrames() throws InterruptedException {
Logging.d(TAG, "Waiting for pending frames"); Logging.d(TAG, "Waiting for pending frames");
synchronized (pendingFrames) { synchronized (pendingFrames) {
while (pendingFrames.isEmpty()) { while (pendingFrames.isEmpty()) {
pendingFrames.wait(); pendingFrames.wait();
} }
return new ArrayList<I420Frame>(pendingFrames); return new ArrayList<VideoFrame>(pendingFrames);
} }
} }
} }
@ -387,13 +387,13 @@ class CameraVideoCapturerTestFixtures {
} }
private VideoTrackWithRenderer createVideoTrackWithRenderer( private VideoTrackWithRenderer createVideoTrackWithRenderer(
CameraVideoCapturer capturer, VideoRenderer.Callbacks rendererCallbacks) { CameraVideoCapturer capturer, VideoSink rendererCallbacks) {
VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer(); VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer); videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer);
capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS); capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
videoTrackWithRenderer.track = videoTrackWithRenderer.track =
peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source); peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source);
videoTrackWithRenderer.track.addRenderer(new VideoRenderer(rendererCallbacks)); videoTrackWithRenderer.track.addSink(rendererCallbacks);
return videoTrackWithRenderer; return videoTrackWithRenderer;
} }
@ -727,13 +727,13 @@ class CameraVideoCapturerTestFixtures {
disposeVideoTrackWithRenderer(videoTrackWithRenderer); disposeVideoTrackWithRenderer(videoTrackWithRenderer);
// Return the frame(s), on a different thread out of spite. // Return the frame(s), on a different thread out of spite.
final List<I420Frame> pendingFrames = final List<VideoFrame> pendingFrames =
videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames(); videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
final Thread returnThread = new Thread(new Runnable() { final Thread returnThread = new Thread(new Runnable() {
@Override @Override
public void run() { public void run() {
for (I420Frame frame : pendingFrames) { for (VideoFrame frame : pendingFrames) {
VideoRenderer.renderFrameDone(frame); frame.release();
} }
} }
}); });

View File

@ -248,9 +248,12 @@ public class EglRendererTest {
/** Tells eglRenderer to render test frame with given index. */ /** Tells eglRenderer to render test frame with given index. */
private void feedFrame(int i) { private void feedFrame(int i) {
eglRenderer.renderFrame(new VideoRenderer.I420Frame(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT, 0, final VideoFrame.I420Buffer buffer = JavaI420Buffer.wrap(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT,
new int[] {TEST_FRAME_WIDTH, TEST_FRAME_WIDTH / 2, TEST_FRAME_WIDTH / 2}, TEST_FRAMES[i], TEST_FRAMES[i][0], TEST_FRAME_WIDTH, TEST_FRAMES[i][1], TEST_FRAME_WIDTH / 2,
0)); TEST_FRAMES[i][2], TEST_FRAME_WIDTH / 2, null /* releaseCallback */);
final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, 0 /* timestamp */);
eglRenderer.onFrame(frame);
frame.release();
} }
@Test @Test

View File

@ -59,8 +59,8 @@ public class PeerConnectionTest {
} }
private static class ObserverExpectations private static class ObserverExpectations
implements PeerConnection.Observer, VideoRenderer.Callbacks, DataChannel.Observer, implements PeerConnection.Observer, VideoSink, DataChannel.Observer, StatsObserver,
StatsObserver, RTCStatsCollectorCallback, RtpReceiver.Observer { RTCStatsCollectorCallback, RtpReceiver.Observer {
private final String name; private final String name;
private int expectedIceCandidates = 0; private int expectedIceCandidates = 0;
private int expectedErrors = 0; private int expectedErrors = 0;
@ -75,7 +75,7 @@ public class PeerConnectionTest {
private Queue<String> expectedAddStreamLabels = new ArrayDeque<>(); private Queue<String> expectedAddStreamLabels = new ArrayDeque<>();
private Queue<String> expectedRemoveStreamLabels = new ArrayDeque<>(); private Queue<String> expectedRemoveStreamLabels = new ArrayDeque<>();
private final List<IceCandidate> gotIceCandidates = new ArrayList<>(); private final List<IceCandidate> gotIceCandidates = new ArrayList<>();
private Map<MediaStream, WeakReference<VideoRenderer>> renderers = new IdentityHashMap<>(); private Map<MediaStream, WeakReference<VideoSink>> videoSinks = new IdentityHashMap<>();
private DataChannel dataChannel; private DataChannel dataChannel;
private Queue<DataChannel.Buffer> expectedBuffers = new ArrayDeque<>(); private Queue<DataChannel.Buffer> expectedBuffers = new ArrayDeque<>();
private Queue<DataChannel.State> expectedStateChanges = new ArrayDeque<>(); private Queue<DataChannel.State> expectedStateChanges = new ArrayDeque<>();
@ -140,13 +140,12 @@ public class PeerConnectionTest {
@Override @Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck") @SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void renderFrame(VideoRenderer.I420Frame frame) { public synchronized void onFrame(VideoFrame frame) {
assertTrue(expectedWidth > 0); assertTrue(expectedWidth > 0);
assertTrue(expectedHeight > 0); assertTrue(expectedHeight > 0);
assertEquals(expectedWidth, frame.rotatedWidth()); assertEquals(expectedWidth, frame.getRotatedWidth());
assertEquals(expectedHeight, frame.rotatedHeight()); assertEquals(expectedHeight, frame.getRotatedHeight());
--expectedFramesDelivered; --expectedFramesDelivered;
VideoRenderer.renderFrameDone(frame);
} }
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@ -231,9 +230,8 @@ public class PeerConnectionTest {
} }
for (VideoTrack track : stream.videoTracks) { for (VideoTrack track : stream.videoTracks) {
assertEquals("video", track.kind()); assertEquals("video", track.kind());
VideoRenderer renderer = createVideoRenderer(this); track.addSink(this);
track.addRenderer(renderer); assertNull(videoSinks.put(stream, new WeakReference<VideoSink>(this)));
assertNull(renderers.put(stream, new WeakReference<VideoRenderer>(renderer)));
} }
gotRemoteStreams.add(stream); gotRemoteStreams.add(stream);
} }
@ -249,11 +247,11 @@ public class PeerConnectionTest {
@SuppressWarnings("NoSynchronizedMethodCheck") @SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void onRemoveStream(MediaStream stream) { public synchronized void onRemoveStream(MediaStream stream) {
assertEquals(expectedRemoveStreamLabels.remove(), stream.getId()); assertEquals(expectedRemoveStreamLabels.remove(), stream.getId());
WeakReference<VideoRenderer> renderer = renderers.remove(stream); WeakReference<VideoSink> videoSink = videoSinks.remove(stream);
assertNotNull(renderer); assertNotNull(videoSink);
assertNotNull(renderer.get()); assertNotNull(videoSink.get());
assertEquals(1, stream.videoTracks.size()); assertEquals(1, stream.videoTracks.size());
stream.videoTracks.get(0).removeRenderer(renderer.get()); stream.videoTracks.get(0).removeSink(videoSink.get());
gotRemoteStreams.remove(stream); gotRemoteStreams.remove(stream);
} }
@ -510,7 +508,7 @@ public class PeerConnectionTest {
// Sets the expected resolution for an ObserverExpectations once a frame // Sets the expected resolution for an ObserverExpectations once a frame
// has been captured. // has been captured.
private static class ExpectedResolutionSetter implements VideoRenderer.Callbacks { private static class ExpectedResolutionSetter implements VideoSink {
private ObserverExpectations observer; private ObserverExpectations observer;
public ExpectedResolutionSetter(ObserverExpectations observer) { public ExpectedResolutionSetter(ObserverExpectations observer) {
@ -520,12 +518,13 @@ public class PeerConnectionTest {
@Override @Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck") @SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void renderFrame(VideoRenderer.I420Frame frame) { public synchronized void onFrame(VideoFrame frame) {
// Because different camera devices (fake & physical) produce different // Because different camera devices (fake & physical) produce different
// resolutions, we only sanity-check the set sizes, // resolutions, we only sanity-check the set sizes,
assertTrue(frame.rotatedWidth() > 0); assertTrue(frame.getRotatedWidth() > 0);
assertTrue(frame.rotatedHeight() > 0); assertTrue(frame.getRotatedHeight() > 0);
observer.setExpectedResolution(frame.rotatedWidth(), frame.rotatedHeight()); observer.setExpectedResolution(frame.getRotatedWidth(), frame.getRotatedHeight());
frame.retain();
} }
} }
@ -584,21 +583,16 @@ public class PeerConnectionTest {
static int videoWindowsMapped = -1; static int videoWindowsMapped = -1;
private static VideoRenderer createVideoRenderer(VideoRenderer.Callbacks videoCallbacks) {
return new VideoRenderer(videoCallbacks);
}
// Return a weak reference to test that ownership is correctly held by // Return a weak reference to test that ownership is correctly held by
// PeerConnection, not by test code. // PeerConnection, not by test code.
private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory, private static WeakReference<MediaStream> addTracksToPC(PeerConnectionFactory factory,
PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId, PeerConnection pc, VideoSource videoSource, String streamLabel, String videoTrackId,
String audioTrackId, VideoRenderer.Callbacks videoCallbacks) { String audioTrackId, VideoSink videoSink) {
MediaStream lMS = factory.createLocalMediaStream(streamLabel); MediaStream lMS = factory.createLocalMediaStream(streamLabel);
VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource); VideoTrack videoTrack = factory.createVideoTrack(videoTrackId, videoSource);
assertNotNull(videoTrack); assertNotNull(videoTrack);
VideoRenderer videoRenderer = createVideoRenderer(videoCallbacks); assertNotNull(videoSink);
assertNotNull(videoRenderer); videoTrack.addSink(videoSink);
videoTrack.addRenderer(videoRenderer);
lMS.addTrack(videoTrack); lMS.addTrack(videoTrack);
// Just for fun, let's remove and re-add the track. // Just for fun, let's remove and re-add the track.
lMS.removeTrack(videoTrack); lMS.removeTrack(videoTrack);

View File

@ -48,14 +48,17 @@ public class SurfaceViewRendererOnMeasureTest {
/** /**
* Returns a dummy YUV frame. * Returns a dummy YUV frame.
*/ */
static VideoRenderer.I420Frame createFrame(int width, int height, int rotationDegree) { static VideoFrame createFrame(int width, int height, int rotationDegree) {
final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2}; final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2}; final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
final ByteBuffer[] yuvPlanes = new ByteBuffer[3]; final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
for (int i = 0; i < 3; ++i) { for (int i = 0; i < 3; ++i) {
yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]); yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
} }
return new VideoRenderer.I420Frame(width, height, rotationDegree, yuvStrides, yuvPlanes, 0); final VideoFrame.I420Buffer buffer =
JavaI420Buffer.wrap(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1], yuvStrides[1],
yuvPlanes[2], yuvStrides[2], null /* releaseCallback */);
return new VideoFrame(buffer, rotationDegree, 0 /* timestamp */);
} }
/** /**
@ -167,13 +170,13 @@ public class SurfaceViewRendererOnMeasureTest {
final int rotatedHeight = 720; final int rotatedHeight = 720;
final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight); final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth); final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
final VideoRenderer.I420Frame frame = final VideoFrame frame = createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
createFrame(unrotatedWidth, unrotatedHeight, rotationDegree); assertEquals(rotatedWidth, frame.getRotatedWidth());
assertEquals(rotatedWidth, frame.rotatedWidth()); assertEquals(rotatedHeight, frame.getRotatedHeight());
assertEquals(rotatedHeight, frame.rotatedHeight());
final String frameDimensions = final String frameDimensions =
unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree; unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
surfaceViewRenderer.renderFrame(frame); surfaceViewRenderer.onFrame(frame);
frame.release();
rendererEvents.waitForFrameSize(unrotatedWidth, unrotatedHeight, rotationDegree); rendererEvents.waitForFrameSize(unrotatedWidth, unrotatedHeight, rotationDegree);
// Test forcing to zero size. // Test forcing to zero size.

View File

@ -131,37 +131,5 @@ static jlong JNI_VideoRenderer_CreateVideoRenderer(
return jlongFromPointer(renderer.release()); return jlongFromPointer(renderer.release());
} }
static void JNI_VideoRenderer_CopyPlane(
JNIEnv* jni,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_src_buffer,
jint width,
jint height,
jint src_stride,
const JavaParamRef<jobject>& j_dst_buffer,
jint dst_stride) {
size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer.obj());
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer.obj());
RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
RTC_CHECK(src_size >= src_stride * height)
<< "Insufficient source buffer capacity " << src_size;
RTC_CHECK(dst_size >= dst_stride * height)
<< "Insufficient destination buffer capacity " << dst_size;
uint8_t* src = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_src_buffer.obj()));
uint8_t* dst = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_dst_buffer.obj()));
if (src_stride == dst_stride) {
memcpy(dst, src, src_stride * height);
} else {
for (int i = 0; i < height; i++) {
memcpy(dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
}
} // namespace jni } // namespace jni
} // namespace webrtc } // namespace webrtc

View File

@ -0,0 +1,52 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "sdk/android/generated_video_jni/jni/VideoFrameDrawer_jni.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
namespace webrtc {
namespace jni {
static void JNI_VideoFrameDrawer_CopyPlane(
JNIEnv* jni,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_src_buffer,
jint width,
jint height,
jint src_stride,
const JavaParamRef<jobject>& j_dst_buffer,
jint dst_stride) {
size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer.obj());
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer.obj());
RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
RTC_CHECK(src_size >= src_stride * height)
<< "Insufficient source buffer capacity " << src_size;
RTC_CHECK(dst_size >= dst_stride * height)
<< "Insufficient destination buffer capacity " << dst_size;
uint8_t* src = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_src_buffer.obj()));
uint8_t* dst = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_dst_buffer.obj()));
if (src_stride == dst_stride) {
memcpy(dst, src, src_stride * height);
} else {
for (int i = 0; i < height; i++) {
memcpy(dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
}
} // namespace jni
} // namespace webrtc