Support for video file instead of camera and output video out to file

When video out to file is enabled the remote video which is recorded is
not show on screen.

You can use this command line for file input and output:
monkeyrunner ./webrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py --devname 02157df28cd47001 --videoin /storage/emulated/0/reference_video_1280x720_30fps.y4m --videoout /storage/emulated/0/output.y4m --videoout_width 1280 --videoout_height 720 --videooutsave /tmp/out.y4m

BUG=webrtc:6545

Review-Url: https://codereview.webrtc.org/2273573003
Cr-Commit-Position: refs/heads/master@{#14660}
This commit is contained in:
mandermo
2016-10-18 04:52:00 -07:00
committed by Commit bot
parent 9e83c97e9f
commit 44666997ca
10 changed files with 933 additions and 186 deletions

View File

@ -21,7 +21,9 @@ import android.test.InstrumentationTestCase;
import android.test.suitebuilder.annotation.SmallTest;
import android.util.Log;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
import org.webrtc.MediaCodecVideoEncoder;
@ -29,6 +31,7 @@ import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SessionDescription;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoRenderer;
import java.util.LinkedList;
@ -235,7 +238,7 @@ public class PeerConnectionClientTest
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
EglBase.Context eglContext) {
VideoCapturer videoCapturer, EglBase.Context eglContext) {
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
SignalingParameters signalingParameters =
new SignalingParameters(iceServers, true, // iceServers, initiator.
@ -249,7 +252,8 @@ public class PeerConnectionClientTest
client.setPeerConnectionFactoryOptions(options);
client.createPeerConnectionFactory(
getInstrumentation().getTargetContext(), peerConnectionParameters, this);
client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signalingParameters);
client.createPeerConnection(
eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParameters);
client.createOffer();
return client;
}
@ -260,14 +264,12 @@ public class PeerConnectionClientTest
true, /* loopback */
false, /* tracing */
// Video codec parameters.
true, /* useCamera2 */
0, /* videoWidth */
0, /* videoHeight */
0, /* videoFps */
0, /* videoStartBitrate */
"", /* videoCodec */
true, /* videoCodecHwAcceleration */
false, /* captureToToTexture */
// Audio codec parameters.
0, /* audioStartBitrate */
"OPUS", /* audioCodec */
@ -275,27 +277,36 @@ public class PeerConnectionClientTest
false, /* aecDump */
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
false /* disableBuiltInNS */, false /* enableLevelControl */);
return peerConnectionParameters;
}
private PeerConnectionParameters createParametersForVideoCall(
String videoCodec, boolean captureToTexture) {
private VideoCapturer createCameraCapturer(boolean captureToTexture) {
final boolean useCamera2 =
captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
CameraEnumerator enumerator;
if (useCamera2) {
enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext());
} else {
enumerator = new Camera1Enumerator(captureToTexture);
}
String deviceName = enumerator.getDeviceNames()[0];
return enumerator.createCapturer(deviceName, null);
}
private PeerConnectionParameters createParametersForVideoCall(String videoCodec) {
PeerConnectionParameters peerConnectionParameters =
new PeerConnectionParameters(true, /* videoCallEnabled */
true, /* loopback */
false, /* tracing */
// Video codec parameters.
useCamera2, /* useCamera2 */
0, /* videoWidth */
0, /* videoHeight */
0, /* videoFps */
0, /* videoStartBitrate */
videoCodec, /* videoCodec */
true, /* videoCodecHwAcceleration */
captureToTexture, /* captureToToTexture */
// Audio codec parameters.
0, /* audioStartBitrate */
"OPUS", /* audioCodec */
@ -303,6 +314,7 @@ public class PeerConnectionClientTest
false, /* aecDump */
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
false /* disableBuiltInNS */, false /* enableLevelControl */);
return peerConnectionParameters;
}
@ -327,7 +339,8 @@ public class PeerConnectionClientTest
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), null);
// Wait for local SDP and ice candidates set events.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@ -343,8 +356,8 @@ public class PeerConnectionClientTest
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
}
private void doLoopbackTest(PeerConnectionParameters parameters, boolean decodeToTexure)
throws InterruptedException {
private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
boolean decodeToTexture) throws InterruptedException {
loopback = true;
MockRenderer localRenderer = null;
MockRenderer remoteRenderer = null;
@ -355,8 +368,8 @@ public class PeerConnectionClientTest
} else {
Log.d(TAG, "testLoopback for audio.");
}
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
decodeToTexure ? eglBase.getEglBaseContext() : null);
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer,
decodeToTexture ? eglBase.getEglBaseContext() : null);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@ -386,22 +399,25 @@ public class PeerConnectionClientTest
@SmallTest
public void testLoopbackAudio() throws InterruptedException {
doLoopbackTest(createParametersForAudioCall(), false);
doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */);
}
@SmallTest
public void testLoopbackVp8() throws InterruptedException {
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
}
@SmallTest
public void testLoopbackVp9() throws InterruptedException {
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
}
@SmallTest
public void testLoopbackH264() throws InterruptedException {
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
}
@SmallTest
@ -410,7 +426,8 @@ public class PeerConnectionClientTest
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@SmallTest
@ -419,7 +436,8 @@ public class PeerConnectionClientTest
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@SmallTest
@ -428,7 +446,8 @@ public class PeerConnectionClientTest
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@SmallTest
@ -443,7 +462,8 @@ public class PeerConnectionClientTest
Log.i(TAG, "VP8 encode to textures is not supported.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
// Test that a call can be setup even if the EGL context used during initialization is
@ -457,11 +477,11 @@ public class PeerConnectionClientTest
}
loopback = true;
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8, true);
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8);
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext());
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
createCameraCapturer(true /* captureToTexture */), eglBase.getEglBaseContext());
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@ -501,7 +521,8 @@ public class PeerConnectionClientTest
Log.i(TAG, "H264 encode to textures is not supported.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
// Checks if default front camera can be switched to back camera and then
@ -514,8 +535,9 @@ public class PeerConnectionClientTest
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), null);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@ -562,8 +584,9 @@ public class PeerConnectionClientTest
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), null);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
@ -611,8 +634,9 @@ public class PeerConnectionClientTest
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), null);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));