Revert of Support for video file instead of camera and output video out to file (patchset #17 id:320001 of https://codereview.webrtc.org/2273573003/ )
Reason for revert: Breaks internal project. Original issue's description: > Support for video file instead of camera and output video out to file > > When video out to file is enabled the remote video which is recorded is > not show on screen. > > You can use this command line for file input and output: > monkeyrunner ./webrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py --devname 02157df28cd47001 --videoin /storage/emulated/0/reference_video_1280x720_30fps.y4m --videoout /storage/emulated/0/output.y4m --videoout_width 1280 --videoout_height 720 --videooutsave /tmp/out.y4m > > BUG=webrtc:6545 > > Committed: https://crrev.com/44666997ca912705f8f96c9bd211e719525a3ccc > Cr-Commit-Position: refs/heads/master@{#14660} TBR=magjed@webrtc.org,sakal@webrtc.org,jansson@chromium.org,mandermo@google.com,mandermo@webrtc.org # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=webrtc:6545 Review-Url: https://codereview.webrtc.org/2425763003 Cr-Commit-Position: refs/heads/master@{#14664}
This commit is contained in:
@ -262,7 +262,6 @@ if (is_android && !build_with_chromium) {
|
||||
"android/java/src/org/webrtc/EglBase10.java",
|
||||
"android/java/src/org/webrtc/EglBase14.java",
|
||||
"android/java/src/org/webrtc/EglRenderer.java",
|
||||
"android/java/src/org/webrtc/FileVideoCapturer.java",
|
||||
"android/java/src/org/webrtc/GlRectDrawer.java",
|
||||
"android/java/src/org/webrtc/GlShader.java",
|
||||
"android/java/src/org/webrtc/GlTextureFrameBuffer.java",
|
||||
@ -291,7 +290,6 @@ if (is_android && !build_with_chromium) {
|
||||
"android/java/src/org/webrtc/SurfaceViewRenderer.java",
|
||||
"android/java/src/org/webrtc/VideoCapturer.java",
|
||||
"android/java/src/org/webrtc/VideoCapturerAndroid.java",
|
||||
"android/java/src/org/webrtc/VideoFileRenderer.java",
|
||||
"android/java/src/org/webrtc/VideoRenderer.java",
|
||||
"android/java/src/org/webrtc/VideoRendererGui.java",
|
||||
"android/java/src/org/webrtc/VideoSource.java",
|
||||
|
||||
@ -1,211 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.io.IOException;
|
||||
|
||||
public class FileVideoCapturer implements VideoCapturer {
|
||||
private interface VideoReader {
|
||||
int getFrameWidth();
|
||||
int getFrameHeight();
|
||||
byte[] getNextFrame();
|
||||
void close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read video data from file for the .y4m container.
|
||||
*/
|
||||
private static class VideoReaderY4M implements VideoReader {
|
||||
private final static String TAG = "VideoReaderY4M";
|
||||
private final int frameWidth;
|
||||
private final int frameHeight;
|
||||
private final int frameSize;
|
||||
|
||||
// First char after header
|
||||
private final long videoStart;
|
||||
|
||||
private static final String Y4M_FRAME_DELIMETER = "FRAME";
|
||||
|
||||
private final RandomAccessFile mediaFileStream;
|
||||
|
||||
public int getFrameWidth() {
|
||||
return frameWidth;
|
||||
}
|
||||
|
||||
public int getFrameHeight() {
|
||||
return frameHeight;
|
||||
}
|
||||
|
||||
public VideoReaderY4M(String file) throws IOException {
|
||||
mediaFileStream = new RandomAccessFile(file, "r");
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for (;;) {
|
||||
int c = mediaFileStream.read();
|
||||
if (c == -1) {
|
||||
// End of file reached.
|
||||
throw new RuntimeException("Found end of file before end of header for file: " + file);
|
||||
}
|
||||
if (c == '\n') {
|
||||
// End of header found.
|
||||
break;
|
||||
}
|
||||
builder.append((char) c);
|
||||
}
|
||||
videoStart = mediaFileStream.getFilePointer();
|
||||
String header = builder.toString();
|
||||
String[] headerTokens = header.split("[ ]");
|
||||
Logging.d(TAG, "header: " + header + ", headerTokens" + headerTokens);
|
||||
int w = 0;
|
||||
int h = 0;
|
||||
String colorSpace = "";
|
||||
for (String tok : headerTokens) {
|
||||
char c = tok.charAt(0);
|
||||
switch (c) {
|
||||
case 'W':
|
||||
w = Integer.parseInt(tok.substring(1));
|
||||
break;
|
||||
case 'H':
|
||||
h = Integer.parseInt(tok.substring(1));
|
||||
break;
|
||||
case 'C':
|
||||
colorSpace = tok.substring(1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Color space: " + colorSpace);
|
||||
if (!colorSpace.equals("420")) {
|
||||
throw new IllegalArgumentException("Does not support any other color space than I420");
|
||||
}
|
||||
if ((w % 2) == 1 || (h % 2) == 1) {
|
||||
throw new IllegalArgumentException("Does not support odd width or height");
|
||||
}
|
||||
frameWidth = w;
|
||||
frameHeight = h;
|
||||
frameSize = w * h * 3 / 2;
|
||||
Logging.d(TAG, "frame dim: (" + w + ", " + h + ") frameSize: " + frameSize);
|
||||
}
|
||||
|
||||
public byte[] getNextFrame() {
|
||||
byte[] frame = new byte[frameSize];
|
||||
try {
|
||||
byte[] frameDelim = new byte[Y4M_FRAME_DELIMETER.length() + 1];
|
||||
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
||||
// We reach end of file, loop
|
||||
mediaFileStream.seek(videoStart);
|
||||
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
||||
throw new RuntimeException("Error looping video");
|
||||
}
|
||||
}
|
||||
String frameDelimStr = new String(frameDelim);
|
||||
if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
|
||||
throw new RuntimeException(
|
||||
"Frames should be delimited by FRAME plus newline, found delimter was: '"
|
||||
+ frameDelimStr + "'");
|
||||
}
|
||||
mediaFileStream.readFully(frame);
|
||||
byte[] nv21Frame = new byte[frameSize];
|
||||
nativeI420ToNV21(frame, frameWidth, frameHeight, nv21Frame);
|
||||
return nv21Frame;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void close() {
|
||||
try {
|
||||
mediaFileStream.close();
|
||||
} catch (IOException e) {
|
||||
Logging.e(TAG, "Problem closing file", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final static String TAG = "FileVideoCapturer";
|
||||
private final VideoReader videoReader;
|
||||
private CapturerObserver capturerObserver;
|
||||
private final Timer timer = new Timer();
|
||||
|
||||
private final TimerTask tickTask = new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
tick();
|
||||
}
|
||||
};
|
||||
|
||||
private int getFrameWidth() {
|
||||
return videoReader.getFrameWidth();
|
||||
}
|
||||
|
||||
private int getFrameHeight() {
|
||||
return videoReader.getFrameHeight();
|
||||
}
|
||||
|
||||
public FileVideoCapturer(String inputFile) throws IOException {
|
||||
try {
|
||||
videoReader = new VideoReaderY4M(inputFile);
|
||||
} catch (IOException e) {
|
||||
Logging.d(TAG, "Could not open video file: " + inputFile);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] getNextFrame() {
|
||||
return videoReader.getNextFrame();
|
||||
}
|
||||
|
||||
public void tick() {
|
||||
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
|
||||
byte[] frameData = getNextFrame();
|
||||
capturerObserver.onByteBufferFrameCaptured(
|
||||
frameData, getFrameWidth(), getFrameHeight(), 0, captureTimeNs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||
CapturerObserver capturerObserver) {
|
||||
this.capturerObserver = capturerObserver;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startCapture(int width, int height, int framerate) {
|
||||
timer.schedule(tickTask, 0, 1000 / framerate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopCapture() throws InterruptedException {
|
||||
timer.cancel();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void changeCaptureFormat(int width, int height, int framerate) {
|
||||
// Empty on purpose
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
videoReader.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isScreencast() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static native void nativeI420ToNV21(byte[] src, int width, int height, byte[] dst);
|
||||
}
|
||||
@ -76,7 +76,7 @@ class SurfaceTextureHelper {
|
||||
}
|
||||
|
||||
// State for YUV conversion, instantiated on demand.
|
||||
static class YuvConverter {
|
||||
static private class YuvConverter {
|
||||
private final EglBase eglBase;
|
||||
private final GlShader shader;
|
||||
private boolean released = false;
|
||||
|
||||
@ -1,135 +0,0 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
package org.webrtc;
|
||||
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
* Can be used to save the video frames to file.
|
||||
*/
|
||||
public class VideoFileRenderer implements VideoRenderer.Callbacks {
|
||||
private static final String TAG = "VideoFileRenderer";
|
||||
|
||||
private final SurfaceTextureHelper.YuvConverter yuvConverter;
|
||||
private final HandlerThread renderThread;
|
||||
private final Object handlerLock = new Object();
|
||||
private final Handler renderThreadHandler;
|
||||
private final FileOutputStream videoOutFile;
|
||||
private final int outputFileWidth;
|
||||
private final int outputFileHeight;
|
||||
private final int outputFrameSize;
|
||||
private final ByteBuffer outputFrameBuffer;
|
||||
|
||||
public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
|
||||
EglBase.Context sharedContext) throws IOException {
|
||||
if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
|
||||
throw new IllegalArgumentException("Does not support uneven width or height");
|
||||
}
|
||||
yuvConverter = new SurfaceTextureHelper.YuvConverter(sharedContext);
|
||||
|
||||
this.outputFileWidth = outputFileWidth;
|
||||
this.outputFileHeight = outputFileHeight;
|
||||
|
||||
outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
|
||||
outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
|
||||
|
||||
videoOutFile = new FileOutputStream(outputFile);
|
||||
videoOutFile.write(
|
||||
("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
|
||||
.getBytes());
|
||||
|
||||
renderThread = new HandlerThread(TAG);
|
||||
renderThread.start();
|
||||
renderThreadHandler = new Handler(renderThread.getLooper());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void renderFrame(final VideoRenderer.I420Frame frame) {
|
||||
renderThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
renderFrameOnRenderThread(frame);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void renderFrameOnRenderThread(VideoRenderer.I420Frame frame) {
|
||||
final float frameAspectRatio = (float) frame.rotatedWidth() / (float) frame.rotatedHeight();
|
||||
|
||||
final float[] rotatedSamplingMatrix =
|
||||
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
|
||||
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
||||
false, frameAspectRatio, (float) outputFileWidth / outputFileHeight);
|
||||
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
||||
|
||||
try {
|
||||
videoOutFile.write("FRAME\n".getBytes());
|
||||
if (!frame.yuvFrame) {
|
||||
yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
|
||||
frame.textureId, texMatrix);
|
||||
|
||||
int stride = outputFileWidth;
|
||||
byte[] data = outputFrameBuffer.array();
|
||||
int offset = outputFrameBuffer.arrayOffset();
|
||||
|
||||
// Write Y
|
||||
videoOutFile.write(data, offset, outputFileWidth * outputFileHeight);
|
||||
|
||||
// Write U
|
||||
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
||||
videoOutFile.write(data, offset + r * stride, stride / 2);
|
||||
}
|
||||
|
||||
// Write V
|
||||
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
||||
videoOutFile.write(data, offset + r * stride + stride / 2, stride / 2);
|
||||
}
|
||||
} else {
|
||||
nativeI420Scale(frame.yuvPlanes[0], frame.yuvStrides[0], frame.yuvPlanes[1],
|
||||
frame.yuvStrides[1], frame.yuvPlanes[2], frame.yuvStrides[2], frame.width, frame.height,
|
||||
outputFrameBuffer, outputFileWidth, outputFileHeight);
|
||||
videoOutFile.write(
|
||||
outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
Logging.e(TAG, "Failed to write to file for video out");
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
}
|
||||
}
|
||||
|
||||
public void release() {
|
||||
final CountDownLatch cleanupBarrier = new CountDownLatch(1);
|
||||
renderThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
videoOutFile.close();
|
||||
} catch (IOException e) {
|
||||
Logging.d(TAG, "Error closing output video file");
|
||||
}
|
||||
cleanupBarrier.countDown();
|
||||
}
|
||||
});
|
||||
ThreadUtils.awaitUninterruptibly(cleanupBarrier);
|
||||
renderThread.quit();
|
||||
}
|
||||
|
||||
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
|
||||
int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
|
||||
int dstWidth, int dstHeight);
|
||||
}
|
||||
@ -43,8 +43,6 @@
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
#include "webrtc/api/androidvideotracksource.h"
|
||||
#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
|
||||
#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
|
||||
@ -2005,77 +2003,6 @@ JOW(void, VideoRenderer_nativeCopyPlane)(
|
||||
}
|
||||
}
|
||||
|
||||
JOW(void, FileVideoCapturer_nativeI420ToNV21)(
|
||||
JNIEnv *jni, jclass, jbyteArray j_src_buffer, jint width, jint height,
|
||||
jbyteArray j_dst_buffer) {
|
||||
size_t src_size = jni->GetArrayLength(j_src_buffer);
|
||||
size_t dst_size = jni->GetArrayLength(j_dst_buffer);
|
||||
int src_stride = width;
|
||||
int dst_stride = width;
|
||||
RTC_CHECK_GE(src_size, src_stride * height * 3 / 2);
|
||||
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
|
||||
uint8_t* src =
|
||||
reinterpret_cast<uint8_t*>(jni->GetByteArrayElements(j_src_buffer, 0));
|
||||
uint8_t* dst =
|
||||
reinterpret_cast<uint8_t*>(jni->GetByteArrayElements(j_dst_buffer, 0));
|
||||
uint8_t* src_y = src;
|
||||
size_t src_stride_y = src_stride;
|
||||
uint8_t* src_u = src + src_stride * height;
|
||||
size_t src_stride_u = src_stride / 2;
|
||||
uint8_t* src_v = src + src_stride * height * 5 / 4;
|
||||
size_t src_stride_v = src_stride / 2;
|
||||
|
||||
uint8_t* dst_y = dst;
|
||||
size_t dst_stride_y = dst_stride;
|
||||
size_t dst_stride_uv = dst_stride;
|
||||
uint8_t* dst_uv = dst + dst_stride * height;
|
||||
|
||||
libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v,
|
||||
src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
|
||||
width, height);
|
||||
}
|
||||
|
||||
JOW(void, VideoFileRenderer_nativeI420Scale)(
|
||||
JNIEnv *jni, jclass,
|
||||
jobject j_src_buffer_y, jint j_src_stride_y,
|
||||
jobject j_src_buffer_u, jint j_src_stride_u,
|
||||
jobject j_src_buffer_v, jint j_src_stride_v,
|
||||
jint width, jint height,
|
||||
jbyteArray j_dst_buffer, jint dstWidth, jint dstHeight) {
|
||||
size_t src_size_y = jni->GetDirectBufferCapacity(j_src_buffer_y);
|
||||
size_t src_size_u = jni->GetDirectBufferCapacity(j_src_buffer_u);
|
||||
size_t src_size_v = jni->GetDirectBufferCapacity(j_src_buffer_v);
|
||||
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
|
||||
int dst_stride = dstWidth;
|
||||
RTC_CHECK_GE(src_size_y, j_src_stride_y * height);
|
||||
RTC_CHECK_GE(src_size_u, j_src_stride_u * height / 4);
|
||||
RTC_CHECK_GE(src_size_v, j_src_stride_v * height / 4);
|
||||
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
|
||||
uint8_t* src_y =
|
||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_y));
|
||||
uint8_t* src_u =
|
||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_u));
|
||||
uint8_t* src_v =
|
||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_v));
|
||||
uint8_t* dst =
|
||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
|
||||
|
||||
uint8_t* dst_y = dst;
|
||||
size_t dst_stride_y = dst_stride;
|
||||
uint8_t* dst_u = dst + dst_stride * dstHeight;
|
||||
size_t dst_stride_u = dst_stride / 2;
|
||||
uint8_t* dst_v = dst + dst_stride * dstHeight * 5 / 4;
|
||||
size_t dst_stride_v = dst_stride / 2;
|
||||
|
||||
int ret = libyuv::I420Scale(
|
||||
src_y, j_src_stride_y, src_u, j_src_stride_u, src_v, j_src_stride_v,
|
||||
width, height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
|
||||
dst_stride_v, dstWidth, dstHeight, libyuv::kFilterBilinear);
|
||||
if (ret) {
|
||||
LOG(LS_ERROR) << "Error scaling I420 frame: " << ret;
|
||||
}
|
||||
}
|
||||
|
||||
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
|
||||
return JavaStringFromStdString(
|
||||
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
|
||||
|
||||
@ -29,26 +29,14 @@ import android.view.Window;
|
||||
import android.view.WindowManager.LayoutParams;
|
||||
import android.widget.Toast;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.RuntimeException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.webrtc.Camera1Enumerator;
|
||||
import org.webrtc.Camera2Enumerator;
|
||||
import org.webrtc.CameraEnumerator;
|
||||
import org.webrtc.EglBase;
|
||||
import org.webrtc.FileVideoCapturer;
|
||||
import org.webrtc.VideoFileRenderer;
|
||||
import org.webrtc.IceCandidate;
|
||||
import org.webrtc.Logging;
|
||||
import org.webrtc.PeerConnectionFactory;
|
||||
import org.webrtc.RendererCommon.ScalingType;
|
||||
import org.webrtc.SessionDescription;
|
||||
import org.webrtc.StatsReport;
|
||||
import org.webrtc.SurfaceViewRenderer;
|
||||
import org.webrtc.VideoCapturer;
|
||||
import org.webrtc.VideoRenderer;
|
||||
|
||||
/**
|
||||
* Activity for peer connection call setup, call waiting
|
||||
@ -84,15 +72,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
||||
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
||||
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
||||
public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
|
||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
|
||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
|
||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
|
||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
|
||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
|
||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
|
||||
public static final String EXTRA_USE_VALUES_FROM_INTENT =
|
||||
"org.appspot.apprtc.USE_VALUES_FROM_INTENT";
|
||||
private static final String TAG = "CallRTCClient";
|
||||
|
||||
// List of mandatory application permissions.
|
||||
@ -122,10 +101,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
private AppRTCAudioManager audioManager = null;
|
||||
private EglBase rootEglBase;
|
||||
private SurfaceViewRenderer localRender;
|
||||
private SurfaceViewRenderer remoteRenderScreen;
|
||||
private VideoFileRenderer videoFileRenderer;
|
||||
private final List<VideoRenderer.Callbacks> remoteRenderers =
|
||||
new ArrayList<VideoRenderer.Callbacks>();
|
||||
private SurfaceViewRenderer remoteRender;
|
||||
private PercentFrameLayout localRenderLayout;
|
||||
private PercentFrameLayout remoteRenderLayout;
|
||||
private ScalingType scalingType;
|
||||
@ -167,7 +143,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
|
||||
// Create UI controls.
|
||||
localRender = (SurfaceViewRenderer) findViewById(R.id.local_video_view);
|
||||
remoteRenderScreen = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
|
||||
remoteRender = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
|
||||
localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
|
||||
remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
|
||||
callFragment = new CallFragment();
|
||||
@ -182,31 +158,12 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
};
|
||||
|
||||
localRender.setOnClickListener(listener);
|
||||
remoteRenderScreen.setOnClickListener(listener);
|
||||
remoteRenderers.add(remoteRenderScreen);
|
||||
|
||||
final Intent intent = getIntent();
|
||||
remoteRender.setOnClickListener(listener);
|
||||
|
||||
// Create video renderers.
|
||||
rootEglBase = EglBase.create();
|
||||
localRender.init(rootEglBase.getEglBaseContext(), null);
|
||||
String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
||||
|
||||
// When saveRemoteVideoToFile is set we save the video from the remote to a file.
|
||||
if (saveRemoteVideoToFile != null) {
|
||||
int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
||||
int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
||||
try {
|
||||
videoFileRenderer = new VideoFileRenderer(
|
||||
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, rootEglBase.getEglBaseContext());
|
||||
remoteRenderers.add(videoFileRenderer);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(
|
||||
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
|
||||
}
|
||||
}
|
||||
remoteRenderScreen.init(rootEglBase.getEglBaseContext(), null);
|
||||
|
||||
remoteRender.init(rootEglBase.getEglBaseContext(), null);
|
||||
localRender.setZOrderMediaOverlay(true);
|
||||
updateVideoView();
|
||||
|
||||
@ -220,6 +177,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
}
|
||||
}
|
||||
|
||||
// Get Intent parameters.
|
||||
final Intent intent = getIntent();
|
||||
Uri roomUri = intent.getData();
|
||||
if (roomUri == null) {
|
||||
logAndToast(getString(R.string.missing_url));
|
||||
@ -228,10 +187,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
finish();
|
||||
return;
|
||||
}
|
||||
|
||||
// Get Intent parameters.
|
||||
String roomId = intent.getStringExtra(EXTRA_ROOMID);
|
||||
Log.d(TAG, "Room ID: " + roomId);
|
||||
if (roomId == null || roomId.length() == 0) {
|
||||
logAndToast(getString(R.string.missing_url));
|
||||
Log.e(TAG, "Incorrect room ID in intent!");
|
||||
@ -243,12 +199,16 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
||||
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
||||
|
||||
boolean useCamera2 =
|
||||
Camera2Enumerator.isSupported(this) && intent.getBooleanExtra(EXTRA_CAMERA2, true);
|
||||
|
||||
peerConnectionParameters =
|
||||
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
||||
tracing, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||
tracing, useCamera2, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
||||
intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
|
||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
||||
@ -260,8 +220,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
|
||||
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
|
||||
|
||||
Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
|
||||
|
||||
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
|
||||
// standard WebSocketRTCClient.
|
||||
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
|
||||
@ -307,46 +265,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
CallActivity.this, peerConnectionParameters, CallActivity.this);
|
||||
}
|
||||
|
||||
private boolean useCamera2() {
|
||||
return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
|
||||
}
|
||||
|
||||
private boolean captureToTexture() {
|
||||
return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
|
||||
}
|
||||
|
||||
private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
|
||||
final String[] deviceNames = enumerator.getDeviceNames();
|
||||
|
||||
// First, try to find front facing camera
|
||||
Logging.d(TAG, "Looking for front facing cameras.");
|
||||
for (String deviceName : deviceNames) {
|
||||
if (enumerator.isFrontFacing(deviceName)) {
|
||||
Logging.d(TAG, "Creating front facing camera capturer.");
|
||||
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||
|
||||
if (videoCapturer != null) {
|
||||
return videoCapturer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Front facing camera not found, try something else
|
||||
Logging.d(TAG, "Looking for other cameras.");
|
||||
for (String deviceName : deviceNames) {
|
||||
if (!enumerator.isFrontFacing(deviceName)) {
|
||||
Logging.d(TAG, "Creating other camera capturer.");
|
||||
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||
|
||||
if (videoCapturer != null) {
|
||||
return videoCapturer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Activity interfaces
|
||||
@Override
|
||||
public void onPause() {
|
||||
@ -435,8 +353,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
|
||||
private void updateVideoView() {
|
||||
remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
|
||||
remoteRenderScreen.setScalingType(scalingType);
|
||||
remoteRenderScreen.setMirror(false);
|
||||
remoteRender.setScalingType(scalingType);
|
||||
remoteRender.setMirror(false);
|
||||
|
||||
if (iceConnected) {
|
||||
localRenderLayout.setPosition(
|
||||
@ -450,7 +368,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
localRender.setMirror(true);
|
||||
|
||||
localRender.requestLayout();
|
||||
remoteRenderScreen.requestLayout();
|
||||
remoteRender.requestLayout();
|
||||
}
|
||||
|
||||
private void startCall() {
|
||||
@ -514,13 +432,9 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
localRender.release();
|
||||
localRender = null;
|
||||
}
|
||||
if (videoFileRenderer != null) {
|
||||
videoFileRenderer.release();
|
||||
videoFileRenderer = null;
|
||||
}
|
||||
if (remoteRenderScreen != null) {
|
||||
remoteRenderScreen.release();
|
||||
remoteRenderScreen = null;
|
||||
if (remoteRender != null) {
|
||||
remoteRender.release();
|
||||
remoteRender = null;
|
||||
}
|
||||
if (audioManager != null) {
|
||||
audioManager.close();
|
||||
@ -578,35 +492,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
});
|
||||
}
|
||||
|
||||
private VideoCapturer createVideoCapturer() {
|
||||
VideoCapturer videoCapturer = null;
|
||||
String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
|
||||
if (videoFileAsCamera != null) {
|
||||
try {
|
||||
videoCapturer = new FileVideoCapturer(videoFileAsCamera);
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to open video file for emulated camera");
|
||||
return null;
|
||||
}
|
||||
} else if (useCamera2()) {
|
||||
if (!captureToTexture()) {
|
||||
reportError(getString(R.string.camera2_texture_only_error));
|
||||
return null;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Creating capturer using camera2 API.");
|
||||
videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
|
||||
} else {
|
||||
Logging.d(TAG, "Creating capturer using camera1 API.");
|
||||
videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
|
||||
}
|
||||
if (videoCapturer == null) {
|
||||
reportError("Failed to open camera");
|
||||
return null;
|
||||
}
|
||||
return videoCapturer;
|
||||
}
|
||||
|
||||
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
|
||||
// All callbacks are invoked from websocket signaling looper thread and
|
||||
// are routed to UI thread.
|
||||
@ -615,12 +500,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
||||
|
||||
signalingParameters = params;
|
||||
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
||||
VideoCapturer videoCapturer = null;
|
||||
if (peerConnectionParameters.videoCallEnabled) {
|
||||
videoCapturer = createVideoCapturer();
|
||||
}
|
||||
peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(), localRender,
|
||||
remoteRenderers, videoCapturer, signalingParameters);
|
||||
peerConnectionClient.createPeerConnection(
|
||||
rootEglBase.getEglBaseContext(), localRender, remoteRender, signalingParameters);
|
||||
|
||||
if (signalingParameters.initiator) {
|
||||
logAndToast("Creating OFFER...");
|
||||
|
||||
@ -144,10 +144,8 @@ public class ConnectActivity extends Activity {
|
||||
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
||||
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
||||
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
||||
boolean useValuesFromIntent =
|
||||
intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
|
||||
String room = sharedPref.getString(keyprefRoom, "");
|
||||
connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
|
||||
connectToRoom(room, true, loopback, runTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
@ -192,7 +190,7 @@ public class ConnectActivity extends Activity {
|
||||
startActivity(intent);
|
||||
return true;
|
||||
} else if (item.getItemId() == R.id.action_loopback) {
|
||||
connectToRoom(null, false, true, false, 0);
|
||||
connectToRoom(null, false, true, 0);
|
||||
return true;
|
||||
} else {
|
||||
return super.onOptionsItemSelected(item);
|
||||
@ -245,42 +243,8 @@ public class ConnectActivity extends Activity {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a value from the shared preference or from the intent, if it does not
|
||||
* exist the default is used.
|
||||
*/
|
||||
private String sharedPrefGetString(
|
||||
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
||||
String defaultValue = getString(defaultId);
|
||||
if (useFromIntent) {
|
||||
String value = getIntent().getStringExtra(intentName);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
return defaultValue;
|
||||
} else {
|
||||
String attributeName = getString(attributeId);
|
||||
return sharedPref.getString(attributeName, defaultValue);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a value from the shared preference or from the intent, if it does not
|
||||
* exist the default is used.
|
||||
*/
|
||||
private boolean sharedPrefGetBoolean(
|
||||
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
||||
boolean defaultValue = Boolean.valueOf(getString(defaultId));
|
||||
if (useFromIntent) {
|
||||
return getIntent().getBooleanExtra(intentName, defaultValue);
|
||||
} else {
|
||||
String attributeName = getString(attributeId);
|
||||
return sharedPref.getBoolean(attributeName, defaultValue);
|
||||
}
|
||||
}
|
||||
|
||||
private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
|
||||
boolean useValuesFromIntent, int runTimeMs) {
|
||||
private void connectToRoom(
|
||||
String roomId, boolean commandLineRun, boolean loopback, int runTimeMs) {
|
||||
this.commandLineRun = commandLineRun;
|
||||
|
||||
// roomId is random for loopback.
|
||||
@ -292,69 +256,58 @@ public class ConnectActivity extends Activity {
|
||||
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
||||
|
||||
// Video call enabled flag.
|
||||
boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
|
||||
CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
|
||||
boolean videoCallEnabled = sharedPref.getBoolean(
|
||||
keyprefVideoCallEnabled, Boolean.valueOf(getString(R.string.pref_videocall_default)));
|
||||
|
||||
// Use Camera2 option.
|
||||
boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
|
||||
R.string.pref_camera2_default, useValuesFromIntent);
|
||||
boolean useCamera2 = sharedPref.getBoolean(
|
||||
keyprefCamera2, Boolean.valueOf(getString(R.string.pref_camera2_default)));
|
||||
|
||||
// Get default codecs.
|
||||
String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
|
||||
CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
|
||||
String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
|
||||
CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
|
||||
String videoCodec =
|
||||
sharedPref.getString(keyprefVideoCodec, getString(R.string.pref_videocodec_default));
|
||||
String audioCodec =
|
||||
sharedPref.getString(keyprefAudioCodec, getString(R.string.pref_audiocodec_default));
|
||||
|
||||
// Check HW codec flag.
|
||||
boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
|
||||
CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
|
||||
boolean hwCodec = sharedPref.getBoolean(
|
||||
keyprefHwCodecAcceleration, Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
|
||||
|
||||
// Check Capture to texture.
|
||||
boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
|
||||
CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
|
||||
useValuesFromIntent);
|
||||
boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
|
||||
Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
|
||||
CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
|
||||
useValuesFromIntent);
|
||||
boolean noAudioProcessing = sharedPref.getBoolean(keyprefNoAudioProcessingPipeline,
|
||||
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
|
||||
CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
|
||||
boolean aecDump = sharedPref.getBoolean(
|
||||
keyprefAecDump, Boolean.valueOf(getString(R.string.pref_aecdump_default)));
|
||||
|
||||
// Check OpenSL ES enabled flag.
|
||||
boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
|
||||
CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
|
||||
boolean useOpenSLES = sharedPref.getBoolean(
|
||||
keyprefOpenSLES, Boolean.valueOf(getString(R.string.pref_opensles_default)));
|
||||
|
||||
// Check Disable built-in AEC flag.
|
||||
boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
|
||||
CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
|
||||
useValuesFromIntent);
|
||||
boolean disableBuiltInAEC = sharedPref.getBoolean(keyprefDisableBuiltInAec,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default)));
|
||||
|
||||
// Check Disable built-in AGC flag.
|
||||
boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
|
||||
CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
|
||||
useValuesFromIntent);
|
||||
boolean disableBuiltInAGC = sharedPref.getBoolean(keyprefDisableBuiltInAgc,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default)));
|
||||
|
||||
// Check Disable built-in NS flag.
|
||||
boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
|
||||
CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
|
||||
useValuesFromIntent);
|
||||
boolean disableBuiltInNS = sharedPref.getBoolean(keyprefDisableBuiltInNs,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default)));
|
||||
|
||||
// Check Enable level control.
|
||||
boolean enableLevelControl = sharedPrefGetBoolean(R.string.pref_enable_level_control_key,
|
||||
CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, R.string.pref_enable_level_control_key,
|
||||
useValuesFromIntent);
|
||||
boolean enableLevelControl = sharedPref.getBoolean(keyprefEnableLevelControl,
|
||||
Boolean.valueOf(getString(R.string.pref_enable_level_control_key)));
|
||||
|
||||
// Get video resolution from settings.
|
||||
int videoWidth = 0;
|
||||
int videoHeight = 0;
|
||||
if (useValuesFromIntent) {
|
||||
videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
|
||||
videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
|
||||
}
|
||||
if (videoWidth == 0 && videoHeight == 0) {
|
||||
String resolution =
|
||||
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
||||
String[] dimensions = resolution.split("[ x]+");
|
||||
@ -368,37 +321,25 @@ public class ConnectActivity extends Activity {
|
||||
Log.e(TAG, "Wrong video resolution setting: " + resolution);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get camera fps from settings.
|
||||
int cameraFps = 0;
|
||||
if (useValuesFromIntent) {
|
||||
cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
|
||||
}
|
||||
if (cameraFps == 0) {
|
||||
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
||||
String[] fpsValues = fps.split("[ x]+");
|
||||
if (fpsValues.length == 2) {
|
||||
try {
|
||||
cameraFps = Integer.parseInt(fpsValues[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
cameraFps = 0;
|
||||
Log.e(TAG, "Wrong camera fps setting: " + fps);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check capture quality slider flag.
|
||||
boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
|
||||
CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
|
||||
R.string.pref_capturequalityslider_default, useValuesFromIntent);
|
||||
boolean captureQualitySlider = sharedPref.getBoolean(keyprefCaptureQualitySlider,
|
||||
Boolean.valueOf(getString(R.string.pref_capturequalityslider_default)));
|
||||
|
||||
// Get video and audio start bitrate.
|
||||
int videoStartBitrate = 0;
|
||||
if (useValuesFromIntent) {
|
||||
videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
|
||||
}
|
||||
if (videoStartBitrate == 0) {
|
||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
@ -406,28 +347,21 @@ public class ConnectActivity extends Activity {
|
||||
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
||||
videoStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
}
|
||||
|
||||
int audioStartBitrate = 0;
|
||||
if (useValuesFromIntent) {
|
||||
audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
|
||||
}
|
||||
if (audioStartBitrate == 0) {
|
||||
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
||||
bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
String bitrateValue = sharedPref.getString(
|
||||
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
||||
audioStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Check statistics display option.
|
||||
boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
|
||||
CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
|
||||
boolean displayHud = sharedPref.getBoolean(
|
||||
keyprefDisplayHud, Boolean.valueOf(getString(R.string.pref_displayhud_default)));
|
||||
|
||||
boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
|
||||
R.string.pref_tracing_default, useValuesFromIntent);
|
||||
boolean tracing = sharedPref.getBoolean(
|
||||
keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
|
||||
|
||||
// Start AppRTCMobile activity.
|
||||
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
|
||||
@ -461,32 +395,6 @@ public class ConnectActivity extends Activity {
|
||||
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
|
||||
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
|
||||
|
||||
if (useValuesFromIntent) {
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
|
||||
String videoFileAsCamera =
|
||||
getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
|
||||
String saveRemoteVideoToFile =
|
||||
getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
|
||||
int videoOutWidth =
|
||||
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
|
||||
int videoOutHeight =
|
||||
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
|
||||
}
|
||||
}
|
||||
|
||||
startActivityForResult(intent, CONNECTION_REQUEST);
|
||||
}
|
||||
}
|
||||
@ -516,7 +424,7 @@ public class ConnectActivity extends Activity {
|
||||
@Override
|
||||
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
||||
String roomId = ((TextView) view).getText().toString();
|
||||
connectToRoom(roomId, false, false, false, 0);
|
||||
connectToRoom(roomId, false, false, 0);
|
||||
}
|
||||
};
|
||||
|
||||
@ -534,7 +442,7 @@ public class ConnectActivity extends Activity {
|
||||
private final OnClickListener connectListener = new OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
|
||||
connectToRoom(roomEditText.getText().toString(), false, false, 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -41,9 +41,6 @@ import org.webrtc.StatsObserver;
|
||||
import org.webrtc.StatsReport;
|
||||
import org.webrtc.VideoCapturer;
|
||||
import org.webrtc.VideoRenderer;
|
||||
import org.webrtc.VideoCapturerAndroid;
|
||||
import org.webrtc.CameraVideoCapturer;
|
||||
import org.webrtc.FileVideoCapturer;
|
||||
import org.webrtc.VideoSource;
|
||||
import org.webrtc.VideoTrack;
|
||||
import org.webrtc.voiceengine.WebRtcAudioManager;
|
||||
@ -51,10 +48,8 @@ import org.webrtc.voiceengine.WebRtcAudioUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.util.concurrent.Executors;
|
||||
@ -112,7 +107,7 @@ public class PeerConnectionClient {
|
||||
private boolean isError;
|
||||
private Timer statsTimer;
|
||||
private VideoRenderer.Callbacks localRender;
|
||||
private List<VideoRenderer.Callbacks> remoteRenders;
|
||||
private VideoRenderer.Callbacks remoteRender;
|
||||
private SignalingParameters signalingParameters;
|
||||
private MediaConstraints pcConstraints;
|
||||
private int videoWidth;
|
||||
@ -131,7 +126,7 @@ public class PeerConnectionClient {
|
||||
private SessionDescription localSdp; // either offer or answer SDP
|
||||
private MediaStream mediaStream;
|
||||
private int numberOfCameras;
|
||||
private VideoCapturer videoCapturer;
|
||||
private CameraVideoCapturer videoCapturer;
|
||||
// enableVideo is set to true if video should be rendered and sent.
|
||||
private boolean renderVideo;
|
||||
private VideoTrack localVideoTrack;
|
||||
@ -148,12 +143,14 @@ public class PeerConnectionClient {
|
||||
public final boolean videoCallEnabled;
|
||||
public final boolean loopback;
|
||||
public final boolean tracing;
|
||||
public final boolean useCamera2;
|
||||
public final int videoWidth;
|
||||
public final int videoHeight;
|
||||
public final int videoFps;
|
||||
public final int videoMaxBitrate;
|
||||
public final String videoCodec;
|
||||
public final boolean videoCodecHwAcceleration;
|
||||
public final boolean captureToTexture;
|
||||
public final int audioStartBitrate;
|
||||
public final String audioCodec;
|
||||
public final boolean noAudioProcessing;
|
||||
@ -165,11 +162,13 @@ public class PeerConnectionClient {
|
||||
public final boolean enableLevelControl;
|
||||
|
||||
public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
|
||||
int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
|
||||
boolean videoCodecHwAcceleration, int audioStartBitrate, String audioCodec,
|
||||
boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES, boolean disableBuiltInAEC,
|
||||
boolean disableBuiltInAGC, boolean disableBuiltInNS, boolean enableLevelControl) {
|
||||
boolean useCamera2, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate,
|
||||
String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
|
||||
int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump,
|
||||
boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
|
||||
boolean disableBuiltInNS, boolean enableLevelControl) {
|
||||
this.videoCallEnabled = videoCallEnabled;
|
||||
this.useCamera2 = useCamera2;
|
||||
this.loopback = loopback;
|
||||
this.tracing = tracing;
|
||||
this.videoWidth = videoWidth;
|
||||
@ -178,6 +177,7 @@ public class PeerConnectionClient {
|
||||
this.videoMaxBitrate = videoMaxBitrate;
|
||||
this.videoCodec = videoCodec;
|
||||
this.videoCodecHwAcceleration = videoCodecHwAcceleration;
|
||||
this.captureToTexture = captureToTexture;
|
||||
this.audioStartBitrate = audioStartBitrate;
|
||||
this.audioCodec = audioCodec;
|
||||
this.noAudioProcessing = noAudioProcessing;
|
||||
@ -286,20 +286,13 @@ public class PeerConnectionClient {
|
||||
|
||||
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
||||
final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
|
||||
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
|
||||
createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
|
||||
videoCapturer, signalingParameters);
|
||||
}
|
||||
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
||||
final VideoRenderer.Callbacks localRender, final List<VideoRenderer.Callbacks> remoteRenders,
|
||||
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
|
||||
final SignalingParameters signalingParameters) {
|
||||
if (peerConnectionParameters == null) {
|
||||
Log.e(TAG, "Creating peer connection without initializing factory.");
|
||||
return;
|
||||
}
|
||||
this.localRender = localRender;
|
||||
this.remoteRenders = remoteRenders;
|
||||
this.videoCapturer = videoCapturer;
|
||||
this.remoteRender = remoteRender;
|
||||
this.signalingParameters = signalingParameters;
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
@ -475,6 +468,36 @@ public class PeerConnectionClient {
|
||||
}
|
||||
}
|
||||
|
||||
private void createCapturer(CameraEnumerator enumerator) {
|
||||
final String[] deviceNames = enumerator.getDeviceNames();
|
||||
|
||||
// First, try to find front facing camera
|
||||
Logging.d(TAG, "Looking for front facing cameras.");
|
||||
for (String deviceName : deviceNames) {
|
||||
if (enumerator.isFrontFacing(deviceName)) {
|
||||
Logging.d(TAG, "Creating front facing camera capturer.");
|
||||
videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||
|
||||
if (videoCapturer != null) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Front facing camera not found, try something else
|
||||
Logging.d(TAG, "Looking for other cameras.");
|
||||
for (String deviceName : deviceNames) {
|
||||
if (!enumerator.isFrontFacing(deviceName)) {
|
||||
Logging.d(TAG, "Creating other camera capturer.");
|
||||
videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||
|
||||
if (videoCapturer != null) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
|
||||
if (factory == null || isError) {
|
||||
Log.e(TAG, "Peerconnection factory is not created");
|
||||
@ -511,6 +534,23 @@ public class PeerConnectionClient {
|
||||
|
||||
mediaStream = factory.createLocalMediaStream("ARDAMS");
|
||||
if (videoCallEnabled) {
|
||||
if (peerConnectionParameters.useCamera2) {
|
||||
if (!peerConnectionParameters.captureToTexture) {
|
||||
reportError(context.getString(R.string.camera2_texture_only_error));
|
||||
return;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Creating capturer using camera2 API.");
|
||||
createCapturer(new Camera2Enumerator(context));
|
||||
} else {
|
||||
Logging.d(TAG, "Creating capturer using camera1 API.");
|
||||
createCapturer(new Camera1Enumerator(peerConnectionParameters.captureToTexture));
|
||||
}
|
||||
|
||||
if (videoCapturer == null) {
|
||||
reportError("Failed to open camera");
|
||||
return;
|
||||
}
|
||||
mediaStream.addTrack(createVideoTrack(videoCapturer));
|
||||
}
|
||||
|
||||
@ -963,18 +1003,13 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
private void switchCameraInternal() {
|
||||
if (videoCapturer instanceof CameraVideoCapturer) {
|
||||
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
|
||||
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
|
||||
+ ". Number of cameras: " + numberOfCameras);
|
||||
return; // No video is sent or only one camera is available or error happened.
|
||||
}
|
||||
Log.d(TAG, "Switch camera");
|
||||
CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
|
||||
cameraVideoCapturer.switchCamera(null);
|
||||
} else {
|
||||
Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
|
||||
}
|
||||
videoCapturer.switchCamera(null);
|
||||
}
|
||||
|
||||
public void switchCamera() {
|
||||
@ -1074,11 +1109,9 @@ public class PeerConnectionClient {
|
||||
if (stream.videoTracks.size() == 1) {
|
||||
remoteVideoTrack = stream.videoTracks.get(0);
|
||||
remoteVideoTrack.setEnabled(renderVideo);
|
||||
for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
|
||||
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -1,124 +0,0 @@
|
||||
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
from optparse import OptionParser
|
||||
import random
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
|
||||
|
||||
def main():
|
||||
parser = OptionParser()
|
||||
|
||||
parser.add_option('--devname', dest='devname', help='The device id')
|
||||
|
||||
parser.add_option(
|
||||
'--videooutsave',
|
||||
dest='videooutsave',
|
||||
help='The path where to save the video out file on local computer')
|
||||
|
||||
parser.add_option(
|
||||
'--videoout',
|
||||
dest='videoout',
|
||||
help='The path where to put the video out file')
|
||||
|
||||
parser.add_option(
|
||||
'--videoout_width',
|
||||
dest='videoout_width',
|
||||
type='int',
|
||||
help='The width for the video out file')
|
||||
|
||||
parser.add_option(
|
||||
'--videoout_height',
|
||||
dest='videoout_height',
|
||||
type='int',
|
||||
help='The height for the video out file')
|
||||
|
||||
parser.add_option(
|
||||
'--videoin',
|
||||
dest='videoin',
|
||||
help='The path where to read input file instead of camera')
|
||||
|
||||
parser.add_option(
|
||||
'--call_length',
|
||||
dest='call_length',
|
||||
type='int',
|
||||
help='The length of the call')
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
print (options, args)
|
||||
|
||||
devname = options.devname
|
||||
|
||||
videoin = options.videoin
|
||||
|
||||
videoout = options.videoout
|
||||
videoout_width = options.videoout_width
|
||||
videoout_height = options.videoout_height
|
||||
|
||||
videooutsave = options.videooutsave
|
||||
|
||||
call_length = options.call_length or 10
|
||||
|
||||
room = ''.join(random.choice(string.ascii_letters + string.digits)
|
||||
for _ in range(8))
|
||||
|
||||
# Delete output video file.
|
||||
if videoout:
|
||||
subprocess.Popen(['adb', '-s', devname, 'shell', 'rm',
|
||||
videoout])
|
||||
|
||||
device = MonkeyRunner.waitForConnection(2, devname)
|
||||
|
||||
extras = {
|
||||
'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True,
|
||||
'org.appspot.apprtc.AUDIOCODEC': 'OPUS',
|
||||
'org.appspot.apprtc.LOOPBACK': True,
|
||||
'org.appspot.apprtc.VIDEOCODEC': 'VP8',
|
||||
'org.appspot.apprtc.CAPTURETOTEXTURE': False,
|
||||
'org.appspot.apprtc.CAMERA2': False,
|
||||
'org.appspot.apprtc.ROOMID': room}
|
||||
|
||||
if videoin:
|
||||
extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin})
|
||||
|
||||
if videoout:
|
||||
extras.update({
|
||||
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE': videoout,
|
||||
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH': videoout_width,
|
||||
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT': videoout_height})
|
||||
|
||||
print extras
|
||||
|
||||
device.startActivity(data='https://appr.tc',
|
||||
action='android.intent.action.VIEW',
|
||||
component='org.appspot.apprtc/.ConnectActivity', extras=extras)
|
||||
|
||||
print 'Running a call for %d seconds' % call_length
|
||||
for _ in xrange(call_length):
|
||||
sys.stdout.write('.')
|
||||
sys.stdout.flush()
|
||||
time.sleep(1)
|
||||
print '\nEnding call.'
|
||||
|
||||
# Press back to end the call. Will end on both sides.
|
||||
device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP)
|
||||
|
||||
if videooutsave:
|
||||
time.sleep(2)
|
||||
|
||||
subprocess.Popen(['adb', '-s', devname, 'pull',
|
||||
videoout, videooutsave])
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@ -21,9 +21,7 @@ import android.test.InstrumentationTestCase;
|
||||
import android.test.suitebuilder.annotation.SmallTest;
|
||||
import android.util.Log;
|
||||
|
||||
import org.webrtc.Camera1Enumerator;
|
||||
import org.webrtc.Camera2Enumerator;
|
||||
import org.webrtc.CameraEnumerator;
|
||||
import org.webrtc.EglBase;
|
||||
import org.webrtc.IceCandidate;
|
||||
import org.webrtc.MediaCodecVideoEncoder;
|
||||
@ -31,7 +29,6 @@ import org.webrtc.PeerConnection;
|
||||
import org.webrtc.PeerConnectionFactory;
|
||||
import org.webrtc.SessionDescription;
|
||||
import org.webrtc.StatsReport;
|
||||
import org.webrtc.VideoCapturer;
|
||||
import org.webrtc.VideoRenderer;
|
||||
|
||||
import java.util.LinkedList;
|
||||
@ -238,7 +235,7 @@ public class PeerConnectionClientTest
|
||||
|
||||
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
|
||||
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
|
||||
VideoCapturer videoCapturer, EglBase.Context eglContext) {
|
||||
EglBase.Context eglContext) {
|
||||
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
||||
SignalingParameters signalingParameters =
|
||||
new SignalingParameters(iceServers, true, // iceServers, initiator.
|
||||
@ -252,8 +249,7 @@ public class PeerConnectionClientTest
|
||||
client.setPeerConnectionFactoryOptions(options);
|
||||
client.createPeerConnectionFactory(
|
||||
getInstrumentation().getTargetContext(), peerConnectionParameters, this);
|
||||
client.createPeerConnection(
|
||||
eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParameters);
|
||||
client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signalingParameters);
|
||||
client.createOffer();
|
||||
return client;
|
||||
}
|
||||
@ -264,12 +260,14 @@ public class PeerConnectionClientTest
|
||||
true, /* loopback */
|
||||
false, /* tracing */
|
||||
// Video codec parameters.
|
||||
true, /* useCamera2 */
|
||||
0, /* videoWidth */
|
||||
0, /* videoHeight */
|
||||
0, /* videoFps */
|
||||
0, /* videoStartBitrate */
|
||||
"", /* videoCodec */
|
||||
true, /* videoCodecHwAcceleration */
|
||||
false, /* captureToToTexture */
|
||||
// Audio codec parameters.
|
||||
0, /* audioStartBitrate */
|
||||
"OPUS", /* audioCodec */
|
||||
@ -277,36 +275,27 @@ public class PeerConnectionClientTest
|
||||
false, /* aecDump */
|
||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||
|
||||
return peerConnectionParameters;
|
||||
}
|
||||
|
||||
private VideoCapturer createCameraCapturer(boolean captureToTexture) {
|
||||
private PeerConnectionParameters createParametersForVideoCall(
|
||||
String videoCodec, boolean captureToTexture) {
|
||||
final boolean useCamera2 =
|
||||
captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
|
||||
|
||||
CameraEnumerator enumerator;
|
||||
if (useCamera2) {
|
||||
enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext());
|
||||
} else {
|
||||
enumerator = new Camera1Enumerator(captureToTexture);
|
||||
}
|
||||
String deviceName = enumerator.getDeviceNames()[0];
|
||||
return enumerator.createCapturer(deviceName, null);
|
||||
}
|
||||
|
||||
private PeerConnectionParameters createParametersForVideoCall(String videoCodec) {
|
||||
PeerConnectionParameters peerConnectionParameters =
|
||||
new PeerConnectionParameters(true, /* videoCallEnabled */
|
||||
true, /* loopback */
|
||||
false, /* tracing */
|
||||
// Video codec parameters.
|
||||
useCamera2, /* useCamera2 */
|
||||
0, /* videoWidth */
|
||||
0, /* videoHeight */
|
||||
0, /* videoFps */
|
||||
0, /* videoStartBitrate */
|
||||
videoCodec, /* videoCodec */
|
||||
true, /* videoCodecHwAcceleration */
|
||||
captureToTexture, /* captureToToTexture */
|
||||
// Audio codec parameters.
|
||||
0, /* audioStartBitrate */
|
||||
"OPUS", /* audioCodec */
|
||||
@ -314,7 +303,6 @@ public class PeerConnectionClientTest
|
||||
false, /* aecDump */
|
||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||
|
||||
return peerConnectionParameters;
|
||||
}
|
||||
|
||||
@ -339,8 +327,7 @@ public class PeerConnectionClientTest
|
||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
|
||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
|
||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(false /* captureToTexture */), null);
|
||||
createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||
|
||||
// Wait for local SDP and ice candidates set events.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
@ -356,8 +343,8 @@ public class PeerConnectionClientTest
|
||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
|
||||
}
|
||||
|
||||
private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
|
||||
boolean decodeToTexture) throws InterruptedException {
|
||||
private void doLoopbackTest(PeerConnectionParameters parameters, boolean decodeToTexure)
|
||||
throws InterruptedException {
|
||||
loopback = true;
|
||||
MockRenderer localRenderer = null;
|
||||
MockRenderer remoteRenderer = null;
|
||||
@ -368,8 +355,8 @@ public class PeerConnectionClientTest
|
||||
} else {
|
||||
Log.d(TAG, "testLoopback for audio.");
|
||||
}
|
||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer,
|
||||
decodeToTexture ? eglBase.getEglBaseContext() : null);
|
||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
|
||||
decodeToTexure ? eglBase.getEglBaseContext() : null);
|
||||
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
@ -399,25 +386,22 @@ public class PeerConnectionClientTest
|
||||
|
||||
@SmallTest
|
||||
public void testLoopbackAudio() throws InterruptedException {
|
||||
doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForAudioCall(), false);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
public void testLoopbackVp8() throws InterruptedException {
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
public void testLoopbackVp9() throws InterruptedException {
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
|
||||
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
public void testLoopbackH264() throws InterruptedException {
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -426,8 +410,7 @@ public class PeerConnectionClientTest
|
||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -436,8 +419,7 @@ public class PeerConnectionClientTest
|
||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
|
||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -446,8 +428,7 @@ public class PeerConnectionClientTest
|
||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -462,8 +443,7 @@ public class PeerConnectionClientTest
|
||||
Log.i(TAG, "VP8 encode to textures is not supported.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true);
|
||||
}
|
||||
|
||||
// Test that a call can be setup even if the EGL context used during initialization is
|
||||
@ -477,11 +457,11 @@ public class PeerConnectionClientTest
|
||||
}
|
||||
|
||||
loopback = true;
|
||||
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8);
|
||||
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8, true);
|
||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
|
||||
createCameraCapturer(true /* captureToTexture */), eglBase.getEglBaseContext());
|
||||
pcClient = createPeerConnectionClient(
|
||||
localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext());
|
||||
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
@ -521,8 +501,7 @@ public class PeerConnectionClientTest
|
||||
Log.i(TAG, "H264 encode to textures is not supported.");
|
||||
return;
|
||||
}
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
|
||||
}
|
||||
|
||||
// Checks if default front camera can be switched to back camera and then
|
||||
@ -535,9 +514,8 @@ public class PeerConnectionClientTest
|
||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||
|
||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(false /* captureToTexture */), null);
|
||||
pcClient = createPeerConnectionClient(
|
||||
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
@ -584,9 +562,8 @@ public class PeerConnectionClientTest
|
||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||
|
||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(false /* captureToTexture */), null);
|
||||
pcClient = createPeerConnectionClient(
|
||||
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
@ -634,9 +611,8 @@ public class PeerConnectionClientTest
|
||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||
|
||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||
createCameraCapturer(false /* captureToTexture */), null);
|
||||
pcClient = createPeerConnectionClient(
|
||||
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||
|
||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||
|
||||
Reference in New Issue
Block a user