Revert of Support for video file instead of camera and output video out to file (patchset #17 id:320001 of https://codereview.webrtc.org/2273573003/ )
Reason for revert: Breaks internal project. Original issue's description: > Support for video file instead of camera and output video out to file > > When video out to file is enabled the remote video which is recorded is > not show on screen. > > You can use this command line for file input and output: > monkeyrunner ./webrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py --devname 02157df28cd47001 --videoin /storage/emulated/0/reference_video_1280x720_30fps.y4m --videoout /storage/emulated/0/output.y4m --videoout_width 1280 --videoout_height 720 --videooutsave /tmp/out.y4m > > BUG=webrtc:6545 > > Committed: https://crrev.com/44666997ca912705f8f96c9bd211e719525a3ccc > Cr-Commit-Position: refs/heads/master@{#14660} TBR=magjed@webrtc.org,sakal@webrtc.org,jansson@chromium.org,mandermo@google.com,mandermo@webrtc.org # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=webrtc:6545 Review-Url: https://codereview.webrtc.org/2425763003 Cr-Commit-Position: refs/heads/master@{#14664}
This commit is contained in:
@ -262,7 +262,6 @@ if (is_android && !build_with_chromium) {
|
|||||||
"android/java/src/org/webrtc/EglBase10.java",
|
"android/java/src/org/webrtc/EglBase10.java",
|
||||||
"android/java/src/org/webrtc/EglBase14.java",
|
"android/java/src/org/webrtc/EglBase14.java",
|
||||||
"android/java/src/org/webrtc/EglRenderer.java",
|
"android/java/src/org/webrtc/EglRenderer.java",
|
||||||
"android/java/src/org/webrtc/FileVideoCapturer.java",
|
|
||||||
"android/java/src/org/webrtc/GlRectDrawer.java",
|
"android/java/src/org/webrtc/GlRectDrawer.java",
|
||||||
"android/java/src/org/webrtc/GlShader.java",
|
"android/java/src/org/webrtc/GlShader.java",
|
||||||
"android/java/src/org/webrtc/GlTextureFrameBuffer.java",
|
"android/java/src/org/webrtc/GlTextureFrameBuffer.java",
|
||||||
@ -291,7 +290,6 @@ if (is_android && !build_with_chromium) {
|
|||||||
"android/java/src/org/webrtc/SurfaceViewRenderer.java",
|
"android/java/src/org/webrtc/SurfaceViewRenderer.java",
|
||||||
"android/java/src/org/webrtc/VideoCapturer.java",
|
"android/java/src/org/webrtc/VideoCapturer.java",
|
||||||
"android/java/src/org/webrtc/VideoCapturerAndroid.java",
|
"android/java/src/org/webrtc/VideoCapturerAndroid.java",
|
||||||
"android/java/src/org/webrtc/VideoFileRenderer.java",
|
|
||||||
"android/java/src/org/webrtc/VideoRenderer.java",
|
"android/java/src/org/webrtc/VideoRenderer.java",
|
||||||
"android/java/src/org/webrtc/VideoRendererGui.java",
|
"android/java/src/org/webrtc/VideoRendererGui.java",
|
||||||
"android/java/src/org/webrtc/VideoSource.java",
|
"android/java/src/org/webrtc/VideoSource.java",
|
||||||
|
|||||||
@ -1,211 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc;
|
|
||||||
|
|
||||||
import android.content.Context;
|
|
||||||
import android.os.SystemClock;
|
|
||||||
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.Timer;
|
|
||||||
import java.util.TimerTask;
|
|
||||||
import java.io.RandomAccessFile;
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
public class FileVideoCapturer implements VideoCapturer {
|
|
||||||
private interface VideoReader {
|
|
||||||
int getFrameWidth();
|
|
||||||
int getFrameHeight();
|
|
||||||
byte[] getNextFrame();
|
|
||||||
void close();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read video data from file for the .y4m container.
|
|
||||||
*/
|
|
||||||
private static class VideoReaderY4M implements VideoReader {
|
|
||||||
private final static String TAG = "VideoReaderY4M";
|
|
||||||
private final int frameWidth;
|
|
||||||
private final int frameHeight;
|
|
||||||
private final int frameSize;
|
|
||||||
|
|
||||||
// First char after header
|
|
||||||
private final long videoStart;
|
|
||||||
|
|
||||||
private static final String Y4M_FRAME_DELIMETER = "FRAME";
|
|
||||||
|
|
||||||
private final RandomAccessFile mediaFileStream;
|
|
||||||
|
|
||||||
public int getFrameWidth() {
|
|
||||||
return frameWidth;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getFrameHeight() {
|
|
||||||
return frameHeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
public VideoReaderY4M(String file) throws IOException {
|
|
||||||
mediaFileStream = new RandomAccessFile(file, "r");
|
|
||||||
StringBuilder builder = new StringBuilder();
|
|
||||||
for (;;) {
|
|
||||||
int c = mediaFileStream.read();
|
|
||||||
if (c == -1) {
|
|
||||||
// End of file reached.
|
|
||||||
throw new RuntimeException("Found end of file before end of header for file: " + file);
|
|
||||||
}
|
|
||||||
if (c == '\n') {
|
|
||||||
// End of header found.
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
builder.append((char) c);
|
|
||||||
}
|
|
||||||
videoStart = mediaFileStream.getFilePointer();
|
|
||||||
String header = builder.toString();
|
|
||||||
String[] headerTokens = header.split("[ ]");
|
|
||||||
Logging.d(TAG, "header: " + header + ", headerTokens" + headerTokens);
|
|
||||||
int w = 0;
|
|
||||||
int h = 0;
|
|
||||||
String colorSpace = "";
|
|
||||||
for (String tok : headerTokens) {
|
|
||||||
char c = tok.charAt(0);
|
|
||||||
switch (c) {
|
|
||||||
case 'W':
|
|
||||||
w = Integer.parseInt(tok.substring(1));
|
|
||||||
break;
|
|
||||||
case 'H':
|
|
||||||
h = Integer.parseInt(tok.substring(1));
|
|
||||||
break;
|
|
||||||
case 'C':
|
|
||||||
colorSpace = tok.substring(1);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Logging.d(TAG, "Color space: " + colorSpace);
|
|
||||||
if (!colorSpace.equals("420")) {
|
|
||||||
throw new IllegalArgumentException("Does not support any other color space than I420");
|
|
||||||
}
|
|
||||||
if ((w % 2) == 1 || (h % 2) == 1) {
|
|
||||||
throw new IllegalArgumentException("Does not support odd width or height");
|
|
||||||
}
|
|
||||||
frameWidth = w;
|
|
||||||
frameHeight = h;
|
|
||||||
frameSize = w * h * 3 / 2;
|
|
||||||
Logging.d(TAG, "frame dim: (" + w + ", " + h + ") frameSize: " + frameSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
public byte[] getNextFrame() {
|
|
||||||
byte[] frame = new byte[frameSize];
|
|
||||||
try {
|
|
||||||
byte[] frameDelim = new byte[Y4M_FRAME_DELIMETER.length() + 1];
|
|
||||||
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
|
||||||
// We reach end of file, loop
|
|
||||||
mediaFileStream.seek(videoStart);
|
|
||||||
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
|
||||||
throw new RuntimeException("Error looping video");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
String frameDelimStr = new String(frameDelim);
|
|
||||||
if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
|
|
||||||
throw new RuntimeException(
|
|
||||||
"Frames should be delimited by FRAME plus newline, found delimter was: '"
|
|
||||||
+ frameDelimStr + "'");
|
|
||||||
}
|
|
||||||
mediaFileStream.readFully(frame);
|
|
||||||
byte[] nv21Frame = new byte[frameSize];
|
|
||||||
nativeI420ToNV21(frame, frameWidth, frameHeight, nv21Frame);
|
|
||||||
return nv21Frame;
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void close() {
|
|
||||||
try {
|
|
||||||
mediaFileStream.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
Logging.e(TAG, "Problem closing file", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private final static String TAG = "FileVideoCapturer";
|
|
||||||
private final VideoReader videoReader;
|
|
||||||
private CapturerObserver capturerObserver;
|
|
||||||
private final Timer timer = new Timer();
|
|
||||||
|
|
||||||
private final TimerTask tickTask = new TimerTask() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
tick();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
private int getFrameWidth() {
|
|
||||||
return videoReader.getFrameWidth();
|
|
||||||
}
|
|
||||||
|
|
||||||
private int getFrameHeight() {
|
|
||||||
return videoReader.getFrameHeight();
|
|
||||||
}
|
|
||||||
|
|
||||||
public FileVideoCapturer(String inputFile) throws IOException {
|
|
||||||
try {
|
|
||||||
videoReader = new VideoReaderY4M(inputFile);
|
|
||||||
} catch (IOException e) {
|
|
||||||
Logging.d(TAG, "Could not open video file: " + inputFile);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private byte[] getNextFrame() {
|
|
||||||
return videoReader.getNextFrame();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void tick() {
|
|
||||||
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
|
||||||
|
|
||||||
byte[] frameData = getNextFrame();
|
|
||||||
capturerObserver.onByteBufferFrameCaptured(
|
|
||||||
frameData, getFrameWidth(), getFrameHeight(), 0, captureTimeNs);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
|
||||||
CapturerObserver capturerObserver) {
|
|
||||||
this.capturerObserver = capturerObserver;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void startCapture(int width, int height, int framerate) {
|
|
||||||
timer.schedule(tickTask, 0, 1000 / framerate);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void stopCapture() throws InterruptedException {
|
|
||||||
timer.cancel();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void changeCaptureFormat(int width, int height, int framerate) {
|
|
||||||
// Empty on purpose
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void dispose() {
|
|
||||||
videoReader.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean isScreencast() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static native void nativeI420ToNV21(byte[] src, int width, int height, byte[] dst);
|
|
||||||
}
|
|
||||||
@ -76,7 +76,7 @@ class SurfaceTextureHelper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// State for YUV conversion, instantiated on demand.
|
// State for YUV conversion, instantiated on demand.
|
||||||
static class YuvConverter {
|
static private class YuvConverter {
|
||||||
private final EglBase eglBase;
|
private final EglBase eglBase;
|
||||||
private final GlShader shader;
|
private final GlShader shader;
|
||||||
private boolean released = false;
|
private boolean released = false;
|
||||||
|
|||||||
@ -1,135 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
package org.webrtc;
|
|
||||||
|
|
||||||
import android.os.Handler;
|
|
||||||
import android.os.HandlerThread;
|
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
|
||||||
import java.io.FileOutputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Can be used to save the video frames to file.
|
|
||||||
*/
|
|
||||||
public class VideoFileRenderer implements VideoRenderer.Callbacks {
|
|
||||||
private static final String TAG = "VideoFileRenderer";
|
|
||||||
|
|
||||||
private final SurfaceTextureHelper.YuvConverter yuvConverter;
|
|
||||||
private final HandlerThread renderThread;
|
|
||||||
private final Object handlerLock = new Object();
|
|
||||||
private final Handler renderThreadHandler;
|
|
||||||
private final FileOutputStream videoOutFile;
|
|
||||||
private final int outputFileWidth;
|
|
||||||
private final int outputFileHeight;
|
|
||||||
private final int outputFrameSize;
|
|
||||||
private final ByteBuffer outputFrameBuffer;
|
|
||||||
|
|
||||||
public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
|
|
||||||
EglBase.Context sharedContext) throws IOException {
|
|
||||||
if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
|
|
||||||
throw new IllegalArgumentException("Does not support uneven width or height");
|
|
||||||
}
|
|
||||||
yuvConverter = new SurfaceTextureHelper.YuvConverter(sharedContext);
|
|
||||||
|
|
||||||
this.outputFileWidth = outputFileWidth;
|
|
||||||
this.outputFileHeight = outputFileHeight;
|
|
||||||
|
|
||||||
outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
|
|
||||||
outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
|
|
||||||
|
|
||||||
videoOutFile = new FileOutputStream(outputFile);
|
|
||||||
videoOutFile.write(
|
|
||||||
("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
|
|
||||||
.getBytes());
|
|
||||||
|
|
||||||
renderThread = new HandlerThread(TAG);
|
|
||||||
renderThread.start();
|
|
||||||
renderThreadHandler = new Handler(renderThread.getLooper());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void renderFrame(final VideoRenderer.I420Frame frame) {
|
|
||||||
renderThreadHandler.post(new Runnable() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
renderFrameOnRenderThread(frame);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private void renderFrameOnRenderThread(VideoRenderer.I420Frame frame) {
|
|
||||||
final float frameAspectRatio = (float) frame.rotatedWidth() / (float) frame.rotatedHeight();
|
|
||||||
|
|
||||||
final float[] rotatedSamplingMatrix =
|
|
||||||
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
|
|
||||||
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
|
||||||
false, frameAspectRatio, (float) outputFileWidth / outputFileHeight);
|
|
||||||
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
|
||||||
|
|
||||||
try {
|
|
||||||
videoOutFile.write("FRAME\n".getBytes());
|
|
||||||
if (!frame.yuvFrame) {
|
|
||||||
yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
|
|
||||||
frame.textureId, texMatrix);
|
|
||||||
|
|
||||||
int stride = outputFileWidth;
|
|
||||||
byte[] data = outputFrameBuffer.array();
|
|
||||||
int offset = outputFrameBuffer.arrayOffset();
|
|
||||||
|
|
||||||
// Write Y
|
|
||||||
videoOutFile.write(data, offset, outputFileWidth * outputFileHeight);
|
|
||||||
|
|
||||||
// Write U
|
|
||||||
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
|
||||||
videoOutFile.write(data, offset + r * stride, stride / 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write V
|
|
||||||
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
|
||||||
videoOutFile.write(data, offset + r * stride + stride / 2, stride / 2);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
nativeI420Scale(frame.yuvPlanes[0], frame.yuvStrides[0], frame.yuvPlanes[1],
|
|
||||||
frame.yuvStrides[1], frame.yuvPlanes[2], frame.yuvStrides[2], frame.width, frame.height,
|
|
||||||
outputFrameBuffer, outputFileWidth, outputFileHeight);
|
|
||||||
videoOutFile.write(
|
|
||||||
outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
Logging.e(TAG, "Failed to write to file for video out");
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
} finally {
|
|
||||||
VideoRenderer.renderFrameDone(frame);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void release() {
|
|
||||||
final CountDownLatch cleanupBarrier = new CountDownLatch(1);
|
|
||||||
renderThreadHandler.post(new Runnable() {
|
|
||||||
@Override
|
|
||||||
public void run() {
|
|
||||||
try {
|
|
||||||
videoOutFile.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
Logging.d(TAG, "Error closing output video file");
|
|
||||||
}
|
|
||||||
cleanupBarrier.countDown();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
ThreadUtils.awaitUninterruptibly(cleanupBarrier);
|
|
||||||
renderThread.quit();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
|
|
||||||
int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
|
|
||||||
int dstWidth, int dstHeight);
|
|
||||||
}
|
|
||||||
@ -43,8 +43,6 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
|
||||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
|
||||||
#include "webrtc/api/androidvideotracksource.h"
|
#include "webrtc/api/androidvideotracksource.h"
|
||||||
#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
|
#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
|
||||||
#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
|
#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
|
||||||
@ -2005,77 +2003,6 @@ JOW(void, VideoRenderer_nativeCopyPlane)(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
JOW(void, FileVideoCapturer_nativeI420ToNV21)(
|
|
||||||
JNIEnv *jni, jclass, jbyteArray j_src_buffer, jint width, jint height,
|
|
||||||
jbyteArray j_dst_buffer) {
|
|
||||||
size_t src_size = jni->GetArrayLength(j_src_buffer);
|
|
||||||
size_t dst_size = jni->GetArrayLength(j_dst_buffer);
|
|
||||||
int src_stride = width;
|
|
||||||
int dst_stride = width;
|
|
||||||
RTC_CHECK_GE(src_size, src_stride * height * 3 / 2);
|
|
||||||
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
|
|
||||||
uint8_t* src =
|
|
||||||
reinterpret_cast<uint8_t*>(jni->GetByteArrayElements(j_src_buffer, 0));
|
|
||||||
uint8_t* dst =
|
|
||||||
reinterpret_cast<uint8_t*>(jni->GetByteArrayElements(j_dst_buffer, 0));
|
|
||||||
uint8_t* src_y = src;
|
|
||||||
size_t src_stride_y = src_stride;
|
|
||||||
uint8_t* src_u = src + src_stride * height;
|
|
||||||
size_t src_stride_u = src_stride / 2;
|
|
||||||
uint8_t* src_v = src + src_stride * height * 5 / 4;
|
|
||||||
size_t src_stride_v = src_stride / 2;
|
|
||||||
|
|
||||||
uint8_t* dst_y = dst;
|
|
||||||
size_t dst_stride_y = dst_stride;
|
|
||||||
size_t dst_stride_uv = dst_stride;
|
|
||||||
uint8_t* dst_uv = dst + dst_stride * height;
|
|
||||||
|
|
||||||
libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v,
|
|
||||||
src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
|
|
||||||
width, height);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOW(void, VideoFileRenderer_nativeI420Scale)(
|
|
||||||
JNIEnv *jni, jclass,
|
|
||||||
jobject j_src_buffer_y, jint j_src_stride_y,
|
|
||||||
jobject j_src_buffer_u, jint j_src_stride_u,
|
|
||||||
jobject j_src_buffer_v, jint j_src_stride_v,
|
|
||||||
jint width, jint height,
|
|
||||||
jbyteArray j_dst_buffer, jint dstWidth, jint dstHeight) {
|
|
||||||
size_t src_size_y = jni->GetDirectBufferCapacity(j_src_buffer_y);
|
|
||||||
size_t src_size_u = jni->GetDirectBufferCapacity(j_src_buffer_u);
|
|
||||||
size_t src_size_v = jni->GetDirectBufferCapacity(j_src_buffer_v);
|
|
||||||
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
|
|
||||||
int dst_stride = dstWidth;
|
|
||||||
RTC_CHECK_GE(src_size_y, j_src_stride_y * height);
|
|
||||||
RTC_CHECK_GE(src_size_u, j_src_stride_u * height / 4);
|
|
||||||
RTC_CHECK_GE(src_size_v, j_src_stride_v * height / 4);
|
|
||||||
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
|
|
||||||
uint8_t* src_y =
|
|
||||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_y));
|
|
||||||
uint8_t* src_u =
|
|
||||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_u));
|
|
||||||
uint8_t* src_v =
|
|
||||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_v));
|
|
||||||
uint8_t* dst =
|
|
||||||
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
|
|
||||||
|
|
||||||
uint8_t* dst_y = dst;
|
|
||||||
size_t dst_stride_y = dst_stride;
|
|
||||||
uint8_t* dst_u = dst + dst_stride * dstHeight;
|
|
||||||
size_t dst_stride_u = dst_stride / 2;
|
|
||||||
uint8_t* dst_v = dst + dst_stride * dstHeight * 5 / 4;
|
|
||||||
size_t dst_stride_v = dst_stride / 2;
|
|
||||||
|
|
||||||
int ret = libyuv::I420Scale(
|
|
||||||
src_y, j_src_stride_y, src_u, j_src_stride_u, src_v, j_src_stride_v,
|
|
||||||
width, height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
|
|
||||||
dst_stride_v, dstWidth, dstHeight, libyuv::kFilterBilinear);
|
|
||||||
if (ret) {
|
|
||||||
LOG(LS_ERROR) << "Error scaling I420 frame: " << ret;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
|
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
|
||||||
return JavaStringFromStdString(
|
return JavaStringFromStdString(
|
||||||
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
|
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
|
||||||
|
|||||||
@ -29,26 +29,14 @@ import android.view.Window;
|
|||||||
import android.view.WindowManager.LayoutParams;
|
import android.view.WindowManager.LayoutParams;
|
||||||
import android.widget.Toast;
|
import android.widget.Toast;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.lang.RuntimeException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.webrtc.Camera1Enumerator;
|
|
||||||
import org.webrtc.Camera2Enumerator;
|
import org.webrtc.Camera2Enumerator;
|
||||||
import org.webrtc.CameraEnumerator;
|
|
||||||
import org.webrtc.EglBase;
|
import org.webrtc.EglBase;
|
||||||
import org.webrtc.FileVideoCapturer;
|
|
||||||
import org.webrtc.VideoFileRenderer;
|
|
||||||
import org.webrtc.IceCandidate;
|
import org.webrtc.IceCandidate;
|
||||||
import org.webrtc.Logging;
|
|
||||||
import org.webrtc.PeerConnectionFactory;
|
import org.webrtc.PeerConnectionFactory;
|
||||||
import org.webrtc.RendererCommon.ScalingType;
|
import org.webrtc.RendererCommon.ScalingType;
|
||||||
import org.webrtc.SessionDescription;
|
import org.webrtc.SessionDescription;
|
||||||
import org.webrtc.StatsReport;
|
import org.webrtc.StatsReport;
|
||||||
import org.webrtc.SurfaceViewRenderer;
|
import org.webrtc.SurfaceViewRenderer;
|
||||||
import org.webrtc.VideoCapturer;
|
|
||||||
import org.webrtc.VideoRenderer;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Activity for peer connection call setup, call waiting
|
* Activity for peer connection call setup, call waiting
|
||||||
@ -84,15 +72,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
||||||
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
||||||
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
||||||
public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
|
|
||||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
|
|
||||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
|
|
||||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
|
|
||||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
|
|
||||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
|
|
||||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
|
|
||||||
public static final String EXTRA_USE_VALUES_FROM_INTENT =
|
|
||||||
"org.appspot.apprtc.USE_VALUES_FROM_INTENT";
|
|
||||||
private static final String TAG = "CallRTCClient";
|
private static final String TAG = "CallRTCClient";
|
||||||
|
|
||||||
// List of mandatory application permissions.
|
// List of mandatory application permissions.
|
||||||
@ -122,10 +101,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
private AppRTCAudioManager audioManager = null;
|
private AppRTCAudioManager audioManager = null;
|
||||||
private EglBase rootEglBase;
|
private EglBase rootEglBase;
|
||||||
private SurfaceViewRenderer localRender;
|
private SurfaceViewRenderer localRender;
|
||||||
private SurfaceViewRenderer remoteRenderScreen;
|
private SurfaceViewRenderer remoteRender;
|
||||||
private VideoFileRenderer videoFileRenderer;
|
|
||||||
private final List<VideoRenderer.Callbacks> remoteRenderers =
|
|
||||||
new ArrayList<VideoRenderer.Callbacks>();
|
|
||||||
private PercentFrameLayout localRenderLayout;
|
private PercentFrameLayout localRenderLayout;
|
||||||
private PercentFrameLayout remoteRenderLayout;
|
private PercentFrameLayout remoteRenderLayout;
|
||||||
private ScalingType scalingType;
|
private ScalingType scalingType;
|
||||||
@ -167,7 +143,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
|
|
||||||
// Create UI controls.
|
// Create UI controls.
|
||||||
localRender = (SurfaceViewRenderer) findViewById(R.id.local_video_view);
|
localRender = (SurfaceViewRenderer) findViewById(R.id.local_video_view);
|
||||||
remoteRenderScreen = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
|
remoteRender = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
|
||||||
localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
|
localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
|
||||||
remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
|
remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
|
||||||
callFragment = new CallFragment();
|
callFragment = new CallFragment();
|
||||||
@ -182,31 +158,12 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
};
|
};
|
||||||
|
|
||||||
localRender.setOnClickListener(listener);
|
localRender.setOnClickListener(listener);
|
||||||
remoteRenderScreen.setOnClickListener(listener);
|
remoteRender.setOnClickListener(listener);
|
||||||
remoteRenderers.add(remoteRenderScreen);
|
|
||||||
|
|
||||||
final Intent intent = getIntent();
|
|
||||||
|
|
||||||
// Create video renderers.
|
// Create video renderers.
|
||||||
rootEglBase = EglBase.create();
|
rootEglBase = EglBase.create();
|
||||||
localRender.init(rootEglBase.getEglBaseContext(), null);
|
localRender.init(rootEglBase.getEglBaseContext(), null);
|
||||||
String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
remoteRender.init(rootEglBase.getEglBaseContext(), null);
|
||||||
|
|
||||||
// When saveRemoteVideoToFile is set we save the video from the remote to a file.
|
|
||||||
if (saveRemoteVideoToFile != null) {
|
|
||||||
int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
|
||||||
int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
|
||||||
try {
|
|
||||||
videoFileRenderer = new VideoFileRenderer(
|
|
||||||
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, rootEglBase.getEglBaseContext());
|
|
||||||
remoteRenderers.add(videoFileRenderer);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException(
|
|
||||||
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
remoteRenderScreen.init(rootEglBase.getEglBaseContext(), null);
|
|
||||||
|
|
||||||
localRender.setZOrderMediaOverlay(true);
|
localRender.setZOrderMediaOverlay(true);
|
||||||
updateVideoView();
|
updateVideoView();
|
||||||
|
|
||||||
@ -220,6 +177,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get Intent parameters.
|
||||||
|
final Intent intent = getIntent();
|
||||||
Uri roomUri = intent.getData();
|
Uri roomUri = intent.getData();
|
||||||
if (roomUri == null) {
|
if (roomUri == null) {
|
||||||
logAndToast(getString(R.string.missing_url));
|
logAndToast(getString(R.string.missing_url));
|
||||||
@ -228,10 +187,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
finish();
|
finish();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Intent parameters.
|
|
||||||
String roomId = intent.getStringExtra(EXTRA_ROOMID);
|
String roomId = intent.getStringExtra(EXTRA_ROOMID);
|
||||||
Log.d(TAG, "Room ID: " + roomId);
|
|
||||||
if (roomId == null || roomId.length() == 0) {
|
if (roomId == null || roomId.length() == 0) {
|
||||||
logAndToast(getString(R.string.missing_url));
|
logAndToast(getString(R.string.missing_url));
|
||||||
Log.e(TAG, "Incorrect room ID in intent!");
|
Log.e(TAG, "Incorrect room ID in intent!");
|
||||||
@ -243,12 +199,16 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
||||||
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
||||||
|
|
||||||
|
boolean useCamera2 =
|
||||||
|
Camera2Enumerator.isSupported(this) && intent.getBooleanExtra(EXTRA_CAMERA2, true);
|
||||||
|
|
||||||
peerConnectionParameters =
|
peerConnectionParameters =
|
||||||
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
||||||
tracing, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
tracing, useCamera2, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||||
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||||
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
||||||
|
intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
|
||||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||||
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
||||||
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
||||||
@ -260,8 +220,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
|
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
|
||||||
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
|
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
|
||||||
|
|
||||||
Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
|
|
||||||
|
|
||||||
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
|
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
|
||||||
// standard WebSocketRTCClient.
|
// standard WebSocketRTCClient.
|
||||||
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
|
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
|
||||||
@ -307,46 +265,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
CallActivity.this, peerConnectionParameters, CallActivity.this);
|
CallActivity.this, peerConnectionParameters, CallActivity.this);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean useCamera2() {
|
|
||||||
return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean captureToTexture() {
|
|
||||||
return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
|
|
||||||
final String[] deviceNames = enumerator.getDeviceNames();
|
|
||||||
|
|
||||||
// First, try to find front facing camera
|
|
||||||
Logging.d(TAG, "Looking for front facing cameras.");
|
|
||||||
for (String deviceName : deviceNames) {
|
|
||||||
if (enumerator.isFrontFacing(deviceName)) {
|
|
||||||
Logging.d(TAG, "Creating front facing camera capturer.");
|
|
||||||
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
|
||||||
|
|
||||||
if (videoCapturer != null) {
|
|
||||||
return videoCapturer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Front facing camera not found, try something else
|
|
||||||
Logging.d(TAG, "Looking for other cameras.");
|
|
||||||
for (String deviceName : deviceNames) {
|
|
||||||
if (!enumerator.isFrontFacing(deviceName)) {
|
|
||||||
Logging.d(TAG, "Creating other camera capturer.");
|
|
||||||
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
|
||||||
|
|
||||||
if (videoCapturer != null) {
|
|
||||||
return videoCapturer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Activity interfaces
|
// Activity interfaces
|
||||||
@Override
|
@Override
|
||||||
public void onPause() {
|
public void onPause() {
|
||||||
@ -435,8 +353,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
|
|
||||||
private void updateVideoView() {
|
private void updateVideoView() {
|
||||||
remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
|
remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
|
||||||
remoteRenderScreen.setScalingType(scalingType);
|
remoteRender.setScalingType(scalingType);
|
||||||
remoteRenderScreen.setMirror(false);
|
remoteRender.setMirror(false);
|
||||||
|
|
||||||
if (iceConnected) {
|
if (iceConnected) {
|
||||||
localRenderLayout.setPosition(
|
localRenderLayout.setPosition(
|
||||||
@ -450,7 +368,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
localRender.setMirror(true);
|
localRender.setMirror(true);
|
||||||
|
|
||||||
localRender.requestLayout();
|
localRender.requestLayout();
|
||||||
remoteRenderScreen.requestLayout();
|
remoteRender.requestLayout();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void startCall() {
|
private void startCall() {
|
||||||
@ -514,13 +432,9 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
localRender.release();
|
localRender.release();
|
||||||
localRender = null;
|
localRender = null;
|
||||||
}
|
}
|
||||||
if (videoFileRenderer != null) {
|
if (remoteRender != null) {
|
||||||
videoFileRenderer.release();
|
remoteRender.release();
|
||||||
videoFileRenderer = null;
|
remoteRender = null;
|
||||||
}
|
|
||||||
if (remoteRenderScreen != null) {
|
|
||||||
remoteRenderScreen.release();
|
|
||||||
remoteRenderScreen = null;
|
|
||||||
}
|
}
|
||||||
if (audioManager != null) {
|
if (audioManager != null) {
|
||||||
audioManager.close();
|
audioManager.close();
|
||||||
@ -578,35 +492,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private VideoCapturer createVideoCapturer() {
|
|
||||||
VideoCapturer videoCapturer = null;
|
|
||||||
String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
|
|
||||||
if (videoFileAsCamera != null) {
|
|
||||||
try {
|
|
||||||
videoCapturer = new FileVideoCapturer(videoFileAsCamera);
|
|
||||||
} catch (IOException e) {
|
|
||||||
reportError("Failed to open video file for emulated camera");
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
} else if (useCamera2()) {
|
|
||||||
if (!captureToTexture()) {
|
|
||||||
reportError(getString(R.string.camera2_texture_only_error));
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
Logging.d(TAG, "Creating capturer using camera2 API.");
|
|
||||||
videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
|
|
||||||
} else {
|
|
||||||
Logging.d(TAG, "Creating capturer using camera1 API.");
|
|
||||||
videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
|
|
||||||
}
|
|
||||||
if (videoCapturer == null) {
|
|
||||||
reportError("Failed to open camera");
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return videoCapturer;
|
|
||||||
}
|
|
||||||
|
|
||||||
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
|
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
|
||||||
// All callbacks are invoked from websocket signaling looper thread and
|
// All callbacks are invoked from websocket signaling looper thread and
|
||||||
// are routed to UI thread.
|
// are routed to UI thread.
|
||||||
@ -615,12 +500,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
|
|
||||||
signalingParameters = params;
|
signalingParameters = params;
|
||||||
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
||||||
VideoCapturer videoCapturer = null;
|
peerConnectionClient.createPeerConnection(
|
||||||
if (peerConnectionParameters.videoCallEnabled) {
|
rootEglBase.getEglBaseContext(), localRender, remoteRender, signalingParameters);
|
||||||
videoCapturer = createVideoCapturer();
|
|
||||||
}
|
|
||||||
peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(), localRender,
|
|
||||||
remoteRenderers, videoCapturer, signalingParameters);
|
|
||||||
|
|
||||||
if (signalingParameters.initiator) {
|
if (signalingParameters.initiator) {
|
||||||
logAndToast("Creating OFFER...");
|
logAndToast("Creating OFFER...");
|
||||||
|
|||||||
@ -144,10 +144,8 @@ public class ConnectActivity extends Activity {
|
|||||||
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
||||||
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
||||||
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
||||||
boolean useValuesFromIntent =
|
|
||||||
intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
|
|
||||||
String room = sharedPref.getString(keyprefRoom, "");
|
String room = sharedPref.getString(keyprefRoom, "");
|
||||||
connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
|
connectToRoom(room, true, loopback, runTimeMs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -192,7 +190,7 @@ public class ConnectActivity extends Activity {
|
|||||||
startActivity(intent);
|
startActivity(intent);
|
||||||
return true;
|
return true;
|
||||||
} else if (item.getItemId() == R.id.action_loopback) {
|
} else if (item.getItemId() == R.id.action_loopback) {
|
||||||
connectToRoom(null, false, true, false, 0);
|
connectToRoom(null, false, true, 0);
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
return super.onOptionsItemSelected(item);
|
return super.onOptionsItemSelected(item);
|
||||||
@ -245,42 +243,8 @@ public class ConnectActivity extends Activity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
private void connectToRoom(
|
||||||
* Get a value from the shared preference or from the intent, if it does not
|
String roomId, boolean commandLineRun, boolean loopback, int runTimeMs) {
|
||||||
* exist the default is used.
|
|
||||||
*/
|
|
||||||
private String sharedPrefGetString(
|
|
||||||
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
|
||||||
String defaultValue = getString(defaultId);
|
|
||||||
if (useFromIntent) {
|
|
||||||
String value = getIntent().getStringExtra(intentName);
|
|
||||||
if (value != null) {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
return defaultValue;
|
|
||||||
} else {
|
|
||||||
String attributeName = getString(attributeId);
|
|
||||||
return sharedPref.getString(attributeName, defaultValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a value from the shared preference or from the intent, if it does not
|
|
||||||
* exist the default is used.
|
|
||||||
*/
|
|
||||||
private boolean sharedPrefGetBoolean(
|
|
||||||
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
|
||||||
boolean defaultValue = Boolean.valueOf(getString(defaultId));
|
|
||||||
if (useFromIntent) {
|
|
||||||
return getIntent().getBooleanExtra(intentName, defaultValue);
|
|
||||||
} else {
|
|
||||||
String attributeName = getString(attributeId);
|
|
||||||
return sharedPref.getBoolean(attributeName, defaultValue);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
|
|
||||||
boolean useValuesFromIntent, int runTimeMs) {
|
|
||||||
this.commandLineRun = commandLineRun;
|
this.commandLineRun = commandLineRun;
|
||||||
|
|
||||||
// roomId is random for loopback.
|
// roomId is random for loopback.
|
||||||
@ -292,69 +256,58 @@ public class ConnectActivity extends Activity {
|
|||||||
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
||||||
|
|
||||||
// Video call enabled flag.
|
// Video call enabled flag.
|
||||||
boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
|
boolean videoCallEnabled = sharedPref.getBoolean(
|
||||||
CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
|
keyprefVideoCallEnabled, Boolean.valueOf(getString(R.string.pref_videocall_default)));
|
||||||
|
|
||||||
// Use Camera2 option.
|
// Use Camera2 option.
|
||||||
boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
|
boolean useCamera2 = sharedPref.getBoolean(
|
||||||
R.string.pref_camera2_default, useValuesFromIntent);
|
keyprefCamera2, Boolean.valueOf(getString(R.string.pref_camera2_default)));
|
||||||
|
|
||||||
// Get default codecs.
|
// Get default codecs.
|
||||||
String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
|
String videoCodec =
|
||||||
CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
|
sharedPref.getString(keyprefVideoCodec, getString(R.string.pref_videocodec_default));
|
||||||
String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
|
String audioCodec =
|
||||||
CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
|
sharedPref.getString(keyprefAudioCodec, getString(R.string.pref_audiocodec_default));
|
||||||
|
|
||||||
// Check HW codec flag.
|
// Check HW codec flag.
|
||||||
boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
|
boolean hwCodec = sharedPref.getBoolean(
|
||||||
CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
|
keyprefHwCodecAcceleration, Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
|
||||||
|
|
||||||
// Check Capture to texture.
|
// Check Capture to texture.
|
||||||
boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
|
boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
|
||||||
CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
|
Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
|
||||||
useValuesFromIntent);
|
|
||||||
|
|
||||||
// Check Disable Audio Processing flag.
|
// Check Disable Audio Processing flag.
|
||||||
boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
|
boolean noAudioProcessing = sharedPref.getBoolean(keyprefNoAudioProcessingPipeline,
|
||||||
CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
|
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
|
||||||
useValuesFromIntent);
|
|
||||||
|
|
||||||
// Check Disable Audio Processing flag.
|
// Check Disable Audio Processing flag.
|
||||||
boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
|
boolean aecDump = sharedPref.getBoolean(
|
||||||
CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
|
keyprefAecDump, Boolean.valueOf(getString(R.string.pref_aecdump_default)));
|
||||||
|
|
||||||
// Check OpenSL ES enabled flag.
|
// Check OpenSL ES enabled flag.
|
||||||
boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
|
boolean useOpenSLES = sharedPref.getBoolean(
|
||||||
CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
|
keyprefOpenSLES, Boolean.valueOf(getString(R.string.pref_opensles_default)));
|
||||||
|
|
||||||
// Check Disable built-in AEC flag.
|
// Check Disable built-in AEC flag.
|
||||||
boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
|
boolean disableBuiltInAEC = sharedPref.getBoolean(keyprefDisableBuiltInAec,
|
||||||
CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
|
Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default)));
|
||||||
useValuesFromIntent);
|
|
||||||
|
|
||||||
// Check Disable built-in AGC flag.
|
// Check Disable built-in AGC flag.
|
||||||
boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
|
boolean disableBuiltInAGC = sharedPref.getBoolean(keyprefDisableBuiltInAgc,
|
||||||
CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
|
Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default)));
|
||||||
useValuesFromIntent);
|
|
||||||
|
|
||||||
// Check Disable built-in NS flag.
|
// Check Disable built-in NS flag.
|
||||||
boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
|
boolean disableBuiltInNS = sharedPref.getBoolean(keyprefDisableBuiltInNs,
|
||||||
CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
|
Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default)));
|
||||||
useValuesFromIntent);
|
|
||||||
|
|
||||||
// Check Enable level control.
|
// Check Enable level control.
|
||||||
boolean enableLevelControl = sharedPrefGetBoolean(R.string.pref_enable_level_control_key,
|
boolean enableLevelControl = sharedPref.getBoolean(keyprefEnableLevelControl,
|
||||||
CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, R.string.pref_enable_level_control_key,
|
Boolean.valueOf(getString(R.string.pref_enable_level_control_key)));
|
||||||
useValuesFromIntent);
|
|
||||||
|
|
||||||
// Get video resolution from settings.
|
// Get video resolution from settings.
|
||||||
int videoWidth = 0;
|
int videoWidth = 0;
|
||||||
int videoHeight = 0;
|
int videoHeight = 0;
|
||||||
if (useValuesFromIntent) {
|
|
||||||
videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
|
|
||||||
videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
|
|
||||||
}
|
|
||||||
if (videoWidth == 0 && videoHeight == 0) {
|
|
||||||
String resolution =
|
String resolution =
|
||||||
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
||||||
String[] dimensions = resolution.split("[ x]+");
|
String[] dimensions = resolution.split("[ x]+");
|
||||||
@ -368,37 +321,25 @@ public class ConnectActivity extends Activity {
|
|||||||
Log.e(TAG, "Wrong video resolution setting: " + resolution);
|
Log.e(TAG, "Wrong video resolution setting: " + resolution);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Get camera fps from settings.
|
// Get camera fps from settings.
|
||||||
int cameraFps = 0;
|
int cameraFps = 0;
|
||||||
if (useValuesFromIntent) {
|
|
||||||
cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
|
|
||||||
}
|
|
||||||
if (cameraFps == 0) {
|
|
||||||
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
||||||
String[] fpsValues = fps.split("[ x]+");
|
String[] fpsValues = fps.split("[ x]+");
|
||||||
if (fpsValues.length == 2) {
|
if (fpsValues.length == 2) {
|
||||||
try {
|
try {
|
||||||
cameraFps = Integer.parseInt(fpsValues[0]);
|
cameraFps = Integer.parseInt(fpsValues[0]);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
cameraFps = 0;
|
|
||||||
Log.e(TAG, "Wrong camera fps setting: " + fps);
|
Log.e(TAG, "Wrong camera fps setting: " + fps);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Check capture quality slider flag.
|
// Check capture quality slider flag.
|
||||||
boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
|
boolean captureQualitySlider = sharedPref.getBoolean(keyprefCaptureQualitySlider,
|
||||||
CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
|
Boolean.valueOf(getString(R.string.pref_capturequalityslider_default)));
|
||||||
R.string.pref_capturequalityslider_default, useValuesFromIntent);
|
|
||||||
|
|
||||||
// Get video and audio start bitrate.
|
// Get video and audio start bitrate.
|
||||||
int videoStartBitrate = 0;
|
int videoStartBitrate = 0;
|
||||||
if (useValuesFromIntent) {
|
|
||||||
videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
|
|
||||||
}
|
|
||||||
if (videoStartBitrate == 0) {
|
|
||||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||||
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
||||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||||
@ -406,28 +347,21 @@ public class ConnectActivity extends Activity {
|
|||||||
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
||||||
videoStartBitrate = Integer.parseInt(bitrateValue);
|
videoStartBitrate = Integer.parseInt(bitrateValue);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
int audioStartBitrate = 0;
|
int audioStartBitrate = 0;
|
||||||
if (useValuesFromIntent) {
|
bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||||
audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
|
bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
||||||
}
|
|
||||||
if (audioStartBitrate == 0) {
|
|
||||||
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
|
||||||
String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
|
||||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||||
String bitrateValue = sharedPref.getString(
|
String bitrateValue = sharedPref.getString(
|
||||||
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
||||||
audioStartBitrate = Integer.parseInt(bitrateValue);
|
audioStartBitrate = Integer.parseInt(bitrateValue);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Check statistics display option.
|
// Check statistics display option.
|
||||||
boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
|
boolean displayHud = sharedPref.getBoolean(
|
||||||
CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
|
keyprefDisplayHud, Boolean.valueOf(getString(R.string.pref_displayhud_default)));
|
||||||
|
|
||||||
boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
|
boolean tracing = sharedPref.getBoolean(
|
||||||
R.string.pref_tracing_default, useValuesFromIntent);
|
keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
|
||||||
|
|
||||||
// Start AppRTCMobile activity.
|
// Start AppRTCMobile activity.
|
||||||
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
|
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
|
||||||
@ -461,32 +395,6 @@ public class ConnectActivity extends Activity {
|
|||||||
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
|
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
|
||||||
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
|
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
|
||||||
|
|
||||||
if (useValuesFromIntent) {
|
|
||||||
if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
|
|
||||||
String videoFileAsCamera =
|
|
||||||
getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
|
|
||||||
intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
|
|
||||||
String saveRemoteVideoToFile =
|
|
||||||
getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
|
||||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
|
|
||||||
int videoOutWidth =
|
|
||||||
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
|
||||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
|
|
||||||
int videoOutHeight =
|
|
||||||
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
|
||||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
startActivityForResult(intent, CONNECTION_REQUEST);
|
startActivityForResult(intent, CONNECTION_REQUEST);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -516,7 +424,7 @@ public class ConnectActivity extends Activity {
|
|||||||
@Override
|
@Override
|
||||||
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
||||||
String roomId = ((TextView) view).getText().toString();
|
String roomId = ((TextView) view).getText().toString();
|
||||||
connectToRoom(roomId, false, false, false, 0);
|
connectToRoom(roomId, false, false, 0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -534,7 +442,7 @@ public class ConnectActivity extends Activity {
|
|||||||
private final OnClickListener connectListener = new OnClickListener() {
|
private final OnClickListener connectListener = new OnClickListener() {
|
||||||
@Override
|
@Override
|
||||||
public void onClick(View view) {
|
public void onClick(View view) {
|
||||||
connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
|
connectToRoom(roomEditText.getText().toString(), false, false, 0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@ -41,9 +41,6 @@ import org.webrtc.StatsObserver;
|
|||||||
import org.webrtc.StatsReport;
|
import org.webrtc.StatsReport;
|
||||||
import org.webrtc.VideoCapturer;
|
import org.webrtc.VideoCapturer;
|
||||||
import org.webrtc.VideoRenderer;
|
import org.webrtc.VideoRenderer;
|
||||||
import org.webrtc.VideoCapturerAndroid;
|
|
||||||
import org.webrtc.CameraVideoCapturer;
|
|
||||||
import org.webrtc.FileVideoCapturer;
|
|
||||||
import org.webrtc.VideoSource;
|
import org.webrtc.VideoSource;
|
||||||
import org.webrtc.VideoTrack;
|
import org.webrtc.VideoTrack;
|
||||||
import org.webrtc.voiceengine.WebRtcAudioManager;
|
import org.webrtc.voiceengine.WebRtcAudioManager;
|
||||||
@ -51,10 +48,8 @@ import org.webrtc.voiceengine.WebRtcAudioUtils;
|
|||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Timer;
|
import java.util.Timer;
|
||||||
import java.util.TimerTask;
|
import java.util.TimerTask;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
@ -112,7 +107,7 @@ public class PeerConnectionClient {
|
|||||||
private boolean isError;
|
private boolean isError;
|
||||||
private Timer statsTimer;
|
private Timer statsTimer;
|
||||||
private VideoRenderer.Callbacks localRender;
|
private VideoRenderer.Callbacks localRender;
|
||||||
private List<VideoRenderer.Callbacks> remoteRenders;
|
private VideoRenderer.Callbacks remoteRender;
|
||||||
private SignalingParameters signalingParameters;
|
private SignalingParameters signalingParameters;
|
||||||
private MediaConstraints pcConstraints;
|
private MediaConstraints pcConstraints;
|
||||||
private int videoWidth;
|
private int videoWidth;
|
||||||
@ -131,7 +126,7 @@ public class PeerConnectionClient {
|
|||||||
private SessionDescription localSdp; // either offer or answer SDP
|
private SessionDescription localSdp; // either offer or answer SDP
|
||||||
private MediaStream mediaStream;
|
private MediaStream mediaStream;
|
||||||
private int numberOfCameras;
|
private int numberOfCameras;
|
||||||
private VideoCapturer videoCapturer;
|
private CameraVideoCapturer videoCapturer;
|
||||||
// enableVideo is set to true if video should be rendered and sent.
|
// enableVideo is set to true if video should be rendered and sent.
|
||||||
private boolean renderVideo;
|
private boolean renderVideo;
|
||||||
private VideoTrack localVideoTrack;
|
private VideoTrack localVideoTrack;
|
||||||
@ -148,12 +143,14 @@ public class PeerConnectionClient {
|
|||||||
public final boolean videoCallEnabled;
|
public final boolean videoCallEnabled;
|
||||||
public final boolean loopback;
|
public final boolean loopback;
|
||||||
public final boolean tracing;
|
public final boolean tracing;
|
||||||
|
public final boolean useCamera2;
|
||||||
public final int videoWidth;
|
public final int videoWidth;
|
||||||
public final int videoHeight;
|
public final int videoHeight;
|
||||||
public final int videoFps;
|
public final int videoFps;
|
||||||
public final int videoMaxBitrate;
|
public final int videoMaxBitrate;
|
||||||
public final String videoCodec;
|
public final String videoCodec;
|
||||||
public final boolean videoCodecHwAcceleration;
|
public final boolean videoCodecHwAcceleration;
|
||||||
|
public final boolean captureToTexture;
|
||||||
public final int audioStartBitrate;
|
public final int audioStartBitrate;
|
||||||
public final String audioCodec;
|
public final String audioCodec;
|
||||||
public final boolean noAudioProcessing;
|
public final boolean noAudioProcessing;
|
||||||
@ -165,11 +162,13 @@ public class PeerConnectionClient {
|
|||||||
public final boolean enableLevelControl;
|
public final boolean enableLevelControl;
|
||||||
|
|
||||||
public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
|
public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
|
||||||
int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
|
boolean useCamera2, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate,
|
||||||
boolean videoCodecHwAcceleration, int audioStartBitrate, String audioCodec,
|
String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
|
||||||
boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES, boolean disableBuiltInAEC,
|
int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump,
|
||||||
boolean disableBuiltInAGC, boolean disableBuiltInNS, boolean enableLevelControl) {
|
boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
|
||||||
|
boolean disableBuiltInNS, boolean enableLevelControl) {
|
||||||
this.videoCallEnabled = videoCallEnabled;
|
this.videoCallEnabled = videoCallEnabled;
|
||||||
|
this.useCamera2 = useCamera2;
|
||||||
this.loopback = loopback;
|
this.loopback = loopback;
|
||||||
this.tracing = tracing;
|
this.tracing = tracing;
|
||||||
this.videoWidth = videoWidth;
|
this.videoWidth = videoWidth;
|
||||||
@ -178,6 +177,7 @@ public class PeerConnectionClient {
|
|||||||
this.videoMaxBitrate = videoMaxBitrate;
|
this.videoMaxBitrate = videoMaxBitrate;
|
||||||
this.videoCodec = videoCodec;
|
this.videoCodec = videoCodec;
|
||||||
this.videoCodecHwAcceleration = videoCodecHwAcceleration;
|
this.videoCodecHwAcceleration = videoCodecHwAcceleration;
|
||||||
|
this.captureToTexture = captureToTexture;
|
||||||
this.audioStartBitrate = audioStartBitrate;
|
this.audioStartBitrate = audioStartBitrate;
|
||||||
this.audioCodec = audioCodec;
|
this.audioCodec = audioCodec;
|
||||||
this.noAudioProcessing = noAudioProcessing;
|
this.noAudioProcessing = noAudioProcessing;
|
||||||
@ -286,20 +286,13 @@ public class PeerConnectionClient {
|
|||||||
|
|
||||||
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
||||||
final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
|
final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
|
||||||
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
|
final SignalingParameters signalingParameters) {
|
||||||
createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
|
|
||||||
videoCapturer, signalingParameters);
|
|
||||||
}
|
|
||||||
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
|
||||||
final VideoRenderer.Callbacks localRender, final List<VideoRenderer.Callbacks> remoteRenders,
|
|
||||||
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
|
|
||||||
if (peerConnectionParameters == null) {
|
if (peerConnectionParameters == null) {
|
||||||
Log.e(TAG, "Creating peer connection without initializing factory.");
|
Log.e(TAG, "Creating peer connection without initializing factory.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.localRender = localRender;
|
this.localRender = localRender;
|
||||||
this.remoteRenders = remoteRenders;
|
this.remoteRender = remoteRender;
|
||||||
this.videoCapturer = videoCapturer;
|
|
||||||
this.signalingParameters = signalingParameters;
|
this.signalingParameters = signalingParameters;
|
||||||
executor.execute(new Runnable() {
|
executor.execute(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
@ -475,6 +468,36 @@ public class PeerConnectionClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void createCapturer(CameraEnumerator enumerator) {
|
||||||
|
final String[] deviceNames = enumerator.getDeviceNames();
|
||||||
|
|
||||||
|
// First, try to find front facing camera
|
||||||
|
Logging.d(TAG, "Looking for front facing cameras.");
|
||||||
|
for (String deviceName : deviceNames) {
|
||||||
|
if (enumerator.isFrontFacing(deviceName)) {
|
||||||
|
Logging.d(TAG, "Creating front facing camera capturer.");
|
||||||
|
videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||||
|
|
||||||
|
if (videoCapturer != null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Front facing camera not found, try something else
|
||||||
|
Logging.d(TAG, "Looking for other cameras.");
|
||||||
|
for (String deviceName : deviceNames) {
|
||||||
|
if (!enumerator.isFrontFacing(deviceName)) {
|
||||||
|
Logging.d(TAG, "Creating other camera capturer.");
|
||||||
|
videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||||
|
|
||||||
|
if (videoCapturer != null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
|
private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
|
||||||
if (factory == null || isError) {
|
if (factory == null || isError) {
|
||||||
Log.e(TAG, "Peerconnection factory is not created");
|
Log.e(TAG, "Peerconnection factory is not created");
|
||||||
@ -511,6 +534,23 @@ public class PeerConnectionClient {
|
|||||||
|
|
||||||
mediaStream = factory.createLocalMediaStream("ARDAMS");
|
mediaStream = factory.createLocalMediaStream("ARDAMS");
|
||||||
if (videoCallEnabled) {
|
if (videoCallEnabled) {
|
||||||
|
if (peerConnectionParameters.useCamera2) {
|
||||||
|
if (!peerConnectionParameters.captureToTexture) {
|
||||||
|
reportError(context.getString(R.string.camera2_texture_only_error));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Logging.d(TAG, "Creating capturer using camera2 API.");
|
||||||
|
createCapturer(new Camera2Enumerator(context));
|
||||||
|
} else {
|
||||||
|
Logging.d(TAG, "Creating capturer using camera1 API.");
|
||||||
|
createCapturer(new Camera1Enumerator(peerConnectionParameters.captureToTexture));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (videoCapturer == null) {
|
||||||
|
reportError("Failed to open camera");
|
||||||
|
return;
|
||||||
|
}
|
||||||
mediaStream.addTrack(createVideoTrack(videoCapturer));
|
mediaStream.addTrack(createVideoTrack(videoCapturer));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -963,18 +1003,13 @@ public class PeerConnectionClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void switchCameraInternal() {
|
private void switchCameraInternal() {
|
||||||
if (videoCapturer instanceof CameraVideoCapturer) {
|
|
||||||
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
|
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
|
||||||
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
|
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
|
||||||
+ ". Number of cameras: " + numberOfCameras);
|
+ ". Number of cameras: " + numberOfCameras);
|
||||||
return; // No video is sent or only one camera is available or error happened.
|
return; // No video is sent or only one camera is available or error happened.
|
||||||
}
|
}
|
||||||
Log.d(TAG, "Switch camera");
|
Log.d(TAG, "Switch camera");
|
||||||
CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
|
videoCapturer.switchCamera(null);
|
||||||
cameraVideoCapturer.switchCamera(null);
|
|
||||||
} else {
|
|
||||||
Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void switchCamera() {
|
public void switchCamera() {
|
||||||
@ -1074,11 +1109,9 @@ public class PeerConnectionClient {
|
|||||||
if (stream.videoTracks.size() == 1) {
|
if (stream.videoTracks.size() == 1) {
|
||||||
remoteVideoTrack = stream.videoTracks.get(0);
|
remoteVideoTrack = stream.videoTracks.get(0);
|
||||||
remoteVideoTrack.setEnabled(renderVideo);
|
remoteVideoTrack.setEnabled(renderVideo);
|
||||||
for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
|
|
||||||
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
|
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -1,124 +0,0 @@
|
|||||||
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Use of this source code is governed by a BSD-style license
|
|
||||||
# that can be found in the LICENSE file in the root of the source
|
|
||||||
# tree. An additional intellectual property rights grant can be found
|
|
||||||
# in the file PATENTS. All contributing project authors may
|
|
||||||
# be found in the AUTHORS file in the root of the source tree.
|
|
||||||
|
|
||||||
from optparse import OptionParser
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = OptionParser()
|
|
||||||
|
|
||||||
parser.add_option('--devname', dest='devname', help='The device id')
|
|
||||||
|
|
||||||
parser.add_option(
|
|
||||||
'--videooutsave',
|
|
||||||
dest='videooutsave',
|
|
||||||
help='The path where to save the video out file on local computer')
|
|
||||||
|
|
||||||
parser.add_option(
|
|
||||||
'--videoout',
|
|
||||||
dest='videoout',
|
|
||||||
help='The path where to put the video out file')
|
|
||||||
|
|
||||||
parser.add_option(
|
|
||||||
'--videoout_width',
|
|
||||||
dest='videoout_width',
|
|
||||||
type='int',
|
|
||||||
help='The width for the video out file')
|
|
||||||
|
|
||||||
parser.add_option(
|
|
||||||
'--videoout_height',
|
|
||||||
dest='videoout_height',
|
|
||||||
type='int',
|
|
||||||
help='The height for the video out file')
|
|
||||||
|
|
||||||
parser.add_option(
|
|
||||||
'--videoin',
|
|
||||||
dest='videoin',
|
|
||||||
help='The path where to read input file instead of camera')
|
|
||||||
|
|
||||||
parser.add_option(
|
|
||||||
'--call_length',
|
|
||||||
dest='call_length',
|
|
||||||
type='int',
|
|
||||||
help='The length of the call')
|
|
||||||
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
print (options, args)
|
|
||||||
|
|
||||||
devname = options.devname
|
|
||||||
|
|
||||||
videoin = options.videoin
|
|
||||||
|
|
||||||
videoout = options.videoout
|
|
||||||
videoout_width = options.videoout_width
|
|
||||||
videoout_height = options.videoout_height
|
|
||||||
|
|
||||||
videooutsave = options.videooutsave
|
|
||||||
|
|
||||||
call_length = options.call_length or 10
|
|
||||||
|
|
||||||
room = ''.join(random.choice(string.ascii_letters + string.digits)
|
|
||||||
for _ in range(8))
|
|
||||||
|
|
||||||
# Delete output video file.
|
|
||||||
if videoout:
|
|
||||||
subprocess.Popen(['adb', '-s', devname, 'shell', 'rm',
|
|
||||||
videoout])
|
|
||||||
|
|
||||||
device = MonkeyRunner.waitForConnection(2, devname)
|
|
||||||
|
|
||||||
extras = {
|
|
||||||
'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True,
|
|
||||||
'org.appspot.apprtc.AUDIOCODEC': 'OPUS',
|
|
||||||
'org.appspot.apprtc.LOOPBACK': True,
|
|
||||||
'org.appspot.apprtc.VIDEOCODEC': 'VP8',
|
|
||||||
'org.appspot.apprtc.CAPTURETOTEXTURE': False,
|
|
||||||
'org.appspot.apprtc.CAMERA2': False,
|
|
||||||
'org.appspot.apprtc.ROOMID': room}
|
|
||||||
|
|
||||||
if videoin:
|
|
||||||
extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin})
|
|
||||||
|
|
||||||
if videoout:
|
|
||||||
extras.update({
|
|
||||||
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE': videoout,
|
|
||||||
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH': videoout_width,
|
|
||||||
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT': videoout_height})
|
|
||||||
|
|
||||||
print extras
|
|
||||||
|
|
||||||
device.startActivity(data='https://appr.tc',
|
|
||||||
action='android.intent.action.VIEW',
|
|
||||||
component='org.appspot.apprtc/.ConnectActivity', extras=extras)
|
|
||||||
|
|
||||||
print 'Running a call for %d seconds' % call_length
|
|
||||||
for _ in xrange(call_length):
|
|
||||||
sys.stdout.write('.')
|
|
||||||
sys.stdout.flush()
|
|
||||||
time.sleep(1)
|
|
||||||
print '\nEnding call.'
|
|
||||||
|
|
||||||
# Press back to end the call. Will end on both sides.
|
|
||||||
device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP)
|
|
||||||
|
|
||||||
if videooutsave:
|
|
||||||
time.sleep(2)
|
|
||||||
|
|
||||||
subprocess.Popen(['adb', '-s', devname, 'pull',
|
|
||||||
videoout, videooutsave])
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
|
|
||||||
@ -21,9 +21,7 @@ import android.test.InstrumentationTestCase;
|
|||||||
import android.test.suitebuilder.annotation.SmallTest;
|
import android.test.suitebuilder.annotation.SmallTest;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
|
||||||
import org.webrtc.Camera1Enumerator;
|
|
||||||
import org.webrtc.Camera2Enumerator;
|
import org.webrtc.Camera2Enumerator;
|
||||||
import org.webrtc.CameraEnumerator;
|
|
||||||
import org.webrtc.EglBase;
|
import org.webrtc.EglBase;
|
||||||
import org.webrtc.IceCandidate;
|
import org.webrtc.IceCandidate;
|
||||||
import org.webrtc.MediaCodecVideoEncoder;
|
import org.webrtc.MediaCodecVideoEncoder;
|
||||||
@ -31,7 +29,6 @@ import org.webrtc.PeerConnection;
|
|||||||
import org.webrtc.PeerConnectionFactory;
|
import org.webrtc.PeerConnectionFactory;
|
||||||
import org.webrtc.SessionDescription;
|
import org.webrtc.SessionDescription;
|
||||||
import org.webrtc.StatsReport;
|
import org.webrtc.StatsReport;
|
||||||
import org.webrtc.VideoCapturer;
|
|
||||||
import org.webrtc.VideoRenderer;
|
import org.webrtc.VideoRenderer;
|
||||||
|
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
@ -238,7 +235,7 @@ public class PeerConnectionClientTest
|
|||||||
|
|
||||||
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
|
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
|
||||||
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
|
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
|
||||||
VideoCapturer videoCapturer, EglBase.Context eglContext) {
|
EglBase.Context eglContext) {
|
||||||
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
||||||
SignalingParameters signalingParameters =
|
SignalingParameters signalingParameters =
|
||||||
new SignalingParameters(iceServers, true, // iceServers, initiator.
|
new SignalingParameters(iceServers, true, // iceServers, initiator.
|
||||||
@ -252,8 +249,7 @@ public class PeerConnectionClientTest
|
|||||||
client.setPeerConnectionFactoryOptions(options);
|
client.setPeerConnectionFactoryOptions(options);
|
||||||
client.createPeerConnectionFactory(
|
client.createPeerConnectionFactory(
|
||||||
getInstrumentation().getTargetContext(), peerConnectionParameters, this);
|
getInstrumentation().getTargetContext(), peerConnectionParameters, this);
|
||||||
client.createPeerConnection(
|
client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signalingParameters);
|
||||||
eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParameters);
|
|
||||||
client.createOffer();
|
client.createOffer();
|
||||||
return client;
|
return client;
|
||||||
}
|
}
|
||||||
@ -264,12 +260,14 @@ public class PeerConnectionClientTest
|
|||||||
true, /* loopback */
|
true, /* loopback */
|
||||||
false, /* tracing */
|
false, /* tracing */
|
||||||
// Video codec parameters.
|
// Video codec parameters.
|
||||||
|
true, /* useCamera2 */
|
||||||
0, /* videoWidth */
|
0, /* videoWidth */
|
||||||
0, /* videoHeight */
|
0, /* videoHeight */
|
||||||
0, /* videoFps */
|
0, /* videoFps */
|
||||||
0, /* videoStartBitrate */
|
0, /* videoStartBitrate */
|
||||||
"", /* videoCodec */
|
"", /* videoCodec */
|
||||||
true, /* videoCodecHwAcceleration */
|
true, /* videoCodecHwAcceleration */
|
||||||
|
false, /* captureToToTexture */
|
||||||
// Audio codec parameters.
|
// Audio codec parameters.
|
||||||
0, /* audioStartBitrate */
|
0, /* audioStartBitrate */
|
||||||
"OPUS", /* audioCodec */
|
"OPUS", /* audioCodec */
|
||||||
@ -277,36 +275,27 @@ public class PeerConnectionClientTest
|
|||||||
false, /* aecDump */
|
false, /* aecDump */
|
||||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||||
|
|
||||||
return peerConnectionParameters;
|
return peerConnectionParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
private VideoCapturer createCameraCapturer(boolean captureToTexture) {
|
private PeerConnectionParameters createParametersForVideoCall(
|
||||||
|
String videoCodec, boolean captureToTexture) {
|
||||||
final boolean useCamera2 =
|
final boolean useCamera2 =
|
||||||
captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
|
captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
|
||||||
|
|
||||||
CameraEnumerator enumerator;
|
|
||||||
if (useCamera2) {
|
|
||||||
enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext());
|
|
||||||
} else {
|
|
||||||
enumerator = new Camera1Enumerator(captureToTexture);
|
|
||||||
}
|
|
||||||
String deviceName = enumerator.getDeviceNames()[0];
|
|
||||||
return enumerator.createCapturer(deviceName, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
private PeerConnectionParameters createParametersForVideoCall(String videoCodec) {
|
|
||||||
PeerConnectionParameters peerConnectionParameters =
|
PeerConnectionParameters peerConnectionParameters =
|
||||||
new PeerConnectionParameters(true, /* videoCallEnabled */
|
new PeerConnectionParameters(true, /* videoCallEnabled */
|
||||||
true, /* loopback */
|
true, /* loopback */
|
||||||
false, /* tracing */
|
false, /* tracing */
|
||||||
// Video codec parameters.
|
// Video codec parameters.
|
||||||
|
useCamera2, /* useCamera2 */
|
||||||
0, /* videoWidth */
|
0, /* videoWidth */
|
||||||
0, /* videoHeight */
|
0, /* videoHeight */
|
||||||
0, /* videoFps */
|
0, /* videoFps */
|
||||||
0, /* videoStartBitrate */
|
0, /* videoStartBitrate */
|
||||||
videoCodec, /* videoCodec */
|
videoCodec, /* videoCodec */
|
||||||
true, /* videoCodecHwAcceleration */
|
true, /* videoCodecHwAcceleration */
|
||||||
|
captureToTexture, /* captureToToTexture */
|
||||||
// Audio codec parameters.
|
// Audio codec parameters.
|
||||||
0, /* audioStartBitrate */
|
0, /* audioStartBitrate */
|
||||||
"OPUS", /* audioCodec */
|
"OPUS", /* audioCodec */
|
||||||
@ -314,7 +303,6 @@ public class PeerConnectionClientTest
|
|||||||
false, /* aecDump */
|
false, /* aecDump */
|
||||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||||
|
|
||||||
return peerConnectionParameters;
|
return peerConnectionParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -339,8 +327,7 @@ public class PeerConnectionClientTest
|
|||||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
|
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
|
||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
|
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
|
||||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||||
createCameraCapturer(false /* captureToTexture */), null);
|
|
||||||
|
|
||||||
// Wait for local SDP and ice candidates set events.
|
// Wait for local SDP and ice candidates set events.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -356,8 +343,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
|
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
|
private void doLoopbackTest(PeerConnectionParameters parameters, boolean decodeToTexure)
|
||||||
boolean decodeToTexture) throws InterruptedException {
|
throws InterruptedException {
|
||||||
loopback = true;
|
loopback = true;
|
||||||
MockRenderer localRenderer = null;
|
MockRenderer localRenderer = null;
|
||||||
MockRenderer remoteRenderer = null;
|
MockRenderer remoteRenderer = null;
|
||||||
@ -368,8 +355,8 @@ public class PeerConnectionClientTest
|
|||||||
} else {
|
} else {
|
||||||
Log.d(TAG, "testLoopback for audio.");
|
Log.d(TAG, "testLoopback for audio.");
|
||||||
}
|
}
|
||||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer,
|
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
|
||||||
decodeToTexture ? eglBase.getEglBaseContext() : null);
|
decodeToTexure ? eglBase.getEglBaseContext() : null);
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -399,25 +386,22 @@ public class PeerConnectionClientTest
|
|||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackAudio() throws InterruptedException {
|
public void testLoopbackAudio() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */);
|
doLoopbackTest(createParametersForAudioCall(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackVp8() throws InterruptedException {
|
public void testLoopbackVp8() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
|
||||||
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackVp9() throws InterruptedException {
|
public void testLoopbackVp9() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false);
|
||||||
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackH264() throws InterruptedException {
|
public void testLoopbackH264() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false);
|
||||||
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -426,8 +410,7 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true);
|
||||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -436,8 +419,7 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true);
|
||||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -446,8 +428,7 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true);
|
||||||
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -462,8 +443,7 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "VP8 encode to textures is not supported.");
|
Log.i(TAG, "VP8 encode to textures is not supported.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true);
|
||||||
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test that a call can be setup even if the EGL context used during initialization is
|
// Test that a call can be setup even if the EGL context used during initialization is
|
||||||
@ -477,11 +457,11 @@ public class PeerConnectionClientTest
|
|||||||
}
|
}
|
||||||
|
|
||||||
loopback = true;
|
loopback = true;
|
||||||
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8);
|
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8, true);
|
||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
|
pcClient = createPeerConnectionClient(
|
||||||
createCameraCapturer(true /* captureToTexture */), eglBase.getEglBaseContext());
|
localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext());
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -521,8 +501,7 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "H264 encode to textures is not supported.");
|
Log.i(TAG, "H264 encode to textures is not supported.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
|
||||||
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Checks if default front camera can be switched to back camera and then
|
// Checks if default front camera can be switched to back camera and then
|
||||||
@ -535,9 +514,8 @@ public class PeerConnectionClientTest
|
|||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
|
|
||||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
pcClient = createPeerConnectionClient(
|
||||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||||
createCameraCapturer(false /* captureToTexture */), null);
|
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -584,9 +562,8 @@ public class PeerConnectionClientTest
|
|||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
|
|
||||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
pcClient = createPeerConnectionClient(
|
||||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||||
createCameraCapturer(false /* captureToTexture */), null);
|
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -634,9 +611,8 @@ public class PeerConnectionClientTest
|
|||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
|
|
||||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
pcClient = createPeerConnectionClient(
|
||||||
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
||||||
createCameraCapturer(false /* captureToTexture */), null);
|
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
|
|||||||
Reference in New Issue
Block a user