Support for video file instead of camera and output video out to file
When video out to file is enabled the remote video which is recorded is not show on screen. You can use this command line for file input and output: monkeyrunner ./webrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py --devname 02157df28cd47001 --videoin /storage/emulated/0/reference_video_1280x720_30fps.y4m --videoout /storage/emulated/0/output.y4m --videoout_width 1280 --videoout_height 720 --videooutsave /tmp/out.y4m BUG=webrtc:6545 Review-Url: https://codereview.webrtc.org/2273573003 Cr-Commit-Position: refs/heads/master@{#14660}
This commit is contained in:
@ -262,6 +262,7 @@ if (is_android && !build_with_chromium) {
|
|||||||
"android/java/src/org/webrtc/EglBase10.java",
|
"android/java/src/org/webrtc/EglBase10.java",
|
||||||
"android/java/src/org/webrtc/EglBase14.java",
|
"android/java/src/org/webrtc/EglBase14.java",
|
||||||
"android/java/src/org/webrtc/EglRenderer.java",
|
"android/java/src/org/webrtc/EglRenderer.java",
|
||||||
|
"android/java/src/org/webrtc/FileVideoCapturer.java",
|
||||||
"android/java/src/org/webrtc/GlRectDrawer.java",
|
"android/java/src/org/webrtc/GlRectDrawer.java",
|
||||||
"android/java/src/org/webrtc/GlShader.java",
|
"android/java/src/org/webrtc/GlShader.java",
|
||||||
"android/java/src/org/webrtc/GlTextureFrameBuffer.java",
|
"android/java/src/org/webrtc/GlTextureFrameBuffer.java",
|
||||||
@ -290,6 +291,7 @@ if (is_android && !build_with_chromium) {
|
|||||||
"android/java/src/org/webrtc/SurfaceViewRenderer.java",
|
"android/java/src/org/webrtc/SurfaceViewRenderer.java",
|
||||||
"android/java/src/org/webrtc/VideoCapturer.java",
|
"android/java/src/org/webrtc/VideoCapturer.java",
|
||||||
"android/java/src/org/webrtc/VideoCapturerAndroid.java",
|
"android/java/src/org/webrtc/VideoCapturerAndroid.java",
|
||||||
|
"android/java/src/org/webrtc/VideoFileRenderer.java",
|
||||||
"android/java/src/org/webrtc/VideoRenderer.java",
|
"android/java/src/org/webrtc/VideoRenderer.java",
|
||||||
"android/java/src/org/webrtc/VideoRendererGui.java",
|
"android/java/src/org/webrtc/VideoRendererGui.java",
|
||||||
"android/java/src/org/webrtc/VideoSource.java",
|
"android/java/src/org/webrtc/VideoSource.java",
|
||||||
|
|||||||
211
webrtc/api/android/java/src/org/webrtc/FileVideoCapturer.java
Normal file
211
webrtc/api/android/java/src/org/webrtc/FileVideoCapturer.java
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.os.SystemClock;
|
||||||
|
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
import java.util.Timer;
|
||||||
|
import java.util.TimerTask;
|
||||||
|
import java.io.RandomAccessFile;
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class FileVideoCapturer implements VideoCapturer {
|
||||||
|
private interface VideoReader {
|
||||||
|
int getFrameWidth();
|
||||||
|
int getFrameHeight();
|
||||||
|
byte[] getNextFrame();
|
||||||
|
void close();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read video data from file for the .y4m container.
|
||||||
|
*/
|
||||||
|
private static class VideoReaderY4M implements VideoReader {
|
||||||
|
private final static String TAG = "VideoReaderY4M";
|
||||||
|
private final int frameWidth;
|
||||||
|
private final int frameHeight;
|
||||||
|
private final int frameSize;
|
||||||
|
|
||||||
|
// First char after header
|
||||||
|
private final long videoStart;
|
||||||
|
|
||||||
|
private static final String Y4M_FRAME_DELIMETER = "FRAME";
|
||||||
|
|
||||||
|
private final RandomAccessFile mediaFileStream;
|
||||||
|
|
||||||
|
public int getFrameWidth() {
|
||||||
|
return frameWidth;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getFrameHeight() {
|
||||||
|
return frameHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
public VideoReaderY4M(String file) throws IOException {
|
||||||
|
mediaFileStream = new RandomAccessFile(file, "r");
|
||||||
|
StringBuilder builder = new StringBuilder();
|
||||||
|
for (;;) {
|
||||||
|
int c = mediaFileStream.read();
|
||||||
|
if (c == -1) {
|
||||||
|
// End of file reached.
|
||||||
|
throw new RuntimeException("Found end of file before end of header for file: " + file);
|
||||||
|
}
|
||||||
|
if (c == '\n') {
|
||||||
|
// End of header found.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
builder.append((char) c);
|
||||||
|
}
|
||||||
|
videoStart = mediaFileStream.getFilePointer();
|
||||||
|
String header = builder.toString();
|
||||||
|
String[] headerTokens = header.split("[ ]");
|
||||||
|
Logging.d(TAG, "header: " + header + ", headerTokens" + headerTokens);
|
||||||
|
int w = 0;
|
||||||
|
int h = 0;
|
||||||
|
String colorSpace = "";
|
||||||
|
for (String tok : headerTokens) {
|
||||||
|
char c = tok.charAt(0);
|
||||||
|
switch (c) {
|
||||||
|
case 'W':
|
||||||
|
w = Integer.parseInt(tok.substring(1));
|
||||||
|
break;
|
||||||
|
case 'H':
|
||||||
|
h = Integer.parseInt(tok.substring(1));
|
||||||
|
break;
|
||||||
|
case 'C':
|
||||||
|
colorSpace = tok.substring(1);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Logging.d(TAG, "Color space: " + colorSpace);
|
||||||
|
if (!colorSpace.equals("420")) {
|
||||||
|
throw new IllegalArgumentException("Does not support any other color space than I420");
|
||||||
|
}
|
||||||
|
if ((w % 2) == 1 || (h % 2) == 1) {
|
||||||
|
throw new IllegalArgumentException("Does not support odd width or height");
|
||||||
|
}
|
||||||
|
frameWidth = w;
|
||||||
|
frameHeight = h;
|
||||||
|
frameSize = w * h * 3 / 2;
|
||||||
|
Logging.d(TAG, "frame dim: (" + w + ", " + h + ") frameSize: " + frameSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
public byte[] getNextFrame() {
|
||||||
|
byte[] frame = new byte[frameSize];
|
||||||
|
try {
|
||||||
|
byte[] frameDelim = new byte[Y4M_FRAME_DELIMETER.length() + 1];
|
||||||
|
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
||||||
|
// We reach end of file, loop
|
||||||
|
mediaFileStream.seek(videoStart);
|
||||||
|
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
||||||
|
throw new RuntimeException("Error looping video");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String frameDelimStr = new String(frameDelim);
|
||||||
|
if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
|
||||||
|
throw new RuntimeException(
|
||||||
|
"Frames should be delimited by FRAME plus newline, found delimter was: '"
|
||||||
|
+ frameDelimStr + "'");
|
||||||
|
}
|
||||||
|
mediaFileStream.readFully(frame);
|
||||||
|
byte[] nv21Frame = new byte[frameSize];
|
||||||
|
nativeI420ToNV21(frame, frameWidth, frameHeight, nv21Frame);
|
||||||
|
return nv21Frame;
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void close() {
|
||||||
|
try {
|
||||||
|
mediaFileStream.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
Logging.e(TAG, "Problem closing file", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final static String TAG = "FileVideoCapturer";
|
||||||
|
private final VideoReader videoReader;
|
||||||
|
private CapturerObserver capturerObserver;
|
||||||
|
private final Timer timer = new Timer();
|
||||||
|
|
||||||
|
private final TimerTask tickTask = new TimerTask() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
tick();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
private int getFrameWidth() {
|
||||||
|
return videoReader.getFrameWidth();
|
||||||
|
}
|
||||||
|
|
||||||
|
private int getFrameHeight() {
|
||||||
|
return videoReader.getFrameHeight();
|
||||||
|
}
|
||||||
|
|
||||||
|
public FileVideoCapturer(String inputFile) throws IOException {
|
||||||
|
try {
|
||||||
|
videoReader = new VideoReaderY4M(inputFile);
|
||||||
|
} catch (IOException e) {
|
||||||
|
Logging.d(TAG, "Could not open video file: " + inputFile);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private byte[] getNextFrame() {
|
||||||
|
return videoReader.getNextFrame();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void tick() {
|
||||||
|
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||||
|
|
||||||
|
byte[] frameData = getNextFrame();
|
||||||
|
capturerObserver.onByteBufferFrameCaptured(
|
||||||
|
frameData, getFrameWidth(), getFrameHeight(), 0, captureTimeNs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||||
|
CapturerObserver capturerObserver) {
|
||||||
|
this.capturerObserver = capturerObserver;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startCapture(int width, int height, int framerate) {
|
||||||
|
timer.schedule(tickTask, 0, 1000 / framerate);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void stopCapture() throws InterruptedException {
|
||||||
|
timer.cancel();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void changeCaptureFormat(int width, int height, int framerate) {
|
||||||
|
// Empty on purpose
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void dispose() {
|
||||||
|
videoReader.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isScreencast() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static native void nativeI420ToNV21(byte[] src, int width, int height, byte[] dst);
|
||||||
|
}
|
||||||
@ -76,7 +76,7 @@ class SurfaceTextureHelper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// State for YUV conversion, instantiated on demand.
|
// State for YUV conversion, instantiated on demand.
|
||||||
static private class YuvConverter {
|
static class YuvConverter {
|
||||||
private final EglBase eglBase;
|
private final EglBase eglBase;
|
||||||
private final GlShader shader;
|
private final GlShader shader;
|
||||||
private boolean released = false;
|
private boolean released = false;
|
||||||
|
|||||||
135
webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java
Normal file
135
webrtc/api/android/java/src/org/webrtc/VideoFileRenderer.java
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.os.Handler;
|
||||||
|
import android.os.HandlerThread;
|
||||||
|
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Can be used to save the video frames to file.
|
||||||
|
*/
|
||||||
|
public class VideoFileRenderer implements VideoRenderer.Callbacks {
|
||||||
|
private static final String TAG = "VideoFileRenderer";
|
||||||
|
|
||||||
|
private final SurfaceTextureHelper.YuvConverter yuvConverter;
|
||||||
|
private final HandlerThread renderThread;
|
||||||
|
private final Object handlerLock = new Object();
|
||||||
|
private final Handler renderThreadHandler;
|
||||||
|
private final FileOutputStream videoOutFile;
|
||||||
|
private final int outputFileWidth;
|
||||||
|
private final int outputFileHeight;
|
||||||
|
private final int outputFrameSize;
|
||||||
|
private final ByteBuffer outputFrameBuffer;
|
||||||
|
|
||||||
|
public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
|
||||||
|
EglBase.Context sharedContext) throws IOException {
|
||||||
|
if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
|
||||||
|
throw new IllegalArgumentException("Does not support uneven width or height");
|
||||||
|
}
|
||||||
|
yuvConverter = new SurfaceTextureHelper.YuvConverter(sharedContext);
|
||||||
|
|
||||||
|
this.outputFileWidth = outputFileWidth;
|
||||||
|
this.outputFileHeight = outputFileHeight;
|
||||||
|
|
||||||
|
outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
|
||||||
|
outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
|
||||||
|
|
||||||
|
videoOutFile = new FileOutputStream(outputFile);
|
||||||
|
videoOutFile.write(
|
||||||
|
("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
|
||||||
|
.getBytes());
|
||||||
|
|
||||||
|
renderThread = new HandlerThread(TAG);
|
||||||
|
renderThread.start();
|
||||||
|
renderThreadHandler = new Handler(renderThread.getLooper());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void renderFrame(final VideoRenderer.I420Frame frame) {
|
||||||
|
renderThreadHandler.post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
renderFrameOnRenderThread(frame);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private void renderFrameOnRenderThread(VideoRenderer.I420Frame frame) {
|
||||||
|
final float frameAspectRatio = (float) frame.rotatedWidth() / (float) frame.rotatedHeight();
|
||||||
|
|
||||||
|
final float[] rotatedSamplingMatrix =
|
||||||
|
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
|
||||||
|
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
||||||
|
false, frameAspectRatio, (float) outputFileWidth / outputFileHeight);
|
||||||
|
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
||||||
|
|
||||||
|
try {
|
||||||
|
videoOutFile.write("FRAME\n".getBytes());
|
||||||
|
if (!frame.yuvFrame) {
|
||||||
|
yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
|
||||||
|
frame.textureId, texMatrix);
|
||||||
|
|
||||||
|
int stride = outputFileWidth;
|
||||||
|
byte[] data = outputFrameBuffer.array();
|
||||||
|
int offset = outputFrameBuffer.arrayOffset();
|
||||||
|
|
||||||
|
// Write Y
|
||||||
|
videoOutFile.write(data, offset, outputFileWidth * outputFileHeight);
|
||||||
|
|
||||||
|
// Write U
|
||||||
|
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
||||||
|
videoOutFile.write(data, offset + r * stride, stride / 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write V
|
||||||
|
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
||||||
|
videoOutFile.write(data, offset + r * stride + stride / 2, stride / 2);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
nativeI420Scale(frame.yuvPlanes[0], frame.yuvStrides[0], frame.yuvPlanes[1],
|
||||||
|
frame.yuvStrides[1], frame.yuvPlanes[2], frame.yuvStrides[2], frame.width, frame.height,
|
||||||
|
outputFrameBuffer, outputFileWidth, outputFileHeight);
|
||||||
|
videoOutFile.write(
|
||||||
|
outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
Logging.e(TAG, "Failed to write to file for video out");
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
} finally {
|
||||||
|
VideoRenderer.renderFrameDone(frame);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void release() {
|
||||||
|
final CountDownLatch cleanupBarrier = new CountDownLatch(1);
|
||||||
|
renderThreadHandler.post(new Runnable() {
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
try {
|
||||||
|
videoOutFile.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
Logging.d(TAG, "Error closing output video file");
|
||||||
|
}
|
||||||
|
cleanupBarrier.countDown();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
ThreadUtils.awaitUninterruptibly(cleanupBarrier);
|
||||||
|
renderThread.quit();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
|
||||||
|
int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
|
||||||
|
int dstWidth, int dstHeight);
|
||||||
|
}
|
||||||
@ -43,6 +43,8 @@
|
|||||||
#include <memory>
|
#include <memory>
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
|
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
||||||
|
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||||
#include "webrtc/api/androidvideotracksource.h"
|
#include "webrtc/api/androidvideotracksource.h"
|
||||||
#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
|
#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
|
||||||
#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
|
#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
|
||||||
@ -2003,6 +2005,77 @@ JOW(void, VideoRenderer_nativeCopyPlane)(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
JOW(void, FileVideoCapturer_nativeI420ToNV21)(
|
||||||
|
JNIEnv *jni, jclass, jbyteArray j_src_buffer, jint width, jint height,
|
||||||
|
jbyteArray j_dst_buffer) {
|
||||||
|
size_t src_size = jni->GetArrayLength(j_src_buffer);
|
||||||
|
size_t dst_size = jni->GetArrayLength(j_dst_buffer);
|
||||||
|
int src_stride = width;
|
||||||
|
int dst_stride = width;
|
||||||
|
RTC_CHECK_GE(src_size, src_stride * height * 3 / 2);
|
||||||
|
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
|
||||||
|
uint8_t* src =
|
||||||
|
reinterpret_cast<uint8_t*>(jni->GetByteArrayElements(j_src_buffer, 0));
|
||||||
|
uint8_t* dst =
|
||||||
|
reinterpret_cast<uint8_t*>(jni->GetByteArrayElements(j_dst_buffer, 0));
|
||||||
|
uint8_t* src_y = src;
|
||||||
|
size_t src_stride_y = src_stride;
|
||||||
|
uint8_t* src_u = src + src_stride * height;
|
||||||
|
size_t src_stride_u = src_stride / 2;
|
||||||
|
uint8_t* src_v = src + src_stride * height * 5 / 4;
|
||||||
|
size_t src_stride_v = src_stride / 2;
|
||||||
|
|
||||||
|
uint8_t* dst_y = dst;
|
||||||
|
size_t dst_stride_y = dst_stride;
|
||||||
|
size_t dst_stride_uv = dst_stride;
|
||||||
|
uint8_t* dst_uv = dst + dst_stride * height;
|
||||||
|
|
||||||
|
libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v,
|
||||||
|
src_stride_v, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
|
||||||
|
width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
JOW(void, VideoFileRenderer_nativeI420Scale)(
|
||||||
|
JNIEnv *jni, jclass,
|
||||||
|
jobject j_src_buffer_y, jint j_src_stride_y,
|
||||||
|
jobject j_src_buffer_u, jint j_src_stride_u,
|
||||||
|
jobject j_src_buffer_v, jint j_src_stride_v,
|
||||||
|
jint width, jint height,
|
||||||
|
jbyteArray j_dst_buffer, jint dstWidth, jint dstHeight) {
|
||||||
|
size_t src_size_y = jni->GetDirectBufferCapacity(j_src_buffer_y);
|
||||||
|
size_t src_size_u = jni->GetDirectBufferCapacity(j_src_buffer_u);
|
||||||
|
size_t src_size_v = jni->GetDirectBufferCapacity(j_src_buffer_v);
|
||||||
|
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
|
||||||
|
int dst_stride = dstWidth;
|
||||||
|
RTC_CHECK_GE(src_size_y, j_src_stride_y * height);
|
||||||
|
RTC_CHECK_GE(src_size_u, j_src_stride_u * height / 4);
|
||||||
|
RTC_CHECK_GE(src_size_v, j_src_stride_v * height / 4);
|
||||||
|
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
|
||||||
|
uint8_t* src_y =
|
||||||
|
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_y));
|
||||||
|
uint8_t* src_u =
|
||||||
|
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_u));
|
||||||
|
uint8_t* src_v =
|
||||||
|
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_v));
|
||||||
|
uint8_t* dst =
|
||||||
|
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
|
||||||
|
|
||||||
|
uint8_t* dst_y = dst;
|
||||||
|
size_t dst_stride_y = dst_stride;
|
||||||
|
uint8_t* dst_u = dst + dst_stride * dstHeight;
|
||||||
|
size_t dst_stride_u = dst_stride / 2;
|
||||||
|
uint8_t* dst_v = dst + dst_stride * dstHeight * 5 / 4;
|
||||||
|
size_t dst_stride_v = dst_stride / 2;
|
||||||
|
|
||||||
|
int ret = libyuv::I420Scale(
|
||||||
|
src_y, j_src_stride_y, src_u, j_src_stride_u, src_v, j_src_stride_v,
|
||||||
|
width, height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
|
||||||
|
dst_stride_v, dstWidth, dstHeight, libyuv::kFilterBilinear);
|
||||||
|
if (ret) {
|
||||||
|
LOG(LS_ERROR) << "Error scaling I420 frame: " << ret;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
|
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
|
||||||
return JavaStringFromStdString(
|
return JavaStringFromStdString(
|
||||||
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
|
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
|
||||||
|
|||||||
@ -29,14 +29,26 @@ import android.view.Window;
|
|||||||
import android.view.WindowManager.LayoutParams;
|
import android.view.WindowManager.LayoutParams;
|
||||||
import android.widget.Toast;
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.lang.RuntimeException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.webrtc.Camera1Enumerator;
|
||||||
import org.webrtc.Camera2Enumerator;
|
import org.webrtc.Camera2Enumerator;
|
||||||
|
import org.webrtc.CameraEnumerator;
|
||||||
import org.webrtc.EglBase;
|
import org.webrtc.EglBase;
|
||||||
|
import org.webrtc.FileVideoCapturer;
|
||||||
|
import org.webrtc.VideoFileRenderer;
|
||||||
import org.webrtc.IceCandidate;
|
import org.webrtc.IceCandidate;
|
||||||
|
import org.webrtc.Logging;
|
||||||
import org.webrtc.PeerConnectionFactory;
|
import org.webrtc.PeerConnectionFactory;
|
||||||
import org.webrtc.RendererCommon.ScalingType;
|
import org.webrtc.RendererCommon.ScalingType;
|
||||||
import org.webrtc.SessionDescription;
|
import org.webrtc.SessionDescription;
|
||||||
import org.webrtc.StatsReport;
|
import org.webrtc.StatsReport;
|
||||||
import org.webrtc.SurfaceViewRenderer;
|
import org.webrtc.SurfaceViewRenderer;
|
||||||
|
import org.webrtc.VideoCapturer;
|
||||||
|
import org.webrtc.VideoRenderer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Activity for peer connection call setup, call waiting
|
* Activity for peer connection call setup, call waiting
|
||||||
@ -72,6 +84,15 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
||||||
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
||||||
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
||||||
|
public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
|
||||||
|
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
|
||||||
|
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
|
||||||
|
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
|
||||||
|
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
|
||||||
|
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
|
||||||
|
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
|
||||||
|
public static final String EXTRA_USE_VALUES_FROM_INTENT =
|
||||||
|
"org.appspot.apprtc.USE_VALUES_FROM_INTENT";
|
||||||
private static final String TAG = "CallRTCClient";
|
private static final String TAG = "CallRTCClient";
|
||||||
|
|
||||||
// List of mandatory application permissions.
|
// List of mandatory application permissions.
|
||||||
@ -101,7 +122,10 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
private AppRTCAudioManager audioManager = null;
|
private AppRTCAudioManager audioManager = null;
|
||||||
private EglBase rootEglBase;
|
private EglBase rootEglBase;
|
||||||
private SurfaceViewRenderer localRender;
|
private SurfaceViewRenderer localRender;
|
||||||
private SurfaceViewRenderer remoteRender;
|
private SurfaceViewRenderer remoteRenderScreen;
|
||||||
|
private VideoFileRenderer videoFileRenderer;
|
||||||
|
private final List<VideoRenderer.Callbacks> remoteRenderers =
|
||||||
|
new ArrayList<VideoRenderer.Callbacks>();
|
||||||
private PercentFrameLayout localRenderLayout;
|
private PercentFrameLayout localRenderLayout;
|
||||||
private PercentFrameLayout remoteRenderLayout;
|
private PercentFrameLayout remoteRenderLayout;
|
||||||
private ScalingType scalingType;
|
private ScalingType scalingType;
|
||||||
@ -143,7 +167,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
|
|
||||||
// Create UI controls.
|
// Create UI controls.
|
||||||
localRender = (SurfaceViewRenderer) findViewById(R.id.local_video_view);
|
localRender = (SurfaceViewRenderer) findViewById(R.id.local_video_view);
|
||||||
remoteRender = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
|
remoteRenderScreen = (SurfaceViewRenderer) findViewById(R.id.remote_video_view);
|
||||||
localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
|
localRenderLayout = (PercentFrameLayout) findViewById(R.id.local_video_layout);
|
||||||
remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
|
remoteRenderLayout = (PercentFrameLayout) findViewById(R.id.remote_video_layout);
|
||||||
callFragment = new CallFragment();
|
callFragment = new CallFragment();
|
||||||
@ -158,12 +182,31 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
};
|
};
|
||||||
|
|
||||||
localRender.setOnClickListener(listener);
|
localRender.setOnClickListener(listener);
|
||||||
remoteRender.setOnClickListener(listener);
|
remoteRenderScreen.setOnClickListener(listener);
|
||||||
|
remoteRenderers.add(remoteRenderScreen);
|
||||||
|
|
||||||
|
final Intent intent = getIntent();
|
||||||
|
|
||||||
// Create video renderers.
|
// Create video renderers.
|
||||||
rootEglBase = EglBase.create();
|
rootEglBase = EglBase.create();
|
||||||
localRender.init(rootEglBase.getEglBaseContext(), null);
|
localRender.init(rootEglBase.getEglBaseContext(), null);
|
||||||
remoteRender.init(rootEglBase.getEglBaseContext(), null);
|
String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
||||||
|
|
||||||
|
// When saveRemoteVideoToFile is set we save the video from the remote to a file.
|
||||||
|
if (saveRemoteVideoToFile != null) {
|
||||||
|
int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
||||||
|
int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
||||||
|
try {
|
||||||
|
videoFileRenderer = new VideoFileRenderer(
|
||||||
|
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, rootEglBase.getEglBaseContext());
|
||||||
|
remoteRenderers.add(videoFileRenderer);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(
|
||||||
|
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
remoteRenderScreen.init(rootEglBase.getEglBaseContext(), null);
|
||||||
|
|
||||||
localRender.setZOrderMediaOverlay(true);
|
localRender.setZOrderMediaOverlay(true);
|
||||||
updateVideoView();
|
updateVideoView();
|
||||||
|
|
||||||
@ -177,8 +220,6 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get Intent parameters.
|
|
||||||
final Intent intent = getIntent();
|
|
||||||
Uri roomUri = intent.getData();
|
Uri roomUri = intent.getData();
|
||||||
if (roomUri == null) {
|
if (roomUri == null) {
|
||||||
logAndToast(getString(R.string.missing_url));
|
logAndToast(getString(R.string.missing_url));
|
||||||
@ -187,7 +228,10 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
finish();
|
finish();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get Intent parameters.
|
||||||
String roomId = intent.getStringExtra(EXTRA_ROOMID);
|
String roomId = intent.getStringExtra(EXTRA_ROOMID);
|
||||||
|
Log.d(TAG, "Room ID: " + roomId);
|
||||||
if (roomId == null || roomId.length() == 0) {
|
if (roomId == null || roomId.length() == 0) {
|
||||||
logAndToast(getString(R.string.missing_url));
|
logAndToast(getString(R.string.missing_url));
|
||||||
Log.e(TAG, "Incorrect room ID in intent!");
|
Log.e(TAG, "Incorrect room ID in intent!");
|
||||||
@ -199,16 +243,12 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
||||||
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
||||||
|
|
||||||
boolean useCamera2 =
|
|
||||||
Camera2Enumerator.isSupported(this) && intent.getBooleanExtra(EXTRA_CAMERA2, true);
|
|
||||||
|
|
||||||
peerConnectionParameters =
|
peerConnectionParameters =
|
||||||
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
||||||
tracing, useCamera2, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
tracing, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||||
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||||
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
||||||
intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
|
|
||||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||||
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
||||||
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
||||||
@ -220,6 +260,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
|
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
|
||||||
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
|
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
|
||||||
|
|
||||||
|
Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
|
||||||
|
|
||||||
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
|
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
|
||||||
// standard WebSocketRTCClient.
|
// standard WebSocketRTCClient.
|
||||||
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
|
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
|
||||||
@ -265,6 +307,46 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
CallActivity.this, peerConnectionParameters, CallActivity.this);
|
CallActivity.this, peerConnectionParameters, CallActivity.this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean useCamera2() {
|
||||||
|
return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean captureToTexture() {
|
||||||
|
return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
|
||||||
|
final String[] deviceNames = enumerator.getDeviceNames();
|
||||||
|
|
||||||
|
// First, try to find front facing camera
|
||||||
|
Logging.d(TAG, "Looking for front facing cameras.");
|
||||||
|
for (String deviceName : deviceNames) {
|
||||||
|
if (enumerator.isFrontFacing(deviceName)) {
|
||||||
|
Logging.d(TAG, "Creating front facing camera capturer.");
|
||||||
|
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||||
|
|
||||||
|
if (videoCapturer != null) {
|
||||||
|
return videoCapturer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Front facing camera not found, try something else
|
||||||
|
Logging.d(TAG, "Looking for other cameras.");
|
||||||
|
for (String deviceName : deviceNames) {
|
||||||
|
if (!enumerator.isFrontFacing(deviceName)) {
|
||||||
|
Logging.d(TAG, "Creating other camera capturer.");
|
||||||
|
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||||
|
|
||||||
|
if (videoCapturer != null) {
|
||||||
|
return videoCapturer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
// Activity interfaces
|
// Activity interfaces
|
||||||
@Override
|
@Override
|
||||||
public void onPause() {
|
public void onPause() {
|
||||||
@ -353,8 +435,8 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
|
|
||||||
private void updateVideoView() {
|
private void updateVideoView() {
|
||||||
remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
|
remoteRenderLayout.setPosition(REMOTE_X, REMOTE_Y, REMOTE_WIDTH, REMOTE_HEIGHT);
|
||||||
remoteRender.setScalingType(scalingType);
|
remoteRenderScreen.setScalingType(scalingType);
|
||||||
remoteRender.setMirror(false);
|
remoteRenderScreen.setMirror(false);
|
||||||
|
|
||||||
if (iceConnected) {
|
if (iceConnected) {
|
||||||
localRenderLayout.setPosition(
|
localRenderLayout.setPosition(
|
||||||
@ -368,7 +450,7 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
localRender.setMirror(true);
|
localRender.setMirror(true);
|
||||||
|
|
||||||
localRender.requestLayout();
|
localRender.requestLayout();
|
||||||
remoteRender.requestLayout();
|
remoteRenderScreen.requestLayout();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void startCall() {
|
private void startCall() {
|
||||||
@ -432,9 +514,13 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
localRender.release();
|
localRender.release();
|
||||||
localRender = null;
|
localRender = null;
|
||||||
}
|
}
|
||||||
if (remoteRender != null) {
|
if (videoFileRenderer != null) {
|
||||||
remoteRender.release();
|
videoFileRenderer.release();
|
||||||
remoteRender = null;
|
videoFileRenderer = null;
|
||||||
|
}
|
||||||
|
if (remoteRenderScreen != null) {
|
||||||
|
remoteRenderScreen.release();
|
||||||
|
remoteRenderScreen = null;
|
||||||
}
|
}
|
||||||
if (audioManager != null) {
|
if (audioManager != null) {
|
||||||
audioManager.close();
|
audioManager.close();
|
||||||
@ -492,6 +578,35 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private VideoCapturer createVideoCapturer() {
|
||||||
|
VideoCapturer videoCapturer = null;
|
||||||
|
String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
|
||||||
|
if (videoFileAsCamera != null) {
|
||||||
|
try {
|
||||||
|
videoCapturer = new FileVideoCapturer(videoFileAsCamera);
|
||||||
|
} catch (IOException e) {
|
||||||
|
reportError("Failed to open video file for emulated camera");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
} else if (useCamera2()) {
|
||||||
|
if (!captureToTexture()) {
|
||||||
|
reportError(getString(R.string.camera2_texture_only_error));
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Logging.d(TAG, "Creating capturer using camera2 API.");
|
||||||
|
videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
|
||||||
|
} else {
|
||||||
|
Logging.d(TAG, "Creating capturer using camera1 API.");
|
||||||
|
videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
|
||||||
|
}
|
||||||
|
if (videoCapturer == null) {
|
||||||
|
reportError("Failed to open camera");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return videoCapturer;
|
||||||
|
}
|
||||||
|
|
||||||
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
|
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
|
||||||
// All callbacks are invoked from websocket signaling looper thread and
|
// All callbacks are invoked from websocket signaling looper thread and
|
||||||
// are routed to UI thread.
|
// are routed to UI thread.
|
||||||
@ -500,8 +615,12 @@ public class CallActivity extends Activity implements AppRTCClient.SignalingEven
|
|||||||
|
|
||||||
signalingParameters = params;
|
signalingParameters = params;
|
||||||
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
||||||
peerConnectionClient.createPeerConnection(
|
VideoCapturer videoCapturer = null;
|
||||||
rootEglBase.getEglBaseContext(), localRender, remoteRender, signalingParameters);
|
if (peerConnectionParameters.videoCallEnabled) {
|
||||||
|
videoCapturer = createVideoCapturer();
|
||||||
|
}
|
||||||
|
peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(), localRender,
|
||||||
|
remoteRenderers, videoCapturer, signalingParameters);
|
||||||
|
|
||||||
if (signalingParameters.initiator) {
|
if (signalingParameters.initiator) {
|
||||||
logAndToast("Creating OFFER...");
|
logAndToast("Creating OFFER...");
|
||||||
|
|||||||
@ -144,8 +144,10 @@ public class ConnectActivity extends Activity {
|
|||||||
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
||||||
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
||||||
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
||||||
|
boolean useValuesFromIntent =
|
||||||
|
intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
|
||||||
String room = sharedPref.getString(keyprefRoom, "");
|
String room = sharedPref.getString(keyprefRoom, "");
|
||||||
connectToRoom(room, true, loopback, runTimeMs);
|
connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -190,7 +192,7 @@ public class ConnectActivity extends Activity {
|
|||||||
startActivity(intent);
|
startActivity(intent);
|
||||||
return true;
|
return true;
|
||||||
} else if (item.getItemId() == R.id.action_loopback) {
|
} else if (item.getItemId() == R.id.action_loopback) {
|
||||||
connectToRoom(null, false, true, 0);
|
connectToRoom(null, false, true, false, 0);
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
return super.onOptionsItemSelected(item);
|
return super.onOptionsItemSelected(item);
|
||||||
@ -243,8 +245,42 @@ public class ConnectActivity extends Activity {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void connectToRoom(
|
/**
|
||||||
String roomId, boolean commandLineRun, boolean loopback, int runTimeMs) {
|
* Get a value from the shared preference or from the intent, if it does not
|
||||||
|
* exist the default is used.
|
||||||
|
*/
|
||||||
|
private String sharedPrefGetString(
|
||||||
|
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
||||||
|
String defaultValue = getString(defaultId);
|
||||||
|
if (useFromIntent) {
|
||||||
|
String value = getIntent().getStringExtra(intentName);
|
||||||
|
if (value != null) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
return defaultValue;
|
||||||
|
} else {
|
||||||
|
String attributeName = getString(attributeId);
|
||||||
|
return sharedPref.getString(attributeName, defaultValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a value from the shared preference or from the intent, if it does not
|
||||||
|
* exist the default is used.
|
||||||
|
*/
|
||||||
|
private boolean sharedPrefGetBoolean(
|
||||||
|
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
||||||
|
boolean defaultValue = Boolean.valueOf(getString(defaultId));
|
||||||
|
if (useFromIntent) {
|
||||||
|
return getIntent().getBooleanExtra(intentName, defaultValue);
|
||||||
|
} else {
|
||||||
|
String attributeName = getString(attributeId);
|
||||||
|
return sharedPref.getBoolean(attributeName, defaultValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
|
||||||
|
boolean useValuesFromIntent, int runTimeMs) {
|
||||||
this.commandLineRun = commandLineRun;
|
this.commandLineRun = commandLineRun;
|
||||||
|
|
||||||
// roomId is random for loopback.
|
// roomId is random for loopback.
|
||||||
@ -256,58 +292,69 @@ public class ConnectActivity extends Activity {
|
|||||||
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
||||||
|
|
||||||
// Video call enabled flag.
|
// Video call enabled flag.
|
||||||
boolean videoCallEnabled = sharedPref.getBoolean(
|
boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
|
||||||
keyprefVideoCallEnabled, Boolean.valueOf(getString(R.string.pref_videocall_default)));
|
CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Use Camera2 option.
|
// Use Camera2 option.
|
||||||
boolean useCamera2 = sharedPref.getBoolean(
|
boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
|
||||||
keyprefCamera2, Boolean.valueOf(getString(R.string.pref_camera2_default)));
|
R.string.pref_camera2_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Get default codecs.
|
// Get default codecs.
|
||||||
String videoCodec =
|
String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
|
||||||
sharedPref.getString(keyprefVideoCodec, getString(R.string.pref_videocodec_default));
|
CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
|
||||||
String audioCodec =
|
String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
|
||||||
sharedPref.getString(keyprefAudioCodec, getString(R.string.pref_audiocodec_default));
|
CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Check HW codec flag.
|
// Check HW codec flag.
|
||||||
boolean hwCodec = sharedPref.getBoolean(
|
boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
|
||||||
keyprefHwCodecAcceleration, Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
|
CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Check Capture to texture.
|
// Check Capture to texture.
|
||||||
boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
|
boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
|
||||||
Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
|
CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
|
||||||
|
useValuesFromIntent);
|
||||||
|
|
||||||
// Check Disable Audio Processing flag.
|
// Check Disable Audio Processing flag.
|
||||||
boolean noAudioProcessing = sharedPref.getBoolean(keyprefNoAudioProcessingPipeline,
|
boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
|
||||||
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
|
CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
|
||||||
|
useValuesFromIntent);
|
||||||
|
|
||||||
// Check Disable Audio Processing flag.
|
// Check Disable Audio Processing flag.
|
||||||
boolean aecDump = sharedPref.getBoolean(
|
boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
|
||||||
keyprefAecDump, Boolean.valueOf(getString(R.string.pref_aecdump_default)));
|
CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Check OpenSL ES enabled flag.
|
// Check OpenSL ES enabled flag.
|
||||||
boolean useOpenSLES = sharedPref.getBoolean(
|
boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
|
||||||
keyprefOpenSLES, Boolean.valueOf(getString(R.string.pref_opensles_default)));
|
CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Check Disable built-in AEC flag.
|
// Check Disable built-in AEC flag.
|
||||||
boolean disableBuiltInAEC = sharedPref.getBoolean(keyprefDisableBuiltInAec,
|
boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
|
||||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default)));
|
CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
|
||||||
|
useValuesFromIntent);
|
||||||
|
|
||||||
// Check Disable built-in AGC flag.
|
// Check Disable built-in AGC flag.
|
||||||
boolean disableBuiltInAGC = sharedPref.getBoolean(keyprefDisableBuiltInAgc,
|
boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
|
||||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default)));
|
CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
|
||||||
|
useValuesFromIntent);
|
||||||
|
|
||||||
// Check Disable built-in NS flag.
|
// Check Disable built-in NS flag.
|
||||||
boolean disableBuiltInNS = sharedPref.getBoolean(keyprefDisableBuiltInNs,
|
boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
|
||||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default)));
|
CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
|
||||||
|
useValuesFromIntent);
|
||||||
|
|
||||||
// Check Enable level control.
|
// Check Enable level control.
|
||||||
boolean enableLevelControl = sharedPref.getBoolean(keyprefEnableLevelControl,
|
boolean enableLevelControl = sharedPrefGetBoolean(R.string.pref_enable_level_control_key,
|
||||||
Boolean.valueOf(getString(R.string.pref_enable_level_control_key)));
|
CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, R.string.pref_enable_level_control_key,
|
||||||
|
useValuesFromIntent);
|
||||||
|
|
||||||
// Get video resolution from settings.
|
// Get video resolution from settings.
|
||||||
int videoWidth = 0;
|
int videoWidth = 0;
|
||||||
int videoHeight = 0;
|
int videoHeight = 0;
|
||||||
|
if (useValuesFromIntent) {
|
||||||
|
videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
|
||||||
|
videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
|
||||||
|
}
|
||||||
|
if (videoWidth == 0 && videoHeight == 0) {
|
||||||
String resolution =
|
String resolution =
|
||||||
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
||||||
String[] dimensions = resolution.split("[ x]+");
|
String[] dimensions = resolution.split("[ x]+");
|
||||||
@ -321,25 +368,37 @@ public class ConnectActivity extends Activity {
|
|||||||
Log.e(TAG, "Wrong video resolution setting: " + resolution);
|
Log.e(TAG, "Wrong video resolution setting: " + resolution);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Get camera fps from settings.
|
// Get camera fps from settings.
|
||||||
int cameraFps = 0;
|
int cameraFps = 0;
|
||||||
|
if (useValuesFromIntent) {
|
||||||
|
cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
|
||||||
|
}
|
||||||
|
if (cameraFps == 0) {
|
||||||
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
||||||
String[] fpsValues = fps.split("[ x]+");
|
String[] fpsValues = fps.split("[ x]+");
|
||||||
if (fpsValues.length == 2) {
|
if (fpsValues.length == 2) {
|
||||||
try {
|
try {
|
||||||
cameraFps = Integer.parseInt(fpsValues[0]);
|
cameraFps = Integer.parseInt(fpsValues[0]);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
|
cameraFps = 0;
|
||||||
Log.e(TAG, "Wrong camera fps setting: " + fps);
|
Log.e(TAG, "Wrong camera fps setting: " + fps);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Check capture quality slider flag.
|
// Check capture quality slider flag.
|
||||||
boolean captureQualitySlider = sharedPref.getBoolean(keyprefCaptureQualitySlider,
|
boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
|
||||||
Boolean.valueOf(getString(R.string.pref_capturequalityslider_default)));
|
CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
|
||||||
|
R.string.pref_capturequalityslider_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Get video and audio start bitrate.
|
// Get video and audio start bitrate.
|
||||||
int videoStartBitrate = 0;
|
int videoStartBitrate = 0;
|
||||||
|
if (useValuesFromIntent) {
|
||||||
|
videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
|
||||||
|
}
|
||||||
|
if (videoStartBitrate == 0) {
|
||||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||||
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
||||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||||
@ -347,21 +406,28 @@ public class ConnectActivity extends Activity {
|
|||||||
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
||||||
videoStartBitrate = Integer.parseInt(bitrateValue);
|
videoStartBitrate = Integer.parseInt(bitrateValue);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
int audioStartBitrate = 0;
|
int audioStartBitrate = 0;
|
||||||
bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
if (useValuesFromIntent) {
|
||||||
bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
|
||||||
|
}
|
||||||
|
if (audioStartBitrate == 0) {
|
||||||
|
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||||
|
String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
||||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||||
String bitrateValue = sharedPref.getString(
|
String bitrateValue = sharedPref.getString(
|
||||||
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
||||||
audioStartBitrate = Integer.parseInt(bitrateValue);
|
audioStartBitrate = Integer.parseInt(bitrateValue);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Check statistics display option.
|
// Check statistics display option.
|
||||||
boolean displayHud = sharedPref.getBoolean(
|
boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
|
||||||
keyprefDisplayHud, Boolean.valueOf(getString(R.string.pref_displayhud_default)));
|
CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
|
||||||
|
|
||||||
boolean tracing = sharedPref.getBoolean(
|
boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
|
||||||
keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
|
R.string.pref_tracing_default, useValuesFromIntent);
|
||||||
|
|
||||||
// Start AppRTCMobile activity.
|
// Start AppRTCMobile activity.
|
||||||
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
|
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
|
||||||
@ -395,6 +461,32 @@ public class ConnectActivity extends Activity {
|
|||||||
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
|
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
|
||||||
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
|
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
|
||||||
|
|
||||||
|
if (useValuesFromIntent) {
|
||||||
|
if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
|
||||||
|
String videoFileAsCamera =
|
||||||
|
getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
|
||||||
|
intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
|
||||||
|
String saveRemoteVideoToFile =
|
||||||
|
getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
||||||
|
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
|
||||||
|
int videoOutWidth =
|
||||||
|
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
||||||
|
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
|
||||||
|
int videoOutHeight =
|
||||||
|
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
||||||
|
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
startActivityForResult(intent, CONNECTION_REQUEST);
|
startActivityForResult(intent, CONNECTION_REQUEST);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -424,7 +516,7 @@ public class ConnectActivity extends Activity {
|
|||||||
@Override
|
@Override
|
||||||
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
||||||
String roomId = ((TextView) view).getText().toString();
|
String roomId = ((TextView) view).getText().toString();
|
||||||
connectToRoom(roomId, false, false, 0);
|
connectToRoom(roomId, false, false, false, 0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -442,7 +534,7 @@ public class ConnectActivity extends Activity {
|
|||||||
private final OnClickListener connectListener = new OnClickListener() {
|
private final OnClickListener connectListener = new OnClickListener() {
|
||||||
@Override
|
@Override
|
||||||
public void onClick(View view) {
|
public void onClick(View view) {
|
||||||
connectToRoom(roomEditText.getText().toString(), false, false, 0);
|
connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@ -41,6 +41,9 @@ import org.webrtc.StatsObserver;
|
|||||||
import org.webrtc.StatsReport;
|
import org.webrtc.StatsReport;
|
||||||
import org.webrtc.VideoCapturer;
|
import org.webrtc.VideoCapturer;
|
||||||
import org.webrtc.VideoRenderer;
|
import org.webrtc.VideoRenderer;
|
||||||
|
import org.webrtc.VideoCapturerAndroid;
|
||||||
|
import org.webrtc.CameraVideoCapturer;
|
||||||
|
import org.webrtc.FileVideoCapturer;
|
||||||
import org.webrtc.VideoSource;
|
import org.webrtc.VideoSource;
|
||||||
import org.webrtc.VideoTrack;
|
import org.webrtc.VideoTrack;
|
||||||
import org.webrtc.voiceengine.WebRtcAudioManager;
|
import org.webrtc.voiceengine.WebRtcAudioManager;
|
||||||
@ -48,8 +51,10 @@ import org.webrtc.voiceengine.WebRtcAudioUtils;
|
|||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Timer;
|
import java.util.Timer;
|
||||||
import java.util.TimerTask;
|
import java.util.TimerTask;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
@ -107,7 +112,7 @@ public class PeerConnectionClient {
|
|||||||
private boolean isError;
|
private boolean isError;
|
||||||
private Timer statsTimer;
|
private Timer statsTimer;
|
||||||
private VideoRenderer.Callbacks localRender;
|
private VideoRenderer.Callbacks localRender;
|
||||||
private VideoRenderer.Callbacks remoteRender;
|
private List<VideoRenderer.Callbacks> remoteRenders;
|
||||||
private SignalingParameters signalingParameters;
|
private SignalingParameters signalingParameters;
|
||||||
private MediaConstraints pcConstraints;
|
private MediaConstraints pcConstraints;
|
||||||
private int videoWidth;
|
private int videoWidth;
|
||||||
@ -126,7 +131,7 @@ public class PeerConnectionClient {
|
|||||||
private SessionDescription localSdp; // either offer or answer SDP
|
private SessionDescription localSdp; // either offer or answer SDP
|
||||||
private MediaStream mediaStream;
|
private MediaStream mediaStream;
|
||||||
private int numberOfCameras;
|
private int numberOfCameras;
|
||||||
private CameraVideoCapturer videoCapturer;
|
private VideoCapturer videoCapturer;
|
||||||
// enableVideo is set to true if video should be rendered and sent.
|
// enableVideo is set to true if video should be rendered and sent.
|
||||||
private boolean renderVideo;
|
private boolean renderVideo;
|
||||||
private VideoTrack localVideoTrack;
|
private VideoTrack localVideoTrack;
|
||||||
@ -143,14 +148,12 @@ public class PeerConnectionClient {
|
|||||||
public final boolean videoCallEnabled;
|
public final boolean videoCallEnabled;
|
||||||
public final boolean loopback;
|
public final boolean loopback;
|
||||||
public final boolean tracing;
|
public final boolean tracing;
|
||||||
public final boolean useCamera2;
|
|
||||||
public final int videoWidth;
|
public final int videoWidth;
|
||||||
public final int videoHeight;
|
public final int videoHeight;
|
||||||
public final int videoFps;
|
public final int videoFps;
|
||||||
public final int videoMaxBitrate;
|
public final int videoMaxBitrate;
|
||||||
public final String videoCodec;
|
public final String videoCodec;
|
||||||
public final boolean videoCodecHwAcceleration;
|
public final boolean videoCodecHwAcceleration;
|
||||||
public final boolean captureToTexture;
|
|
||||||
public final int audioStartBitrate;
|
public final int audioStartBitrate;
|
||||||
public final String audioCodec;
|
public final String audioCodec;
|
||||||
public final boolean noAudioProcessing;
|
public final boolean noAudioProcessing;
|
||||||
@ -162,13 +165,11 @@ public class PeerConnectionClient {
|
|||||||
public final boolean enableLevelControl;
|
public final boolean enableLevelControl;
|
||||||
|
|
||||||
public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
|
public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
|
||||||
boolean useCamera2, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate,
|
int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec,
|
||||||
String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
|
boolean videoCodecHwAcceleration, int audioStartBitrate, String audioCodec,
|
||||||
int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump,
|
boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES, boolean disableBuiltInAEC,
|
||||||
boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
|
boolean disableBuiltInAGC, boolean disableBuiltInNS, boolean enableLevelControl) {
|
||||||
boolean disableBuiltInNS, boolean enableLevelControl) {
|
|
||||||
this.videoCallEnabled = videoCallEnabled;
|
this.videoCallEnabled = videoCallEnabled;
|
||||||
this.useCamera2 = useCamera2;
|
|
||||||
this.loopback = loopback;
|
this.loopback = loopback;
|
||||||
this.tracing = tracing;
|
this.tracing = tracing;
|
||||||
this.videoWidth = videoWidth;
|
this.videoWidth = videoWidth;
|
||||||
@ -177,7 +178,6 @@ public class PeerConnectionClient {
|
|||||||
this.videoMaxBitrate = videoMaxBitrate;
|
this.videoMaxBitrate = videoMaxBitrate;
|
||||||
this.videoCodec = videoCodec;
|
this.videoCodec = videoCodec;
|
||||||
this.videoCodecHwAcceleration = videoCodecHwAcceleration;
|
this.videoCodecHwAcceleration = videoCodecHwAcceleration;
|
||||||
this.captureToTexture = captureToTexture;
|
|
||||||
this.audioStartBitrate = audioStartBitrate;
|
this.audioStartBitrate = audioStartBitrate;
|
||||||
this.audioCodec = audioCodec;
|
this.audioCodec = audioCodec;
|
||||||
this.noAudioProcessing = noAudioProcessing;
|
this.noAudioProcessing = noAudioProcessing;
|
||||||
@ -286,13 +286,20 @@ public class PeerConnectionClient {
|
|||||||
|
|
||||||
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
||||||
final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
|
final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
|
||||||
final SignalingParameters signalingParameters) {
|
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
|
||||||
|
createPeerConnection(renderEGLContext, localRender, Collections.singletonList(remoteRender),
|
||||||
|
videoCapturer, signalingParameters);
|
||||||
|
}
|
||||||
|
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
||||||
|
final VideoRenderer.Callbacks localRender, final List<VideoRenderer.Callbacks> remoteRenders,
|
||||||
|
final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) {
|
||||||
if (peerConnectionParameters == null) {
|
if (peerConnectionParameters == null) {
|
||||||
Log.e(TAG, "Creating peer connection without initializing factory.");
|
Log.e(TAG, "Creating peer connection without initializing factory.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.localRender = localRender;
|
this.localRender = localRender;
|
||||||
this.remoteRender = remoteRender;
|
this.remoteRenders = remoteRenders;
|
||||||
|
this.videoCapturer = videoCapturer;
|
||||||
this.signalingParameters = signalingParameters;
|
this.signalingParameters = signalingParameters;
|
||||||
executor.execute(new Runnable() {
|
executor.execute(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
@ -468,36 +475,6 @@ public class PeerConnectionClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createCapturer(CameraEnumerator enumerator) {
|
|
||||||
final String[] deviceNames = enumerator.getDeviceNames();
|
|
||||||
|
|
||||||
// First, try to find front facing camera
|
|
||||||
Logging.d(TAG, "Looking for front facing cameras.");
|
|
||||||
for (String deviceName : deviceNames) {
|
|
||||||
if (enumerator.isFrontFacing(deviceName)) {
|
|
||||||
Logging.d(TAG, "Creating front facing camera capturer.");
|
|
||||||
videoCapturer = enumerator.createCapturer(deviceName, null);
|
|
||||||
|
|
||||||
if (videoCapturer != null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Front facing camera not found, try something else
|
|
||||||
Logging.d(TAG, "Looking for other cameras.");
|
|
||||||
for (String deviceName : deviceNames) {
|
|
||||||
if (!enumerator.isFrontFacing(deviceName)) {
|
|
||||||
Logging.d(TAG, "Creating other camera capturer.");
|
|
||||||
videoCapturer = enumerator.createCapturer(deviceName, null);
|
|
||||||
|
|
||||||
if (videoCapturer != null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
|
private void createPeerConnectionInternal(EglBase.Context renderEGLContext) {
|
||||||
if (factory == null || isError) {
|
if (factory == null || isError) {
|
||||||
Log.e(TAG, "Peerconnection factory is not created");
|
Log.e(TAG, "Peerconnection factory is not created");
|
||||||
@ -534,23 +511,6 @@ public class PeerConnectionClient {
|
|||||||
|
|
||||||
mediaStream = factory.createLocalMediaStream("ARDAMS");
|
mediaStream = factory.createLocalMediaStream("ARDAMS");
|
||||||
if (videoCallEnabled) {
|
if (videoCallEnabled) {
|
||||||
if (peerConnectionParameters.useCamera2) {
|
|
||||||
if (!peerConnectionParameters.captureToTexture) {
|
|
||||||
reportError(context.getString(R.string.camera2_texture_only_error));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
Logging.d(TAG, "Creating capturer using camera2 API.");
|
|
||||||
createCapturer(new Camera2Enumerator(context));
|
|
||||||
} else {
|
|
||||||
Logging.d(TAG, "Creating capturer using camera1 API.");
|
|
||||||
createCapturer(new Camera1Enumerator(peerConnectionParameters.captureToTexture));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (videoCapturer == null) {
|
|
||||||
reportError("Failed to open camera");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
mediaStream.addTrack(createVideoTrack(videoCapturer));
|
mediaStream.addTrack(createVideoTrack(videoCapturer));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1003,13 +963,18 @@ public class PeerConnectionClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void switchCameraInternal() {
|
private void switchCameraInternal() {
|
||||||
|
if (videoCapturer instanceof CameraVideoCapturer) {
|
||||||
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
|
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
|
||||||
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
|
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
|
||||||
+ ". Number of cameras: " + numberOfCameras);
|
+ ". Number of cameras: " + numberOfCameras);
|
||||||
return; // No video is sent or only one camera is available or error happened.
|
return; // No video is sent or only one camera is available or error happened.
|
||||||
}
|
}
|
||||||
Log.d(TAG, "Switch camera");
|
Log.d(TAG, "Switch camera");
|
||||||
videoCapturer.switchCamera(null);
|
CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
|
||||||
|
cameraVideoCapturer.switchCamera(null);
|
||||||
|
} else {
|
||||||
|
Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void switchCamera() {
|
public void switchCamera() {
|
||||||
@ -1109,9 +1074,11 @@ public class PeerConnectionClient {
|
|||||||
if (stream.videoTracks.size() == 1) {
|
if (stream.videoTracks.size() == 1) {
|
||||||
remoteVideoTrack = stream.videoTracks.get(0);
|
remoteVideoTrack = stream.videoTracks.get(0);
|
||||||
remoteVideoTrack.setEnabled(renderVideo);
|
remoteVideoTrack.setEnabled(renderVideo);
|
||||||
|
for (VideoRenderer.Callbacks remoteRender : remoteRenders) {
|
||||||
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
|
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,124 @@
|
|||||||
|
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Use of this source code is governed by a BSD-style license
|
||||||
|
# that can be found in the LICENSE file in the root of the source
|
||||||
|
# tree. An additional intellectual property rights grant can be found
|
||||||
|
# in the file PATENTS. All contributing project authors may
|
||||||
|
# be found in the AUTHORS file in the root of the source tree.
|
||||||
|
|
||||||
|
from optparse import OptionParser
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = OptionParser()
|
||||||
|
|
||||||
|
parser.add_option('--devname', dest='devname', help='The device id')
|
||||||
|
|
||||||
|
parser.add_option(
|
||||||
|
'--videooutsave',
|
||||||
|
dest='videooutsave',
|
||||||
|
help='The path where to save the video out file on local computer')
|
||||||
|
|
||||||
|
parser.add_option(
|
||||||
|
'--videoout',
|
||||||
|
dest='videoout',
|
||||||
|
help='The path where to put the video out file')
|
||||||
|
|
||||||
|
parser.add_option(
|
||||||
|
'--videoout_width',
|
||||||
|
dest='videoout_width',
|
||||||
|
type='int',
|
||||||
|
help='The width for the video out file')
|
||||||
|
|
||||||
|
parser.add_option(
|
||||||
|
'--videoout_height',
|
||||||
|
dest='videoout_height',
|
||||||
|
type='int',
|
||||||
|
help='The height for the video out file')
|
||||||
|
|
||||||
|
parser.add_option(
|
||||||
|
'--videoin',
|
||||||
|
dest='videoin',
|
||||||
|
help='The path where to read input file instead of camera')
|
||||||
|
|
||||||
|
parser.add_option(
|
||||||
|
'--call_length',
|
||||||
|
dest='call_length',
|
||||||
|
type='int',
|
||||||
|
help='The length of the call')
|
||||||
|
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
print (options, args)
|
||||||
|
|
||||||
|
devname = options.devname
|
||||||
|
|
||||||
|
videoin = options.videoin
|
||||||
|
|
||||||
|
videoout = options.videoout
|
||||||
|
videoout_width = options.videoout_width
|
||||||
|
videoout_height = options.videoout_height
|
||||||
|
|
||||||
|
videooutsave = options.videooutsave
|
||||||
|
|
||||||
|
call_length = options.call_length or 10
|
||||||
|
|
||||||
|
room = ''.join(random.choice(string.ascii_letters + string.digits)
|
||||||
|
for _ in range(8))
|
||||||
|
|
||||||
|
# Delete output video file.
|
||||||
|
if videoout:
|
||||||
|
subprocess.Popen(['adb', '-s', devname, 'shell', 'rm',
|
||||||
|
videoout])
|
||||||
|
|
||||||
|
device = MonkeyRunner.waitForConnection(2, devname)
|
||||||
|
|
||||||
|
extras = {
|
||||||
|
'org.appspot.apprtc.USE_VALUES_FROM_INTENT': True,
|
||||||
|
'org.appspot.apprtc.AUDIOCODEC': 'OPUS',
|
||||||
|
'org.appspot.apprtc.LOOPBACK': True,
|
||||||
|
'org.appspot.apprtc.VIDEOCODEC': 'VP8',
|
||||||
|
'org.appspot.apprtc.CAPTURETOTEXTURE': False,
|
||||||
|
'org.appspot.apprtc.CAMERA2': False,
|
||||||
|
'org.appspot.apprtc.ROOMID': room}
|
||||||
|
|
||||||
|
if videoin:
|
||||||
|
extras.update({'org.appspot.apprtc.VIDEO_FILE_AS_CAMERA': videoin})
|
||||||
|
|
||||||
|
if videoout:
|
||||||
|
extras.update({
|
||||||
|
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE': videoout,
|
||||||
|
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH': videoout_width,
|
||||||
|
'org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT': videoout_height})
|
||||||
|
|
||||||
|
print extras
|
||||||
|
|
||||||
|
device.startActivity(data='https://appr.tc',
|
||||||
|
action='android.intent.action.VIEW',
|
||||||
|
component='org.appspot.apprtc/.ConnectActivity', extras=extras)
|
||||||
|
|
||||||
|
print 'Running a call for %d seconds' % call_length
|
||||||
|
for _ in xrange(call_length):
|
||||||
|
sys.stdout.write('.')
|
||||||
|
sys.stdout.flush()
|
||||||
|
time.sleep(1)
|
||||||
|
print '\nEnding call.'
|
||||||
|
|
||||||
|
# Press back to end the call. Will end on both sides.
|
||||||
|
device.press('KEYCODE_BACK', MonkeyDevice.DOWN_AND_UP)
|
||||||
|
|
||||||
|
if videooutsave:
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
subprocess.Popen(['adb', '-s', devname, 'pull',
|
||||||
|
videoout, videooutsave])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|
||||||
@ -21,7 +21,9 @@ import android.test.InstrumentationTestCase;
|
|||||||
import android.test.suitebuilder.annotation.SmallTest;
|
import android.test.suitebuilder.annotation.SmallTest;
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
|
||||||
|
import org.webrtc.Camera1Enumerator;
|
||||||
import org.webrtc.Camera2Enumerator;
|
import org.webrtc.Camera2Enumerator;
|
||||||
|
import org.webrtc.CameraEnumerator;
|
||||||
import org.webrtc.EglBase;
|
import org.webrtc.EglBase;
|
||||||
import org.webrtc.IceCandidate;
|
import org.webrtc.IceCandidate;
|
||||||
import org.webrtc.MediaCodecVideoEncoder;
|
import org.webrtc.MediaCodecVideoEncoder;
|
||||||
@ -29,6 +31,7 @@ import org.webrtc.PeerConnection;
|
|||||||
import org.webrtc.PeerConnectionFactory;
|
import org.webrtc.PeerConnectionFactory;
|
||||||
import org.webrtc.SessionDescription;
|
import org.webrtc.SessionDescription;
|
||||||
import org.webrtc.StatsReport;
|
import org.webrtc.StatsReport;
|
||||||
|
import org.webrtc.VideoCapturer;
|
||||||
import org.webrtc.VideoRenderer;
|
import org.webrtc.VideoRenderer;
|
||||||
|
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
@ -235,7 +238,7 @@ public class PeerConnectionClientTest
|
|||||||
|
|
||||||
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
|
PeerConnectionClient createPeerConnectionClient(MockRenderer localRenderer,
|
||||||
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
|
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
|
||||||
EglBase.Context eglContext) {
|
VideoCapturer videoCapturer, EglBase.Context eglContext) {
|
||||||
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
|
||||||
SignalingParameters signalingParameters =
|
SignalingParameters signalingParameters =
|
||||||
new SignalingParameters(iceServers, true, // iceServers, initiator.
|
new SignalingParameters(iceServers, true, // iceServers, initiator.
|
||||||
@ -249,7 +252,8 @@ public class PeerConnectionClientTest
|
|||||||
client.setPeerConnectionFactoryOptions(options);
|
client.setPeerConnectionFactoryOptions(options);
|
||||||
client.createPeerConnectionFactory(
|
client.createPeerConnectionFactory(
|
||||||
getInstrumentation().getTargetContext(), peerConnectionParameters, this);
|
getInstrumentation().getTargetContext(), peerConnectionParameters, this);
|
||||||
client.createPeerConnection(eglContext, localRenderer, remoteRenderer, signalingParameters);
|
client.createPeerConnection(
|
||||||
|
eglContext, localRenderer, remoteRenderer, videoCapturer, signalingParameters);
|
||||||
client.createOffer();
|
client.createOffer();
|
||||||
return client;
|
return client;
|
||||||
}
|
}
|
||||||
@ -260,14 +264,12 @@ public class PeerConnectionClientTest
|
|||||||
true, /* loopback */
|
true, /* loopback */
|
||||||
false, /* tracing */
|
false, /* tracing */
|
||||||
// Video codec parameters.
|
// Video codec parameters.
|
||||||
true, /* useCamera2 */
|
|
||||||
0, /* videoWidth */
|
0, /* videoWidth */
|
||||||
0, /* videoHeight */
|
0, /* videoHeight */
|
||||||
0, /* videoFps */
|
0, /* videoFps */
|
||||||
0, /* videoStartBitrate */
|
0, /* videoStartBitrate */
|
||||||
"", /* videoCodec */
|
"", /* videoCodec */
|
||||||
true, /* videoCodecHwAcceleration */
|
true, /* videoCodecHwAcceleration */
|
||||||
false, /* captureToToTexture */
|
|
||||||
// Audio codec parameters.
|
// Audio codec parameters.
|
||||||
0, /* audioStartBitrate */
|
0, /* audioStartBitrate */
|
||||||
"OPUS", /* audioCodec */
|
"OPUS", /* audioCodec */
|
||||||
@ -275,27 +277,36 @@ public class PeerConnectionClientTest
|
|||||||
false, /* aecDump */
|
false, /* aecDump */
|
||||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||||
|
|
||||||
return peerConnectionParameters;
|
return peerConnectionParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
private PeerConnectionParameters createParametersForVideoCall(
|
private VideoCapturer createCameraCapturer(boolean captureToTexture) {
|
||||||
String videoCodec, boolean captureToTexture) {
|
|
||||||
final boolean useCamera2 =
|
final boolean useCamera2 =
|
||||||
captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
|
captureToTexture && Camera2Enumerator.isSupported(getInstrumentation().getTargetContext());
|
||||||
|
|
||||||
|
CameraEnumerator enumerator;
|
||||||
|
if (useCamera2) {
|
||||||
|
enumerator = new Camera2Enumerator(getInstrumentation().getTargetContext());
|
||||||
|
} else {
|
||||||
|
enumerator = new Camera1Enumerator(captureToTexture);
|
||||||
|
}
|
||||||
|
String deviceName = enumerator.getDeviceNames()[0];
|
||||||
|
return enumerator.createCapturer(deviceName, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
private PeerConnectionParameters createParametersForVideoCall(String videoCodec) {
|
||||||
PeerConnectionParameters peerConnectionParameters =
|
PeerConnectionParameters peerConnectionParameters =
|
||||||
new PeerConnectionParameters(true, /* videoCallEnabled */
|
new PeerConnectionParameters(true, /* videoCallEnabled */
|
||||||
true, /* loopback */
|
true, /* loopback */
|
||||||
false, /* tracing */
|
false, /* tracing */
|
||||||
// Video codec parameters.
|
// Video codec parameters.
|
||||||
useCamera2, /* useCamera2 */
|
|
||||||
0, /* videoWidth */
|
0, /* videoWidth */
|
||||||
0, /* videoHeight */
|
0, /* videoHeight */
|
||||||
0, /* videoFps */
|
0, /* videoFps */
|
||||||
0, /* videoStartBitrate */
|
0, /* videoStartBitrate */
|
||||||
videoCodec, /* videoCodec */
|
videoCodec, /* videoCodec */
|
||||||
true, /* videoCodecHwAcceleration */
|
true, /* videoCodecHwAcceleration */
|
||||||
captureToTexture, /* captureToToTexture */
|
|
||||||
// Audio codec parameters.
|
// Audio codec parameters.
|
||||||
0, /* audioStartBitrate */
|
0, /* audioStartBitrate */
|
||||||
"OPUS", /* audioCodec */
|
"OPUS", /* audioCodec */
|
||||||
@ -303,6 +314,7 @@ public class PeerConnectionClientTest
|
|||||||
false, /* aecDump */
|
false, /* aecDump */
|
||||||
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
|
||||||
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
false /* disableBuiltInNS */, false /* enableLevelControl */);
|
||||||
|
|
||||||
return peerConnectionParameters;
|
return peerConnectionParameters;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -327,7 +339,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
|
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
|
||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
|
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
|
||||||
createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), null);
|
||||||
|
|
||||||
// Wait for local SDP and ice candidates set events.
|
// Wait for local SDP and ice candidates set events.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -343,8 +356,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
|
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doLoopbackTest(PeerConnectionParameters parameters, boolean decodeToTexure)
|
private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
|
||||||
throws InterruptedException {
|
boolean decodeToTexture) throws InterruptedException {
|
||||||
loopback = true;
|
loopback = true;
|
||||||
MockRenderer localRenderer = null;
|
MockRenderer localRenderer = null;
|
||||||
MockRenderer remoteRenderer = null;
|
MockRenderer remoteRenderer = null;
|
||||||
@ -355,8 +368,8 @@ public class PeerConnectionClientTest
|
|||||||
} else {
|
} else {
|
||||||
Log.d(TAG, "testLoopback for audio.");
|
Log.d(TAG, "testLoopback for audio.");
|
||||||
}
|
}
|
||||||
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
|
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer,
|
||||||
decodeToTexure ? eglBase.getEglBaseContext() : null);
|
decodeToTexture ? eglBase.getEglBaseContext() : null);
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -386,22 +399,25 @@ public class PeerConnectionClientTest
|
|||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackAudio() throws InterruptedException {
|
public void testLoopbackAudio() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForAudioCall(), false);
|
doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackVp8() throws InterruptedException {
|
public void testLoopbackVp8() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), false);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackVp9() throws InterruptedException {
|
public void testLoopbackVp9() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), false);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void testLoopbackH264() throws InterruptedException {
|
public void testLoopbackH264() throws InterruptedException {
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), false);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -410,7 +426,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, false), true);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -419,7 +436,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9, false), true);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -428,7 +446,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, false), true);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
@ -443,7 +462,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "VP8 encode to textures is not supported.");
|
Log.i(TAG, "VP8 encode to textures is not supported.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8, true), true);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||||
|
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test that a call can be setup even if the EGL context used during initialization is
|
// Test that a call can be setup even if the EGL context used during initialization is
|
||||||
@ -457,11 +477,11 @@ public class PeerConnectionClientTest
|
|||||||
}
|
}
|
||||||
|
|
||||||
loopback = true;
|
loopback = true;
|
||||||
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8, true);
|
PeerConnectionParameters parameters = createParametersForVideoCall(VIDEO_CODEC_VP8);
|
||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
pcClient = createPeerConnectionClient(
|
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters,
|
||||||
localRenderer, remoteRenderer, parameters, eglBase.getEglBaseContext());
|
createCameraCapturer(true /* captureToTexture */), eglBase.getEglBaseContext());
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -501,7 +521,8 @@ public class PeerConnectionClientTest
|
|||||||
Log.i(TAG, "H264 encode to textures is not supported.");
|
Log.i(TAG, "H264 encode to textures is not supported.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264, true), true);
|
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
|
||||||
|
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Checks if default front camera can be switched to back camera and then
|
// Checks if default front camera can be switched to back camera and then
|
||||||
@ -514,8 +535,9 @@ public class PeerConnectionClientTest
|
|||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
|
|
||||||
pcClient = createPeerConnectionClient(
|
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
||||||
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), null);
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -562,8 +584,9 @@ public class PeerConnectionClientTest
|
|||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
|
|
||||||
pcClient = createPeerConnectionClient(
|
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
||||||
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), null);
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
@ -611,8 +634,9 @@ public class PeerConnectionClientTest
|
|||||||
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
MockRenderer localRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
|
||||||
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
|
||||||
|
|
||||||
pcClient = createPeerConnectionClient(
|
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
|
||||||
localRenderer, remoteRenderer, createParametersForVideoCall(VIDEO_CODEC_VP8, false), null);
|
createParametersForVideoCall(VIDEO_CODEC_VP8),
|
||||||
|
createCameraCapturer(false /* captureToTexture */), null);
|
||||||
|
|
||||||
// Wait for local SDP, rename it to answer and set as remote SDP.
|
// Wait for local SDP, rename it to answer and set as remote SDP.
|
||||||
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
|
||||||
|
|||||||
Reference in New Issue
Block a user