Moving src/webrtc into src/.
In order to eliminate the WebRTC Subtree mirror in Chromium, WebRTC is moving the content of the src/webrtc directory up to the src/ directory. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true TBR=tommi@webrtc.org Bug: chromium:611808 Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38 Reviewed-on: https://webrtc-review.googlesource.com/1560 Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org> Reviewed-by: Henrik Kjellander <kjellander@webrtc.org> Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
committed by
Commit Bot
parent
6674846b4a
commit
bb547203bf
@ -0,0 +1,595 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import org.appspot.apprtc.util.AppRTCUtils;
|
||||
|
||||
import android.content.BroadcastReceiver;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.content.IntentFilter;
|
||||
import android.content.SharedPreferences;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.media.AudioDeviceInfo;
|
||||
import android.media.AudioManager;
|
||||
import android.os.Build;
|
||||
import android.preference.PreferenceManager;
|
||||
import android.util.Log;
|
||||
|
||||
import org.webrtc.ThreadUtils;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* AppRTCAudioManager manages all audio related parts of the AppRTC demo.
|
||||
*/
|
||||
public class AppRTCAudioManager {
|
||||
private static final String TAG = "AppRTCAudioManager";
|
||||
private static final String SPEAKERPHONE_AUTO = "auto";
|
||||
private static final String SPEAKERPHONE_TRUE = "true";
|
||||
private static final String SPEAKERPHONE_FALSE = "false";
|
||||
|
||||
/**
|
||||
* AudioDevice is the names of possible audio devices that we currently
|
||||
* support.
|
||||
*/
|
||||
public enum AudioDevice { SPEAKER_PHONE, WIRED_HEADSET, EARPIECE, BLUETOOTH, NONE }
|
||||
|
||||
/** AudioManager state. */
|
||||
public enum AudioManagerState {
|
||||
UNINITIALIZED,
|
||||
PREINITIALIZED,
|
||||
RUNNING,
|
||||
}
|
||||
|
||||
/** Selected audio device change event. */
|
||||
public static interface AudioManagerEvents {
|
||||
// Callback fired once audio device is changed or list of available audio devices changed.
|
||||
void onAudioDeviceChanged(
|
||||
AudioDevice selectedAudioDevice, Set<AudioDevice> availableAudioDevices);
|
||||
}
|
||||
|
||||
private final Context apprtcContext;
|
||||
private AudioManager audioManager;
|
||||
|
||||
private AudioManagerEvents audioManagerEvents;
|
||||
private AudioManagerState amState;
|
||||
private int savedAudioMode = AudioManager.MODE_INVALID;
|
||||
private boolean savedIsSpeakerPhoneOn = false;
|
||||
private boolean savedIsMicrophoneMute = false;
|
||||
private boolean hasWiredHeadset = false;
|
||||
|
||||
// Default audio device; speaker phone for video calls or earpiece for audio
|
||||
// only calls.
|
||||
private AudioDevice defaultAudioDevice;
|
||||
|
||||
// Contains the currently selected audio device.
|
||||
// This device is changed automatically using a certain scheme where e.g.
|
||||
// a wired headset "wins" over speaker phone. It is also possible for a
|
||||
// user to explicitly select a device (and overrid any predefined scheme).
|
||||
// See |userSelectedAudioDevice| for details.
|
||||
private AudioDevice selectedAudioDevice;
|
||||
|
||||
// Contains the user-selected audio device which overrides the predefined
|
||||
// selection scheme.
|
||||
// TODO(henrika): always set to AudioDevice.NONE today. Add support for
|
||||
// explicit selection based on choice by userSelectedAudioDevice.
|
||||
private AudioDevice userSelectedAudioDevice;
|
||||
|
||||
// Contains speakerphone setting: auto, true or false
|
||||
private final String useSpeakerphone;
|
||||
|
||||
// Proximity sensor object. It measures the proximity of an object in cm
|
||||
// relative to the view screen of a device and can therefore be used to
|
||||
// assist device switching (close to ear <=> use headset earpiece if
|
||||
// available, far from ear <=> use speaker phone).
|
||||
private AppRTCProximitySensor proximitySensor = null;
|
||||
|
||||
// Handles all tasks related to Bluetooth headset devices.
|
||||
private final AppRTCBluetoothManager bluetoothManager;
|
||||
|
||||
// Contains a list of available audio devices. A Set collection is used to
|
||||
// avoid duplicate elements.
|
||||
private Set<AudioDevice> audioDevices = new HashSet<AudioDevice>();
|
||||
|
||||
// Broadcast receiver for wired headset intent broadcasts.
|
||||
private BroadcastReceiver wiredHeadsetReceiver;
|
||||
|
||||
// Callback method for changes in audio focus.
|
||||
private AudioManager.OnAudioFocusChangeListener audioFocusChangeListener;
|
||||
|
||||
/**
|
||||
* This method is called when the proximity sensor reports a state change,
|
||||
* e.g. from "NEAR to FAR" or from "FAR to NEAR".
|
||||
*/
|
||||
private void onProximitySensorChangedState() {
|
||||
if (!useSpeakerphone.equals(SPEAKERPHONE_AUTO)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// The proximity sensor should only be activated when there are exactly two
|
||||
// available audio devices.
|
||||
if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
|
||||
&& audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
|
||||
if (proximitySensor.sensorReportsNearState()) {
|
||||
// Sensor reports that a "handset is being held up to a person's ear",
|
||||
// or "something is covering the light sensor".
|
||||
setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.EARPIECE);
|
||||
} else {
|
||||
// Sensor reports that a "handset is removed from a person's ear", or
|
||||
// "the light sensor is no longer covered".
|
||||
setAudioDeviceInternal(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Receiver which handles changes in wired headset availability. */
|
||||
private class WiredHeadsetReceiver extends BroadcastReceiver {
|
||||
private static final int STATE_UNPLUGGED = 0;
|
||||
private static final int STATE_PLUGGED = 1;
|
||||
private static final int HAS_NO_MIC = 0;
|
||||
private static final int HAS_MIC = 1;
|
||||
|
||||
@Override
|
||||
public void onReceive(Context context, Intent intent) {
|
||||
int state = intent.getIntExtra("state", STATE_UNPLUGGED);
|
||||
int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
|
||||
String name = intent.getStringExtra("name");
|
||||
Log.d(TAG, "WiredHeadsetReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
|
||||
+ "a=" + intent.getAction() + ", s="
|
||||
+ (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
|
||||
+ (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
|
||||
+ isInitialStickyBroadcast());
|
||||
hasWiredHeadset = (state == STATE_PLUGGED);
|
||||
updateAudioDeviceState();
|
||||
}
|
||||
};
|
||||
|
||||
/** Construction. */
|
||||
static AppRTCAudioManager create(Context context) {
|
||||
return new AppRTCAudioManager(context);
|
||||
}
|
||||
|
||||
private AppRTCAudioManager(Context context) {
|
||||
Log.d(TAG, "ctor");
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
apprtcContext = context;
|
||||
audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
|
||||
bluetoothManager = AppRTCBluetoothManager.create(context, this);
|
||||
wiredHeadsetReceiver = new WiredHeadsetReceiver();
|
||||
amState = AudioManagerState.UNINITIALIZED;
|
||||
|
||||
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
|
||||
useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
|
||||
context.getString(R.string.pref_speakerphone_default));
|
||||
Log.d(TAG, "useSpeakerphone: " + useSpeakerphone);
|
||||
if (useSpeakerphone.equals(SPEAKERPHONE_FALSE)) {
|
||||
defaultAudioDevice = AudioDevice.EARPIECE;
|
||||
} else {
|
||||
defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
|
||||
}
|
||||
|
||||
// Create and initialize the proximity sensor.
|
||||
// Tablet devices (e.g. Nexus 7) does not support proximity sensors.
|
||||
// Note that, the sensor will not be active until start() has been called.
|
||||
proximitySensor = AppRTCProximitySensor.create(context, new Runnable() {
|
||||
// This method will be called each time a state change is detected.
|
||||
// Example: user holds his hand over the device (closer than ~5 cm),
|
||||
// or removes his hand from the device.
|
||||
public void run() {
|
||||
onProximitySensorChangedState();
|
||||
}
|
||||
});
|
||||
|
||||
Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice);
|
||||
AppRTCUtils.logDeviceInfo(TAG);
|
||||
}
|
||||
|
||||
public void start(AudioManagerEvents audioManagerEvents) {
|
||||
Log.d(TAG, "start");
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
if (amState == AudioManagerState.RUNNING) {
|
||||
Log.e(TAG, "AudioManager is already active");
|
||||
return;
|
||||
}
|
||||
// TODO(henrika): perhaps call new method called preInitAudio() here if UNINITIALIZED.
|
||||
|
||||
Log.d(TAG, "AudioManager starts...");
|
||||
this.audioManagerEvents = audioManagerEvents;
|
||||
amState = AudioManagerState.RUNNING;
|
||||
|
||||
// Store current audio state so we can restore it when stop() is called.
|
||||
savedAudioMode = audioManager.getMode();
|
||||
savedIsSpeakerPhoneOn = audioManager.isSpeakerphoneOn();
|
||||
savedIsMicrophoneMute = audioManager.isMicrophoneMute();
|
||||
hasWiredHeadset = hasWiredHeadset();
|
||||
|
||||
// Create an AudioManager.OnAudioFocusChangeListener instance.
|
||||
audioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() {
|
||||
// Called on the listener to notify if the audio focus for this listener has been changed.
|
||||
// The |focusChange| value indicates whether the focus was gained, whether the focus was lost,
|
||||
// and whether that loss is transient, or whether the new focus holder will hold it for an
|
||||
// unknown amount of time.
|
||||
// TODO(henrika): possibly extend support of handling audio-focus changes. Only contains
|
||||
// logging for now.
|
||||
@Override
|
||||
public void onAudioFocusChange(int focusChange) {
|
||||
String typeOfChange = "AUDIOFOCUS_NOT_DEFINED";
|
||||
switch (focusChange) {
|
||||
case AudioManager.AUDIOFOCUS_GAIN:
|
||||
typeOfChange = "AUDIOFOCUS_GAIN";
|
||||
break;
|
||||
case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
|
||||
typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT";
|
||||
break;
|
||||
case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
|
||||
typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE";
|
||||
break;
|
||||
case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
|
||||
typeOfChange = "AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK";
|
||||
break;
|
||||
case AudioManager.AUDIOFOCUS_LOSS:
|
||||
typeOfChange = "AUDIOFOCUS_LOSS";
|
||||
break;
|
||||
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
|
||||
typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT";
|
||||
break;
|
||||
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
|
||||
typeOfChange = "AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK";
|
||||
break;
|
||||
default:
|
||||
typeOfChange = "AUDIOFOCUS_INVALID";
|
||||
break;
|
||||
}
|
||||
Log.d(TAG, "onAudioFocusChange: " + typeOfChange);
|
||||
}
|
||||
};
|
||||
|
||||
// Request audio playout focus (without ducking) and install listener for changes in focus.
|
||||
int result = audioManager.requestAudioFocus(audioFocusChangeListener,
|
||||
AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
|
||||
if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
|
||||
Log.d(TAG, "Audio focus request granted for VOICE_CALL streams");
|
||||
} else {
|
||||
Log.e(TAG, "Audio focus request failed");
|
||||
}
|
||||
|
||||
// Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
|
||||
// required to be in this mode when playout and/or recording starts for
|
||||
// best possible VoIP performance.
|
||||
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
|
||||
|
||||
// Always disable microphone mute during a WebRTC call.
|
||||
setMicrophoneMute(false);
|
||||
|
||||
// Set initial device states.
|
||||
userSelectedAudioDevice = AudioDevice.NONE;
|
||||
selectedAudioDevice = AudioDevice.NONE;
|
||||
audioDevices.clear();
|
||||
|
||||
// Initialize and start Bluetooth if a BT device is available or initiate
|
||||
// detection of new (enabled) BT devices.
|
||||
bluetoothManager.start();
|
||||
|
||||
// Do initial selection of audio device. This setting can later be changed
|
||||
// either by adding/removing a BT or wired headset or by covering/uncovering
|
||||
// the proximity sensor.
|
||||
updateAudioDeviceState();
|
||||
|
||||
// Register receiver for broadcast intents related to adding/removing a
|
||||
// wired headset.
|
||||
registerReceiver(wiredHeadsetReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
|
||||
Log.d(TAG, "AudioManager started");
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
Log.d(TAG, "stop");
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
if (amState != AudioManagerState.RUNNING) {
|
||||
Log.e(TAG, "Trying to stop AudioManager in incorrect state: " + amState);
|
||||
return;
|
||||
}
|
||||
amState = AudioManagerState.UNINITIALIZED;
|
||||
|
||||
unregisterReceiver(wiredHeadsetReceiver);
|
||||
|
||||
bluetoothManager.stop();
|
||||
|
||||
// Restore previously stored audio states.
|
||||
setSpeakerphoneOn(savedIsSpeakerPhoneOn);
|
||||
setMicrophoneMute(savedIsMicrophoneMute);
|
||||
audioManager.setMode(savedAudioMode);
|
||||
|
||||
// Abandon audio focus. Gives the previous focus owner, if any, focus.
|
||||
audioManager.abandonAudioFocus(audioFocusChangeListener);
|
||||
audioFocusChangeListener = null;
|
||||
Log.d(TAG, "Abandoned audio focus for VOICE_CALL streams");
|
||||
|
||||
if (proximitySensor != null) {
|
||||
proximitySensor.stop();
|
||||
proximitySensor = null;
|
||||
}
|
||||
|
||||
audioManagerEvents = null;
|
||||
Log.d(TAG, "AudioManager stopped");
|
||||
}
|
||||
|
||||
/** Changes selection of the currently active audio device. */
|
||||
private void setAudioDeviceInternal(AudioDevice device) {
|
||||
Log.d(TAG, "setAudioDeviceInternal(device=" + device + ")");
|
||||
AppRTCUtils.assertIsTrue(audioDevices.contains(device));
|
||||
|
||||
switch (device) {
|
||||
case SPEAKER_PHONE:
|
||||
setSpeakerphoneOn(true);
|
||||
break;
|
||||
case EARPIECE:
|
||||
setSpeakerphoneOn(false);
|
||||
break;
|
||||
case WIRED_HEADSET:
|
||||
setSpeakerphoneOn(false);
|
||||
break;
|
||||
case BLUETOOTH:
|
||||
setSpeakerphoneOn(false);
|
||||
break;
|
||||
default:
|
||||
Log.e(TAG, "Invalid audio device selection");
|
||||
break;
|
||||
}
|
||||
selectedAudioDevice = device;
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes default audio device.
|
||||
* TODO(henrika): add usage of this method in the AppRTCMobile client.
|
||||
*/
|
||||
public void setDefaultAudioDevice(AudioDevice defaultDevice) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
switch (defaultDevice) {
|
||||
case SPEAKER_PHONE:
|
||||
defaultAudioDevice = defaultDevice;
|
||||
break;
|
||||
case EARPIECE:
|
||||
if (hasEarpiece()) {
|
||||
defaultAudioDevice = defaultDevice;
|
||||
} else {
|
||||
defaultAudioDevice = AudioDevice.SPEAKER_PHONE;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
Log.e(TAG, "Invalid default audio device selection");
|
||||
break;
|
||||
}
|
||||
Log.d(TAG, "setDefaultAudioDevice(device=" + defaultAudioDevice + ")");
|
||||
updateAudioDeviceState();
|
||||
}
|
||||
|
||||
/** Changes selection of the currently active audio device. */
|
||||
public void selectAudioDevice(AudioDevice device) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
if (!audioDevices.contains(device)) {
|
||||
Log.e(TAG, "Can not select " + device + " from available " + audioDevices);
|
||||
}
|
||||
userSelectedAudioDevice = device;
|
||||
updateAudioDeviceState();
|
||||
}
|
||||
|
||||
/** Returns current set of available/selectable audio devices. */
|
||||
public Set<AudioDevice> getAudioDevices() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
return Collections.unmodifiableSet(new HashSet<AudioDevice>(audioDevices));
|
||||
}
|
||||
|
||||
/** Returns the currently selected audio device. */
|
||||
public AudioDevice getSelectedAudioDevice() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
return selectedAudioDevice;
|
||||
}
|
||||
|
||||
/** Helper method for receiver registration. */
|
||||
private void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
|
||||
apprtcContext.registerReceiver(receiver, filter);
|
||||
}
|
||||
|
||||
/** Helper method for unregistration of an existing receiver. */
|
||||
private void unregisterReceiver(BroadcastReceiver receiver) {
|
||||
apprtcContext.unregisterReceiver(receiver);
|
||||
}
|
||||
|
||||
/** Sets the speaker phone mode. */
|
||||
private void setSpeakerphoneOn(boolean on) {
|
||||
boolean wasOn = audioManager.isSpeakerphoneOn();
|
||||
if (wasOn == on) {
|
||||
return;
|
||||
}
|
||||
audioManager.setSpeakerphoneOn(on);
|
||||
}
|
||||
|
||||
/** Sets the microphone mute state. */
|
||||
private void setMicrophoneMute(boolean on) {
|
||||
boolean wasMuted = audioManager.isMicrophoneMute();
|
||||
if (wasMuted == on) {
|
||||
return;
|
||||
}
|
||||
audioManager.setMicrophoneMute(on);
|
||||
}
|
||||
|
||||
/** Gets the current earpiece state. */
|
||||
private boolean hasEarpiece() {
|
||||
return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a wired headset is connected or not.
|
||||
* This is not a valid indication that audio playback is actually over
|
||||
* the wired headset as audio routing depends on other conditions. We
|
||||
* only use it as an early indicator (during initialization) of an attached
|
||||
* wired headset.
|
||||
*/
|
||||
@Deprecated
|
||||
private boolean hasWiredHeadset() {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
|
||||
return audioManager.isWiredHeadsetOn();
|
||||
} else {
|
||||
final AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_ALL);
|
||||
for (AudioDeviceInfo device : devices) {
|
||||
final int type = device.getType();
|
||||
if (type == AudioDeviceInfo.TYPE_WIRED_HEADSET) {
|
||||
Log.d(TAG, "hasWiredHeadset: found wired headset");
|
||||
return true;
|
||||
} else if (type == AudioDeviceInfo.TYPE_USB_DEVICE) {
|
||||
Log.d(TAG, "hasWiredHeadset: found USB audio device");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates list of possible audio devices and make new device selection.
|
||||
* TODO(henrika): add unit test to verify all state transitions.
|
||||
*/
|
||||
public void updateAudioDeviceState() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "--- updateAudioDeviceState: "
|
||||
+ "wired headset=" + hasWiredHeadset + ", "
|
||||
+ "BT state=" + bluetoothManager.getState());
|
||||
Log.d(TAG, "Device status: "
|
||||
+ "available=" + audioDevices + ", "
|
||||
+ "selected=" + selectedAudioDevice + ", "
|
||||
+ "user selected=" + userSelectedAudioDevice);
|
||||
|
||||
// Check if any Bluetooth headset is connected. The internal BT state will
|
||||
// change accordingly.
|
||||
// TODO(henrika): perhaps wrap required state into BT manager.
|
||||
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
|
||||
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
|
||||
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_DISCONNECTING) {
|
||||
bluetoothManager.updateDevice();
|
||||
}
|
||||
|
||||
// Update the set of available audio devices.
|
||||
Set<AudioDevice> newAudioDevices = new HashSet<>();
|
||||
|
||||
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
|
||||
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
|
||||
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE) {
|
||||
newAudioDevices.add(AudioDevice.BLUETOOTH);
|
||||
}
|
||||
|
||||
if (hasWiredHeadset) {
|
||||
// If a wired headset is connected, then it is the only possible option.
|
||||
newAudioDevices.add(AudioDevice.WIRED_HEADSET);
|
||||
} else {
|
||||
// No wired headset, hence the audio-device list can contain speaker
|
||||
// phone (on a tablet), or speaker phone and earpiece (on mobile phone).
|
||||
newAudioDevices.add(AudioDevice.SPEAKER_PHONE);
|
||||
if (hasEarpiece()) {
|
||||
newAudioDevices.add(AudioDevice.EARPIECE);
|
||||
}
|
||||
}
|
||||
// Store state which is set to true if the device list has changed.
|
||||
boolean audioDeviceSetUpdated = !audioDevices.equals(newAudioDevices);
|
||||
// Update the existing audio device set.
|
||||
audioDevices = newAudioDevices;
|
||||
// Correct user selected audio devices if needed.
|
||||
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_UNAVAILABLE
|
||||
&& userSelectedAudioDevice == AudioDevice.BLUETOOTH) {
|
||||
// If BT is not available, it can't be the user selection.
|
||||
userSelectedAudioDevice = AudioDevice.NONE;
|
||||
}
|
||||
if (hasWiredHeadset && userSelectedAudioDevice == AudioDevice.SPEAKER_PHONE) {
|
||||
// If user selected speaker phone, but then plugged wired headset then make
|
||||
// wired headset as user selected device.
|
||||
userSelectedAudioDevice = AudioDevice.WIRED_HEADSET;
|
||||
}
|
||||
if (!hasWiredHeadset && userSelectedAudioDevice == AudioDevice.WIRED_HEADSET) {
|
||||
// If user selected wired headset, but then unplugged wired headset then make
|
||||
// speaker phone as user selected device.
|
||||
userSelectedAudioDevice = AudioDevice.SPEAKER_PHONE;
|
||||
}
|
||||
|
||||
// Need to start Bluetooth if it is available and user either selected it explicitly or
|
||||
// user did not select any output device.
|
||||
boolean needBluetoothAudioStart =
|
||||
bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
|
||||
&& (userSelectedAudioDevice == AudioDevice.NONE
|
||||
|| userSelectedAudioDevice == AudioDevice.BLUETOOTH);
|
||||
|
||||
// Need to stop Bluetooth audio if user selected different device and
|
||||
// Bluetooth SCO connection is established or in the process.
|
||||
boolean needBluetoothAudioStop =
|
||||
(bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED
|
||||
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING)
|
||||
&& (userSelectedAudioDevice != AudioDevice.NONE
|
||||
&& userSelectedAudioDevice != AudioDevice.BLUETOOTH);
|
||||
|
||||
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.HEADSET_AVAILABLE
|
||||
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTING
|
||||
|| bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
|
||||
Log.d(TAG, "Need BT audio: start=" + needBluetoothAudioStart + ", "
|
||||
+ "stop=" + needBluetoothAudioStop + ", "
|
||||
+ "BT state=" + bluetoothManager.getState());
|
||||
}
|
||||
|
||||
// Start or stop Bluetooth SCO connection given states set earlier.
|
||||
if (needBluetoothAudioStop) {
|
||||
bluetoothManager.stopScoAudio();
|
||||
bluetoothManager.updateDevice();
|
||||
}
|
||||
|
||||
if (needBluetoothAudioStart && !needBluetoothAudioStop) {
|
||||
// Attempt to start Bluetooth SCO audio (takes a few second to start).
|
||||
if (!bluetoothManager.startScoAudio()) {
|
||||
// Remove BLUETOOTH from list of available devices since SCO failed.
|
||||
audioDevices.remove(AudioDevice.BLUETOOTH);
|
||||
audioDeviceSetUpdated = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Update selected audio device.
|
||||
AudioDevice newAudioDevice = selectedAudioDevice;
|
||||
|
||||
if (bluetoothManager.getState() == AppRTCBluetoothManager.State.SCO_CONNECTED) {
|
||||
// If a Bluetooth is connected, then it should be used as output audio
|
||||
// device. Note that it is not sufficient that a headset is available;
|
||||
// an active SCO channel must also be up and running.
|
||||
newAudioDevice = AudioDevice.BLUETOOTH;
|
||||
} else if (hasWiredHeadset) {
|
||||
// If a wired headset is connected, but Bluetooth is not, then wired headset is used as
|
||||
// audio device.
|
||||
newAudioDevice = AudioDevice.WIRED_HEADSET;
|
||||
} else {
|
||||
// No wired headset and no Bluetooth, hence the audio-device list can contain speaker
|
||||
// phone (on a tablet), or speaker phone and earpiece (on mobile phone).
|
||||
// |defaultAudioDevice| contains either AudioDevice.SPEAKER_PHONE or AudioDevice.EARPIECE
|
||||
// depending on the user's selection.
|
||||
newAudioDevice = defaultAudioDevice;
|
||||
}
|
||||
// Switch to new device but only if there has been any changes.
|
||||
if (newAudioDevice != selectedAudioDevice || audioDeviceSetUpdated) {
|
||||
// Do the required device switch.
|
||||
setAudioDeviceInternal(newAudioDevice);
|
||||
Log.d(TAG, "New device status: "
|
||||
+ "available=" + audioDevices + ", "
|
||||
+ "selected=" + newAudioDevice);
|
||||
if (audioManagerEvents != null) {
|
||||
// Notify a listening client that audio device has been changed.
|
||||
audioManagerEvents.onAudioDeviceChanged(selectedAudioDevice, audioDevices);
|
||||
}
|
||||
}
|
||||
Log.d(TAG, "--- updateAudioDeviceState done");
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,526 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.bluetooth.BluetoothAdapter;
|
||||
import android.bluetooth.BluetoothDevice;
|
||||
import android.bluetooth.BluetoothHeadset;
|
||||
import android.bluetooth.BluetoothProfile;
|
||||
import android.content.BroadcastReceiver;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.content.IntentFilter;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.media.AudioManager;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.os.Process;
|
||||
import android.util.Log;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.appspot.apprtc.util.AppRTCUtils;
|
||||
import org.webrtc.ThreadUtils;
|
||||
|
||||
/**
|
||||
* AppRTCProximitySensor manages functions related to Bluetoth devices in the
|
||||
* AppRTC demo.
|
||||
*/
|
||||
public class AppRTCBluetoothManager {
|
||||
private static final String TAG = "AppRTCBluetoothManager";
|
||||
|
||||
// Timeout interval for starting or stopping audio to a Bluetooth SCO device.
|
||||
private static final int BLUETOOTH_SCO_TIMEOUT_MS = 4000;
|
||||
// Maximum number of SCO connection attempts.
|
||||
private static final int MAX_SCO_CONNECTION_ATTEMPTS = 2;
|
||||
|
||||
// Bluetooth connection state.
|
||||
public enum State {
|
||||
// Bluetooth is not available; no adapter or Bluetooth is off.
|
||||
UNINITIALIZED,
|
||||
// Bluetooth error happened when trying to start Bluetooth.
|
||||
ERROR,
|
||||
// Bluetooth proxy object for the Headset profile exists, but no connected headset devices,
|
||||
// SCO is not started or disconnected.
|
||||
HEADSET_UNAVAILABLE,
|
||||
// Bluetooth proxy object for the Headset profile connected, connected Bluetooth headset
|
||||
// present, but SCO is not started or disconnected.
|
||||
HEADSET_AVAILABLE,
|
||||
// Bluetooth audio SCO connection with remote device is closing.
|
||||
SCO_DISCONNECTING,
|
||||
// Bluetooth audio SCO connection with remote device is initiated.
|
||||
SCO_CONNECTING,
|
||||
// Bluetooth audio SCO connection with remote device is established.
|
||||
SCO_CONNECTED
|
||||
}
|
||||
|
||||
private final Context apprtcContext;
|
||||
private final AppRTCAudioManager apprtcAudioManager;
|
||||
private final AudioManager audioManager;
|
||||
private final Handler handler;
|
||||
|
||||
int scoConnectionAttempts;
|
||||
private State bluetoothState;
|
||||
private final BluetoothProfile.ServiceListener bluetoothServiceListener;
|
||||
private BluetoothAdapter bluetoothAdapter;
|
||||
private BluetoothHeadset bluetoothHeadset;
|
||||
private BluetoothDevice bluetoothDevice;
|
||||
private final BroadcastReceiver bluetoothHeadsetReceiver;
|
||||
|
||||
// Runs when the Bluetooth timeout expires. We use that timeout after calling
|
||||
// startScoAudio() or stopScoAudio() because we're not guaranteed to get a
|
||||
// callback after those calls.
|
||||
private final Runnable bluetoothTimeoutRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
bluetoothTimeout();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Implementation of an interface that notifies BluetoothProfile IPC clients when they have been
|
||||
* connected to or disconnected from the service.
|
||||
*/
|
||||
private class BluetoothServiceListener implements BluetoothProfile.ServiceListener {
|
||||
@Override
|
||||
// Called to notify the client when the proxy object has been connected to the service.
|
||||
// Once we have the profile proxy object, we can use it to monitor the state of the
|
||||
// connection and perform other operations that are relevant to the headset profile.
|
||||
public void onServiceConnected(int profile, BluetoothProfile proxy) {
|
||||
if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
|
||||
return;
|
||||
}
|
||||
Log.d(TAG, "BluetoothServiceListener.onServiceConnected: BT state=" + bluetoothState);
|
||||
// Android only supports one connected Bluetooth Headset at a time.
|
||||
bluetoothHeadset = (BluetoothHeadset) proxy;
|
||||
updateAudioDeviceState();
|
||||
Log.d(TAG, "onServiceConnected done: BT state=" + bluetoothState);
|
||||
}
|
||||
|
||||
@Override
|
||||
/** Notifies the client when the proxy object has been disconnected from the service. */
|
||||
public void onServiceDisconnected(int profile) {
|
||||
if (profile != BluetoothProfile.HEADSET || bluetoothState == State.UNINITIALIZED) {
|
||||
return;
|
||||
}
|
||||
Log.d(TAG, "BluetoothServiceListener.onServiceDisconnected: BT state=" + bluetoothState);
|
||||
stopScoAudio();
|
||||
bluetoothHeadset = null;
|
||||
bluetoothDevice = null;
|
||||
bluetoothState = State.HEADSET_UNAVAILABLE;
|
||||
updateAudioDeviceState();
|
||||
Log.d(TAG, "onServiceDisconnected done: BT state=" + bluetoothState);
|
||||
}
|
||||
}
|
||||
|
||||
// Intent broadcast receiver which handles changes in Bluetooth device availability.
|
||||
// Detects headset changes and Bluetooth SCO state changes.
|
||||
private class BluetoothHeadsetBroadcastReceiver extends BroadcastReceiver {
|
||||
@Override
|
||||
public void onReceive(Context context, Intent intent) {
|
||||
if (bluetoothState == State.UNINITIALIZED) {
|
||||
return;
|
||||
}
|
||||
final String action = intent.getAction();
|
||||
// Change in connection state of the Headset profile. Note that the
|
||||
// change does not tell us anything about whether we're streaming
|
||||
// audio to BT over SCO. Typically received when user turns on a BT
|
||||
// headset while audio is active using another audio device.
|
||||
if (action.equals(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED)) {
|
||||
final int state =
|
||||
intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_DISCONNECTED);
|
||||
Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
|
||||
+ "a=ACTION_CONNECTION_STATE_CHANGED, "
|
||||
+ "s=" + stateToString(state) + ", "
|
||||
+ "sb=" + isInitialStickyBroadcast() + ", "
|
||||
+ "BT state: " + bluetoothState);
|
||||
if (state == BluetoothHeadset.STATE_CONNECTED) {
|
||||
scoConnectionAttempts = 0;
|
||||
updateAudioDeviceState();
|
||||
} else if (state == BluetoothHeadset.STATE_CONNECTING) {
|
||||
// No action needed.
|
||||
} else if (state == BluetoothHeadset.STATE_DISCONNECTING) {
|
||||
// No action needed.
|
||||
} else if (state == BluetoothHeadset.STATE_DISCONNECTED) {
|
||||
// Bluetooth is probably powered off during the call.
|
||||
stopScoAudio();
|
||||
updateAudioDeviceState();
|
||||
}
|
||||
// Change in the audio (SCO) connection state of the Headset profile.
|
||||
// Typically received after call to startScoAudio() has finalized.
|
||||
} else if (action.equals(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED)) {
|
||||
final int state = intent.getIntExtra(
|
||||
BluetoothHeadset.EXTRA_STATE, BluetoothHeadset.STATE_AUDIO_DISCONNECTED);
|
||||
Log.d(TAG, "BluetoothHeadsetBroadcastReceiver.onReceive: "
|
||||
+ "a=ACTION_AUDIO_STATE_CHANGED, "
|
||||
+ "s=" + stateToString(state) + ", "
|
||||
+ "sb=" + isInitialStickyBroadcast() + ", "
|
||||
+ "BT state: " + bluetoothState);
|
||||
if (state == BluetoothHeadset.STATE_AUDIO_CONNECTED) {
|
||||
cancelTimer();
|
||||
if (bluetoothState == State.SCO_CONNECTING) {
|
||||
Log.d(TAG, "+++ Bluetooth audio SCO is now connected");
|
||||
bluetoothState = State.SCO_CONNECTED;
|
||||
scoConnectionAttempts = 0;
|
||||
updateAudioDeviceState();
|
||||
} else {
|
||||
Log.w(TAG, "Unexpected state BluetoothHeadset.STATE_AUDIO_CONNECTED");
|
||||
}
|
||||
} else if (state == BluetoothHeadset.STATE_AUDIO_CONNECTING) {
|
||||
Log.d(TAG, "+++ Bluetooth audio SCO is now connecting...");
|
||||
} else if (state == BluetoothHeadset.STATE_AUDIO_DISCONNECTED) {
|
||||
Log.d(TAG, "+++ Bluetooth audio SCO is now disconnected");
|
||||
if (isInitialStickyBroadcast()) {
|
||||
Log.d(TAG, "Ignore STATE_AUDIO_DISCONNECTED initial sticky broadcast.");
|
||||
return;
|
||||
}
|
||||
updateAudioDeviceState();
|
||||
}
|
||||
}
|
||||
Log.d(TAG, "onReceive done: BT state=" + bluetoothState);
|
||||
}
|
||||
};
|
||||
|
||||
/** Construction. */
|
||||
static AppRTCBluetoothManager create(Context context, AppRTCAudioManager audioManager) {
|
||||
Log.d(TAG, "create" + AppRTCUtils.getThreadInfo());
|
||||
return new AppRTCBluetoothManager(context, audioManager);
|
||||
}
|
||||
|
||||
protected AppRTCBluetoothManager(Context context, AppRTCAudioManager audioManager) {
|
||||
Log.d(TAG, "ctor");
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
apprtcContext = context;
|
||||
apprtcAudioManager = audioManager;
|
||||
this.audioManager = getAudioManager(context);
|
||||
bluetoothState = State.UNINITIALIZED;
|
||||
bluetoothServiceListener = new BluetoothServiceListener();
|
||||
bluetoothHeadsetReceiver = new BluetoothHeadsetBroadcastReceiver();
|
||||
handler = new Handler(Looper.getMainLooper());
|
||||
}
|
||||
|
||||
/** Returns the internal state. */
|
||||
public State getState() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
return bluetoothState;
|
||||
}
|
||||
|
||||
/**
|
||||
* Activates components required to detect Bluetooth devices and to enable
|
||||
* BT SCO (audio is routed via BT SCO) for the headset profile. The end
|
||||
* state will be HEADSET_UNAVAILABLE but a state machine has started which
|
||||
* will start a state change sequence where the final outcome depends on
|
||||
* if/when the BT headset is enabled.
|
||||
* Example of state change sequence when start() is called while BT device
|
||||
* is connected and enabled:
|
||||
* UNINITIALIZED --> HEADSET_UNAVAILABLE --> HEADSET_AVAILABLE -->
|
||||
* SCO_CONNECTING --> SCO_CONNECTED <==> audio is now routed via BT SCO.
|
||||
* Note that the AppRTCAudioManager is also involved in driving this state
|
||||
* change.
|
||||
*/
|
||||
public void start() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "start");
|
||||
if (!hasPermission(apprtcContext, android.Manifest.permission.BLUETOOTH)) {
|
||||
Log.w(TAG, "Process (pid=" + Process.myPid() + ") lacks BLUETOOTH permission");
|
||||
return;
|
||||
}
|
||||
if (bluetoothState != State.UNINITIALIZED) {
|
||||
Log.w(TAG, "Invalid BT state");
|
||||
return;
|
||||
}
|
||||
bluetoothHeadset = null;
|
||||
bluetoothDevice = null;
|
||||
scoConnectionAttempts = 0;
|
||||
// Get a handle to the default local Bluetooth adapter.
|
||||
bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
|
||||
if (bluetoothAdapter == null) {
|
||||
Log.w(TAG, "Device does not support Bluetooth");
|
||||
return;
|
||||
}
|
||||
// Ensure that the device supports use of BT SCO audio for off call use cases.
|
||||
if (!audioManager.isBluetoothScoAvailableOffCall()) {
|
||||
Log.e(TAG, "Bluetooth SCO audio is not available off call");
|
||||
return;
|
||||
}
|
||||
logBluetoothAdapterInfo(bluetoothAdapter);
|
||||
// Establish a connection to the HEADSET profile (includes both Bluetooth Headset and
|
||||
// Hands-Free) proxy object and install a listener.
|
||||
if (!getBluetoothProfileProxy(
|
||||
apprtcContext, bluetoothServiceListener, BluetoothProfile.HEADSET)) {
|
||||
Log.e(TAG, "BluetoothAdapter.getProfileProxy(HEADSET) failed");
|
||||
return;
|
||||
}
|
||||
// Register receivers for BluetoothHeadset change notifications.
|
||||
IntentFilter bluetoothHeadsetFilter = new IntentFilter();
|
||||
// Register receiver for change in connection state of the Headset profile.
|
||||
bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
|
||||
// Register receiver for change in audio connection state of the Headset profile.
|
||||
bluetoothHeadsetFilter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
|
||||
registerReceiver(bluetoothHeadsetReceiver, bluetoothHeadsetFilter);
|
||||
Log.d(TAG, "HEADSET profile state: "
|
||||
+ stateToString(bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET)));
|
||||
Log.d(TAG, "Bluetooth proxy for headset profile has started");
|
||||
bluetoothState = State.HEADSET_UNAVAILABLE;
|
||||
Log.d(TAG, "start done: BT state=" + bluetoothState);
|
||||
}
|
||||
|
||||
/** Stops and closes all components related to Bluetooth audio. */
|
||||
public void stop() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "stop: BT state=" + bluetoothState);
|
||||
if (bluetoothAdapter == null) {
|
||||
return;
|
||||
}
|
||||
// Stop BT SCO connection with remote device if needed.
|
||||
stopScoAudio();
|
||||
// Close down remaining BT resources.
|
||||
if (bluetoothState == State.UNINITIALIZED) {
|
||||
return;
|
||||
}
|
||||
unregisterReceiver(bluetoothHeadsetReceiver);
|
||||
cancelTimer();
|
||||
if (bluetoothHeadset != null) {
|
||||
bluetoothAdapter.closeProfileProxy(BluetoothProfile.HEADSET, bluetoothHeadset);
|
||||
bluetoothHeadset = null;
|
||||
}
|
||||
bluetoothAdapter = null;
|
||||
bluetoothDevice = null;
|
||||
bluetoothState = State.UNINITIALIZED;
|
||||
Log.d(TAG, "stop done: BT state=" + bluetoothState);
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts Bluetooth SCO connection with remote device.
|
||||
* Note that the phone application always has the priority on the usage of the SCO connection
|
||||
* for telephony. If this method is called while the phone is in call it will be ignored.
|
||||
* Similarly, if a call is received or sent while an application is using the SCO connection,
|
||||
* the connection will be lost for the application and NOT returned automatically when the call
|
||||
* ends. Also note that: up to and including API version JELLY_BEAN_MR1, this method initiates a
|
||||
* virtual voice call to the Bluetooth headset. After API version JELLY_BEAN_MR2 only a raw SCO
|
||||
* audio connection is established.
|
||||
* TODO(henrika): should we add support for virtual voice call to BT headset also for JBMR2 and
|
||||
* higher. It might be required to initiates a virtual voice call since many devices do not
|
||||
* accept SCO audio without a "call".
|
||||
*/
|
||||
public boolean startScoAudio() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "startSco: BT state=" + bluetoothState + ", "
|
||||
+ "attempts: " + scoConnectionAttempts + ", "
|
||||
+ "SCO is on: " + isScoOn());
|
||||
if (scoConnectionAttempts >= MAX_SCO_CONNECTION_ATTEMPTS) {
|
||||
Log.e(TAG, "BT SCO connection fails - no more attempts");
|
||||
return false;
|
||||
}
|
||||
if (bluetoothState != State.HEADSET_AVAILABLE) {
|
||||
Log.e(TAG, "BT SCO connection fails - no headset available");
|
||||
return false;
|
||||
}
|
||||
// Start BT SCO channel and wait for ACTION_AUDIO_STATE_CHANGED.
|
||||
Log.d(TAG, "Starting Bluetooth SCO and waits for ACTION_AUDIO_STATE_CHANGED...");
|
||||
// The SCO connection establishment can take several seconds, hence we cannot rely on the
|
||||
// connection to be available when the method returns but instead register to receive the
|
||||
// intent ACTION_SCO_AUDIO_STATE_UPDATED and wait for the state to be SCO_AUDIO_STATE_CONNECTED.
|
||||
bluetoothState = State.SCO_CONNECTING;
|
||||
audioManager.startBluetoothSco();
|
||||
audioManager.setBluetoothScoOn(true);
|
||||
scoConnectionAttempts++;
|
||||
startTimer();
|
||||
Log.d(TAG, "startScoAudio done: BT state=" + bluetoothState + ", "
|
||||
+ "SCO is on: " + isScoOn());
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Stops Bluetooth SCO connection with remote device. */
|
||||
public void stopScoAudio() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "stopScoAudio: BT state=" + bluetoothState + ", "
|
||||
+ "SCO is on: " + isScoOn());
|
||||
if (bluetoothState != State.SCO_CONNECTING && bluetoothState != State.SCO_CONNECTED) {
|
||||
return;
|
||||
}
|
||||
cancelTimer();
|
||||
audioManager.stopBluetoothSco();
|
||||
audioManager.setBluetoothScoOn(false);
|
||||
bluetoothState = State.SCO_DISCONNECTING;
|
||||
Log.d(TAG, "stopScoAudio done: BT state=" + bluetoothState + ", "
|
||||
+ "SCO is on: " + isScoOn());
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the BluetoothHeadset proxy object (controls the Bluetooth Headset
|
||||
* Service via IPC) to update the list of connected devices for the HEADSET
|
||||
* profile. The internal state will change to HEADSET_UNAVAILABLE or to
|
||||
* HEADSET_AVAILABLE and |bluetoothDevice| will be mapped to the connected
|
||||
* device if available.
|
||||
*/
|
||||
public void updateDevice() {
|
||||
if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
|
||||
return;
|
||||
}
|
||||
Log.d(TAG, "updateDevice");
|
||||
// Get connected devices for the headset profile. Returns the set of
|
||||
// devices which are in state STATE_CONNECTED. The BluetoothDevice class
|
||||
// is just a thin wrapper for a Bluetooth hardware address.
|
||||
List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
|
||||
if (devices.isEmpty()) {
|
||||
bluetoothDevice = null;
|
||||
bluetoothState = State.HEADSET_UNAVAILABLE;
|
||||
Log.d(TAG, "No connected bluetooth headset");
|
||||
} else {
|
||||
// Always use first device in list. Android only supports one device.
|
||||
bluetoothDevice = devices.get(0);
|
||||
bluetoothState = State.HEADSET_AVAILABLE;
|
||||
Log.d(TAG, "Connected bluetooth headset: "
|
||||
+ "name=" + bluetoothDevice.getName() + ", "
|
||||
+ "state=" + stateToString(bluetoothHeadset.getConnectionState(bluetoothDevice))
|
||||
+ ", SCO audio=" + bluetoothHeadset.isAudioConnected(bluetoothDevice));
|
||||
}
|
||||
Log.d(TAG, "updateDevice done: BT state=" + bluetoothState);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stubs for test mocks.
|
||||
*/
|
||||
protected AudioManager getAudioManager(Context context) {
|
||||
return (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||
}
|
||||
|
||||
protected void registerReceiver(BroadcastReceiver receiver, IntentFilter filter) {
|
||||
apprtcContext.registerReceiver(receiver, filter);
|
||||
}
|
||||
|
||||
protected void unregisterReceiver(BroadcastReceiver receiver) {
|
||||
apprtcContext.unregisterReceiver(receiver);
|
||||
}
|
||||
|
||||
protected boolean getBluetoothProfileProxy(
|
||||
Context context, BluetoothProfile.ServiceListener listener, int profile) {
|
||||
return bluetoothAdapter.getProfileProxy(context, listener, profile);
|
||||
}
|
||||
|
||||
protected boolean hasPermission(Context context, String permission) {
|
||||
return apprtcContext.checkPermission(permission, Process.myPid(), Process.myUid())
|
||||
== PackageManager.PERMISSION_GRANTED;
|
||||
}
|
||||
|
||||
/** Logs the state of the local Bluetooth adapter. */
|
||||
@SuppressLint("HardwareIds")
|
||||
protected void logBluetoothAdapterInfo(BluetoothAdapter localAdapter) {
|
||||
Log.d(TAG, "BluetoothAdapter: "
|
||||
+ "enabled=" + localAdapter.isEnabled() + ", "
|
||||
+ "state=" + stateToString(localAdapter.getState()) + ", "
|
||||
+ "name=" + localAdapter.getName() + ", "
|
||||
+ "address=" + localAdapter.getAddress());
|
||||
// Log the set of BluetoothDevice objects that are bonded (paired) to the local adapter.
|
||||
Set<BluetoothDevice> pairedDevices = localAdapter.getBondedDevices();
|
||||
if (!pairedDevices.isEmpty()) {
|
||||
Log.d(TAG, "paired devices:");
|
||||
for (BluetoothDevice device : pairedDevices) {
|
||||
Log.d(TAG, " name=" + device.getName() + ", address=" + device.getAddress());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Ensures that the audio manager updates its list of available audio devices. */
|
||||
private void updateAudioDeviceState() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "updateAudioDeviceState");
|
||||
apprtcAudioManager.updateAudioDeviceState();
|
||||
}
|
||||
|
||||
/** Starts timer which times out after BLUETOOTH_SCO_TIMEOUT_MS milliseconds. */
|
||||
private void startTimer() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "startTimer");
|
||||
handler.postDelayed(bluetoothTimeoutRunnable, BLUETOOTH_SCO_TIMEOUT_MS);
|
||||
}
|
||||
|
||||
/** Cancels any outstanding timer tasks. */
|
||||
private void cancelTimer() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Log.d(TAG, "cancelTimer");
|
||||
handler.removeCallbacks(bluetoothTimeoutRunnable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when start of the BT SCO channel takes too long time. Usually
|
||||
* happens when the BT device has been turned on during an ongoing call.
|
||||
*/
|
||||
private void bluetoothTimeout() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
if (bluetoothState == State.UNINITIALIZED || bluetoothHeadset == null) {
|
||||
return;
|
||||
}
|
||||
Log.d(TAG, "bluetoothTimeout: BT state=" + bluetoothState + ", "
|
||||
+ "attempts: " + scoConnectionAttempts + ", "
|
||||
+ "SCO is on: " + isScoOn());
|
||||
if (bluetoothState != State.SCO_CONNECTING) {
|
||||
return;
|
||||
}
|
||||
// Bluetooth SCO should be connecting; check the latest result.
|
||||
boolean scoConnected = false;
|
||||
List<BluetoothDevice> devices = bluetoothHeadset.getConnectedDevices();
|
||||
if (devices.size() > 0) {
|
||||
bluetoothDevice = devices.get(0);
|
||||
if (bluetoothHeadset.isAudioConnected(bluetoothDevice)) {
|
||||
Log.d(TAG, "SCO connected with " + bluetoothDevice.getName());
|
||||
scoConnected = true;
|
||||
} else {
|
||||
Log.d(TAG, "SCO is not connected with " + bluetoothDevice.getName());
|
||||
}
|
||||
}
|
||||
if (scoConnected) {
|
||||
// We thought BT had timed out, but it's actually on; updating state.
|
||||
bluetoothState = State.SCO_CONNECTED;
|
||||
scoConnectionAttempts = 0;
|
||||
} else {
|
||||
// Give up and "cancel" our request by calling stopBluetoothSco().
|
||||
Log.w(TAG, "BT failed to connect after timeout");
|
||||
stopScoAudio();
|
||||
}
|
||||
updateAudioDeviceState();
|
||||
Log.d(TAG, "bluetoothTimeout done: BT state=" + bluetoothState);
|
||||
}
|
||||
|
||||
/** Checks whether audio uses Bluetooth SCO. */
|
||||
private boolean isScoOn() {
|
||||
return audioManager.isBluetoothScoOn();
|
||||
}
|
||||
|
||||
/** Converts BluetoothAdapter states into local string representations. */
|
||||
private String stateToString(int state) {
|
||||
switch (state) {
|
||||
case BluetoothAdapter.STATE_DISCONNECTED:
|
||||
return "DISCONNECTED";
|
||||
case BluetoothAdapter.STATE_CONNECTED:
|
||||
return "CONNECTED";
|
||||
case BluetoothAdapter.STATE_CONNECTING:
|
||||
return "CONNECTING";
|
||||
case BluetoothAdapter.STATE_DISCONNECTING:
|
||||
return "DISCONNECTING";
|
||||
case BluetoothAdapter.STATE_OFF:
|
||||
return "OFF";
|
||||
case BluetoothAdapter.STATE_ON:
|
||||
return "ON";
|
||||
case BluetoothAdapter.STATE_TURNING_OFF:
|
||||
// Indicates the local Bluetooth adapter is turning off. Local clients should immediately
|
||||
// attempt graceful disconnection of any remote links.
|
||||
return "TURNING_OFF";
|
||||
case BluetoothAdapter.STATE_TURNING_ON:
|
||||
// Indicates the local Bluetooth adapter is turning on. However local clients should wait
|
||||
// for STATE_ON before attempting to use the adapter.
|
||||
return "TURNING_ON";
|
||||
default:
|
||||
return "INVALID";
|
||||
}
|
||||
}
|
||||
}
|
||||
137
examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
Normal file
137
examples/androidapp/src/org/appspot/apprtc/AppRTCClient.java
Normal file
@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import org.webrtc.IceCandidate;
|
||||
import org.webrtc.PeerConnection;
|
||||
import org.webrtc.SessionDescription;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* AppRTCClient is the interface representing an AppRTC client.
|
||||
*/
|
||||
public interface AppRTCClient {
|
||||
/**
|
||||
* Struct holding the connection parameters of an AppRTC room.
|
||||
*/
|
||||
class RoomConnectionParameters {
|
||||
public final String roomUrl;
|
||||
public final String roomId;
|
||||
public final boolean loopback;
|
||||
public final String urlParameters;
|
||||
public RoomConnectionParameters(
|
||||
String roomUrl, String roomId, boolean loopback, String urlParameters) {
|
||||
this.roomUrl = roomUrl;
|
||||
this.roomId = roomId;
|
||||
this.loopback = loopback;
|
||||
this.urlParameters = urlParameters;
|
||||
}
|
||||
public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
|
||||
this(roomUrl, roomId, loopback, null /* urlParameters */);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously connect to an AppRTC room URL using supplied connection
|
||||
* parameters. Once connection is established onConnectedToRoom()
|
||||
* callback with room parameters is invoked.
|
||||
*/
|
||||
void connectToRoom(RoomConnectionParameters connectionParameters);
|
||||
|
||||
/**
|
||||
* Send offer SDP to the other participant.
|
||||
*/
|
||||
void sendOfferSdp(final SessionDescription sdp);
|
||||
|
||||
/**
|
||||
* Send answer SDP to the other participant.
|
||||
*/
|
||||
void sendAnswerSdp(final SessionDescription sdp);
|
||||
|
||||
/**
|
||||
* Send Ice candidate to the other participant.
|
||||
*/
|
||||
void sendLocalIceCandidate(final IceCandidate candidate);
|
||||
|
||||
/**
|
||||
* Send removed ICE candidates to the other participant.
|
||||
*/
|
||||
void sendLocalIceCandidateRemovals(final IceCandidate[] candidates);
|
||||
|
||||
/**
|
||||
* Disconnect from room.
|
||||
*/
|
||||
void disconnectFromRoom();
|
||||
|
||||
/**
|
||||
* Struct holding the signaling parameters of an AppRTC room.
|
||||
*/
|
||||
class SignalingParameters {
|
||||
public final List<PeerConnection.IceServer> iceServers;
|
||||
public final boolean initiator;
|
||||
public final String clientId;
|
||||
public final String wssUrl;
|
||||
public final String wssPostUrl;
|
||||
public final SessionDescription offerSdp;
|
||||
public final List<IceCandidate> iceCandidates;
|
||||
|
||||
public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
|
||||
String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
|
||||
List<IceCandidate> iceCandidates) {
|
||||
this.iceServers = iceServers;
|
||||
this.initiator = initiator;
|
||||
this.clientId = clientId;
|
||||
this.wssUrl = wssUrl;
|
||||
this.wssPostUrl = wssPostUrl;
|
||||
this.offerSdp = offerSdp;
|
||||
this.iceCandidates = iceCandidates;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback interface for messages delivered on signaling channel.
|
||||
*
|
||||
* <p>Methods are guaranteed to be invoked on the UI thread of |activity|.
|
||||
*/
|
||||
interface SignalingEvents {
|
||||
/**
|
||||
* Callback fired once the room's signaling parameters
|
||||
* SignalingParameters are extracted.
|
||||
*/
|
||||
void onConnectedToRoom(final SignalingParameters params);
|
||||
|
||||
/**
|
||||
* Callback fired once remote SDP is received.
|
||||
*/
|
||||
void onRemoteDescription(final SessionDescription sdp);
|
||||
|
||||
/**
|
||||
* Callback fired once remote Ice candidate is received.
|
||||
*/
|
||||
void onRemoteIceCandidate(final IceCandidate candidate);
|
||||
|
||||
/**
|
||||
* Callback fired once remote Ice candidate removals are received.
|
||||
*/
|
||||
void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates);
|
||||
|
||||
/**
|
||||
* Callback fired once channel is closed.
|
||||
*/
|
||||
void onChannelClose();
|
||||
|
||||
/**
|
||||
* Callback fired once channel error happened.
|
||||
*/
|
||||
void onChannelError(final String description);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,163 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.hardware.Sensor;
|
||||
import android.hardware.SensorEvent;
|
||||
import android.hardware.SensorEventListener;
|
||||
import android.hardware.SensorManager;
|
||||
import android.os.Build;
|
||||
import android.util.Log;
|
||||
import org.appspot.apprtc.util.AppRTCUtils;
|
||||
import org.webrtc.ThreadUtils;
|
||||
|
||||
/**
|
||||
* AppRTCProximitySensor manages functions related to the proximity sensor in
|
||||
* the AppRTC demo.
|
||||
* On most device, the proximity sensor is implemented as a boolean-sensor.
|
||||
* It returns just two values "NEAR" or "FAR". Thresholding is done on the LUX
|
||||
* value i.e. the LUX value of the light sensor is compared with a threshold.
|
||||
* A LUX-value more than the threshold means the proximity sensor returns "FAR".
|
||||
* Anything less than the threshold value and the sensor returns "NEAR".
|
||||
*/
|
||||
public class AppRTCProximitySensor implements SensorEventListener {
|
||||
private static final String TAG = "AppRTCProximitySensor";
|
||||
|
||||
// This class should be created, started and stopped on one thread
|
||||
// (e.g. the main thread). We use |nonThreadSafe| to ensure that this is
|
||||
// the case. Only active when |DEBUG| is set to true.
|
||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||
|
||||
private final Runnable onSensorStateListener;
|
||||
private final SensorManager sensorManager;
|
||||
private Sensor proximitySensor = null;
|
||||
private boolean lastStateReportIsNear = false;
|
||||
|
||||
/** Construction */
|
||||
static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
|
||||
return new AppRTCProximitySensor(context, sensorStateListener);
|
||||
}
|
||||
|
||||
private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
|
||||
Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
|
||||
onSensorStateListener = sensorStateListener;
|
||||
sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
|
||||
}
|
||||
|
||||
/**
|
||||
* Activate the proximity sensor. Also do initialization if called for the
|
||||
* first time.
|
||||
*/
|
||||
public boolean start() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Log.d(TAG, "start" + AppRTCUtils.getThreadInfo());
|
||||
if (!initDefaultSensor()) {
|
||||
// Proximity sensor is not supported on this device.
|
||||
return false;
|
||||
}
|
||||
sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Deactivate the proximity sensor. */
|
||||
public void stop() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
Log.d(TAG, "stop" + AppRTCUtils.getThreadInfo());
|
||||
if (proximitySensor == null) {
|
||||
return;
|
||||
}
|
||||
sensorManager.unregisterListener(this, proximitySensor);
|
||||
}
|
||||
|
||||
/** Getter for last reported state. Set to true if "near" is reported. */
|
||||
public boolean sensorReportsNearState() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
return lastStateReportIsNear;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void onAccuracyChanged(Sensor sensor, int accuracy) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
AppRTCUtils.assertIsTrue(sensor.getType() == Sensor.TYPE_PROXIMITY);
|
||||
if (accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
|
||||
Log.e(TAG, "The values returned by this sensor cannot be trusted");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void onSensorChanged(SensorEvent event) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
AppRTCUtils.assertIsTrue(event.sensor.getType() == Sensor.TYPE_PROXIMITY);
|
||||
// As a best practice; do as little as possible within this method and
|
||||
// avoid blocking.
|
||||
float distanceInCentimeters = event.values[0];
|
||||
if (distanceInCentimeters < proximitySensor.getMaximumRange()) {
|
||||
Log.d(TAG, "Proximity sensor => NEAR state");
|
||||
lastStateReportIsNear = true;
|
||||
} else {
|
||||
Log.d(TAG, "Proximity sensor => FAR state");
|
||||
lastStateReportIsNear = false;
|
||||
}
|
||||
|
||||
// Report about new state to listening client. Client can then call
|
||||
// sensorReportsNearState() to query the current state (NEAR or FAR).
|
||||
if (onSensorStateListener != null) {
|
||||
onSensorStateListener.run();
|
||||
}
|
||||
|
||||
Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
|
||||
+ "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
|
||||
+ event.values[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default proximity sensor if it exists. Tablet devices (e.g. Nexus 7)
|
||||
* does not support this type of sensor and false will be returned in such
|
||||
* cases.
|
||||
*/
|
||||
private boolean initDefaultSensor() {
|
||||
if (proximitySensor != null) {
|
||||
return true;
|
||||
}
|
||||
proximitySensor = sensorManager.getDefaultSensor(Sensor.TYPE_PROXIMITY);
|
||||
if (proximitySensor == null) {
|
||||
return false;
|
||||
}
|
||||
logProximitySensorInfo();
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Helper method for logging information about the proximity sensor. */
|
||||
private void logProximitySensorInfo() {
|
||||
if (proximitySensor == null) {
|
||||
return;
|
||||
}
|
||||
StringBuilder info = new StringBuilder("Proximity sensor: ");
|
||||
info.append("name=").append(proximitySensor.getName());
|
||||
info.append(", vendor: ").append(proximitySensor.getVendor());
|
||||
info.append(", power: ").append(proximitySensor.getPower());
|
||||
info.append(", resolution: ").append(proximitySensor.getResolution());
|
||||
info.append(", max range: ").append(proximitySensor.getMaximumRange());
|
||||
info.append(", min delay: ").append(proximitySensor.getMinDelay());
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
|
||||
// Added in API level 20.
|
||||
info.append(", type: ").append(proximitySensor.getStringType());
|
||||
}
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
// Added in API level 21.
|
||||
info.append(", max delay: ").append(proximitySensor.getMaxDelay());
|
||||
info.append(", reporting mode: ").append(proximitySensor.getReportingMode());
|
||||
info.append(", isWakeUpSensor: ").append(proximitySensor.isWakeUpSensor());
|
||||
}
|
||||
Log.d(TAG, info.toString());
|
||||
}
|
||||
}
|
||||
957
examples/androidapp/src/org/appspot/apprtc/CallActivity.java
Normal file
957
examples/androidapp/src/org/appspot/apprtc/CallActivity.java
Normal file
@ -0,0 +1,957 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.app.Activity;
|
||||
import android.app.AlertDialog;
|
||||
import android.app.FragmentTransaction;
|
||||
import android.content.Context;
|
||||
import android.content.DialogInterface;
|
||||
import android.content.Intent;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.media.projection.MediaProjection;
|
||||
import android.media.projection.MediaProjectionManager;
|
||||
import android.net.Uri;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.util.DisplayMetrics;
|
||||
import android.util.Log;
|
||||
import android.view.View;
|
||||
import android.view.Window;
|
||||
import android.view.WindowManager;
|
||||
import android.view.WindowManager.LayoutParams;
|
||||
import android.widget.Toast;
|
||||
import java.io.IOException;
|
||||
import java.lang.RuntimeException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.appspot.apprtc.AppRTCAudioManager.AudioDevice;
|
||||
import org.appspot.apprtc.AppRTCAudioManager.AudioManagerEvents;
|
||||
import org.appspot.apprtc.AppRTCClient.RoomConnectionParameters;
|
||||
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
|
||||
import org.appspot.apprtc.PeerConnectionClient.DataChannelParameters;
|
||||
import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
|
||||
import org.webrtc.Camera1Enumerator;
|
||||
import org.webrtc.Camera2Enumerator;
|
||||
import org.webrtc.CameraEnumerator;
|
||||
import org.webrtc.FileVideoCapturer;
|
||||
import org.webrtc.IceCandidate;
|
||||
import org.webrtc.Logging;
|
||||
import org.webrtc.PeerConnectionFactory;
|
||||
import org.webrtc.RendererCommon.ScalingType;
|
||||
import org.webrtc.ScreenCapturerAndroid;
|
||||
import org.webrtc.SessionDescription;
|
||||
import org.webrtc.StatsReport;
|
||||
import org.webrtc.SurfaceViewRenderer;
|
||||
import org.webrtc.VideoCapturer;
|
||||
import org.webrtc.VideoFileRenderer;
|
||||
import org.webrtc.VideoFrame;
|
||||
import org.webrtc.VideoRenderer;
|
||||
import org.webrtc.VideoSink;
|
||||
|
||||
/**
|
||||
* Activity for peer connection call setup, call waiting
|
||||
* and call view.
|
||||
*/
|
||||
public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
|
||||
PeerConnectionClient.PeerConnectionEvents,
|
||||
CallFragment.OnCallEvents {
|
||||
private static final String TAG = "CallRTCClient";
|
||||
|
||||
// Fix for devices running old Android versions not finding the libraries.
|
||||
// https://bugs.chromium.org/p/webrtc/issues/detail?id=6751
|
||||
static {
|
||||
try {
|
||||
System.loadLibrary("c++_shared");
|
||||
System.loadLibrary("boringssl.cr");
|
||||
System.loadLibrary("protobuf_lite.cr");
|
||||
} catch (UnsatisfiedLinkError e) {
|
||||
Logging.w(TAG, "Failed to load native dependencies: ", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
|
||||
public static final String EXTRA_URLPARAMETERS = "org.appspot.apprtc.URLPARAMETERS";
|
||||
public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
|
||||
public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
|
||||
public static final String EXTRA_SCREENCAPTURE = "org.appspot.apprtc.SCREENCAPTURE";
|
||||
public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
|
||||
public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
|
||||
public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
|
||||
public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
|
||||
public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
|
||||
"org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
|
||||
public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
|
||||
public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
|
||||
public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
|
||||
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
|
||||
public static final String EXTRA_FLEXFEC_ENABLED = "org.appspot.apprtc.FLEXFEC";
|
||||
public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
|
||||
public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
|
||||
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
|
||||
"org.appspot.apprtc.NOAUDIOPROCESSING";
|
||||
public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
|
||||
public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
|
||||
public static final String EXTRA_ENABLE_LEVEL_CONTROL = "org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
|
||||
public static final String EXTRA_DISABLE_WEBRTC_AGC_AND_HPF =
|
||||
"org.appspot.apprtc.DISABLE_WEBRTC_GAIN_CONTROL";
|
||||
public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
|
||||
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
||||
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
||||
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
||||
public static final String EXTRA_VIDEO_FILE_AS_CAMERA = "org.appspot.apprtc.VIDEO_FILE_AS_CAMERA";
|
||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE =
|
||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE";
|
||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH =
|
||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_WIDTH";
|
||||
public static final String EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT =
|
||||
"org.appspot.apprtc.SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT";
|
||||
public static final String EXTRA_USE_VALUES_FROM_INTENT =
|
||||
"org.appspot.apprtc.USE_VALUES_FROM_INTENT";
|
||||
public static final String EXTRA_DATA_CHANNEL_ENABLED = "org.appspot.apprtc.DATA_CHANNEL_ENABLED";
|
||||
public static final String EXTRA_ORDERED = "org.appspot.apprtc.ORDERED";
|
||||
public static final String EXTRA_MAX_RETRANSMITS_MS = "org.appspot.apprtc.MAX_RETRANSMITS_MS";
|
||||
public static final String EXTRA_MAX_RETRANSMITS = "org.appspot.apprtc.MAX_RETRANSMITS";
|
||||
public static final String EXTRA_PROTOCOL = "org.appspot.apprtc.PROTOCOL";
|
||||
public static final String EXTRA_NEGOTIATED = "org.appspot.apprtc.NEGOTIATED";
|
||||
public static final String EXTRA_ID = "org.appspot.apprtc.ID";
|
||||
|
||||
private static final int CAPTURE_PERMISSION_REQUEST_CODE = 1;
|
||||
|
||||
// List of mandatory application permissions.
|
||||
private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
|
||||
"android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
|
||||
|
||||
// Peer connection statistics callback period in ms.
|
||||
private static final int STAT_CALLBACK_PERIOD = 1000;
|
||||
|
||||
private class ProxyRenderer<T extends VideoRenderer.Callbacks & VideoSink>
|
||||
implements VideoRenderer.Callbacks, VideoSink {
|
||||
private T target;
|
||||
|
||||
@Override
|
||||
synchronized public void renderFrame(VideoRenderer.I420Frame frame) {
|
||||
if (target == null) {
|
||||
Logging.d(TAG, "Dropping frame in proxy because target is null.");
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
return;
|
||||
}
|
||||
|
||||
target.renderFrame(frame);
|
||||
}
|
||||
|
||||
@Override
|
||||
synchronized public void onFrame(VideoFrame frame) {
|
||||
if (target == null) {
|
||||
Logging.d(TAG, "Dropping frame in proxy because target is null.");
|
||||
return;
|
||||
}
|
||||
|
||||
target.onFrame(frame);
|
||||
}
|
||||
|
||||
synchronized public void setTarget(T target) {
|
||||
this.target = target;
|
||||
}
|
||||
}
|
||||
|
||||
private final ProxyRenderer remoteProxyRenderer = new ProxyRenderer();
|
||||
private final ProxyRenderer localProxyRenderer = new ProxyRenderer();
|
||||
private PeerConnectionClient peerConnectionClient = null;
|
||||
private AppRTCClient appRtcClient;
|
||||
private SignalingParameters signalingParameters;
|
||||
private AppRTCAudioManager audioManager = null;
|
||||
private SurfaceViewRenderer pipRenderer;
|
||||
private SurfaceViewRenderer fullscreenRenderer;
|
||||
private VideoFileRenderer videoFileRenderer;
|
||||
private final List<VideoRenderer.Callbacks> remoteRenderers =
|
||||
new ArrayList<VideoRenderer.Callbacks>();
|
||||
private Toast logToast;
|
||||
private boolean commandLineRun;
|
||||
private int runTimeMs;
|
||||
private boolean activityRunning;
|
||||
private RoomConnectionParameters roomConnectionParameters;
|
||||
private PeerConnectionParameters peerConnectionParameters;
|
||||
private boolean iceConnected;
|
||||
private boolean isError;
|
||||
private boolean callControlFragmentVisible = true;
|
||||
private long callStartedTimeMs = 0;
|
||||
private boolean micEnabled = true;
|
||||
private boolean screencaptureEnabled = false;
|
||||
private static Intent mediaProjectionPermissionResultData;
|
||||
private static int mediaProjectionPermissionResultCode;
|
||||
// True if local view is in the fullscreen renderer.
|
||||
private boolean isSwappedFeeds;
|
||||
|
||||
// Controls
|
||||
private CallFragment callFragment;
|
||||
private HudFragment hudFragment;
|
||||
private CpuMonitor cpuMonitor;
|
||||
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
|
||||
|
||||
// Set window styles for fullscreen-window size. Needs to be done before
|
||||
// adding content.
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
|
||||
| LayoutParams.FLAG_DISMISS_KEYGUARD | LayoutParams.FLAG_SHOW_WHEN_LOCKED
|
||||
| LayoutParams.FLAG_TURN_SCREEN_ON);
|
||||
getWindow().getDecorView().setSystemUiVisibility(getSystemUiVisibility());
|
||||
setContentView(R.layout.activity_call);
|
||||
|
||||
iceConnected = false;
|
||||
signalingParameters = null;
|
||||
|
||||
// Create UI controls.
|
||||
pipRenderer = (SurfaceViewRenderer) findViewById(R.id.pip_video_view);
|
||||
fullscreenRenderer = (SurfaceViewRenderer) findViewById(R.id.fullscreen_video_view);
|
||||
callFragment = new CallFragment();
|
||||
hudFragment = new HudFragment();
|
||||
|
||||
// Show/hide call control fragment on view click.
|
||||
View.OnClickListener listener = new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
toggleCallControlFragmentVisibility();
|
||||
}
|
||||
};
|
||||
|
||||
// Swap feeds on pip view click.
|
||||
pipRenderer.setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
setSwappedFeeds(!isSwappedFeeds);
|
||||
}
|
||||
});
|
||||
|
||||
fullscreenRenderer.setOnClickListener(listener);
|
||||
remoteRenderers.add(remoteProxyRenderer);
|
||||
|
||||
final Intent intent = getIntent();
|
||||
|
||||
// Create peer connection client.
|
||||
peerConnectionClient = new PeerConnectionClient();
|
||||
|
||||
// Create video renderers.
|
||||
pipRenderer.init(peerConnectionClient.getRenderContext(), null);
|
||||
pipRenderer.setScalingType(ScalingType.SCALE_ASPECT_FIT);
|
||||
String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
||||
|
||||
// When saveRemoteVideoToFile is set we save the video from the remote to a file.
|
||||
if (saveRemoteVideoToFile != null) {
|
||||
int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
||||
int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
||||
try {
|
||||
videoFileRenderer = new VideoFileRenderer(saveRemoteVideoToFile, videoOutWidth,
|
||||
videoOutHeight, peerConnectionClient.getRenderContext());
|
||||
remoteRenderers.add(videoFileRenderer);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(
|
||||
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
|
||||
}
|
||||
}
|
||||
fullscreenRenderer.init(peerConnectionClient.getRenderContext(), null);
|
||||
fullscreenRenderer.setScalingType(ScalingType.SCALE_ASPECT_FILL);
|
||||
|
||||
pipRenderer.setZOrderMediaOverlay(true);
|
||||
pipRenderer.setEnableHardwareScaler(true /* enabled */);
|
||||
fullscreenRenderer.setEnableHardwareScaler(true /* enabled */);
|
||||
// Start with local feed in fullscreen and swap it to the pip when the call is connected.
|
||||
setSwappedFeeds(true /* isSwappedFeeds */);
|
||||
|
||||
// Check for mandatory permissions.
|
||||
for (String permission : MANDATORY_PERMISSIONS) {
|
||||
if (checkCallingOrSelfPermission(permission) != PackageManager.PERMISSION_GRANTED) {
|
||||
logAndToast("Permission " + permission + " is not granted");
|
||||
setResult(RESULT_CANCELED);
|
||||
finish();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Uri roomUri = intent.getData();
|
||||
if (roomUri == null) {
|
||||
logAndToast(getString(R.string.missing_url));
|
||||
Log.e(TAG, "Didn't get any URL in intent!");
|
||||
setResult(RESULT_CANCELED);
|
||||
finish();
|
||||
return;
|
||||
}
|
||||
|
||||
// Get Intent parameters.
|
||||
String roomId = intent.getStringExtra(EXTRA_ROOMID);
|
||||
Log.d(TAG, "Room ID: " + roomId);
|
||||
if (roomId == null || roomId.length() == 0) {
|
||||
logAndToast(getString(R.string.missing_url));
|
||||
Log.e(TAG, "Incorrect room ID in intent!");
|
||||
setResult(RESULT_CANCELED);
|
||||
finish();
|
||||
return;
|
||||
}
|
||||
|
||||
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
||||
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
||||
|
||||
int videoWidth = intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0);
|
||||
int videoHeight = intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0);
|
||||
|
||||
screencaptureEnabled = intent.getBooleanExtra(EXTRA_SCREENCAPTURE, false);
|
||||
// If capturing format is not specified for screencapture, use screen resolution.
|
||||
if (screencaptureEnabled && videoWidth == 0 && videoHeight == 0) {
|
||||
DisplayMetrics displayMetrics = getDisplayMetrics();
|
||||
videoWidth = displayMetrics.widthPixels;
|
||||
videoHeight = displayMetrics.heightPixels;
|
||||
}
|
||||
DataChannelParameters dataChannelParameters = null;
|
||||
if (intent.getBooleanExtra(EXTRA_DATA_CHANNEL_ENABLED, false)) {
|
||||
dataChannelParameters = new DataChannelParameters(intent.getBooleanExtra(EXTRA_ORDERED, true),
|
||||
intent.getIntExtra(EXTRA_MAX_RETRANSMITS_MS, -1),
|
||||
intent.getIntExtra(EXTRA_MAX_RETRANSMITS, -1), intent.getStringExtra(EXTRA_PROTOCOL),
|
||||
intent.getBooleanExtra(EXTRA_NEGOTIATED, false), intent.getIntExtra(EXTRA_ID, -1));
|
||||
}
|
||||
peerConnectionParameters =
|
||||
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
||||
tracing, videoWidth, videoHeight, intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
||||
intent.getBooleanExtra(EXTRA_FLEXFEC_ENABLED, false),
|
||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
|
||||
intent.getBooleanExtra(EXTRA_ENABLE_LEVEL_CONTROL, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, false), dataChannelParameters);
|
||||
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
|
||||
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
|
||||
|
||||
Log.d(TAG, "VIDEO_FILE: '" + intent.getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA) + "'");
|
||||
|
||||
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
|
||||
// standard WebSocketRTCClient.
|
||||
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
|
||||
appRtcClient = new WebSocketRTCClient(this);
|
||||
} else {
|
||||
Log.i(TAG, "Using DirectRTCClient because room name looks like an IP.");
|
||||
appRtcClient = new DirectRTCClient(this);
|
||||
}
|
||||
// Create connection parameters.
|
||||
String urlParameters = intent.getStringExtra(EXTRA_URLPARAMETERS);
|
||||
roomConnectionParameters =
|
||||
new RoomConnectionParameters(roomUri.toString(), roomId, loopback, urlParameters);
|
||||
|
||||
// Create CPU monitor
|
||||
cpuMonitor = new CpuMonitor(this);
|
||||
hudFragment.setCpuMonitor(cpuMonitor);
|
||||
|
||||
// Send intent arguments to fragments.
|
||||
callFragment.setArguments(intent.getExtras());
|
||||
hudFragment.setArguments(intent.getExtras());
|
||||
// Activate call and HUD fragments and start the call.
|
||||
FragmentTransaction ft = getFragmentManager().beginTransaction();
|
||||
ft.add(R.id.call_fragment_container, callFragment);
|
||||
ft.add(R.id.hud_fragment_container, hudFragment);
|
||||
ft.commit();
|
||||
|
||||
// For command line execution run connection for <runTimeMs> and exit.
|
||||
if (commandLineRun && runTimeMs > 0) {
|
||||
(new Handler()).postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
disconnect();
|
||||
}
|
||||
}, runTimeMs);
|
||||
}
|
||||
|
||||
if (loopback) {
|
||||
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
|
||||
options.networkIgnoreMask = 0;
|
||||
peerConnectionClient.setPeerConnectionFactoryOptions(options);
|
||||
}
|
||||
peerConnectionClient.createPeerConnectionFactory(
|
||||
getApplicationContext(), peerConnectionParameters, CallActivity.this);
|
||||
|
||||
if (screencaptureEnabled) {
|
||||
startScreenCapture();
|
||||
} else {
|
||||
startCall();
|
||||
}
|
||||
}
|
||||
|
||||
@TargetApi(17)
|
||||
private DisplayMetrics getDisplayMetrics() {
|
||||
DisplayMetrics displayMetrics = new DisplayMetrics();
|
||||
WindowManager windowManager =
|
||||
(WindowManager) getApplication().getSystemService(Context.WINDOW_SERVICE);
|
||||
windowManager.getDefaultDisplay().getRealMetrics(displayMetrics);
|
||||
return displayMetrics;
|
||||
}
|
||||
|
||||
@TargetApi(19)
|
||||
private static int getSystemUiVisibility() {
|
||||
int flags = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN;
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
|
||||
flags |= View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY;
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private void startScreenCapture() {
|
||||
MediaProjectionManager mediaProjectionManager =
|
||||
(MediaProjectionManager) getApplication().getSystemService(
|
||||
Context.MEDIA_PROJECTION_SERVICE);
|
||||
startActivityForResult(
|
||||
mediaProjectionManager.createScreenCaptureIntent(), CAPTURE_PERMISSION_REQUEST_CODE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onActivityResult(int requestCode, int resultCode, Intent data) {
|
||||
if (requestCode != CAPTURE_PERMISSION_REQUEST_CODE)
|
||||
return;
|
||||
mediaProjectionPermissionResultCode = resultCode;
|
||||
mediaProjectionPermissionResultData = data;
|
||||
startCall();
|
||||
}
|
||||
|
||||
private boolean useCamera2() {
|
||||
return Camera2Enumerator.isSupported(this) && getIntent().getBooleanExtra(EXTRA_CAMERA2, true);
|
||||
}
|
||||
|
||||
private boolean captureToTexture() {
|
||||
return getIntent().getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false);
|
||||
}
|
||||
|
||||
private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
|
||||
final String[] deviceNames = enumerator.getDeviceNames();
|
||||
|
||||
// First, try to find front facing camera
|
||||
Logging.d(TAG, "Looking for front facing cameras.");
|
||||
for (String deviceName : deviceNames) {
|
||||
if (enumerator.isFrontFacing(deviceName)) {
|
||||
Logging.d(TAG, "Creating front facing camera capturer.");
|
||||
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||
|
||||
if (videoCapturer != null) {
|
||||
return videoCapturer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Front facing camera not found, try something else
|
||||
Logging.d(TAG, "Looking for other cameras.");
|
||||
for (String deviceName : deviceNames) {
|
||||
if (!enumerator.isFrontFacing(deviceName)) {
|
||||
Logging.d(TAG, "Creating other camera capturer.");
|
||||
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
|
||||
|
||||
if (videoCapturer != null) {
|
||||
return videoCapturer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private VideoCapturer createScreenCapturer() {
|
||||
if (mediaProjectionPermissionResultCode != Activity.RESULT_OK) {
|
||||
reportError("User didn't give permission to capture the screen.");
|
||||
return null;
|
||||
}
|
||||
return new ScreenCapturerAndroid(
|
||||
mediaProjectionPermissionResultData, new MediaProjection.Callback() {
|
||||
@Override
|
||||
public void onStop() {
|
||||
reportError("User revoked permission to capture the screen.");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Activity interfaces
|
||||
@Override
|
||||
public void onStop() {
|
||||
super.onStop();
|
||||
activityRunning = false;
|
||||
// Don't stop the video when using screencapture to allow user to show other apps to the remote
|
||||
// end.
|
||||
if (peerConnectionClient != null && !screencaptureEnabled) {
|
||||
peerConnectionClient.stopVideoSource();
|
||||
}
|
||||
cpuMonitor.pause();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
super.onStart();
|
||||
activityRunning = true;
|
||||
// Video is not paused for screencapture. See onPause.
|
||||
if (peerConnectionClient != null && !screencaptureEnabled) {
|
||||
peerConnectionClient.startVideoSource();
|
||||
}
|
||||
cpuMonitor.resume();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDestroy() {
|
||||
Thread.setDefaultUncaughtExceptionHandler(null);
|
||||
disconnect();
|
||||
if (logToast != null) {
|
||||
logToast.cancel();
|
||||
}
|
||||
activityRunning = false;
|
||||
super.onDestroy();
|
||||
}
|
||||
|
||||
// CallFragment.OnCallEvents interface implementation.
|
||||
@Override
|
||||
public void onCallHangUp() {
|
||||
disconnect();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraSwitch() {
|
||||
if (peerConnectionClient != null) {
|
||||
peerConnectionClient.switchCamera();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVideoScalingSwitch(ScalingType scalingType) {
|
||||
fullscreenRenderer.setScalingType(scalingType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCaptureFormatChange(int width, int height, int framerate) {
|
||||
if (peerConnectionClient != null) {
|
||||
peerConnectionClient.changeCaptureFormat(width, height, framerate);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onToggleMic() {
|
||||
if (peerConnectionClient != null) {
|
||||
micEnabled = !micEnabled;
|
||||
peerConnectionClient.setAudioEnabled(micEnabled);
|
||||
}
|
||||
return micEnabled;
|
||||
}
|
||||
|
||||
// Helper functions.
|
||||
private void toggleCallControlFragmentVisibility() {
|
||||
if (!iceConnected || !callFragment.isAdded()) {
|
||||
return;
|
||||
}
|
||||
// Show/hide call control fragment
|
||||
callControlFragmentVisible = !callControlFragmentVisible;
|
||||
FragmentTransaction ft = getFragmentManager().beginTransaction();
|
||||
if (callControlFragmentVisible) {
|
||||
ft.show(callFragment);
|
||||
ft.show(hudFragment);
|
||||
} else {
|
||||
ft.hide(callFragment);
|
||||
ft.hide(hudFragment);
|
||||
}
|
||||
ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
|
||||
ft.commit();
|
||||
}
|
||||
|
||||
private void startCall() {
|
||||
if (appRtcClient == null) {
|
||||
Log.e(TAG, "AppRTC client is not allocated for a call.");
|
||||
return;
|
||||
}
|
||||
callStartedTimeMs = System.currentTimeMillis();
|
||||
|
||||
// Start room connection.
|
||||
logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
|
||||
appRtcClient.connectToRoom(roomConnectionParameters);
|
||||
|
||||
// Create and audio manager that will take care of audio routing,
|
||||
// audio modes, audio device enumeration etc.
|
||||
audioManager = AppRTCAudioManager.create(getApplicationContext());
|
||||
// Store existing audio settings and change audio mode to
|
||||
// MODE_IN_COMMUNICATION for best possible VoIP performance.
|
||||
Log.d(TAG, "Starting the audio manager...");
|
||||
audioManager.start(new AudioManagerEvents() {
|
||||
// This method will be called each time the number of available audio
|
||||
// devices has changed.
|
||||
@Override
|
||||
public void onAudioDeviceChanged(
|
||||
AudioDevice audioDevice, Set<AudioDevice> availableAudioDevices) {
|
||||
onAudioManagerDevicesChanged(audioDevice, availableAudioDevices);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Should be called from UI thread
|
||||
private void callConnected() {
|
||||
final long delta = System.currentTimeMillis() - callStartedTimeMs;
|
||||
Log.i(TAG, "Call connected: delay=" + delta + "ms");
|
||||
if (peerConnectionClient == null || isError) {
|
||||
Log.w(TAG, "Call is connected in closed or error state");
|
||||
return;
|
||||
}
|
||||
// Enable statistics callback.
|
||||
peerConnectionClient.enableStatsEvents(true, STAT_CALLBACK_PERIOD);
|
||||
setSwappedFeeds(false /* isSwappedFeeds */);
|
||||
}
|
||||
|
||||
// This method is called when the audio manager reports audio device change,
|
||||
// e.g. from wired headset to speakerphone.
|
||||
private void onAudioManagerDevicesChanged(
|
||||
final AudioDevice device, final Set<AudioDevice> availableDevices) {
|
||||
Log.d(TAG, "onAudioManagerDevicesChanged: " + availableDevices + ", "
|
||||
+ "selected: " + device);
|
||||
// TODO(henrika): add callback handler.
|
||||
}
|
||||
|
||||
// Disconnect from remote resources, dispose of local resources, and exit.
|
||||
private void disconnect() {
|
||||
activityRunning = false;
|
||||
remoteProxyRenderer.setTarget(null);
|
||||
localProxyRenderer.setTarget(null);
|
||||
if (appRtcClient != null) {
|
||||
appRtcClient.disconnectFromRoom();
|
||||
appRtcClient = null;
|
||||
}
|
||||
if (pipRenderer != null) {
|
||||
pipRenderer.release();
|
||||
pipRenderer = null;
|
||||
}
|
||||
if (videoFileRenderer != null) {
|
||||
videoFileRenderer.release();
|
||||
videoFileRenderer = null;
|
||||
}
|
||||
if (fullscreenRenderer != null) {
|
||||
fullscreenRenderer.release();
|
||||
fullscreenRenderer = null;
|
||||
}
|
||||
if (peerConnectionClient != null) {
|
||||
peerConnectionClient.close();
|
||||
peerConnectionClient = null;
|
||||
}
|
||||
if (audioManager != null) {
|
||||
audioManager.stop();
|
||||
audioManager = null;
|
||||
}
|
||||
if (iceConnected && !isError) {
|
||||
setResult(RESULT_OK);
|
||||
} else {
|
||||
setResult(RESULT_CANCELED);
|
||||
}
|
||||
finish();
|
||||
}
|
||||
|
||||
private void disconnectWithErrorMessage(final String errorMessage) {
|
||||
if (commandLineRun || !activityRunning) {
|
||||
Log.e(TAG, "Critical error: " + errorMessage);
|
||||
disconnect();
|
||||
} else {
|
||||
new AlertDialog.Builder(this)
|
||||
.setTitle(getText(R.string.channel_error_title))
|
||||
.setMessage(errorMessage)
|
||||
.setCancelable(false)
|
||||
.setNeutralButton(R.string.ok,
|
||||
new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
disconnect();
|
||||
}
|
||||
})
|
||||
.create()
|
||||
.show();
|
||||
}
|
||||
}
|
||||
|
||||
// Log |msg| and Toast about it.
|
||||
private void logAndToast(String msg) {
|
||||
Log.d(TAG, msg);
|
||||
if (logToast != null) {
|
||||
logToast.cancel();
|
||||
}
|
||||
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
|
||||
logToast.show();
|
||||
}
|
||||
|
||||
private void reportError(final String description) {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (!isError) {
|
||||
isError = true;
|
||||
disconnectWithErrorMessage(description);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private VideoCapturer createVideoCapturer() {
|
||||
VideoCapturer videoCapturer = null;
|
||||
String videoFileAsCamera = getIntent().getStringExtra(EXTRA_VIDEO_FILE_AS_CAMERA);
|
||||
if (videoFileAsCamera != null) {
|
||||
try {
|
||||
videoCapturer = new FileVideoCapturer(videoFileAsCamera);
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to open video file for emulated camera");
|
||||
return null;
|
||||
}
|
||||
} else if (screencaptureEnabled) {
|
||||
return createScreenCapturer();
|
||||
} else if (useCamera2()) {
|
||||
if (!captureToTexture()) {
|
||||
reportError(getString(R.string.camera2_texture_only_error));
|
||||
return null;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Creating capturer using camera2 API.");
|
||||
videoCapturer = createCameraCapturer(new Camera2Enumerator(this));
|
||||
} else {
|
||||
Logging.d(TAG, "Creating capturer using camera1 API.");
|
||||
videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
|
||||
}
|
||||
if (videoCapturer == null) {
|
||||
reportError("Failed to open camera");
|
||||
return null;
|
||||
}
|
||||
return videoCapturer;
|
||||
}
|
||||
|
||||
private void setSwappedFeeds(boolean isSwappedFeeds) {
|
||||
Logging.d(TAG, "setSwappedFeeds: " + isSwappedFeeds);
|
||||
this.isSwappedFeeds = isSwappedFeeds;
|
||||
localProxyRenderer.setTarget(isSwappedFeeds ? fullscreenRenderer : pipRenderer);
|
||||
remoteProxyRenderer.setTarget(isSwappedFeeds ? pipRenderer : fullscreenRenderer);
|
||||
fullscreenRenderer.setMirror(isSwappedFeeds);
|
||||
pipRenderer.setMirror(!isSwappedFeeds);
|
||||
}
|
||||
|
||||
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
|
||||
// All callbacks are invoked from websocket signaling looper thread and
|
||||
// are routed to UI thread.
|
||||
private void onConnectedToRoomInternal(final SignalingParameters params) {
|
||||
final long delta = System.currentTimeMillis() - callStartedTimeMs;
|
||||
|
||||
signalingParameters = params;
|
||||
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
||||
VideoCapturer videoCapturer = null;
|
||||
if (peerConnectionParameters.videoCallEnabled) {
|
||||
videoCapturer = createVideoCapturer();
|
||||
}
|
||||
peerConnectionClient.createPeerConnection(
|
||||
localProxyRenderer, remoteRenderers, videoCapturer, signalingParameters);
|
||||
|
||||
if (signalingParameters.initiator) {
|
||||
logAndToast("Creating OFFER...");
|
||||
// Create offer. Offer SDP will be sent to answering client in
|
||||
// PeerConnectionEvents.onLocalDescription event.
|
||||
peerConnectionClient.createOffer();
|
||||
} else {
|
||||
if (params.offerSdp != null) {
|
||||
peerConnectionClient.setRemoteDescription(params.offerSdp);
|
||||
logAndToast("Creating ANSWER...");
|
||||
// Create answer. Answer SDP will be sent to offering client in
|
||||
// PeerConnectionEvents.onLocalDescription event.
|
||||
peerConnectionClient.createAnswer();
|
||||
}
|
||||
if (params.iceCandidates != null) {
|
||||
// Add remote ICE candidates from room.
|
||||
for (IceCandidate iceCandidate : params.iceCandidates) {
|
||||
peerConnectionClient.addRemoteIceCandidate(iceCandidate);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConnectedToRoom(final SignalingParameters params) {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
onConnectedToRoomInternal(params);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoteDescription(final SessionDescription sdp) {
|
||||
final long delta = System.currentTimeMillis() - callStartedTimeMs;
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (peerConnectionClient == null) {
|
||||
Log.e(TAG, "Received remote SDP for non-initilized peer connection.");
|
||||
return;
|
||||
}
|
||||
logAndToast("Received remote " + sdp.type + ", delay=" + delta + "ms");
|
||||
peerConnectionClient.setRemoteDescription(sdp);
|
||||
if (!signalingParameters.initiator) {
|
||||
logAndToast("Creating ANSWER...");
|
||||
// Create answer. Answer SDP will be sent to offering client in
|
||||
// PeerConnectionEvents.onLocalDescription event.
|
||||
peerConnectionClient.createAnswer();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoteIceCandidate(final IceCandidate candidate) {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (peerConnectionClient == null) {
|
||||
Log.e(TAG, "Received ICE candidate for a non-initialized peer connection.");
|
||||
return;
|
||||
}
|
||||
peerConnectionClient.addRemoteIceCandidate(candidate);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoteIceCandidatesRemoved(final IceCandidate[] candidates) {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (peerConnectionClient == null) {
|
||||
Log.e(TAG, "Received ICE candidate removals for a non-initialized peer connection.");
|
||||
return;
|
||||
}
|
||||
peerConnectionClient.removeRemoteIceCandidates(candidates);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChannelClose() {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
logAndToast("Remote end hung up; dropping PeerConnection");
|
||||
disconnect();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onChannelError(final String description) {
|
||||
reportError(description);
|
||||
}
|
||||
|
||||
// -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
|
||||
// Send local peer connection SDP and ICE candidates to remote party.
|
||||
// All callbacks are invoked from peer connection client looper thread and
|
||||
// are routed to UI thread.
|
||||
@Override
|
||||
public void onLocalDescription(final SessionDescription sdp) {
|
||||
final long delta = System.currentTimeMillis() - callStartedTimeMs;
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (appRtcClient != null) {
|
||||
logAndToast("Sending " + sdp.type + ", delay=" + delta + "ms");
|
||||
if (signalingParameters.initiator) {
|
||||
appRtcClient.sendOfferSdp(sdp);
|
||||
} else {
|
||||
appRtcClient.sendAnswerSdp(sdp);
|
||||
}
|
||||
}
|
||||
if (peerConnectionParameters.videoMaxBitrate > 0) {
|
||||
Log.d(TAG, "Set video maximum bitrate: " + peerConnectionParameters.videoMaxBitrate);
|
||||
peerConnectionClient.setVideoMaxBitrate(peerConnectionParameters.videoMaxBitrate);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceCandidate(final IceCandidate candidate) {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (appRtcClient != null) {
|
||||
appRtcClient.sendLocalIceCandidate(candidate);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (appRtcClient != null) {
|
||||
appRtcClient.sendLocalIceCandidateRemovals(candidates);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceConnected() {
|
||||
final long delta = System.currentTimeMillis() - callStartedTimeMs;
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
logAndToast("ICE connected, delay=" + delta + "ms");
|
||||
iceConnected = true;
|
||||
callConnected();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceDisconnected() {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
logAndToast("ICE disconnected");
|
||||
iceConnected = false;
|
||||
disconnect();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionClosed() {}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionStatsReady(final StatsReport[] reports) {
|
||||
runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (!isError && iceConnected) {
|
||||
hudFragment.updateEncoderStatistics(reports);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionError(final String description) {
|
||||
reportError(description);
|
||||
}
|
||||
}
|
||||
139
examples/androidapp/src/org/appspot/apprtc/CallFragment.java
Normal file
139
examples/androidapp/src/org/appspot/apprtc/CallFragment.java
Normal file
@ -0,0 +1,139 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.Fragment;
|
||||
import android.os.Bundle;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.widget.ImageButton;
|
||||
import android.widget.SeekBar;
|
||||
import android.widget.TextView;
|
||||
|
||||
import org.webrtc.RendererCommon.ScalingType;
|
||||
|
||||
/**
|
||||
* Fragment for call control.
|
||||
*/
|
||||
public class CallFragment extends Fragment {
|
||||
private View controlView;
|
||||
private TextView contactView;
|
||||
private ImageButton disconnectButton;
|
||||
private ImageButton cameraSwitchButton;
|
||||
private ImageButton videoScalingButton;
|
||||
private ImageButton toggleMuteButton;
|
||||
private TextView captureFormatText;
|
||||
private SeekBar captureFormatSlider;
|
||||
private OnCallEvents callEvents;
|
||||
private ScalingType scalingType;
|
||||
private boolean videoCallEnabled = true;
|
||||
|
||||
/**
|
||||
* Call control interface for container activity.
|
||||
*/
|
||||
public interface OnCallEvents {
|
||||
void onCallHangUp();
|
||||
void onCameraSwitch();
|
||||
void onVideoScalingSwitch(ScalingType scalingType);
|
||||
void onCaptureFormatChange(int width, int height, int framerate);
|
||||
boolean onToggleMic();
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(
|
||||
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
|
||||
controlView = inflater.inflate(R.layout.fragment_call, container, false);
|
||||
|
||||
// Create UI controls.
|
||||
contactView = (TextView) controlView.findViewById(R.id.contact_name_call);
|
||||
disconnectButton = (ImageButton) controlView.findViewById(R.id.button_call_disconnect);
|
||||
cameraSwitchButton = (ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
|
||||
videoScalingButton = (ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
|
||||
toggleMuteButton = (ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
|
||||
captureFormatText = (TextView) controlView.findViewById(R.id.capture_format_text_call);
|
||||
captureFormatSlider = (SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
|
||||
|
||||
// Add buttons click events.
|
||||
disconnectButton.setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
callEvents.onCallHangUp();
|
||||
}
|
||||
});
|
||||
|
||||
cameraSwitchButton.setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
callEvents.onCameraSwitch();
|
||||
}
|
||||
});
|
||||
|
||||
videoScalingButton.setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
|
||||
videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
|
||||
scalingType = ScalingType.SCALE_ASPECT_FIT;
|
||||
} else {
|
||||
videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
|
||||
scalingType = ScalingType.SCALE_ASPECT_FILL;
|
||||
}
|
||||
callEvents.onVideoScalingSwitch(scalingType);
|
||||
}
|
||||
});
|
||||
scalingType = ScalingType.SCALE_ASPECT_FILL;
|
||||
|
||||
toggleMuteButton.setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
boolean enabled = callEvents.onToggleMic();
|
||||
toggleMuteButton.setAlpha(enabled ? 1.0f : 0.3f);
|
||||
}
|
||||
});
|
||||
|
||||
return controlView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
super.onStart();
|
||||
|
||||
boolean captureSliderEnabled = false;
|
||||
Bundle args = getArguments();
|
||||
if (args != null) {
|
||||
String contactName = args.getString(CallActivity.EXTRA_ROOMID);
|
||||
contactView.setText(contactName);
|
||||
videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
|
||||
captureSliderEnabled = videoCallEnabled
|
||||
&& args.getBoolean(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, false);
|
||||
}
|
||||
if (!videoCallEnabled) {
|
||||
cameraSwitchButton.setVisibility(View.INVISIBLE);
|
||||
}
|
||||
if (captureSliderEnabled) {
|
||||
captureFormatSlider.setOnSeekBarChangeListener(
|
||||
new CaptureQualityController(captureFormatText, callEvents));
|
||||
} else {
|
||||
captureFormatText.setVisibility(View.GONE);
|
||||
captureFormatSlider.setVisibility(View.GONE);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(sakal): Replace with onAttach(Context) once we only support API level 23+.
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public void onAttach(Activity activity) {
|
||||
super.onAttach(activity);
|
||||
callEvents = (OnCallEvents) activity;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,110 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.widget.SeekBar;
|
||||
import android.widget.TextView;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
/**
|
||||
* Control capture format based on a seekbar listener.
|
||||
*/
|
||||
public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
|
||||
private final List<CaptureFormat> formats =
|
||||
Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
|
||||
new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
|
||||
new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
|
||||
// Prioritize framerate below this threshold and resolution above the threshold.
|
||||
private static final int FRAMERATE_THRESHOLD = 15;
|
||||
private TextView captureFormatText;
|
||||
private CallFragment.OnCallEvents callEvents;
|
||||
private int width = 0;
|
||||
private int height = 0;
|
||||
private int framerate = 0;
|
||||
private double targetBandwidth = 0;
|
||||
|
||||
public CaptureQualityController(
|
||||
TextView captureFormatText, CallFragment.OnCallEvents callEvents) {
|
||||
this.captureFormatText = captureFormatText;
|
||||
this.callEvents = callEvents;
|
||||
}
|
||||
|
||||
private final Comparator<CaptureFormat> compareFormats = new Comparator<CaptureFormat>() {
|
||||
@Override
|
||||
public int compare(CaptureFormat first, CaptureFormat second) {
|
||||
int firstFps = calculateFramerate(targetBandwidth, first);
|
||||
int secondFps = calculateFramerate(targetBandwidth, second);
|
||||
|
||||
if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
|
||||
|| firstFps == secondFps) {
|
||||
// Compare resolution.
|
||||
return first.width * first.height - second.width * second.height;
|
||||
} else {
|
||||
// Compare fps.
|
||||
return firstFps - secondFps;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
|
||||
if (progress == 0) {
|
||||
width = 0;
|
||||
height = 0;
|
||||
framerate = 0;
|
||||
captureFormatText.setText(R.string.muted);
|
||||
return;
|
||||
}
|
||||
|
||||
// Extract max bandwidth (in millipixels / second).
|
||||
long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
|
||||
for (CaptureFormat format : formats) {
|
||||
maxCaptureBandwidth =
|
||||
Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
|
||||
}
|
||||
|
||||
// Fraction between 0 and 1.
|
||||
double bandwidthFraction = (double) progress / 100.0;
|
||||
// Make a log-scale transformation, still between 0 and 1.
|
||||
final double kExpConstant = 3.0;
|
||||
bandwidthFraction =
|
||||
(Math.exp(kExpConstant * bandwidthFraction) - 1) / (Math.exp(kExpConstant) - 1);
|
||||
targetBandwidth = bandwidthFraction * maxCaptureBandwidth;
|
||||
|
||||
// Choose the best format given a target bandwidth.
|
||||
final CaptureFormat bestFormat = Collections.max(formats, compareFormats);
|
||||
width = bestFormat.width;
|
||||
height = bestFormat.height;
|
||||
framerate = calculateFramerate(targetBandwidth, bestFormat);
|
||||
captureFormatText.setText(
|
||||
String.format(captureFormatText.getContext().getString(R.string.format_description), width,
|
||||
height, framerate));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStartTrackingTouch(SeekBar seekBar) {}
|
||||
|
||||
@Override
|
||||
public void onStopTrackingTouch(SeekBar seekBar) {
|
||||
callEvents.onCaptureFormatChange(width, height, framerate);
|
||||
}
|
||||
|
||||
// Return the highest frame rate possible based on bandwidth and format.
|
||||
private int calculateFramerate(double bandwidth, CaptureFormat format) {
|
||||
return (int) Math.round(
|
||||
Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
|
||||
/ 1000.0);
|
||||
}
|
||||
}
|
||||
626
examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
Normal file
626
examples/androidapp/src/org/appspot/apprtc/ConnectActivity.java
Normal file
@ -0,0 +1,626 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.AlertDialog;
|
||||
import android.content.DialogInterface;
|
||||
import android.content.Intent;
|
||||
import android.content.SharedPreferences;
|
||||
import android.net.Uri;
|
||||
import android.os.Bundle;
|
||||
import android.preference.PreferenceManager;
|
||||
import android.util.Log;
|
||||
import android.view.ContextMenu;
|
||||
import android.view.KeyEvent;
|
||||
import android.view.Menu;
|
||||
import android.view.MenuItem;
|
||||
import android.view.View;
|
||||
import android.view.View.OnClickListener;
|
||||
import android.view.inputmethod.EditorInfo;
|
||||
import android.webkit.URLUtil;
|
||||
import android.widget.AdapterView;
|
||||
import android.widget.ArrayAdapter;
|
||||
import android.widget.EditText;
|
||||
import android.widget.ImageButton;
|
||||
import android.widget.ListView;
|
||||
import android.widget.TextView;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Random;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
|
||||
/**
|
||||
* Handles the initial setup where the user selects which room to join.
|
||||
*/
|
||||
public class ConnectActivity extends Activity {
|
||||
private static final String TAG = "ConnectActivity";
|
||||
private static final int CONNECTION_REQUEST = 1;
|
||||
private static final int REMOVE_FAVORITE_INDEX = 0;
|
||||
private static boolean commandLineRun = false;
|
||||
|
||||
private ImageButton connectButton;
|
||||
private ImageButton addFavoriteButton;
|
||||
private EditText roomEditText;
|
||||
private ListView roomListView;
|
||||
private SharedPreferences sharedPref;
|
||||
private String keyprefVideoCallEnabled;
|
||||
private String keyprefScreencapture;
|
||||
private String keyprefCamera2;
|
||||
private String keyprefResolution;
|
||||
private String keyprefFps;
|
||||
private String keyprefCaptureQualitySlider;
|
||||
private String keyprefVideoBitrateType;
|
||||
private String keyprefVideoBitrateValue;
|
||||
private String keyprefVideoCodec;
|
||||
private String keyprefAudioBitrateType;
|
||||
private String keyprefAudioBitrateValue;
|
||||
private String keyprefAudioCodec;
|
||||
private String keyprefHwCodecAcceleration;
|
||||
private String keyprefCaptureToTexture;
|
||||
private String keyprefFlexfec;
|
||||
private String keyprefNoAudioProcessingPipeline;
|
||||
private String keyprefAecDump;
|
||||
private String keyprefOpenSLES;
|
||||
private String keyprefDisableBuiltInAec;
|
||||
private String keyprefDisableBuiltInAgc;
|
||||
private String keyprefDisableBuiltInNs;
|
||||
private String keyprefEnableLevelControl;
|
||||
private String keyprefDisableWebRtcAGCAndHPF;
|
||||
private String keyprefDisplayHud;
|
||||
private String keyprefTracing;
|
||||
private String keyprefRoomServerUrl;
|
||||
private String keyprefRoom;
|
||||
private String keyprefRoomList;
|
||||
private ArrayList<String> roomList;
|
||||
private ArrayAdapter<String> adapter;
|
||||
private String keyprefEnableDataChannel;
|
||||
private String keyprefOrdered;
|
||||
private String keyprefMaxRetransmitTimeMs;
|
||||
private String keyprefMaxRetransmits;
|
||||
private String keyprefDataProtocol;
|
||||
private String keyprefNegotiated;
|
||||
private String keyprefDataId;
|
||||
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
|
||||
// Get setting keys.
|
||||
PreferenceManager.setDefaultValues(this, R.xml.preferences, false);
|
||||
sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
|
||||
keyprefVideoCallEnabled = getString(R.string.pref_videocall_key);
|
||||
keyprefScreencapture = getString(R.string.pref_screencapture_key);
|
||||
keyprefCamera2 = getString(R.string.pref_camera2_key);
|
||||
keyprefResolution = getString(R.string.pref_resolution_key);
|
||||
keyprefFps = getString(R.string.pref_fps_key);
|
||||
keyprefCaptureQualitySlider = getString(R.string.pref_capturequalityslider_key);
|
||||
keyprefVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
|
||||
keyprefVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
|
||||
keyprefVideoCodec = getString(R.string.pref_videocodec_key);
|
||||
keyprefHwCodecAcceleration = getString(R.string.pref_hwcodec_key);
|
||||
keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
|
||||
keyprefFlexfec = getString(R.string.pref_flexfec_key);
|
||||
keyprefAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
|
||||
keyprefAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
|
||||
keyprefAudioCodec = getString(R.string.pref_audiocodec_key);
|
||||
keyprefNoAudioProcessingPipeline = getString(R.string.pref_noaudioprocessing_key);
|
||||
keyprefAecDump = getString(R.string.pref_aecdump_key);
|
||||
keyprefOpenSLES = getString(R.string.pref_opensles_key);
|
||||
keyprefDisableBuiltInAec = getString(R.string.pref_disable_built_in_aec_key);
|
||||
keyprefDisableBuiltInAgc = getString(R.string.pref_disable_built_in_agc_key);
|
||||
keyprefDisableBuiltInNs = getString(R.string.pref_disable_built_in_ns_key);
|
||||
keyprefEnableLevelControl = getString(R.string.pref_enable_level_control_key);
|
||||
keyprefDisableWebRtcAGCAndHPF = getString(R.string.pref_disable_webrtc_agc_and_hpf_key);
|
||||
keyprefDisplayHud = getString(R.string.pref_displayhud_key);
|
||||
keyprefTracing = getString(R.string.pref_tracing_key);
|
||||
keyprefRoomServerUrl = getString(R.string.pref_room_server_url_key);
|
||||
keyprefRoom = getString(R.string.pref_room_key);
|
||||
keyprefRoomList = getString(R.string.pref_room_list_key);
|
||||
keyprefEnableDataChannel = getString(R.string.pref_enable_datachannel_key);
|
||||
keyprefOrdered = getString(R.string.pref_ordered_key);
|
||||
keyprefMaxRetransmitTimeMs = getString(R.string.pref_max_retransmit_time_ms_key);
|
||||
keyprefMaxRetransmits = getString(R.string.pref_max_retransmits_key);
|
||||
keyprefDataProtocol = getString(R.string.pref_data_protocol_key);
|
||||
keyprefNegotiated = getString(R.string.pref_negotiated_key);
|
||||
keyprefDataId = getString(R.string.pref_data_id_key);
|
||||
|
||||
setContentView(R.layout.activity_connect);
|
||||
|
||||
roomEditText = (EditText) findViewById(R.id.room_edittext);
|
||||
roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
|
||||
@Override
|
||||
public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
|
||||
if (i == EditorInfo.IME_ACTION_DONE) {
|
||||
addFavoriteButton.performClick();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
});
|
||||
roomEditText.requestFocus();
|
||||
|
||||
roomListView = (ListView) findViewById(R.id.room_listview);
|
||||
roomListView.setEmptyView(findViewById(android.R.id.empty));
|
||||
roomListView.setOnItemClickListener(roomListClickListener);
|
||||
registerForContextMenu(roomListView);
|
||||
connectButton = (ImageButton) findViewById(R.id.connect_button);
|
||||
connectButton.setOnClickListener(connectListener);
|
||||
addFavoriteButton = (ImageButton) findViewById(R.id.add_favorite_button);
|
||||
addFavoriteButton.setOnClickListener(addFavoriteListener);
|
||||
|
||||
// If an implicit VIEW intent is launching the app, go directly to that URL.
|
||||
final Intent intent = getIntent();
|
||||
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
||||
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
||||
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
||||
boolean useValuesFromIntent =
|
||||
intent.getBooleanExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, false);
|
||||
String room = sharedPref.getString(keyprefRoom, "");
|
||||
connectToRoom(room, true, loopback, useValuesFromIntent, runTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onCreateOptionsMenu(Menu menu) {
|
||||
getMenuInflater().inflate(R.menu.connect_menu, menu);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
|
||||
if (v.getId() == R.id.room_listview) {
|
||||
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) menuInfo;
|
||||
menu.setHeaderTitle(roomList.get(info.position));
|
||||
String[] menuItems = getResources().getStringArray(R.array.roomListContextMenu);
|
||||
for (int i = 0; i < menuItems.length; i++) {
|
||||
menu.add(Menu.NONE, i, i, menuItems[i]);
|
||||
}
|
||||
} else {
|
||||
super.onCreateContextMenu(menu, v, menuInfo);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onContextItemSelected(MenuItem item) {
|
||||
if (item.getItemId() == REMOVE_FAVORITE_INDEX) {
|
||||
AdapterView.AdapterContextMenuInfo info =
|
||||
(AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
|
||||
roomList.remove(info.position);
|
||||
adapter.notifyDataSetChanged();
|
||||
return true;
|
||||
}
|
||||
|
||||
return super.onContextItemSelected(item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onOptionsItemSelected(MenuItem item) {
|
||||
// Handle presses on the action bar items.
|
||||
if (item.getItemId() == R.id.action_settings) {
|
||||
Intent intent = new Intent(this, SettingsActivity.class);
|
||||
startActivity(intent);
|
||||
return true;
|
||||
} else if (item.getItemId() == R.id.action_loopback) {
|
||||
connectToRoom(null, false, true, false, 0);
|
||||
return true;
|
||||
} else {
|
||||
return super.onOptionsItemSelected(item);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPause() {
|
||||
super.onPause();
|
||||
String room = roomEditText.getText().toString();
|
||||
String roomListJson = new JSONArray(roomList).toString();
|
||||
SharedPreferences.Editor editor = sharedPref.edit();
|
||||
editor.putString(keyprefRoom, room);
|
||||
editor.putString(keyprefRoomList, roomListJson);
|
||||
editor.commit();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume() {
|
||||
super.onResume();
|
||||
String room = sharedPref.getString(keyprefRoom, "");
|
||||
roomEditText.setText(room);
|
||||
roomList = new ArrayList<String>();
|
||||
String roomListJson = sharedPref.getString(keyprefRoomList, null);
|
||||
if (roomListJson != null) {
|
||||
try {
|
||||
JSONArray jsonArray = new JSONArray(roomListJson);
|
||||
for (int i = 0; i < jsonArray.length(); i++) {
|
||||
roomList.add(jsonArray.get(i).toString());
|
||||
}
|
||||
} catch (JSONException e) {
|
||||
Log.e(TAG, "Failed to load room list: " + e.toString());
|
||||
}
|
||||
}
|
||||
adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, roomList);
|
||||
roomListView.setAdapter(adapter);
|
||||
if (adapter.getCount() > 0) {
|
||||
roomListView.requestFocus();
|
||||
roomListView.setItemChecked(0, true);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
|
||||
if (requestCode == CONNECTION_REQUEST && commandLineRun) {
|
||||
Log.d(TAG, "Return: " + resultCode);
|
||||
setResult(resultCode);
|
||||
commandLineRun = false;
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a value from the shared preference or from the intent, if it does not
|
||||
* exist the default is used.
|
||||
*/
|
||||
private String sharedPrefGetString(
|
||||
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
||||
String defaultValue = getString(defaultId);
|
||||
if (useFromIntent) {
|
||||
String value = getIntent().getStringExtra(intentName);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
return defaultValue;
|
||||
} else {
|
||||
String attributeName = getString(attributeId);
|
||||
return sharedPref.getString(attributeName, defaultValue);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a value from the shared preference or from the intent, if it does not
|
||||
* exist the default is used.
|
||||
*/
|
||||
private boolean sharedPrefGetBoolean(
|
||||
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
||||
boolean defaultValue = Boolean.valueOf(getString(defaultId));
|
||||
if (useFromIntent) {
|
||||
return getIntent().getBooleanExtra(intentName, defaultValue);
|
||||
} else {
|
||||
String attributeName = getString(attributeId);
|
||||
return sharedPref.getBoolean(attributeName, defaultValue);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a value from the shared preference or from the intent, if it does not
|
||||
* exist the default is used.
|
||||
*/
|
||||
private int sharedPrefGetInteger(
|
||||
int attributeId, String intentName, int defaultId, boolean useFromIntent) {
|
||||
String defaultString = getString(defaultId);
|
||||
int defaultValue = Integer.parseInt(defaultString);
|
||||
if (useFromIntent) {
|
||||
return getIntent().getIntExtra(intentName, defaultValue);
|
||||
} else {
|
||||
String attributeName = getString(attributeId);
|
||||
String value = sharedPref.getString(attributeName, defaultString);
|
||||
try {
|
||||
return Integer.parseInt(value);
|
||||
} catch (NumberFormatException e) {
|
||||
Log.e(TAG, "Wrong setting for: " + attributeName + ":" + value);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
|
||||
boolean useValuesFromIntent, int runTimeMs) {
|
||||
this.commandLineRun = commandLineRun;
|
||||
|
||||
// roomId is random for loopback.
|
||||
if (loopback) {
|
||||
roomId = Integer.toString((new Random()).nextInt(100000000));
|
||||
}
|
||||
|
||||
String roomUrl = sharedPref.getString(
|
||||
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
||||
|
||||
// Video call enabled flag.
|
||||
boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
|
||||
CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
|
||||
|
||||
// Use screencapture option.
|
||||
boolean useScreencapture = sharedPrefGetBoolean(R.string.pref_screencapture_key,
|
||||
CallActivity.EXTRA_SCREENCAPTURE, R.string.pref_screencapture_default, useValuesFromIntent);
|
||||
|
||||
// Use Camera2 option.
|
||||
boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
|
||||
R.string.pref_camera2_default, useValuesFromIntent);
|
||||
|
||||
// Get default codecs.
|
||||
String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
|
||||
CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
|
||||
String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
|
||||
CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
|
||||
|
||||
// Check HW codec flag.
|
||||
boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
|
||||
CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
|
||||
|
||||
// Check Capture to texture.
|
||||
boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
|
||||
CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
|
||||
useValuesFromIntent);
|
||||
|
||||
// Check FlexFEC.
|
||||
boolean flexfecEnabled = sharedPrefGetBoolean(R.string.pref_flexfec_key,
|
||||
CallActivity.EXTRA_FLEXFEC_ENABLED, R.string.pref_flexfec_default, useValuesFromIntent);
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
|
||||
CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
|
||||
useValuesFromIntent);
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
|
||||
CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
|
||||
|
||||
// Check OpenSL ES enabled flag.
|
||||
boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
|
||||
CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
|
||||
|
||||
// Check Disable built-in AEC flag.
|
||||
boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
|
||||
CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
|
||||
useValuesFromIntent);
|
||||
|
||||
// Check Disable built-in AGC flag.
|
||||
boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
|
||||
CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
|
||||
useValuesFromIntent);
|
||||
|
||||
// Check Disable built-in NS flag.
|
||||
boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
|
||||
CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
|
||||
useValuesFromIntent);
|
||||
|
||||
// Check Enable level control.
|
||||
boolean enableLevelControl = sharedPrefGetBoolean(R.string.pref_enable_level_control_key,
|
||||
CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, R.string.pref_enable_level_control_key,
|
||||
useValuesFromIntent);
|
||||
|
||||
// Check Disable gain control
|
||||
boolean disableWebRtcAGCAndHPF = sharedPrefGetBoolean(
|
||||
R.string.pref_disable_webrtc_agc_and_hpf_key, CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF,
|
||||
R.string.pref_disable_webrtc_agc_and_hpf_key, useValuesFromIntent);
|
||||
|
||||
// Get video resolution from settings.
|
||||
int videoWidth = 0;
|
||||
int videoHeight = 0;
|
||||
if (useValuesFromIntent) {
|
||||
videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
|
||||
videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
|
||||
}
|
||||
if (videoWidth == 0 && videoHeight == 0) {
|
||||
String resolution =
|
||||
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
||||
String[] dimensions = resolution.split("[ x]+");
|
||||
if (dimensions.length == 2) {
|
||||
try {
|
||||
videoWidth = Integer.parseInt(dimensions[0]);
|
||||
videoHeight = Integer.parseInt(dimensions[1]);
|
||||
} catch (NumberFormatException e) {
|
||||
videoWidth = 0;
|
||||
videoHeight = 0;
|
||||
Log.e(TAG, "Wrong video resolution setting: " + resolution);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get camera fps from settings.
|
||||
int cameraFps = 0;
|
||||
if (useValuesFromIntent) {
|
||||
cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
|
||||
}
|
||||
if (cameraFps == 0) {
|
||||
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
||||
String[] fpsValues = fps.split("[ x]+");
|
||||
if (fpsValues.length == 2) {
|
||||
try {
|
||||
cameraFps = Integer.parseInt(fpsValues[0]);
|
||||
} catch (NumberFormatException e) {
|
||||
cameraFps = 0;
|
||||
Log.e(TAG, "Wrong camera fps setting: " + fps);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check capture quality slider flag.
|
||||
boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
|
||||
CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
|
||||
R.string.pref_capturequalityslider_default, useValuesFromIntent);
|
||||
|
||||
// Get video and audio start bitrate.
|
||||
int videoStartBitrate = 0;
|
||||
if (useValuesFromIntent) {
|
||||
videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
|
||||
}
|
||||
if (videoStartBitrate == 0) {
|
||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
String bitrateValue = sharedPref.getString(
|
||||
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
||||
videoStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
}
|
||||
|
||||
int audioStartBitrate = 0;
|
||||
if (useValuesFromIntent) {
|
||||
audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
|
||||
}
|
||||
if (audioStartBitrate == 0) {
|
||||
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
String bitrateValue = sharedPref.getString(
|
||||
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
||||
audioStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
}
|
||||
|
||||
// Check statistics display option.
|
||||
boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
|
||||
CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
|
||||
|
||||
boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
|
||||
R.string.pref_tracing_default, useValuesFromIntent);
|
||||
|
||||
// Get datachannel options
|
||||
boolean dataChannelEnabled = sharedPrefGetBoolean(R.string.pref_enable_datachannel_key,
|
||||
CallActivity.EXTRA_DATA_CHANNEL_ENABLED, R.string.pref_enable_datachannel_default,
|
||||
useValuesFromIntent);
|
||||
boolean ordered = sharedPrefGetBoolean(R.string.pref_ordered_key, CallActivity.EXTRA_ORDERED,
|
||||
R.string.pref_ordered_default, useValuesFromIntent);
|
||||
boolean negotiated = sharedPrefGetBoolean(R.string.pref_negotiated_key,
|
||||
CallActivity.EXTRA_NEGOTIATED, R.string.pref_negotiated_default, useValuesFromIntent);
|
||||
int maxRetrMs = sharedPrefGetInteger(R.string.pref_max_retransmit_time_ms_key,
|
||||
CallActivity.EXTRA_MAX_RETRANSMITS_MS, R.string.pref_max_retransmit_time_ms_default,
|
||||
useValuesFromIntent);
|
||||
int maxRetr =
|
||||
sharedPrefGetInteger(R.string.pref_max_retransmits_key, CallActivity.EXTRA_MAX_RETRANSMITS,
|
||||
R.string.pref_max_retransmits_default, useValuesFromIntent);
|
||||
int id = sharedPrefGetInteger(R.string.pref_data_id_key, CallActivity.EXTRA_ID,
|
||||
R.string.pref_data_id_default, useValuesFromIntent);
|
||||
String protocol = sharedPrefGetString(R.string.pref_data_protocol_key,
|
||||
CallActivity.EXTRA_PROTOCOL, R.string.pref_data_protocol_default, useValuesFromIntent);
|
||||
|
||||
// Start AppRTCMobile activity.
|
||||
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
|
||||
if (validateUrl(roomUrl)) {
|
||||
Uri uri = Uri.parse(roomUrl);
|
||||
Intent intent = new Intent(this, CallActivity.class);
|
||||
intent.setData(uri);
|
||||
intent.putExtra(CallActivity.EXTRA_ROOMID, roomId);
|
||||
intent.putExtra(CallActivity.EXTRA_LOOPBACK, loopback);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_CALL, videoCallEnabled);
|
||||
intent.putExtra(CallActivity.EXTRA_SCREENCAPTURE, useScreencapture);
|
||||
intent.putExtra(CallActivity.EXTRA_CAMERA2, useCamera2);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
|
||||
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
|
||||
intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
|
||||
intent.putExtra(CallActivity.EXTRA_FLEXFEC_ENABLED, flexfecEnabled);
|
||||
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
|
||||
intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
|
||||
intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
|
||||
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
|
||||
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, disableBuiltInAGC);
|
||||
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_NS, disableBuiltInNS);
|
||||
intent.putExtra(CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, enableLevelControl);
|
||||
intent.putExtra(CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, disableWebRtcAGCAndHPF);
|
||||
intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
|
||||
intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
|
||||
intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
|
||||
intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
|
||||
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
|
||||
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
|
||||
|
||||
intent.putExtra(CallActivity.EXTRA_DATA_CHANNEL_ENABLED, dataChannelEnabled);
|
||||
|
||||
if (dataChannelEnabled) {
|
||||
intent.putExtra(CallActivity.EXTRA_ORDERED, ordered);
|
||||
intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS_MS, maxRetrMs);
|
||||
intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS, maxRetr);
|
||||
intent.putExtra(CallActivity.EXTRA_PROTOCOL, protocol);
|
||||
intent.putExtra(CallActivity.EXTRA_NEGOTIATED, negotiated);
|
||||
intent.putExtra(CallActivity.EXTRA_ID, id);
|
||||
}
|
||||
|
||||
if (useValuesFromIntent) {
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
|
||||
String videoFileAsCamera =
|
||||
getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
|
||||
String saveRemoteVideoToFile =
|
||||
getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
|
||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
|
||||
int videoOutWidth =
|
||||
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
|
||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
|
||||
}
|
||||
|
||||
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
|
||||
int videoOutHeight =
|
||||
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
|
||||
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
|
||||
}
|
||||
}
|
||||
|
||||
startActivityForResult(intent, CONNECTION_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean validateUrl(String url) {
|
||||
if (URLUtil.isHttpsUrl(url) || URLUtil.isHttpUrl(url)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
new AlertDialog.Builder(this)
|
||||
.setTitle(getText(R.string.invalid_url_title))
|
||||
.setMessage(getString(R.string.invalid_url_text, url))
|
||||
.setCancelable(false)
|
||||
.setNeutralButton(R.string.ok,
|
||||
new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
}
|
||||
})
|
||||
.create()
|
||||
.show();
|
||||
return false;
|
||||
}
|
||||
|
||||
private final AdapterView.OnItemClickListener roomListClickListener =
|
||||
new AdapterView.OnItemClickListener() {
|
||||
@Override
|
||||
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
||||
String roomId = ((TextView) view).getText().toString();
|
||||
connectToRoom(roomId, false, false, false, 0);
|
||||
}
|
||||
};
|
||||
|
||||
private final OnClickListener addFavoriteListener = new OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
String newRoom = roomEditText.getText().toString();
|
||||
if (newRoom.length() > 0 && !roomList.contains(newRoom)) {
|
||||
adapter.add(newRoom);
|
||||
adapter.notifyDataSetChanged();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final OnClickListener connectListener = new OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
|
||||
}
|
||||
};
|
||||
}
|
||||
511
examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
Normal file
511
examples/androidapp/src/org/appspot/apprtc/CpuMonitor.java
Normal file
@ -0,0 +1,511 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.content.IntentFilter;
|
||||
import android.os.BatteryManager;
|
||||
import android.os.SystemClock;
|
||||
import android.util.Log;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Scanner;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Simple CPU monitor. The caller creates a CpuMonitor object which can then
|
||||
* be used via sampleCpuUtilization() to collect the percentual use of the
|
||||
* cumulative CPU capacity for all CPUs running at their nominal frequency. 3
|
||||
* values are generated: (1) getCpuCurrent() returns the use since the last
|
||||
* sampleCpuUtilization(), (2) getCpuAvg3() returns the use since 3 prior
|
||||
* calls, and (3) getCpuAvgAll() returns the use over all SAMPLE_SAVE_NUMBER
|
||||
* calls.
|
||||
*
|
||||
* <p>CPUs in Android are often "offline", and while this of course means 0 Hz
|
||||
* as current frequency, in this state we cannot even get their nominal
|
||||
* frequency. We therefore tread carefully, and allow any CPU to be missing.
|
||||
* Missing CPUs are assumed to have the same nominal frequency as any close
|
||||
* lower-numbered CPU, but as soon as it is online, we'll get their proper
|
||||
* frequency and remember it. (Since CPU 0 in practice always seem to be
|
||||
* online, this unidirectional frequency inheritance should be no problem in
|
||||
* practice.)
|
||||
*
|
||||
* <p>Caveats:
|
||||
* o No provision made for zany "turbo" mode, common in the x86 world.
|
||||
* o No provision made for ARM big.LITTLE; if CPU n can switch behind our
|
||||
* back, we might get incorrect estimates.
|
||||
* o This is not thread-safe. To call asynchronously, create different
|
||||
* CpuMonitor objects.
|
||||
*
|
||||
* <p>If we can gather enough info to generate a sensible result,
|
||||
* sampleCpuUtilization returns true. It is designed to never throw an
|
||||
* exception.
|
||||
*
|
||||
* <p>sampleCpuUtilization should not be called too often in its present form,
|
||||
* since then deltas would be small and the percent values would fluctuate and
|
||||
* be unreadable. If it is desirable to call it more often than say once per
|
||||
* second, one would need to increase SAMPLE_SAVE_NUMBER and probably use
|
||||
* Queue<Integer> to avoid copying overhead.
|
||||
*
|
||||
* <p>Known problems:
|
||||
* 1. Nexus 7 devices running Kitkat have a kernel which often output an
|
||||
* incorrect 'idle' field in /proc/stat. The value is close to twice the
|
||||
* correct value, and then returns to back to correct reading. Both when
|
||||
* jumping up and back down we might create faulty CPU load readings.
|
||||
*/
|
||||
|
||||
class CpuMonitor {
|
||||
private static final String TAG = "CpuMonitor";
|
||||
private static final int MOVING_AVERAGE_SAMPLES = 5;
|
||||
|
||||
private static final int CPU_STAT_SAMPLE_PERIOD_MS = 2000;
|
||||
private static final int CPU_STAT_LOG_PERIOD_MS = 6000;
|
||||
|
||||
private final Context appContext;
|
||||
// User CPU usage at current frequency.
|
||||
private final MovingAverage userCpuUsage;
|
||||
// System CPU usage at current frequency.
|
||||
private final MovingAverage systemCpuUsage;
|
||||
// Total CPU usage relative to maximum frequency.
|
||||
private final MovingAverage totalCpuUsage;
|
||||
// CPU frequency in percentage from maximum.
|
||||
private final MovingAverage frequencyScale;
|
||||
|
||||
private ScheduledExecutorService executor;
|
||||
private long lastStatLogTimeMs;
|
||||
private long[] cpuFreqMax;
|
||||
private int cpusPresent;
|
||||
private int actualCpusPresent;
|
||||
private boolean initialized;
|
||||
private boolean cpuOveruse;
|
||||
private String[] maxPath;
|
||||
private String[] curPath;
|
||||
private double[] curFreqScales;
|
||||
private ProcStat lastProcStat;
|
||||
|
||||
private static class ProcStat {
|
||||
final long userTime;
|
||||
final long systemTime;
|
||||
final long idleTime;
|
||||
|
||||
ProcStat(long userTime, long systemTime, long idleTime) {
|
||||
this.userTime = userTime;
|
||||
this.systemTime = systemTime;
|
||||
this.idleTime = idleTime;
|
||||
}
|
||||
}
|
||||
|
||||
private static class MovingAverage {
|
||||
private final int size;
|
||||
private double sum;
|
||||
private double currentValue;
|
||||
private double[] circBuffer;
|
||||
private int circBufferIndex;
|
||||
|
||||
public MovingAverage(int size) {
|
||||
if (size <= 0) {
|
||||
throw new AssertionError("Size value in MovingAverage ctor should be positive.");
|
||||
}
|
||||
this.size = size;
|
||||
circBuffer = new double[size];
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
Arrays.fill(circBuffer, 0);
|
||||
circBufferIndex = 0;
|
||||
sum = 0;
|
||||
currentValue = 0;
|
||||
}
|
||||
|
||||
public void addValue(double value) {
|
||||
sum -= circBuffer[circBufferIndex];
|
||||
circBuffer[circBufferIndex++] = value;
|
||||
currentValue = value;
|
||||
sum += value;
|
||||
if (circBufferIndex >= size) {
|
||||
circBufferIndex = 0;
|
||||
}
|
||||
}
|
||||
|
||||
public double getCurrent() {
|
||||
return currentValue;
|
||||
}
|
||||
|
||||
public double getAverage() {
|
||||
return sum / (double) size;
|
||||
}
|
||||
}
|
||||
|
||||
public CpuMonitor(Context context) {
|
||||
Log.d(TAG, "CpuMonitor ctor.");
|
||||
appContext = context.getApplicationContext();
|
||||
userCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
|
||||
systemCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
|
||||
totalCpuUsage = new MovingAverage(MOVING_AVERAGE_SAMPLES);
|
||||
frequencyScale = new MovingAverage(MOVING_AVERAGE_SAMPLES);
|
||||
lastStatLogTimeMs = SystemClock.elapsedRealtime();
|
||||
|
||||
scheduleCpuUtilizationTask();
|
||||
}
|
||||
|
||||
public void pause() {
|
||||
if (executor != null) {
|
||||
Log.d(TAG, "pause");
|
||||
executor.shutdownNow();
|
||||
executor = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void resume() {
|
||||
Log.d(TAG, "resume");
|
||||
resetStat();
|
||||
scheduleCpuUtilizationTask();
|
||||
}
|
||||
|
||||
public synchronized void reset() {
|
||||
if (executor != null) {
|
||||
Log.d(TAG, "reset");
|
||||
resetStat();
|
||||
cpuOveruse = false;
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized int getCpuUsageCurrent() {
|
||||
return doubleToPercent(userCpuUsage.getCurrent() + systemCpuUsage.getCurrent());
|
||||
}
|
||||
|
||||
public synchronized int getCpuUsageAverage() {
|
||||
return doubleToPercent(userCpuUsage.getAverage() + systemCpuUsage.getAverage());
|
||||
}
|
||||
|
||||
public synchronized int getFrequencyScaleAverage() {
|
||||
return doubleToPercent(frequencyScale.getAverage());
|
||||
}
|
||||
|
||||
private void scheduleCpuUtilizationTask() {
|
||||
if (executor != null) {
|
||||
executor.shutdownNow();
|
||||
executor = null;
|
||||
}
|
||||
|
||||
executor = Executors.newSingleThreadScheduledExecutor();
|
||||
@SuppressWarnings("unused") // Prevent downstream linter warnings.
|
||||
Future<?> possiblyIgnoredError = executor.scheduleAtFixedRate(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
cpuUtilizationTask();
|
||||
}
|
||||
}, 0, CPU_STAT_SAMPLE_PERIOD_MS, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
private void cpuUtilizationTask() {
|
||||
boolean cpuMonitorAvailable = sampleCpuUtilization();
|
||||
if (cpuMonitorAvailable
|
||||
&& SystemClock.elapsedRealtime() - lastStatLogTimeMs >= CPU_STAT_LOG_PERIOD_MS) {
|
||||
lastStatLogTimeMs = SystemClock.elapsedRealtime();
|
||||
String statString = getStatString();
|
||||
Log.d(TAG, statString);
|
||||
}
|
||||
}
|
||||
|
||||
private void init() {
|
||||
try {
|
||||
FileReader fin = new FileReader("/sys/devices/system/cpu/present");
|
||||
try {
|
||||
BufferedReader reader = new BufferedReader(fin);
|
||||
Scanner scanner = new Scanner(reader).useDelimiter("[-\n]");
|
||||
scanner.nextInt(); // Skip leading number 0.
|
||||
cpusPresent = 1 + scanner.nextInt();
|
||||
scanner.close();
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "Cannot do CPU stats due to /sys/devices/system/cpu/present parsing problem");
|
||||
} finally {
|
||||
fin.close();
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
Log.e(TAG, "Cannot do CPU stats since /sys/devices/system/cpu/present is missing");
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "Error closing file");
|
||||
}
|
||||
|
||||
cpuFreqMax = new long[cpusPresent];
|
||||
maxPath = new String[cpusPresent];
|
||||
curPath = new String[cpusPresent];
|
||||
curFreqScales = new double[cpusPresent];
|
||||
for (int i = 0; i < cpusPresent; i++) {
|
||||
cpuFreqMax[i] = 0; // Frequency "not yet determined".
|
||||
curFreqScales[i] = 0;
|
||||
maxPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
|
||||
curPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/scaling_cur_freq";
|
||||
}
|
||||
|
||||
lastProcStat = new ProcStat(0, 0, 0);
|
||||
resetStat();
|
||||
|
||||
initialized = true;
|
||||
}
|
||||
|
||||
private synchronized void resetStat() {
|
||||
userCpuUsage.reset();
|
||||
systemCpuUsage.reset();
|
||||
totalCpuUsage.reset();
|
||||
frequencyScale.reset();
|
||||
lastStatLogTimeMs = SystemClock.elapsedRealtime();
|
||||
}
|
||||
|
||||
private int getBatteryLevel() {
|
||||
// Use sticky broadcast with null receiver to read battery level once only.
|
||||
Intent intent = appContext.registerReceiver(
|
||||
null /* receiver */, new IntentFilter(Intent.ACTION_BATTERY_CHANGED));
|
||||
|
||||
int batteryLevel = 0;
|
||||
int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
|
||||
if (batteryScale > 0) {
|
||||
batteryLevel =
|
||||
(int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
|
||||
}
|
||||
return batteryLevel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-measure CPU use. Call this method at an interval of around 1/s.
|
||||
* This method returns true on success. The fields
|
||||
* cpuCurrent, cpuAvg3, and cpuAvgAll are updated on success, and represents:
|
||||
* cpuCurrent: The CPU use since the last sampleCpuUtilization call.
|
||||
* cpuAvg3: The average CPU over the last 3 calls.
|
||||
* cpuAvgAll: The average CPU over the last SAMPLE_SAVE_NUMBER calls.
|
||||
*/
|
||||
private synchronized boolean sampleCpuUtilization() {
|
||||
long lastSeenMaxFreq = 0;
|
||||
long cpuFreqCurSum = 0;
|
||||
long cpuFreqMaxSum = 0;
|
||||
|
||||
if (!initialized) {
|
||||
init();
|
||||
}
|
||||
if (cpusPresent == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
actualCpusPresent = 0;
|
||||
for (int i = 0; i < cpusPresent; i++) {
|
||||
/*
|
||||
* For each CPU, attempt to first read its max frequency, then its
|
||||
* current frequency. Once as the max frequency for a CPU is found,
|
||||
* save it in cpuFreqMax[].
|
||||
*/
|
||||
|
||||
curFreqScales[i] = 0;
|
||||
if (cpuFreqMax[i] == 0) {
|
||||
// We have never found this CPU's max frequency. Attempt to read it.
|
||||
long cpufreqMax = readFreqFromFile(maxPath[i]);
|
||||
if (cpufreqMax > 0) {
|
||||
Log.d(TAG, "Core " + i + ". Max frequency: " + cpufreqMax);
|
||||
lastSeenMaxFreq = cpufreqMax;
|
||||
cpuFreqMax[i] = cpufreqMax;
|
||||
maxPath[i] = null; // Kill path to free its memory.
|
||||
}
|
||||
} else {
|
||||
lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
|
||||
}
|
||||
|
||||
long cpuFreqCur = readFreqFromFile(curPath[i]);
|
||||
if (cpuFreqCur == 0 && lastSeenMaxFreq == 0) {
|
||||
// No current frequency information for this CPU core - ignore it.
|
||||
continue;
|
||||
}
|
||||
if (cpuFreqCur > 0) {
|
||||
actualCpusPresent++;
|
||||
}
|
||||
cpuFreqCurSum += cpuFreqCur;
|
||||
|
||||
/* Here, lastSeenMaxFreq might come from
|
||||
* 1. cpuFreq[i], or
|
||||
* 2. a previous iteration, or
|
||||
* 3. a newly read value, or
|
||||
* 4. hypothetically from the pre-loop dummy.
|
||||
*/
|
||||
cpuFreqMaxSum += lastSeenMaxFreq;
|
||||
if (lastSeenMaxFreq > 0) {
|
||||
curFreqScales[i] = (double) cpuFreqCur / lastSeenMaxFreq;
|
||||
}
|
||||
}
|
||||
|
||||
if (cpuFreqCurSum == 0 || cpuFreqMaxSum == 0) {
|
||||
Log.e(TAG, "Could not read max or current frequency for any CPU");
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* Since the cycle counts are for the period between the last invocation
|
||||
* and this present one, we average the percentual CPU frequencies between
|
||||
* now and the beginning of the measurement period. This is significantly
|
||||
* incorrect only if the frequencies have peeked or dropped in between the
|
||||
* invocations.
|
||||
*/
|
||||
double currentFrequencyScale = cpuFreqCurSum / (double) cpuFreqMaxSum;
|
||||
if (frequencyScale.getCurrent() > 0) {
|
||||
currentFrequencyScale = (frequencyScale.getCurrent() + currentFrequencyScale) * 0.5;
|
||||
}
|
||||
|
||||
ProcStat procStat = readProcStat();
|
||||
if (procStat == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
long diffUserTime = procStat.userTime - lastProcStat.userTime;
|
||||
long diffSystemTime = procStat.systemTime - lastProcStat.systemTime;
|
||||
long diffIdleTime = procStat.idleTime - lastProcStat.idleTime;
|
||||
long allTime = diffUserTime + diffSystemTime + diffIdleTime;
|
||||
|
||||
if (currentFrequencyScale == 0 || allTime == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Update statistics.
|
||||
frequencyScale.addValue(currentFrequencyScale);
|
||||
|
||||
double currentUserCpuUsage = diffUserTime / (double) allTime;
|
||||
userCpuUsage.addValue(currentUserCpuUsage);
|
||||
|
||||
double currentSystemCpuUsage = diffSystemTime / (double) allTime;
|
||||
systemCpuUsage.addValue(currentSystemCpuUsage);
|
||||
|
||||
double currentTotalCpuUsage =
|
||||
(currentUserCpuUsage + currentSystemCpuUsage) * currentFrequencyScale;
|
||||
totalCpuUsage.addValue(currentTotalCpuUsage);
|
||||
|
||||
// Save new measurements for next round's deltas.
|
||||
lastProcStat = procStat;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private int doubleToPercent(double d) {
|
||||
return (int) (d * 100 + 0.5);
|
||||
}
|
||||
|
||||
private synchronized String getStatString() {
|
||||
StringBuilder stat = new StringBuilder();
|
||||
stat.append("CPU User: ")
|
||||
.append(doubleToPercent(userCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(userCpuUsage.getAverage()))
|
||||
.append(". System: ")
|
||||
.append(doubleToPercent(systemCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(systemCpuUsage.getAverage()))
|
||||
.append(". Freq: ")
|
||||
.append(doubleToPercent(frequencyScale.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(frequencyScale.getAverage()))
|
||||
.append(". Total usage: ")
|
||||
.append(doubleToPercent(totalCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(totalCpuUsage.getAverage()))
|
||||
.append(". Cores: ")
|
||||
.append(actualCpusPresent);
|
||||
stat.append("( ");
|
||||
for (int i = 0; i < cpusPresent; i++) {
|
||||
stat.append(doubleToPercent(curFreqScales[i])).append(" ");
|
||||
}
|
||||
stat.append("). Battery: ").append(getBatteryLevel());
|
||||
if (cpuOveruse) {
|
||||
stat.append(". Overuse.");
|
||||
}
|
||||
return stat.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a single integer value from the named file. Return the read value
|
||||
* or if an error occurs return 0.
|
||||
*/
|
||||
private long readFreqFromFile(String fileName) {
|
||||
long number = 0;
|
||||
try {
|
||||
BufferedReader reader = new BufferedReader(new FileReader(fileName));
|
||||
try {
|
||||
String line = reader.readLine();
|
||||
number = parseLong(line);
|
||||
} finally {
|
||||
reader.close();
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
// CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
|
||||
// is not present. This is not an error.
|
||||
} catch (IOException e) {
|
||||
// CPU core is off, so file with its scaling frequency .../cpufreq/scaling_cur_freq
|
||||
// is empty. This is not an error.
|
||||
}
|
||||
return number;
|
||||
}
|
||||
|
||||
private static long parseLong(String value) {
|
||||
long number = 0;
|
||||
try {
|
||||
number = Long.parseLong(value);
|
||||
} catch (NumberFormatException e) {
|
||||
Log.e(TAG, "parseLong error.", e);
|
||||
}
|
||||
return number;
|
||||
}
|
||||
|
||||
/*
|
||||
* Read the current utilization of all CPUs using the cumulative first line
|
||||
* of /proc/stat.
|
||||
*/
|
||||
private ProcStat readProcStat() {
|
||||
long userTime = 0;
|
||||
long systemTime = 0;
|
||||
long idleTime = 0;
|
||||
try {
|
||||
BufferedReader reader = new BufferedReader(new FileReader("/proc/stat"));
|
||||
try {
|
||||
// line should contain something like this:
|
||||
// cpu 5093818 271838 3512830 165934119 101374 447076 272086 0 0 0
|
||||
// user nice system idle iowait irq softirq
|
||||
String line = reader.readLine();
|
||||
String[] lines = line.split("\\s+");
|
||||
int length = lines.length;
|
||||
if (length >= 5) {
|
||||
userTime = parseLong(lines[1]); // user
|
||||
userTime += parseLong(lines[2]); // nice
|
||||
systemTime = parseLong(lines[3]); // system
|
||||
idleTime = parseLong(lines[4]); // idle
|
||||
}
|
||||
if (length >= 8) {
|
||||
userTime += parseLong(lines[5]); // iowait
|
||||
systemTime += parseLong(lines[6]); // irq
|
||||
systemTime += parseLong(lines[7]); // softirq
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "Problems parsing /proc/stat", e);
|
||||
return null;
|
||||
} finally {
|
||||
reader.close();
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
Log.e(TAG, "Cannot open /proc/stat for reading", e);
|
||||
return null;
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "Problems reading /proc/stat", e);
|
||||
return null;
|
||||
}
|
||||
return new ProcStat(userTime, systemTime, idleTime);
|
||||
}
|
||||
}
|
||||
347
examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
Normal file
347
examples/androidapp/src/org/appspot/apprtc/DirectRTCClient.java
Normal file
@ -0,0 +1,347 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.webrtc.IceCandidate;
|
||||
import org.webrtc.PeerConnection;
|
||||
import org.webrtc.SessionDescription;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Implementation of AppRTCClient that uses direct TCP connection as the signaling channel.
|
||||
* This eliminates the need for an external server. This class does not support loopback
|
||||
* connections.
|
||||
*/
|
||||
public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChannelEvents {
|
||||
private static final String TAG = "DirectRTCClient";
|
||||
private static final int DEFAULT_PORT = 8888;
|
||||
|
||||
// Regex pattern used for checking if room id looks like an IP.
|
||||
static final Pattern IP_PATTERN = Pattern.compile("("
|
||||
// IPv4
|
||||
+ "((\\d+\\.){3}\\d+)|"
|
||||
// IPv6
|
||||
+ "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
|
||||
+ "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
|
||||
+ "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
|
||||
// IPv6 without []
|
||||
+ "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
|
||||
+ "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
|
||||
// Literals
|
||||
+ "localhost"
|
||||
+ ")"
|
||||
// Optional port number
|
||||
+ "(:(\\d+))?");
|
||||
|
||||
private final ExecutorService executor;
|
||||
private final SignalingEvents events;
|
||||
private TCPChannelClient tcpClient;
|
||||
private RoomConnectionParameters connectionParameters;
|
||||
|
||||
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
|
||||
|
||||
// All alterations of the room state should be done from inside the looper thread.
|
||||
private ConnectionState roomState;
|
||||
|
||||
public DirectRTCClient(SignalingEvents events) {
|
||||
this.events = events;
|
||||
|
||||
executor = Executors.newSingleThreadExecutor();
|
||||
roomState = ConnectionState.NEW;
|
||||
}
|
||||
|
||||
/**
|
||||
* Connects to the room, roomId in connectionsParameters is required. roomId must be a valid
|
||||
* IP address matching IP_PATTERN.
|
||||
*/
|
||||
@Override
|
||||
public void connectToRoom(RoomConnectionParameters connectionParameters) {
|
||||
this.connectionParameters = connectionParameters;
|
||||
|
||||
if (connectionParameters.loopback) {
|
||||
reportError("Loopback connections aren't supported by DirectRTCClient.");
|
||||
}
|
||||
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
connectToRoomInternal();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void disconnectFromRoom() {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
disconnectFromRoomInternal();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Connects to the room.
|
||||
*
|
||||
* Runs on the looper thread.
|
||||
*/
|
||||
private void connectToRoomInternal() {
|
||||
this.roomState = ConnectionState.NEW;
|
||||
|
||||
String endpoint = connectionParameters.roomId;
|
||||
|
||||
Matcher matcher = IP_PATTERN.matcher(endpoint);
|
||||
if (!matcher.matches()) {
|
||||
reportError("roomId must match IP_PATTERN for DirectRTCClient.");
|
||||
return;
|
||||
}
|
||||
|
||||
String ip = matcher.group(1);
|
||||
String portStr = matcher.group(matcher.groupCount());
|
||||
int port;
|
||||
|
||||
if (portStr != null) {
|
||||
try {
|
||||
port = Integer.parseInt(portStr);
|
||||
} catch (NumberFormatException e) {
|
||||
reportError("Invalid port number: " + portStr);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
port = DEFAULT_PORT;
|
||||
}
|
||||
|
||||
tcpClient = new TCPChannelClient(executor, this, ip, port);
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnects from the room.
|
||||
*
|
||||
* Runs on the looper thread.
|
||||
*/
|
||||
private void disconnectFromRoomInternal() {
|
||||
roomState = ConnectionState.CLOSED;
|
||||
|
||||
if (tcpClient != null) {
|
||||
tcpClient.disconnect();
|
||||
tcpClient = null;
|
||||
}
|
||||
executor.shutdown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sendOfferSdp(final SessionDescription sdp) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (roomState != ConnectionState.CONNECTED) {
|
||||
reportError("Sending offer SDP in non connected state.");
|
||||
return;
|
||||
}
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "sdp", sdp.description);
|
||||
jsonPut(json, "type", "offer");
|
||||
sendMessage(json.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sendAnswerSdp(final SessionDescription sdp) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "sdp", sdp.description);
|
||||
jsonPut(json, "type", "answer");
|
||||
sendMessage(json.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sendLocalIceCandidate(final IceCandidate candidate) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "type", "candidate");
|
||||
jsonPut(json, "label", candidate.sdpMLineIndex);
|
||||
jsonPut(json, "id", candidate.sdpMid);
|
||||
jsonPut(json, "candidate", candidate.sdp);
|
||||
|
||||
if (roomState != ConnectionState.CONNECTED) {
|
||||
reportError("Sending ICE candidate in non connected state.");
|
||||
return;
|
||||
}
|
||||
sendMessage(json.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/** Send removed Ice candidates to the other participant. */
|
||||
@Override
|
||||
public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "type", "remove-candidates");
|
||||
JSONArray jsonArray = new JSONArray();
|
||||
for (final IceCandidate candidate : candidates) {
|
||||
jsonArray.put(toJsonCandidate(candidate));
|
||||
}
|
||||
jsonPut(json, "candidates", jsonArray);
|
||||
|
||||
if (roomState != ConnectionState.CONNECTED) {
|
||||
reportError("Sending ICE candidate removals in non connected state.");
|
||||
return;
|
||||
}
|
||||
sendMessage(json.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------
|
||||
// TCPChannelClient event handlers
|
||||
|
||||
/**
|
||||
* If the client is the server side, this will trigger onConnectedToRoom.
|
||||
*/
|
||||
@Override
|
||||
public void onTCPConnected(boolean isServer) {
|
||||
if (isServer) {
|
||||
roomState = ConnectionState.CONNECTED;
|
||||
|
||||
SignalingParameters parameters = new SignalingParameters(
|
||||
// Ice servers are not needed for direct connections.
|
||||
new LinkedList<PeerConnection.IceServer>(),
|
||||
isServer, // Server side acts as the initiator on direct connections.
|
||||
null, // clientId
|
||||
null, // wssUrl
|
||||
null, // wwsPostUrl
|
||||
null, // offerSdp
|
||||
null // iceCandidates
|
||||
);
|
||||
events.onConnectedToRoom(parameters);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTCPMessage(String msg) {
|
||||
try {
|
||||
JSONObject json = new JSONObject(msg);
|
||||
String type = json.optString("type");
|
||||
if (type.equals("candidate")) {
|
||||
events.onRemoteIceCandidate(toJavaCandidate(json));
|
||||
} else if (type.equals("remove-candidates")) {
|
||||
JSONArray candidateArray = json.getJSONArray("candidates");
|
||||
IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
|
||||
for (int i = 0; i < candidateArray.length(); ++i) {
|
||||
candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
|
||||
}
|
||||
events.onRemoteIceCandidatesRemoved(candidates);
|
||||
} else if (type.equals("answer")) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else if (type.equals("offer")) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
|
||||
SignalingParameters parameters = new SignalingParameters(
|
||||
// Ice servers are not needed for direct connections.
|
||||
new LinkedList<PeerConnection.IceServer>(),
|
||||
false, // This code will only be run on the client side. So, we are not the initiator.
|
||||
null, // clientId
|
||||
null, // wssUrl
|
||||
null, // wssPostUrl
|
||||
sdp, // offerSdp
|
||||
null // iceCandidates
|
||||
);
|
||||
roomState = ConnectionState.CONNECTED;
|
||||
events.onConnectedToRoom(parameters);
|
||||
} else {
|
||||
reportError("Unexpected TCP message: " + msg);
|
||||
}
|
||||
} catch (JSONException e) {
|
||||
reportError("TCP message JSON parsing error: " + e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTCPError(String description) {
|
||||
reportError("TCP connection error: " + description);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTCPClose() {
|
||||
events.onChannelClose();
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// Helper functions.
|
||||
private void reportError(final String errorMessage) {
|
||||
Log.e(TAG, errorMessage);
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (roomState != ConnectionState.ERROR) {
|
||||
roomState = ConnectionState.ERROR;
|
||||
events.onChannelError(errorMessage);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void sendMessage(final String message) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
tcpClient.send(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Put a |key|->|value| mapping in |json|.
|
||||
private static void jsonPut(JSONObject json, String key, Object value) {
|
||||
try {
|
||||
json.put(key, value);
|
||||
} catch (JSONException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
// Converts a Java candidate to a JSONObject.
|
||||
private static JSONObject toJsonCandidate(final IceCandidate candidate) {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "label", candidate.sdpMLineIndex);
|
||||
jsonPut(json, "id", candidate.sdpMid);
|
||||
jsonPut(json, "candidate", candidate.sdp);
|
||||
return json;
|
||||
}
|
||||
|
||||
// Converts a JSON candidate to a Java object.
|
||||
private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
|
||||
return new IceCandidate(
|
||||
json.getString("id"), json.getInt("label"), json.getString("candidate"));
|
||||
}
|
||||
}
|
||||
205
examples/androidapp/src/org/appspot/apprtc/HudFragment.java
Normal file
205
examples/androidapp/src/org/appspot/apprtc/HudFragment.java
Normal file
@ -0,0 +1,205 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.app.Fragment;
|
||||
import android.os.Bundle;
|
||||
import android.util.TypedValue;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.ViewGroup;
|
||||
import android.widget.ImageButton;
|
||||
import android.widget.TextView;
|
||||
|
||||
import org.webrtc.StatsReport;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Fragment for HUD statistics display.
|
||||
*/
|
||||
public class HudFragment extends Fragment {
|
||||
private View controlView;
|
||||
private TextView encoderStatView;
|
||||
private TextView hudViewBwe;
|
||||
private TextView hudViewConnection;
|
||||
private TextView hudViewVideoSend;
|
||||
private TextView hudViewVideoRecv;
|
||||
private ImageButton toggleDebugButton;
|
||||
private boolean videoCallEnabled;
|
||||
private boolean displayHud;
|
||||
private volatile boolean isRunning;
|
||||
private CpuMonitor cpuMonitor;
|
||||
|
||||
@Override
|
||||
public View onCreateView(
|
||||
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
|
||||
controlView = inflater.inflate(R.layout.fragment_hud, container, false);
|
||||
|
||||
// Create UI controls.
|
||||
encoderStatView = (TextView) controlView.findViewById(R.id.encoder_stat_call);
|
||||
hudViewBwe = (TextView) controlView.findViewById(R.id.hud_stat_bwe);
|
||||
hudViewConnection = (TextView) controlView.findViewById(R.id.hud_stat_connection);
|
||||
hudViewVideoSend = (TextView) controlView.findViewById(R.id.hud_stat_video_send);
|
||||
hudViewVideoRecv = (TextView) controlView.findViewById(R.id.hud_stat_video_recv);
|
||||
toggleDebugButton = (ImageButton) controlView.findViewById(R.id.button_toggle_debug);
|
||||
|
||||
toggleDebugButton.setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (displayHud) {
|
||||
int visibility =
|
||||
(hudViewBwe.getVisibility() == View.VISIBLE) ? View.INVISIBLE : View.VISIBLE;
|
||||
hudViewsSetProperties(visibility);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return controlView;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
super.onStart();
|
||||
|
||||
Bundle args = getArguments();
|
||||
if (args != null) {
|
||||
videoCallEnabled = args.getBoolean(CallActivity.EXTRA_VIDEO_CALL, true);
|
||||
displayHud = args.getBoolean(CallActivity.EXTRA_DISPLAY_HUD, false);
|
||||
}
|
||||
int visibility = displayHud ? View.VISIBLE : View.INVISIBLE;
|
||||
encoderStatView.setVisibility(visibility);
|
||||
toggleDebugButton.setVisibility(visibility);
|
||||
hudViewsSetProperties(View.INVISIBLE);
|
||||
isRunning = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStop() {
|
||||
isRunning = false;
|
||||
super.onStop();
|
||||
}
|
||||
|
||||
public void setCpuMonitor(CpuMonitor cpuMonitor) {
|
||||
this.cpuMonitor = cpuMonitor;
|
||||
}
|
||||
|
||||
private void hudViewsSetProperties(int visibility) {
|
||||
hudViewBwe.setVisibility(visibility);
|
||||
hudViewConnection.setVisibility(visibility);
|
||||
hudViewVideoSend.setVisibility(visibility);
|
||||
hudViewVideoRecv.setVisibility(visibility);
|
||||
hudViewBwe.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
|
||||
hudViewConnection.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
|
||||
hudViewVideoSend.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
|
||||
hudViewVideoRecv.setTextSize(TypedValue.COMPLEX_UNIT_PT, 5);
|
||||
}
|
||||
|
||||
private Map<String, String> getReportMap(StatsReport report) {
|
||||
Map<String, String> reportMap = new HashMap<String, String>();
|
||||
for (StatsReport.Value value : report.values) {
|
||||
reportMap.put(value.name, value.value);
|
||||
}
|
||||
return reportMap;
|
||||
}
|
||||
|
||||
public void updateEncoderStatistics(final StatsReport[] reports) {
|
||||
if (!isRunning || !displayHud) {
|
||||
return;
|
||||
}
|
||||
StringBuilder encoderStat = new StringBuilder(128);
|
||||
StringBuilder bweStat = new StringBuilder();
|
||||
StringBuilder connectionStat = new StringBuilder();
|
||||
StringBuilder videoSendStat = new StringBuilder();
|
||||
StringBuilder videoRecvStat = new StringBuilder();
|
||||
String fps = null;
|
||||
String targetBitrate = null;
|
||||
String actualBitrate = null;
|
||||
|
||||
for (StatsReport report : reports) {
|
||||
if (report.type.equals("ssrc") && report.id.contains("ssrc") && report.id.contains("send")) {
|
||||
// Send video statistics.
|
||||
Map<String, String> reportMap = getReportMap(report);
|
||||
String trackId = reportMap.get("googTrackId");
|
||||
if (trackId != null && trackId.contains(PeerConnectionClient.VIDEO_TRACK_ID)) {
|
||||
fps = reportMap.get("googFrameRateSent");
|
||||
videoSendStat.append(report.id).append("\n");
|
||||
for (StatsReport.Value value : report.values) {
|
||||
String name = value.name.replace("goog", "");
|
||||
videoSendStat.append(name).append("=").append(value.value).append("\n");
|
||||
}
|
||||
}
|
||||
} else if (report.type.equals("ssrc") && report.id.contains("ssrc")
|
||||
&& report.id.contains("recv")) {
|
||||
// Receive video statistics.
|
||||
Map<String, String> reportMap = getReportMap(report);
|
||||
// Check if this stat is for video track.
|
||||
String frameWidth = reportMap.get("googFrameWidthReceived");
|
||||
if (frameWidth != null) {
|
||||
videoRecvStat.append(report.id).append("\n");
|
||||
for (StatsReport.Value value : report.values) {
|
||||
String name = value.name.replace("goog", "");
|
||||
videoRecvStat.append(name).append("=").append(value.value).append("\n");
|
||||
}
|
||||
}
|
||||
} else if (report.id.equals("bweforvideo")) {
|
||||
// BWE statistics.
|
||||
Map<String, String> reportMap = getReportMap(report);
|
||||
targetBitrate = reportMap.get("googTargetEncBitrate");
|
||||
actualBitrate = reportMap.get("googActualEncBitrate");
|
||||
|
||||
bweStat.append(report.id).append("\n");
|
||||
for (StatsReport.Value value : report.values) {
|
||||
String name = value.name.replace("goog", "").replace("Available", "");
|
||||
bweStat.append(name).append("=").append(value.value).append("\n");
|
||||
}
|
||||
} else if (report.type.equals("googCandidatePair")) {
|
||||
// Connection statistics.
|
||||
Map<String, String> reportMap = getReportMap(report);
|
||||
String activeConnection = reportMap.get("googActiveConnection");
|
||||
if (activeConnection != null && activeConnection.equals("true")) {
|
||||
connectionStat.append(report.id).append("\n");
|
||||
for (StatsReport.Value value : report.values) {
|
||||
String name = value.name.replace("goog", "");
|
||||
connectionStat.append(name).append("=").append(value.value).append("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
hudViewBwe.setText(bweStat.toString());
|
||||
hudViewConnection.setText(connectionStat.toString());
|
||||
hudViewVideoSend.setText(videoSendStat.toString());
|
||||
hudViewVideoRecv.setText(videoRecvStat.toString());
|
||||
|
||||
if (videoCallEnabled) {
|
||||
if (fps != null) {
|
||||
encoderStat.append("Fps: ").append(fps).append("\n");
|
||||
}
|
||||
if (targetBitrate != null) {
|
||||
encoderStat.append("Target BR: ").append(targetBitrate).append("\n");
|
||||
}
|
||||
if (actualBitrate != null) {
|
||||
encoderStat.append("Actual BR: ").append(actualBitrate).append("\n");
|
||||
}
|
||||
}
|
||||
|
||||
if (cpuMonitor != null) {
|
||||
encoderStat.append("CPU%: ")
|
||||
.append(cpuMonitor.getCpuUsageCurrent())
|
||||
.append("/")
|
||||
.append(cpuMonitor.getCpuUsageAverage())
|
||||
.append(". Freq: ")
|
||||
.append(cpuMonitor.getFrequencyScaleAverage());
|
||||
}
|
||||
encoderStatView.setText(encoderStat.toString());
|
||||
}
|
||||
}
|
||||
1368
examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
Normal file
1368
examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,226 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
|
||||
import org.appspot.apprtc.util.AsyncHttpURLConnection;
|
||||
import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.webrtc.IceCandidate;
|
||||
import org.webrtc.PeerConnection;
|
||||
import org.webrtc.SessionDescription;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Scanner;
|
||||
|
||||
/**
|
||||
* AsyncTask that converts an AppRTC room URL into the set of signaling
|
||||
* parameters to use with that room.
|
||||
*/
|
||||
public class RoomParametersFetcher {
|
||||
private static final String TAG = "RoomRTCClient";
|
||||
private static final int TURN_HTTP_TIMEOUT_MS = 5000;
|
||||
private final RoomParametersFetcherEvents events;
|
||||
private final String roomUrl;
|
||||
private final String roomMessage;
|
||||
private AsyncHttpURLConnection httpConnection;
|
||||
|
||||
/**
|
||||
* Room parameters fetcher callbacks.
|
||||
*/
|
||||
public interface RoomParametersFetcherEvents {
|
||||
/**
|
||||
* Callback fired once the room's signaling parameters
|
||||
* SignalingParameters are extracted.
|
||||
*/
|
||||
void onSignalingParametersReady(final SignalingParameters params);
|
||||
|
||||
/**
|
||||
* Callback for room parameters extraction error.
|
||||
*/
|
||||
void onSignalingParametersError(final String description);
|
||||
}
|
||||
|
||||
public RoomParametersFetcher(
|
||||
String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
|
||||
this.roomUrl = roomUrl;
|
||||
this.roomMessage = roomMessage;
|
||||
this.events = events;
|
||||
}
|
||||
|
||||
public void makeRequest() {
|
||||
Log.d(TAG, "Connecting to room: " + roomUrl);
|
||||
httpConnection =
|
||||
new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
Log.e(TAG, "Room connection error: " + errorMessage);
|
||||
events.onSignalingParametersError(errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHttpComplete(String response) {
|
||||
roomHttpResponseParse(response);
|
||||
}
|
||||
});
|
||||
httpConnection.send();
|
||||
}
|
||||
|
||||
private void roomHttpResponseParse(String response) {
|
||||
Log.d(TAG, "Room response: " + response);
|
||||
try {
|
||||
LinkedList<IceCandidate> iceCandidates = null;
|
||||
SessionDescription offerSdp = null;
|
||||
JSONObject roomJson = new JSONObject(response);
|
||||
|
||||
String result = roomJson.getString("result");
|
||||
if (!result.equals("SUCCESS")) {
|
||||
events.onSignalingParametersError("Room response error: " + result);
|
||||
return;
|
||||
}
|
||||
response = roomJson.getString("params");
|
||||
roomJson = new JSONObject(response);
|
||||
String roomId = roomJson.getString("room_id");
|
||||
String clientId = roomJson.getString("client_id");
|
||||
String wssUrl = roomJson.getString("wss_url");
|
||||
String wssPostUrl = roomJson.getString("wss_post_url");
|
||||
boolean initiator = (roomJson.getBoolean("is_initiator"));
|
||||
if (!initiator) {
|
||||
iceCandidates = new LinkedList<IceCandidate>();
|
||||
String messagesString = roomJson.getString("messages");
|
||||
JSONArray messages = new JSONArray(messagesString);
|
||||
for (int i = 0; i < messages.length(); ++i) {
|
||||
String messageString = messages.getString(i);
|
||||
JSONObject message = new JSONObject(messageString);
|
||||
String messageType = message.getString("type");
|
||||
Log.d(TAG, "GAE->C #" + i + " : " + messageString);
|
||||
if (messageType.equals("offer")) {
|
||||
offerSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
|
||||
} else if (messageType.equals("candidate")) {
|
||||
IceCandidate candidate = new IceCandidate(
|
||||
message.getString("id"), message.getInt("label"), message.getString("candidate"));
|
||||
iceCandidates.add(candidate);
|
||||
} else {
|
||||
Log.e(TAG, "Unknown message: " + messageString);
|
||||
}
|
||||
}
|
||||
}
|
||||
Log.d(TAG, "RoomId: " + roomId + ". ClientId: " + clientId);
|
||||
Log.d(TAG, "Initiator: " + initiator);
|
||||
Log.d(TAG, "WSS url: " + wssUrl);
|
||||
Log.d(TAG, "WSS POST url: " + wssPostUrl);
|
||||
|
||||
LinkedList<PeerConnection.IceServer> iceServers =
|
||||
iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
|
||||
boolean isTurnPresent = false;
|
||||
for (PeerConnection.IceServer server : iceServers) {
|
||||
Log.d(TAG, "IceServer: " + server);
|
||||
if (server.uri.startsWith("turn:")) {
|
||||
isTurnPresent = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Request TURN servers.
|
||||
if (!isTurnPresent && !roomJson.optString("ice_server_url").isEmpty()) {
|
||||
LinkedList<PeerConnection.IceServer> turnServers =
|
||||
requestTurnServers(roomJson.getString("ice_server_url"));
|
||||
for (PeerConnection.IceServer turnServer : turnServers) {
|
||||
Log.d(TAG, "TurnServer: " + turnServer);
|
||||
iceServers.add(turnServer);
|
||||
}
|
||||
}
|
||||
|
||||
SignalingParameters params = new SignalingParameters(
|
||||
iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
|
||||
events.onSignalingParametersReady(params);
|
||||
} catch (JSONException e) {
|
||||
events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
|
||||
} catch (IOException e) {
|
||||
events.onSignalingParametersError("Room IO error: " + e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// Requests & returns a TURN ICE Server based on a request URL. Must be run
|
||||
// off the main thread!
|
||||
private LinkedList<PeerConnection.IceServer> requestTurnServers(String url)
|
||||
throws IOException, JSONException {
|
||||
LinkedList<PeerConnection.IceServer> turnServers = new LinkedList<PeerConnection.IceServer>();
|
||||
Log.d(TAG, "Request TURN from: " + url);
|
||||
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
||||
connection.setDoOutput(true);
|
||||
connection.setRequestProperty("REFERER", "https://appr.tc");
|
||||
connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
|
||||
connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
|
||||
int responseCode = connection.getResponseCode();
|
||||
if (responseCode != 200) {
|
||||
throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
|
||||
+ connection.getHeaderField(null));
|
||||
}
|
||||
InputStream responseStream = connection.getInputStream();
|
||||
String response = drainStream(responseStream);
|
||||
connection.disconnect();
|
||||
Log.d(TAG, "TURN response: " + response);
|
||||
JSONObject responseJSON = new JSONObject(response);
|
||||
JSONArray iceServers = responseJSON.getJSONArray("iceServers");
|
||||
for (int i = 0; i < iceServers.length(); ++i) {
|
||||
JSONObject server = iceServers.getJSONObject(i);
|
||||
JSONArray turnUrls = server.getJSONArray("urls");
|
||||
String username = server.has("username") ? server.getString("username") : "";
|
||||
String credential = server.has("credential") ? server.getString("credential") : "";
|
||||
for (int j = 0; j < turnUrls.length(); j++) {
|
||||
String turnUrl = turnUrls.getString(j);
|
||||
PeerConnection.IceServer turnServer =
|
||||
PeerConnection.IceServer.builder(turnUrl)
|
||||
.setUsername(username)
|
||||
.setPassword(credential)
|
||||
.createIceServer();
|
||||
turnServers.add(turnServer);
|
||||
}
|
||||
}
|
||||
return turnServers;
|
||||
}
|
||||
|
||||
// Return the list of ICE servers described by a WebRTCPeerConnection
|
||||
// configuration string.
|
||||
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
|
||||
throws JSONException {
|
||||
JSONObject json = new JSONObject(pcConfig);
|
||||
JSONArray servers = json.getJSONArray("iceServers");
|
||||
LinkedList<PeerConnection.IceServer> ret = new LinkedList<PeerConnection.IceServer>();
|
||||
for (int i = 0; i < servers.length(); ++i) {
|
||||
JSONObject server = servers.getJSONObject(i);
|
||||
String url = server.getString("urls");
|
||||
String credential = server.has("credential") ? server.getString("credential") : "";
|
||||
PeerConnection.IceServer turnServer =
|
||||
PeerConnection.IceServer.builder(url)
|
||||
.setPassword(credential)
|
||||
.createIceServer();
|
||||
ret.add(turnServer);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Return the contents of an InputStream as a String.
|
||||
private static String drainStream(InputStream in) {
|
||||
Scanner s = new Scanner(in).useDelimiter("\\A");
|
||||
return s.hasNext() ? s.next() : "";
|
||||
}
|
||||
}
|
||||
319
examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
Normal file
319
examples/androidapp/src/org/appspot/apprtc/SettingsActivity.java
Normal file
@ -0,0 +1,319 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.content.SharedPreferences;
|
||||
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
|
||||
import android.os.Bundle;
|
||||
import android.preference.ListPreference;
|
||||
import android.preference.Preference;
|
||||
import org.webrtc.Camera2Enumerator;
|
||||
import org.webrtc.voiceengine.WebRtcAudioUtils;
|
||||
|
||||
/**
|
||||
* Settings activity for AppRTC.
|
||||
*/
|
||||
public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
|
||||
private SettingsFragment settingsFragment;
|
||||
private String keyprefVideoCall;
|
||||
private String keyprefScreencapture;
|
||||
private String keyprefCamera2;
|
||||
private String keyprefResolution;
|
||||
private String keyprefFps;
|
||||
private String keyprefCaptureQualitySlider;
|
||||
private String keyprefMaxVideoBitrateType;
|
||||
private String keyprefMaxVideoBitrateValue;
|
||||
private String keyPrefVideoCodec;
|
||||
private String keyprefHwCodec;
|
||||
private String keyprefCaptureToTexture;
|
||||
private String keyprefFlexfec;
|
||||
|
||||
private String keyprefStartAudioBitrateType;
|
||||
private String keyprefStartAudioBitrateValue;
|
||||
private String keyPrefAudioCodec;
|
||||
private String keyprefNoAudioProcessing;
|
||||
private String keyprefAecDump;
|
||||
private String keyprefOpenSLES;
|
||||
private String keyprefDisableBuiltInAEC;
|
||||
private String keyprefDisableBuiltInAGC;
|
||||
private String keyprefDisableBuiltInNS;
|
||||
private String keyprefEnableLevelControl;
|
||||
private String keyprefDisableWebRtcAGCAndHPF;
|
||||
private String keyprefSpeakerphone;
|
||||
|
||||
private String keyPrefRoomServerUrl;
|
||||
private String keyPrefDisplayHud;
|
||||
private String keyPrefTracing;
|
||||
|
||||
private String keyprefEnableDataChannel;
|
||||
private String keyprefOrdered;
|
||||
private String keyprefMaxRetransmitTimeMs;
|
||||
private String keyprefMaxRetransmits;
|
||||
private String keyprefDataProtocol;
|
||||
private String keyprefNegotiated;
|
||||
private String keyprefDataId;
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
keyprefVideoCall = getString(R.string.pref_videocall_key);
|
||||
keyprefScreencapture = getString(R.string.pref_screencapture_key);
|
||||
keyprefCamera2 = getString(R.string.pref_camera2_key);
|
||||
keyprefResolution = getString(R.string.pref_resolution_key);
|
||||
keyprefFps = getString(R.string.pref_fps_key);
|
||||
keyprefCaptureQualitySlider = getString(R.string.pref_capturequalityslider_key);
|
||||
keyprefMaxVideoBitrateType = getString(R.string.pref_maxvideobitrate_key);
|
||||
keyprefMaxVideoBitrateValue = getString(R.string.pref_maxvideobitratevalue_key);
|
||||
keyPrefVideoCodec = getString(R.string.pref_videocodec_key);
|
||||
keyprefHwCodec = getString(R.string.pref_hwcodec_key);
|
||||
keyprefCaptureToTexture = getString(R.string.pref_capturetotexture_key);
|
||||
keyprefFlexfec = getString(R.string.pref_flexfec_key);
|
||||
|
||||
keyprefStartAudioBitrateType = getString(R.string.pref_startaudiobitrate_key);
|
||||
keyprefStartAudioBitrateValue = getString(R.string.pref_startaudiobitratevalue_key);
|
||||
keyPrefAudioCodec = getString(R.string.pref_audiocodec_key);
|
||||
keyprefNoAudioProcessing = getString(R.string.pref_noaudioprocessing_key);
|
||||
keyprefAecDump = getString(R.string.pref_aecdump_key);
|
||||
keyprefOpenSLES = getString(R.string.pref_opensles_key);
|
||||
keyprefDisableBuiltInAEC = getString(R.string.pref_disable_built_in_aec_key);
|
||||
keyprefDisableBuiltInAGC = getString(R.string.pref_disable_built_in_agc_key);
|
||||
keyprefDisableBuiltInNS = getString(R.string.pref_disable_built_in_ns_key);
|
||||
keyprefEnableLevelControl = getString(R.string.pref_enable_level_control_key);
|
||||
keyprefDisableWebRtcAGCAndHPF = getString(R.string.pref_disable_webrtc_agc_and_hpf_key);
|
||||
keyprefSpeakerphone = getString(R.string.pref_speakerphone_key);
|
||||
|
||||
keyprefEnableDataChannel = getString(R.string.pref_enable_datachannel_key);
|
||||
keyprefOrdered = getString(R.string.pref_ordered_key);
|
||||
keyprefMaxRetransmitTimeMs = getString(R.string.pref_max_retransmit_time_ms_key);
|
||||
keyprefMaxRetransmits = getString(R.string.pref_max_retransmits_key);
|
||||
keyprefDataProtocol = getString(R.string.pref_data_protocol_key);
|
||||
keyprefNegotiated = getString(R.string.pref_negotiated_key);
|
||||
keyprefDataId = getString(R.string.pref_data_id_key);
|
||||
|
||||
keyPrefRoomServerUrl = getString(R.string.pref_room_server_url_key);
|
||||
keyPrefDisplayHud = getString(R.string.pref_displayhud_key);
|
||||
keyPrefTracing = getString(R.string.pref_tracing_key);
|
||||
|
||||
// Display the fragment as the main content.
|
||||
settingsFragment = new SettingsFragment();
|
||||
getFragmentManager()
|
||||
.beginTransaction()
|
||||
.replace(android.R.id.content, settingsFragment)
|
||||
.commit();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onResume() {
|
||||
super.onResume();
|
||||
// Set summary to be the user-description for the selected value
|
||||
SharedPreferences sharedPreferences =
|
||||
settingsFragment.getPreferenceScreen().getSharedPreferences();
|
||||
sharedPreferences.registerOnSharedPreferenceChangeListener(this);
|
||||
updateSummaryB(sharedPreferences, keyprefVideoCall);
|
||||
updateSummaryB(sharedPreferences, keyprefScreencapture);
|
||||
updateSummaryB(sharedPreferences, keyprefCamera2);
|
||||
updateSummary(sharedPreferences, keyprefResolution);
|
||||
updateSummary(sharedPreferences, keyprefFps);
|
||||
updateSummaryB(sharedPreferences, keyprefCaptureQualitySlider);
|
||||
updateSummary(sharedPreferences, keyprefMaxVideoBitrateType);
|
||||
updateSummaryBitrate(sharedPreferences, keyprefMaxVideoBitrateValue);
|
||||
setVideoBitrateEnable(sharedPreferences);
|
||||
updateSummary(sharedPreferences, keyPrefVideoCodec);
|
||||
updateSummaryB(sharedPreferences, keyprefHwCodec);
|
||||
updateSummaryB(sharedPreferences, keyprefCaptureToTexture);
|
||||
updateSummaryB(sharedPreferences, keyprefFlexfec);
|
||||
|
||||
updateSummary(sharedPreferences, keyprefStartAudioBitrateType);
|
||||
updateSummaryBitrate(sharedPreferences, keyprefStartAudioBitrateValue);
|
||||
setAudioBitrateEnable(sharedPreferences);
|
||||
updateSummary(sharedPreferences, keyPrefAudioCodec);
|
||||
updateSummaryB(sharedPreferences, keyprefNoAudioProcessing);
|
||||
updateSummaryB(sharedPreferences, keyprefAecDump);
|
||||
updateSummaryB(sharedPreferences, keyprefOpenSLES);
|
||||
updateSummaryB(sharedPreferences, keyprefDisableBuiltInAEC);
|
||||
updateSummaryB(sharedPreferences, keyprefDisableBuiltInAGC);
|
||||
updateSummaryB(sharedPreferences, keyprefDisableBuiltInNS);
|
||||
updateSummaryB(sharedPreferences, keyprefEnableLevelControl);
|
||||
updateSummaryB(sharedPreferences, keyprefDisableWebRtcAGCAndHPF);
|
||||
updateSummaryList(sharedPreferences, keyprefSpeakerphone);
|
||||
|
||||
updateSummaryB(sharedPreferences, keyprefEnableDataChannel);
|
||||
updateSummaryB(sharedPreferences, keyprefOrdered);
|
||||
updateSummary(sharedPreferences, keyprefMaxRetransmitTimeMs);
|
||||
updateSummary(sharedPreferences, keyprefMaxRetransmits);
|
||||
updateSummary(sharedPreferences, keyprefDataProtocol);
|
||||
updateSummaryB(sharedPreferences, keyprefNegotiated);
|
||||
updateSummary(sharedPreferences, keyprefDataId);
|
||||
setDataChannelEnable(sharedPreferences);
|
||||
|
||||
updateSummary(sharedPreferences, keyPrefRoomServerUrl);
|
||||
updateSummaryB(sharedPreferences, keyPrefDisplayHud);
|
||||
updateSummaryB(sharedPreferences, keyPrefTracing);
|
||||
|
||||
if (!Camera2Enumerator.isSupported(this)) {
|
||||
Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
|
||||
|
||||
camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
|
||||
camera2Preference.setEnabled(false);
|
||||
}
|
||||
|
||||
// Disable forcing WebRTC based AEC so it won't affect our value.
|
||||
// Otherwise, if it was enabled, isAcousticEchoCancelerSupported would always return false.
|
||||
WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
|
||||
if (!WebRtcAudioUtils.isAcousticEchoCancelerSupported()) {
|
||||
Preference disableBuiltInAECPreference =
|
||||
settingsFragment.findPreference(keyprefDisableBuiltInAEC);
|
||||
|
||||
disableBuiltInAECPreference.setSummary(getString(R.string.pref_built_in_aec_not_available));
|
||||
disableBuiltInAECPreference.setEnabled(false);
|
||||
}
|
||||
|
||||
WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
|
||||
if (!WebRtcAudioUtils.isAutomaticGainControlSupported()) {
|
||||
Preference disableBuiltInAGCPreference =
|
||||
settingsFragment.findPreference(keyprefDisableBuiltInAGC);
|
||||
|
||||
disableBuiltInAGCPreference.setSummary(getString(R.string.pref_built_in_agc_not_available));
|
||||
disableBuiltInAGCPreference.setEnabled(false);
|
||||
}
|
||||
|
||||
WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
|
||||
if (!WebRtcAudioUtils.isNoiseSuppressorSupported()) {
|
||||
Preference disableBuiltInNSPreference =
|
||||
settingsFragment.findPreference(keyprefDisableBuiltInNS);
|
||||
|
||||
disableBuiltInNSPreference.setSummary(getString(R.string.pref_built_in_ns_not_available));
|
||||
disableBuiltInNSPreference.setEnabled(false);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPause() {
|
||||
super.onPause();
|
||||
SharedPreferences sharedPreferences =
|
||||
settingsFragment.getPreferenceScreen().getSharedPreferences();
|
||||
sharedPreferences.unregisterOnSharedPreferenceChangeListener(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
|
||||
// clang-format off
|
||||
if (key.equals(keyprefResolution)
|
||||
|| key.equals(keyprefFps)
|
||||
|| key.equals(keyprefMaxVideoBitrateType)
|
||||
|| key.equals(keyPrefVideoCodec)
|
||||
|| key.equals(keyprefStartAudioBitrateType)
|
||||
|| key.equals(keyPrefAudioCodec)
|
||||
|| key.equals(keyPrefRoomServerUrl)
|
||||
|| key.equals(keyprefMaxRetransmitTimeMs)
|
||||
|| key.equals(keyprefMaxRetransmits)
|
||||
|| key.equals(keyprefDataProtocol)
|
||||
|| key.equals(keyprefDataId)) {
|
||||
updateSummary(sharedPreferences, key);
|
||||
} else if (key.equals(keyprefMaxVideoBitrateValue)
|
||||
|| key.equals(keyprefStartAudioBitrateValue)) {
|
||||
updateSummaryBitrate(sharedPreferences, key);
|
||||
} else if (key.equals(keyprefVideoCall)
|
||||
|| key.equals(keyprefScreencapture)
|
||||
|| key.equals(keyprefCamera2)
|
||||
|| key.equals(keyPrefTracing)
|
||||
|| key.equals(keyprefCaptureQualitySlider)
|
||||
|| key.equals(keyprefHwCodec)
|
||||
|| key.equals(keyprefCaptureToTexture)
|
||||
|| key.equals(keyprefFlexfec)
|
||||
|| key.equals(keyprefNoAudioProcessing)
|
||||
|| key.equals(keyprefAecDump)
|
||||
|| key.equals(keyprefOpenSLES)
|
||||
|| key.equals(keyprefDisableBuiltInAEC)
|
||||
|| key.equals(keyprefDisableBuiltInAGC)
|
||||
|| key.equals(keyprefDisableBuiltInNS)
|
||||
|| key.equals(keyprefEnableLevelControl)
|
||||
|| key.equals(keyprefDisableWebRtcAGCAndHPF)
|
||||
|| key.equals(keyPrefDisplayHud)
|
||||
|| key.equals(keyprefEnableDataChannel)
|
||||
|| key.equals(keyprefOrdered)
|
||||
|| key.equals(keyprefNegotiated)) {
|
||||
updateSummaryB(sharedPreferences, key);
|
||||
} else if (key.equals(keyprefSpeakerphone)) {
|
||||
updateSummaryList(sharedPreferences, key);
|
||||
}
|
||||
// clang-format on
|
||||
if (key.equals(keyprefMaxVideoBitrateType)) {
|
||||
setVideoBitrateEnable(sharedPreferences);
|
||||
}
|
||||
if (key.equals(keyprefStartAudioBitrateType)) {
|
||||
setAudioBitrateEnable(sharedPreferences);
|
||||
}
|
||||
if (key.equals(keyprefEnableDataChannel)) {
|
||||
setDataChannelEnable(sharedPreferences);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateSummary(SharedPreferences sharedPreferences, String key) {
|
||||
Preference updatedPref = settingsFragment.findPreference(key);
|
||||
// Set summary to be the user-description for the selected value
|
||||
updatedPref.setSummary(sharedPreferences.getString(key, ""));
|
||||
}
|
||||
|
||||
private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
|
||||
Preference updatedPref = settingsFragment.findPreference(key);
|
||||
updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
|
||||
}
|
||||
|
||||
private void updateSummaryB(SharedPreferences sharedPreferences, String key) {
|
||||
Preference updatedPref = settingsFragment.findPreference(key);
|
||||
updatedPref.setSummary(sharedPreferences.getBoolean(key, true)
|
||||
? getString(R.string.pref_value_enabled)
|
||||
: getString(R.string.pref_value_disabled));
|
||||
}
|
||||
|
||||
private void updateSummaryList(SharedPreferences sharedPreferences, String key) {
|
||||
ListPreference updatedPref = (ListPreference) settingsFragment.findPreference(key);
|
||||
updatedPref.setSummary(updatedPref.getEntry());
|
||||
}
|
||||
|
||||
private void setVideoBitrateEnable(SharedPreferences sharedPreferences) {
|
||||
Preference bitratePreferenceValue =
|
||||
settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
|
||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType =
|
||||
sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
|
||||
if (bitrateType.equals(bitrateTypeDefault)) {
|
||||
bitratePreferenceValue.setEnabled(false);
|
||||
} else {
|
||||
bitratePreferenceValue.setEnabled(true);
|
||||
}
|
||||
}
|
||||
|
||||
private void setAudioBitrateEnable(SharedPreferences sharedPreferences) {
|
||||
Preference bitratePreferenceValue =
|
||||
settingsFragment.findPreference(keyprefStartAudioBitrateValue);
|
||||
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
String bitrateType =
|
||||
sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
|
||||
if (bitrateType.equals(bitrateTypeDefault)) {
|
||||
bitratePreferenceValue.setEnabled(false);
|
||||
} else {
|
||||
bitratePreferenceValue.setEnabled(true);
|
||||
}
|
||||
}
|
||||
|
||||
private void setDataChannelEnable(SharedPreferences sharedPreferences) {
|
||||
boolean enabled = sharedPreferences.getBoolean(keyprefEnableDataChannel, true);
|
||||
settingsFragment.findPreference(keyprefOrdered).setEnabled(enabled);
|
||||
settingsFragment.findPreference(keyprefMaxRetransmitTimeMs).setEnabled(enabled);
|
||||
settingsFragment.findPreference(keyprefMaxRetransmits).setEnabled(enabled);
|
||||
settingsFragment.findPreference(keyprefDataProtocol).setEnabled(enabled);
|
||||
settingsFragment.findPreference(keyprefNegotiated).setEnabled(enabled);
|
||||
settingsFragment.findPreference(keyprefDataId).setEnabled(enabled);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.os.Bundle;
|
||||
import android.preference.PreferenceFragment;
|
||||
|
||||
/**
|
||||
* Settings fragment for AppRTC.
|
||||
*/
|
||||
public class SettingsFragment extends PreferenceFragment {
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
// Load the preferences from an XML resource
|
||||
addPreferencesFromResource(R.xml.preferences);
|
||||
}
|
||||
}
|
||||
354
examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
Normal file
354
examples/androidapp/src/org/appspot/apprtc/TCPChannelClient.java
Normal file
@ -0,0 +1,354 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
import org.webrtc.ThreadUtils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.InetAddress;
|
||||
import java.net.ServerSocket;
|
||||
import java.net.Socket;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
/**
|
||||
* Replacement for WebSocketChannelClient for direct communication between two IP addresses. Handles
|
||||
* the signaling between the two clients using a TCP connection.
|
||||
*
|
||||
* <p>All public methods should be called from a looper executor thread
|
||||
* passed in a constructor, otherwise exception will be thrown.
|
||||
* All events are dispatched on the same thread.
|
||||
*/
|
||||
public class TCPChannelClient {
|
||||
private static final String TAG = "TCPChannelClient";
|
||||
|
||||
private final ExecutorService executor;
|
||||
private final ThreadUtils.ThreadChecker executorThreadCheck;
|
||||
private final TCPChannelEvents eventListener;
|
||||
private TCPSocket socket;
|
||||
|
||||
/**
|
||||
* Callback interface for messages delivered on TCP Connection. All callbacks are invoked from the
|
||||
* looper executor thread.
|
||||
*/
|
||||
public interface TCPChannelEvents {
|
||||
void onTCPConnected(boolean server);
|
||||
void onTCPMessage(String message);
|
||||
void onTCPError(String description);
|
||||
void onTCPClose();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the TCPChannelClient. If IP is a local IP address, starts a listening server on
|
||||
* that IP. If not, instead connects to the IP.
|
||||
*
|
||||
* @param eventListener Listener that will receive events from the client.
|
||||
* @param ip IP address to listen on or connect to.
|
||||
* @param port Port to listen on or connect to.
|
||||
*/
|
||||
public TCPChannelClient(
|
||||
ExecutorService executor, TCPChannelEvents eventListener, String ip, int port) {
|
||||
this.executor = executor;
|
||||
executorThreadCheck = new ThreadUtils.ThreadChecker();
|
||||
executorThreadCheck.detachThread();
|
||||
this.eventListener = eventListener;
|
||||
|
||||
InetAddress address;
|
||||
try {
|
||||
address = InetAddress.getByName(ip);
|
||||
} catch (UnknownHostException e) {
|
||||
reportError("Invalid IP address.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (address.isAnyLocalAddress()) {
|
||||
socket = new TCPSocketServer(address, port);
|
||||
} else {
|
||||
socket = new TCPSocketClient(address, port);
|
||||
}
|
||||
|
||||
socket.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnects the client if not already disconnected. This will fire the onTCPClose event.
|
||||
*/
|
||||
public void disconnect() {
|
||||
executorThreadCheck.checkIsOnValidThread();
|
||||
|
||||
socket.disconnect();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a message on the socket.
|
||||
*
|
||||
* @param message Message to be sent.
|
||||
*/
|
||||
public void send(String message) {
|
||||
executorThreadCheck.checkIsOnValidThread();
|
||||
|
||||
socket.send(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for firing onTCPError events. Calls onTCPError on the executor thread.
|
||||
*/
|
||||
private void reportError(final String message) {
|
||||
Log.e(TAG, "TCP Error: " + message);
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onTCPError(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for server and client sockets. Contains a listening thread that will call
|
||||
* eventListener.onTCPMessage on new messages.
|
||||
*/
|
||||
private abstract class TCPSocket extends Thread {
|
||||
// Lock for editing out and rawSocket
|
||||
protected final Object rawSocketLock;
|
||||
private PrintWriter out;
|
||||
private Socket rawSocket;
|
||||
|
||||
/**
|
||||
* Connect to the peer, potentially a slow operation.
|
||||
*
|
||||
* @return Socket connection, null if connection failed.
|
||||
*/
|
||||
public abstract Socket connect();
|
||||
/** Returns true if sockets is a server rawSocket. */
|
||||
public abstract boolean isServer();
|
||||
|
||||
TCPSocket() {
|
||||
rawSocketLock = new Object();
|
||||
}
|
||||
|
||||
/**
|
||||
* The listening thread.
|
||||
*/
|
||||
@Override
|
||||
public void run() {
|
||||
Log.d(TAG, "Listening thread started...");
|
||||
|
||||
// Receive connection to temporary variable first, so we don't block.
|
||||
Socket tempSocket = connect();
|
||||
BufferedReader in;
|
||||
|
||||
Log.d(TAG, "TCP connection established.");
|
||||
|
||||
synchronized (rawSocketLock) {
|
||||
if (rawSocket != null) {
|
||||
Log.e(TAG, "Socket already existed and will be replaced.");
|
||||
}
|
||||
|
||||
rawSocket = tempSocket;
|
||||
|
||||
// Connecting failed, error has already been reported, just exit.
|
||||
if (rawSocket == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
out = new PrintWriter(rawSocket.getOutputStream(), true);
|
||||
in = new BufferedReader(new InputStreamReader(rawSocket.getInputStream()));
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to open IO on rawSocket: " + e.getMessage());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Log.v(TAG, "Execute onTCPConnected");
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Log.v(TAG, "Run onTCPConnected");
|
||||
eventListener.onTCPConnected(isServer());
|
||||
}
|
||||
});
|
||||
|
||||
while (true) {
|
||||
final String message;
|
||||
try {
|
||||
message = in.readLine();
|
||||
} catch (IOException e) {
|
||||
synchronized (rawSocketLock) {
|
||||
// If socket was closed, this is expected.
|
||||
if (rawSocket == null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
reportError("Failed to read from rawSocket: " + e.getMessage());
|
||||
break;
|
||||
}
|
||||
|
||||
// No data received, rawSocket probably closed.
|
||||
if (message == null) {
|
||||
break;
|
||||
}
|
||||
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Log.v(TAG, "Receive: " + message);
|
||||
eventListener.onTCPMessage(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Log.d(TAG, "Receiving thread exiting...");
|
||||
|
||||
// Close the rawSocket if it is still open.
|
||||
disconnect();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the rawSocket if it is still open. Also fires the onTCPClose event.
|
||||
*/
|
||||
public void disconnect() {
|
||||
try {
|
||||
synchronized (rawSocketLock) {
|
||||
if (rawSocket != null) {
|
||||
rawSocket.close();
|
||||
rawSocket = null;
|
||||
out = null;
|
||||
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventListener.onTCPClose();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to close rawSocket: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a message on the socket. Should only be called on the executor thread.
|
||||
*/
|
||||
public void send(String message) {
|
||||
Log.v(TAG, "Send: " + message);
|
||||
|
||||
synchronized (rawSocketLock) {
|
||||
if (out == null) {
|
||||
reportError("Sending data on closed socket.");
|
||||
return;
|
||||
}
|
||||
|
||||
out.write(message + "\n");
|
||||
out.flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class TCPSocketServer extends TCPSocket {
|
||||
// Server socket is also guarded by rawSocketLock.
|
||||
private ServerSocket serverSocket;
|
||||
|
||||
final private InetAddress address;
|
||||
final private int port;
|
||||
|
||||
public TCPSocketServer(InetAddress address, int port) {
|
||||
this.address = address;
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
/** Opens a listening socket and waits for a connection. */
|
||||
@Override
|
||||
public Socket connect() {
|
||||
Log.d(TAG, "Listening on [" + address.getHostAddress() + "]:" + Integer.toString(port));
|
||||
|
||||
final ServerSocket tempSocket;
|
||||
try {
|
||||
tempSocket = new ServerSocket(port, 0, address);
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to create server socket: " + e.getMessage());
|
||||
return null;
|
||||
}
|
||||
|
||||
synchronized (rawSocketLock) {
|
||||
if (serverSocket != null) {
|
||||
Log.e(TAG, "Server rawSocket was already listening and new will be opened.");
|
||||
}
|
||||
|
||||
serverSocket = tempSocket;
|
||||
}
|
||||
|
||||
try {
|
||||
return tempSocket.accept();
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to receive connection: " + e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Closes the listening socket and calls super. */
|
||||
@Override
|
||||
public void disconnect() {
|
||||
try {
|
||||
synchronized (rawSocketLock) {
|
||||
if (serverSocket != null) {
|
||||
serverSocket.close();
|
||||
serverSocket = null;
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to close server socket: " + e.getMessage());
|
||||
}
|
||||
|
||||
super.disconnect();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isServer() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private class TCPSocketClient extends TCPSocket {
|
||||
final private InetAddress address;
|
||||
final private int port;
|
||||
|
||||
public TCPSocketClient(InetAddress address, int port) {
|
||||
this.address = address;
|
||||
this.port = port;
|
||||
}
|
||||
|
||||
/** Connects to the peer. */
|
||||
@Override
|
||||
public Socket connect() {
|
||||
Log.d(TAG, "Connecting to [" + address.getHostAddress() + "]:" + Integer.toString(port));
|
||||
|
||||
try {
|
||||
return new Socket(address, port);
|
||||
} catch (IOException e) {
|
||||
reportError("Failed to connect: " + e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isServer() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,84 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.AlertDialog;
|
||||
import android.content.DialogInterface;
|
||||
import android.util.Log;
|
||||
import android.util.TypedValue;
|
||||
import android.widget.ScrollView;
|
||||
import android.widget.TextView;
|
||||
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
|
||||
/**
|
||||
* Singleton helper: install a default unhandled exception handler which shows
|
||||
* an informative dialog and kills the app. Useful for apps whose
|
||||
* error-handling consists of throwing RuntimeExceptions.
|
||||
* NOTE: almost always more useful to
|
||||
* Thread.setDefaultUncaughtExceptionHandler() rather than
|
||||
* Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
|
||||
*/
|
||||
public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
|
||||
private static final String TAG = "AppRTCMobileActivity";
|
||||
private final Activity activity;
|
||||
|
||||
public UnhandledExceptionHandler(final Activity activity) {
|
||||
this.activity = activity;
|
||||
}
|
||||
|
||||
public void uncaughtException(Thread unusedThread, final Throwable e) {
|
||||
activity.runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
String title = "Fatal error: " + getTopLevelCauseMessage(e);
|
||||
String msg = getRecursiveStackTrace(e);
|
||||
TextView errorView = new TextView(activity);
|
||||
errorView.setText(msg);
|
||||
errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
|
||||
ScrollView scrollingContainer = new ScrollView(activity);
|
||||
scrollingContainer.addView(errorView);
|
||||
Log.e(TAG, title + "\n\n" + msg);
|
||||
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialog, int which) {
|
||||
dialog.dismiss();
|
||||
System.exit(1);
|
||||
}
|
||||
};
|
||||
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
|
||||
builder.setTitle(title)
|
||||
.setView(scrollingContainer)
|
||||
.setPositiveButton("Exit", listener)
|
||||
.show();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Returns the Message attached to the original Cause of |t|.
|
||||
private static String getTopLevelCauseMessage(Throwable t) {
|
||||
Throwable topLevelCause = t;
|
||||
while (topLevelCause.getCause() != null) {
|
||||
topLevelCause = topLevelCause.getCause();
|
||||
}
|
||||
return topLevelCause.getMessage();
|
||||
}
|
||||
|
||||
// Returns a human-readable String of the stacktrace in |t|, recursively
|
||||
// through all Causes that led to |t|.
|
||||
private static String getRecursiveStackTrace(Throwable t) {
|
||||
StringWriter writer = new StringWriter();
|
||||
t.printStackTrace(new PrintWriter(writer));
|
||||
return writer.toString();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,296 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import org.appspot.apprtc.util.AsyncHttpURLConnection;
|
||||
import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
|
||||
|
||||
import android.os.Handler;
|
||||
import android.util.Log;
|
||||
|
||||
import de.tavendo.autobahn.WebSocket.WebSocketConnectionObserver;
|
||||
import de.tavendo.autobahn.WebSocketConnection;
|
||||
import de.tavendo.autobahn.WebSocketException;
|
||||
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.LinkedList;
|
||||
|
||||
/**
|
||||
* WebSocket client implementation.
|
||||
*
|
||||
* <p>All public methods should be called from a looper executor thread
|
||||
* passed in a constructor, otherwise exception will be thrown.
|
||||
* All events are dispatched on the same thread.
|
||||
*/
|
||||
|
||||
public class WebSocketChannelClient {
|
||||
private static final String TAG = "WSChannelRTCClient";
|
||||
private static final int CLOSE_TIMEOUT = 1000;
|
||||
private final WebSocketChannelEvents events;
|
||||
private final Handler handler;
|
||||
private WebSocketConnection ws;
|
||||
private WebSocketObserver wsObserver;
|
||||
private String wsServerUrl;
|
||||
private String postServerUrl;
|
||||
private String roomID;
|
||||
private String clientID;
|
||||
private WebSocketConnectionState state;
|
||||
private final Object closeEventLock = new Object();
|
||||
private boolean closeEvent;
|
||||
// WebSocket send queue. Messages are added to the queue when WebSocket
|
||||
// client is not registered and are consumed in register() call.
|
||||
private final LinkedList<String> wsSendQueue;
|
||||
|
||||
/**
|
||||
* Possible WebSocket connection states.
|
||||
*/
|
||||
public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
|
||||
|
||||
/**
|
||||
* Callback interface for messages delivered on WebSocket.
|
||||
* All events are dispatched from a looper executor thread.
|
||||
*/
|
||||
public interface WebSocketChannelEvents {
|
||||
void onWebSocketMessage(final String message);
|
||||
void onWebSocketClose();
|
||||
void onWebSocketError(final String description);
|
||||
}
|
||||
|
||||
public WebSocketChannelClient(Handler handler, WebSocketChannelEvents events) {
|
||||
this.handler = handler;
|
||||
this.events = events;
|
||||
roomID = null;
|
||||
clientID = null;
|
||||
wsSendQueue = new LinkedList<String>();
|
||||
state = WebSocketConnectionState.NEW;
|
||||
}
|
||||
|
||||
public WebSocketConnectionState getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void connect(final String wsUrl, final String postUrl) {
|
||||
checkIfCalledOnValidThread();
|
||||
if (state != WebSocketConnectionState.NEW) {
|
||||
Log.e(TAG, "WebSocket is already connected.");
|
||||
return;
|
||||
}
|
||||
wsServerUrl = wsUrl;
|
||||
postServerUrl = postUrl;
|
||||
closeEvent = false;
|
||||
|
||||
Log.d(TAG, "Connecting WebSocket to: " + wsUrl + ". Post URL: " + postUrl);
|
||||
ws = new WebSocketConnection();
|
||||
wsObserver = new WebSocketObserver();
|
||||
try {
|
||||
ws.connect(new URI(wsServerUrl), wsObserver);
|
||||
} catch (URISyntaxException e) {
|
||||
reportError("URI error: " + e.getMessage());
|
||||
} catch (WebSocketException e) {
|
||||
reportError("WebSocket connection error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void register(final String roomID, final String clientID) {
|
||||
checkIfCalledOnValidThread();
|
||||
this.roomID = roomID;
|
||||
this.clientID = clientID;
|
||||
if (state != WebSocketConnectionState.CONNECTED) {
|
||||
Log.w(TAG, "WebSocket register() in state " + state);
|
||||
return;
|
||||
}
|
||||
Log.d(TAG, "Registering WebSocket for room " + roomID + ". ClientID: " + clientID);
|
||||
JSONObject json = new JSONObject();
|
||||
try {
|
||||
json.put("cmd", "register");
|
||||
json.put("roomid", roomID);
|
||||
json.put("clientid", clientID);
|
||||
Log.d(TAG, "C->WSS: " + json.toString());
|
||||
ws.sendTextMessage(json.toString());
|
||||
state = WebSocketConnectionState.REGISTERED;
|
||||
// Send any previously accumulated messages.
|
||||
for (String sendMessage : wsSendQueue) {
|
||||
send(sendMessage);
|
||||
}
|
||||
wsSendQueue.clear();
|
||||
} catch (JSONException e) {
|
||||
reportError("WebSocket register JSON error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void send(String message) {
|
||||
checkIfCalledOnValidThread();
|
||||
switch (state) {
|
||||
case NEW:
|
||||
case CONNECTED:
|
||||
// Store outgoing messages and send them after websocket client
|
||||
// is registered.
|
||||
Log.d(TAG, "WS ACC: " + message);
|
||||
wsSendQueue.add(message);
|
||||
return;
|
||||
case ERROR:
|
||||
case CLOSED:
|
||||
Log.e(TAG, "WebSocket send() in error or closed state : " + message);
|
||||
return;
|
||||
case REGISTERED:
|
||||
JSONObject json = new JSONObject();
|
||||
try {
|
||||
json.put("cmd", "send");
|
||||
json.put("msg", message);
|
||||
message = json.toString();
|
||||
Log.d(TAG, "C->WSS: " + message);
|
||||
ws.sendTextMessage(message);
|
||||
} catch (JSONException e) {
|
||||
reportError("WebSocket send JSON error: " + e.getMessage());
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// This call can be used to send WebSocket messages before WebSocket
|
||||
// connection is opened.
|
||||
public void post(String message) {
|
||||
checkIfCalledOnValidThread();
|
||||
sendWSSMessage("POST", message);
|
||||
}
|
||||
|
||||
public void disconnect(boolean waitForComplete) {
|
||||
checkIfCalledOnValidThread();
|
||||
Log.d(TAG, "Disconnect WebSocket. State: " + state);
|
||||
if (state == WebSocketConnectionState.REGISTERED) {
|
||||
// Send "bye" to WebSocket server.
|
||||
send("{\"type\": \"bye\"}");
|
||||
state = WebSocketConnectionState.CONNECTED;
|
||||
// Send http DELETE to http WebSocket server.
|
||||
sendWSSMessage("DELETE", "");
|
||||
}
|
||||
// Close WebSocket in CONNECTED or ERROR states only.
|
||||
if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
|
||||
ws.disconnect();
|
||||
state = WebSocketConnectionState.CLOSED;
|
||||
|
||||
// Wait for websocket close event to prevent websocket library from
|
||||
// sending any pending messages to deleted looper thread.
|
||||
if (waitForComplete) {
|
||||
synchronized (closeEventLock) {
|
||||
while (!closeEvent) {
|
||||
try {
|
||||
closeEventLock.wait(CLOSE_TIMEOUT);
|
||||
break;
|
||||
} catch (InterruptedException e) {
|
||||
Log.e(TAG, "Wait error: " + e.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Log.d(TAG, "Disconnecting WebSocket done.");
|
||||
}
|
||||
|
||||
private void reportError(final String errorMessage) {
|
||||
Log.e(TAG, errorMessage);
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (state != WebSocketConnectionState.ERROR) {
|
||||
state = WebSocketConnectionState.ERROR;
|
||||
events.onWebSocketError(errorMessage);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Asynchronously send POST/DELETE to WebSocket server.
|
||||
private void sendWSSMessage(final String method, final String message) {
|
||||
String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
|
||||
Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
|
||||
AsyncHttpURLConnection httpConnection =
|
||||
new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
reportError("WS " + method + " error: " + errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHttpComplete(String response) {}
|
||||
});
|
||||
httpConnection.send();
|
||||
}
|
||||
|
||||
// Helper method for debugging purposes. Ensures that WebSocket method is
|
||||
// called on a looper thread.
|
||||
private void checkIfCalledOnValidThread() {
|
||||
if (Thread.currentThread() != handler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("WebSocket method is not called on valid thread");
|
||||
}
|
||||
}
|
||||
|
||||
private class WebSocketObserver implements WebSocketConnectionObserver {
|
||||
@Override
|
||||
public void onOpen() {
|
||||
Log.d(TAG, "WebSocket connection opened to: " + wsServerUrl);
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
state = WebSocketConnectionState.CONNECTED;
|
||||
// Check if we have pending register request.
|
||||
if (roomID != null && clientID != null) {
|
||||
register(roomID, clientID);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(WebSocketCloseNotification code, String reason) {
|
||||
Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
|
||||
+ state);
|
||||
synchronized (closeEventLock) {
|
||||
closeEvent = true;
|
||||
closeEventLock.notify();
|
||||
}
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (state != WebSocketConnectionState.CLOSED) {
|
||||
state = WebSocketConnectionState.CLOSED;
|
||||
events.onWebSocketClose();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextMessage(String payload) {
|
||||
Log.d(TAG, "WSS->C: " + payload);
|
||||
final String message = payload;
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (state == WebSocketConnectionState.CONNECTED
|
||||
|| state == WebSocketConnectionState.REGISTERED) {
|
||||
events.onWebSocketMessage(message);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRawTextMessage(byte[] payload) {}
|
||||
|
||||
@Override
|
||||
public void onBinaryMessage(byte[] payload) {}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,428 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc;
|
||||
|
||||
import org.appspot.apprtc.RoomParametersFetcher.RoomParametersFetcherEvents;
|
||||
import org.appspot.apprtc.WebSocketChannelClient.WebSocketChannelEvents;
|
||||
import org.appspot.apprtc.WebSocketChannelClient.WebSocketConnectionState;
|
||||
import org.appspot.apprtc.util.AsyncHttpURLConnection;
|
||||
import org.appspot.apprtc.util.AsyncHttpURLConnection.AsyncHttpEvents;
|
||||
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.util.Log;
|
||||
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.webrtc.IceCandidate;
|
||||
import org.webrtc.SessionDescription;
|
||||
|
||||
/**
|
||||
* Negotiates signaling for chatting with https://appr.tc "rooms".
|
||||
* Uses the client<->server specifics of the apprtc AppEngine webapp.
|
||||
*
|
||||
* <p>To use: create an instance of this object (registering a message handler) and
|
||||
* call connectToRoom(). Once room connection is established
|
||||
* onConnectedToRoom() callback with room parameters is invoked.
|
||||
* Messages to other party (with local Ice candidates and answer SDP) can
|
||||
* be sent after WebSocket connection is established.
|
||||
*/
|
||||
public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
|
||||
private static final String TAG = "WSRTCClient";
|
||||
private static final String ROOM_JOIN = "join";
|
||||
private static final String ROOM_MESSAGE = "message";
|
||||
private static final String ROOM_LEAVE = "leave";
|
||||
|
||||
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
|
||||
|
||||
private enum MessageType { MESSAGE, LEAVE }
|
||||
|
||||
private final Handler handler;
|
||||
private boolean initiator;
|
||||
private SignalingEvents events;
|
||||
private WebSocketChannelClient wsClient;
|
||||
private ConnectionState roomState;
|
||||
private RoomConnectionParameters connectionParameters;
|
||||
private String messageUrl;
|
||||
private String leaveUrl;
|
||||
|
||||
public WebSocketRTCClient(SignalingEvents events) {
|
||||
this.events = events;
|
||||
roomState = ConnectionState.NEW;
|
||||
final HandlerThread handlerThread = new HandlerThread(TAG);
|
||||
handlerThread.start();
|
||||
handler = new Handler(handlerThread.getLooper());
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// AppRTCClient interface implementation.
|
||||
// Asynchronously connect to an AppRTC room URL using supplied connection
|
||||
// parameters, retrieves room parameters and connect to WebSocket server.
|
||||
@Override
|
||||
public void connectToRoom(RoomConnectionParameters connectionParameters) {
|
||||
this.connectionParameters = connectionParameters;
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
connectToRoomInternal();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void disconnectFromRoom() {
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
disconnectFromRoomInternal();
|
||||
handler.getLooper().quit();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Connects to room - function runs on a local looper thread.
|
||||
private void connectToRoomInternal() {
|
||||
String connectionUrl = getConnectionUrl(connectionParameters);
|
||||
Log.d(TAG, "Connect to room: " + connectionUrl);
|
||||
roomState = ConnectionState.NEW;
|
||||
wsClient = new WebSocketChannelClient(handler, this);
|
||||
|
||||
RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
|
||||
@Override
|
||||
public void onSignalingParametersReady(final SignalingParameters params) {
|
||||
WebSocketRTCClient.this.handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
WebSocketRTCClient.this.signalingParametersReady(params);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSignalingParametersError(String description) {
|
||||
WebSocketRTCClient.this.reportError(description);
|
||||
}
|
||||
};
|
||||
|
||||
new RoomParametersFetcher(connectionUrl, null, callbacks).makeRequest();
|
||||
}
|
||||
|
||||
// Disconnect from room and send bye messages - runs on a local looper thread.
|
||||
private void disconnectFromRoomInternal() {
|
||||
Log.d(TAG, "Disconnect. Room state: " + roomState);
|
||||
if (roomState == ConnectionState.CONNECTED) {
|
||||
Log.d(TAG, "Closing room.");
|
||||
sendPostMessage(MessageType.LEAVE, leaveUrl, null);
|
||||
}
|
||||
roomState = ConnectionState.CLOSED;
|
||||
if (wsClient != null) {
|
||||
wsClient.disconnect(true);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper functions to get connection, post message and leave message URLs
|
||||
private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId
|
||||
+ getQueryString(connectionParameters);
|
||||
}
|
||||
|
||||
private String getMessageUrl(
|
||||
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
|
||||
+ "/" + signalingParameters.clientId + getQueryString(connectionParameters);
|
||||
}
|
||||
|
||||
private String getLeaveUrl(
|
||||
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
|
||||
+ signalingParameters.clientId + getQueryString(connectionParameters);
|
||||
}
|
||||
|
||||
private String getQueryString(RoomConnectionParameters connectionParameters) {
|
||||
if (connectionParameters.urlParameters != null) {
|
||||
return "?" + connectionParameters.urlParameters;
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
// Callback issued when room parameters are extracted. Runs on local
|
||||
// looper thread.
|
||||
private void signalingParametersReady(final SignalingParameters signalingParameters) {
|
||||
Log.d(TAG, "Room connection completed.");
|
||||
if (connectionParameters.loopback
|
||||
&& (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
|
||||
reportError("Loopback room is busy.");
|
||||
return;
|
||||
}
|
||||
if (!connectionParameters.loopback && !signalingParameters.initiator
|
||||
&& signalingParameters.offerSdp == null) {
|
||||
Log.w(TAG, "No offer SDP in room response.");
|
||||
}
|
||||
initiator = signalingParameters.initiator;
|
||||
messageUrl = getMessageUrl(connectionParameters, signalingParameters);
|
||||
leaveUrl = getLeaveUrl(connectionParameters, signalingParameters);
|
||||
Log.d(TAG, "Message URL: " + messageUrl);
|
||||
Log.d(TAG, "Leave URL: " + leaveUrl);
|
||||
roomState = ConnectionState.CONNECTED;
|
||||
|
||||
// Fire connection and signaling parameters events.
|
||||
events.onConnectedToRoom(signalingParameters);
|
||||
|
||||
// Connect and register WebSocket client.
|
||||
wsClient.connect(signalingParameters.wssUrl, signalingParameters.wssPostUrl);
|
||||
wsClient.register(connectionParameters.roomId, signalingParameters.clientId);
|
||||
}
|
||||
|
||||
// Send local offer SDP to the other participant.
|
||||
@Override
|
||||
public void sendOfferSdp(final SessionDescription sdp) {
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (roomState != ConnectionState.CONNECTED) {
|
||||
reportError("Sending offer SDP in non connected state.");
|
||||
return;
|
||||
}
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "sdp", sdp.description);
|
||||
jsonPut(json, "type", "offer");
|
||||
sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
|
||||
if (connectionParameters.loopback) {
|
||||
// In loopback mode rename this offer to answer and route it back.
|
||||
SessionDescription sdpAnswer = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
|
||||
events.onRemoteDescription(sdpAnswer);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Send local answer SDP to the other participant.
|
||||
@Override
|
||||
public void sendAnswerSdp(final SessionDescription sdp) {
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (connectionParameters.loopback) {
|
||||
Log.e(TAG, "Sending answer in loopback mode.");
|
||||
return;
|
||||
}
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "sdp", sdp.description);
|
||||
jsonPut(json, "type", "answer");
|
||||
wsClient.send(json.toString());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Send Ice candidate to the other participant.
|
||||
@Override
|
||||
public void sendLocalIceCandidate(final IceCandidate candidate) {
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "type", "candidate");
|
||||
jsonPut(json, "label", candidate.sdpMLineIndex);
|
||||
jsonPut(json, "id", candidate.sdpMid);
|
||||
jsonPut(json, "candidate", candidate.sdp);
|
||||
if (initiator) {
|
||||
// Call initiator sends ice candidates to GAE server.
|
||||
if (roomState != ConnectionState.CONNECTED) {
|
||||
reportError("Sending ICE candidate in non connected state.");
|
||||
return;
|
||||
}
|
||||
sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
|
||||
if (connectionParameters.loopback) {
|
||||
events.onRemoteIceCandidate(candidate);
|
||||
}
|
||||
} else {
|
||||
// Call receiver sends ice candidates to websocket server.
|
||||
wsClient.send(json.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Send removed Ice candidates to the other participant.
|
||||
@Override
|
||||
public void sendLocalIceCandidateRemovals(final IceCandidate[] candidates) {
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "type", "remove-candidates");
|
||||
JSONArray jsonArray = new JSONArray();
|
||||
for (final IceCandidate candidate : candidates) {
|
||||
jsonArray.put(toJsonCandidate(candidate));
|
||||
}
|
||||
jsonPut(json, "candidates", jsonArray);
|
||||
if (initiator) {
|
||||
// Call initiator sends ice candidates to GAE server.
|
||||
if (roomState != ConnectionState.CONNECTED) {
|
||||
reportError("Sending ICE candidate removals in non connected state.");
|
||||
return;
|
||||
}
|
||||
sendPostMessage(MessageType.MESSAGE, messageUrl, json.toString());
|
||||
if (connectionParameters.loopback) {
|
||||
events.onRemoteIceCandidatesRemoved(candidates);
|
||||
}
|
||||
} else {
|
||||
// Call receiver sends ice candidates to websocket server.
|
||||
wsClient.send(json.toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// WebSocketChannelEvents interface implementation.
|
||||
// All events are called by WebSocketChannelClient on a local looper thread
|
||||
// (passed to WebSocket client constructor).
|
||||
@Override
|
||||
public void onWebSocketMessage(final String msg) {
|
||||
if (wsClient.getState() != WebSocketConnectionState.REGISTERED) {
|
||||
Log.e(TAG, "Got WebSocket message in non registered state.");
|
||||
return;
|
||||
}
|
||||
try {
|
||||
JSONObject json = new JSONObject(msg);
|
||||
String msgText = json.getString("msg");
|
||||
String errorText = json.optString("error");
|
||||
if (msgText.length() > 0) {
|
||||
json = new JSONObject(msgText);
|
||||
String type = json.optString("type");
|
||||
if (type.equals("candidate")) {
|
||||
events.onRemoteIceCandidate(toJavaCandidate(json));
|
||||
} else if (type.equals("remove-candidates")) {
|
||||
JSONArray candidateArray = json.getJSONArray("candidates");
|
||||
IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
|
||||
for (int i = 0; i < candidateArray.length(); ++i) {
|
||||
candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
|
||||
}
|
||||
events.onRemoteIceCandidatesRemoved(candidates);
|
||||
} else if (type.equals("answer")) {
|
||||
if (initiator) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else {
|
||||
reportError("Received answer for call initiator: " + msg);
|
||||
}
|
||||
} else if (type.equals("offer")) {
|
||||
if (!initiator) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else {
|
||||
reportError("Received offer for call receiver: " + msg);
|
||||
}
|
||||
} else if (type.equals("bye")) {
|
||||
events.onChannelClose();
|
||||
} else {
|
||||
reportError("Unexpected WebSocket message: " + msg);
|
||||
}
|
||||
} else {
|
||||
if (errorText != null && errorText.length() > 0) {
|
||||
reportError("WebSocket error message: " + errorText);
|
||||
} else {
|
||||
reportError("Unexpected WebSocket message: " + msg);
|
||||
}
|
||||
}
|
||||
} catch (JSONException e) {
|
||||
reportError("WebSocket message JSON parsing error: " + e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onWebSocketClose() {
|
||||
events.onChannelClose();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onWebSocketError(String description) {
|
||||
reportError("WebSocket error: " + description);
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------
|
||||
// Helper functions.
|
||||
private void reportError(final String errorMessage) {
|
||||
Log.e(TAG, errorMessage);
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (roomState != ConnectionState.ERROR) {
|
||||
roomState = ConnectionState.ERROR;
|
||||
events.onChannelError(errorMessage);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Put a |key|->|value| mapping in |json|.
|
||||
private static void jsonPut(JSONObject json, String key, Object value) {
|
||||
try {
|
||||
json.put(key, value);
|
||||
} catch (JSONException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
// Send SDP or ICE candidate to a room server.
|
||||
private void sendPostMessage(
|
||||
final MessageType messageType, final String url, final String message) {
|
||||
String logInfo = url;
|
||||
if (message != null) {
|
||||
logInfo += ". Message: " + message;
|
||||
}
|
||||
Log.d(TAG, "C->GAE: " + logInfo);
|
||||
AsyncHttpURLConnection httpConnection =
|
||||
new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
reportError("GAE POST error: " + errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHttpComplete(String response) {
|
||||
if (messageType == MessageType.MESSAGE) {
|
||||
try {
|
||||
JSONObject roomJson = new JSONObject(response);
|
||||
String result = roomJson.getString("result");
|
||||
if (!result.equals("SUCCESS")) {
|
||||
reportError("GAE POST error: " + result);
|
||||
}
|
||||
} catch (JSONException e) {
|
||||
reportError("GAE POST JSON error: " + e.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
httpConnection.send();
|
||||
}
|
||||
|
||||
// Converts a Java candidate to a JSONObject.
|
||||
private JSONObject toJsonCandidate(final IceCandidate candidate) {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "label", candidate.sdpMLineIndex);
|
||||
jsonPut(json, "id", candidate.sdpMid);
|
||||
jsonPut(json, "candidate", candidate.sdp);
|
||||
return json;
|
||||
}
|
||||
|
||||
// Converts a JSON candidate to a Java object.
|
||||
IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
|
||||
return new IceCandidate(
|
||||
json.getString("id"), json.getInt("label"), json.getString("candidate"));
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,47 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc.util;
|
||||
|
||||
import android.os.Build;
|
||||
import android.util.Log;
|
||||
|
||||
/**
|
||||
* AppRTCUtils provides helper functions for managing thread safety.
|
||||
*/
|
||||
public final class AppRTCUtils {
|
||||
private AppRTCUtils() {}
|
||||
|
||||
/** Helper method which throws an exception when an assertion has failed. */
|
||||
public static void assertIsTrue(boolean condition) {
|
||||
if (!condition) {
|
||||
throw new AssertionError("Expected condition to be true");
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper method for building a string of thread information.*/
|
||||
public static String getThreadInfo() {
|
||||
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
||||
+ "]";
|
||||
}
|
||||
|
||||
/** Information about the current build, taken from system properties. */
|
||||
public static void logDeviceInfo(String tag) {
|
||||
Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
|
||||
+ "Release: " + Build.VERSION.RELEASE + ", "
|
||||
+ "Brand: " + Build.BRAND + ", "
|
||||
+ "Device: " + Build.DEVICE + ", "
|
||||
+ "Id: " + Build.ID + ", "
|
||||
+ "Hardware: " + Build.HARDWARE + ", "
|
||||
+ "Manufacturer: " + Build.MANUFACTURER + ", "
|
||||
+ "Model: " + Build.MODEL + ", "
|
||||
+ "Product: " + Build.PRODUCT);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,119 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.appspot.apprtc.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.net.URL;
|
||||
import java.util.Scanner;
|
||||
|
||||
/**
|
||||
* Asynchronous http requests implementation.
|
||||
*/
|
||||
public class AsyncHttpURLConnection {
|
||||
private static final int HTTP_TIMEOUT_MS = 8000;
|
||||
private static final String HTTP_ORIGIN = "https://appr.tc";
|
||||
private final String method;
|
||||
private final String url;
|
||||
private final String message;
|
||||
private final AsyncHttpEvents events;
|
||||
private String contentType;
|
||||
|
||||
/**
|
||||
* Http requests callbacks.
|
||||
*/
|
||||
public interface AsyncHttpEvents {
|
||||
void onHttpError(String errorMessage);
|
||||
void onHttpComplete(String response);
|
||||
}
|
||||
|
||||
public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
|
||||
this.method = method;
|
||||
this.url = url;
|
||||
this.message = message;
|
||||
this.events = events;
|
||||
}
|
||||
|
||||
public void setContentType(String contentType) {
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
public void send() {
|
||||
Runnable runHttp = new Runnable() {
|
||||
public void run() {
|
||||
sendHttpMessage();
|
||||
}
|
||||
};
|
||||
new Thread(runHttp).start();
|
||||
}
|
||||
|
||||
private void sendHttpMessage() {
|
||||
try {
|
||||
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
||||
byte[] postData = new byte[0];
|
||||
if (message != null) {
|
||||
postData = message.getBytes("UTF-8");
|
||||
}
|
||||
connection.setRequestMethod(method);
|
||||
connection.setUseCaches(false);
|
||||
connection.setDoInput(true);
|
||||
connection.setConnectTimeout(HTTP_TIMEOUT_MS);
|
||||
connection.setReadTimeout(HTTP_TIMEOUT_MS);
|
||||
// TODO(glaznev) - query request origin from pref_room_server_url_key preferences.
|
||||
connection.addRequestProperty("origin", HTTP_ORIGIN);
|
||||
boolean doOutput = false;
|
||||
if (method.equals("POST")) {
|
||||
doOutput = true;
|
||||
connection.setDoOutput(true);
|
||||
connection.setFixedLengthStreamingMode(postData.length);
|
||||
}
|
||||
if (contentType == null) {
|
||||
connection.setRequestProperty("Content-Type", "text/plain; charset=utf-8");
|
||||
} else {
|
||||
connection.setRequestProperty("Content-Type", contentType);
|
||||
}
|
||||
|
||||
// Send POST request.
|
||||
if (doOutput && postData.length > 0) {
|
||||
OutputStream outStream = connection.getOutputStream();
|
||||
outStream.write(postData);
|
||||
outStream.close();
|
||||
}
|
||||
|
||||
// Get response.
|
||||
int responseCode = connection.getResponseCode();
|
||||
if (responseCode != 200) {
|
||||
events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
|
||||
+ connection.getHeaderField(null));
|
||||
connection.disconnect();
|
||||
return;
|
||||
}
|
||||
InputStream responseStream = connection.getInputStream();
|
||||
String response = drainStream(responseStream);
|
||||
responseStream.close();
|
||||
connection.disconnect();
|
||||
events.onHttpComplete(response);
|
||||
} catch (SocketTimeoutException e) {
|
||||
events.onHttpError("HTTP " + method + " to " + url + " timeout");
|
||||
} catch (IOException e) {
|
||||
events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Return the contents of an InputStream as a String.
|
||||
private static String drainStream(InputStream in) {
|
||||
Scanner s = new Scanner(in).useDelimiter("\\A");
|
||||
return s.hasNext() ? s.next() : "";
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user