Format all Java in WebRTC.
BUG=webrtc:6419 TBR=henrika@webrtc.org Review-Url: https://codereview.webrtc.org/2377003002 Cr-Commit-Position: refs/heads/master@{#14432}
This commit is contained in:
@ -84,10 +84,8 @@ public class AppRTCAudioManager {
|
||||
|
||||
// The proximity sensor should only be activated when there are exactly two
|
||||
// available audio devices.
|
||||
if (audioDevices.size() == 2
|
||||
&& audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
|
||||
&& audioDevices.contains(
|
||||
AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
|
||||
if (audioDevices.size() == 2 && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
|
||||
&& audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
|
||||
if (proximitySensor.sensorReportsNearState()) {
|
||||
// Sensor reports that a "handset is being held up to a person's ear",
|
||||
// or "something is covering the light sensor".
|
||||
@ -101,17 +99,14 @@ public class AppRTCAudioManager {
|
||||
}
|
||||
|
||||
/** Construction */
|
||||
static AppRTCAudioManager create(Context context,
|
||||
Runnable deviceStateChangeListener) {
|
||||
static AppRTCAudioManager create(Context context, Runnable deviceStateChangeListener) {
|
||||
return new AppRTCAudioManager(context, deviceStateChangeListener);
|
||||
}
|
||||
|
||||
private AppRTCAudioManager(Context context,
|
||||
Runnable deviceStateChangeListener) {
|
||||
private AppRTCAudioManager(Context context, Runnable deviceStateChangeListener) {
|
||||
apprtcContext = context;
|
||||
onStateChangeListener = deviceStateChangeListener;
|
||||
audioManager = ((AudioManager) context.getSystemService(
|
||||
Context.AUDIO_SERVICE));
|
||||
audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
|
||||
|
||||
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
|
||||
useSpeakerphone = sharedPreferences.getString(context.getString(R.string.pref_speakerphone_key),
|
||||
@ -149,8 +144,8 @@ public class AppRTCAudioManager {
|
||||
savedIsMicrophoneMute = audioManager.isMicrophoneMute();
|
||||
|
||||
// Request audio focus before making any device switch.
|
||||
audioManager.requestAudioFocus(null, AudioManager.STREAM_VOICE_CALL,
|
||||
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
|
||||
audioManager.requestAudioFocus(
|
||||
null, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
|
||||
|
||||
// Start by setting MODE_IN_COMMUNICATION as default audio mode. It is
|
||||
// required to be in this mode when playout and/or recording starts for
|
||||
@ -250,13 +245,11 @@ public class AppRTCAudioManager {
|
||||
int state = intent.getIntExtra("state", STATE_UNPLUGGED);
|
||||
int microphone = intent.getIntExtra("microphone", HAS_NO_MIC);
|
||||
String name = intent.getStringExtra("name");
|
||||
Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo()
|
||||
+ ": "
|
||||
+ "a=" + intent.getAction()
|
||||
+ ", s=" + (state == STATE_UNPLUGGED ? "unplugged" : "plugged")
|
||||
+ ", m=" + (microphone == HAS_MIC ? "mic" : "no mic")
|
||||
+ ", n=" + name
|
||||
+ ", sb=" + isInitialStickyBroadcast());
|
||||
Log.d(TAG, "BroadcastReceiver.onReceive" + AppRTCUtils.getThreadInfo() + ": "
|
||||
+ "a=" + intent.getAction() + ", s="
|
||||
+ (state == STATE_UNPLUGGED ? "unplugged" : "plugged") + ", m="
|
||||
+ (microphone == HAS_MIC ? "mic" : "no mic") + ", n=" + name + ", sb="
|
||||
+ isInitialStickyBroadcast());
|
||||
|
||||
boolean hasWiredHeadset = (state == STATE_PLUGGED);
|
||||
switch (state) {
|
||||
@ -304,8 +297,7 @@ public class AppRTCAudioManager {
|
||||
|
||||
/** Gets the current earpiece state. */
|
||||
private boolean hasEarpiece() {
|
||||
return apprtcContext.getPackageManager().hasSystemFeature(
|
||||
PackageManager.FEATURE_TELEPHONY);
|
||||
return apprtcContext.getPackageManager().hasSystemFeature(PackageManager.FEATURE_TELEPHONY);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -331,7 +323,7 @@ public class AppRTCAudioManager {
|
||||
// No wired headset, hence the audio-device list can contain speaker
|
||||
// phone (on a tablet), or speaker phone and earpiece (on mobile phone).
|
||||
audioDevices.add(AudioDevice.SPEAKER_PHONE);
|
||||
if (hasEarpiece()) {
|
||||
if (hasEarpiece()) {
|
||||
audioDevices.add(AudioDevice.EARPIECE);
|
||||
}
|
||||
}
|
||||
@ -347,8 +339,8 @@ public class AppRTCAudioManager {
|
||||
|
||||
/** Called each time a new audio device has been added or removed. */
|
||||
private void onAudioManagerChangedState() {
|
||||
Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices
|
||||
+ ", selected=" + selectedAudioDevice);
|
||||
Log.d(TAG, "onAudioManagerChangedState: devices=" + audioDevices + ", selected="
|
||||
+ selectedAudioDevice);
|
||||
|
||||
// Enable the proximity sensor if there are two available audio devices
|
||||
// in the list. Given the current implementation, we know that the choice
|
||||
|
||||
@ -20,7 +20,6 @@ import java.util.List;
|
||||
* AppRTCClient is the interface representing an AppRTC client.
|
||||
*/
|
||||
public interface AppRTCClient {
|
||||
|
||||
/**
|
||||
* Struct holding the connection parameters of an AppRTC room.
|
||||
*/
|
||||
@ -28,8 +27,7 @@ public interface AppRTCClient {
|
||||
public final String roomUrl;
|
||||
public final String roomId;
|
||||
public final boolean loopback;
|
||||
public RoomConnectionParameters(
|
||||
String roomUrl, String roomId, boolean loopback) {
|
||||
public RoomConnectionParameters(String roomUrl, String roomId, boolean loopback) {
|
||||
this.roomUrl = roomUrl;
|
||||
this.roomId = roomId;
|
||||
this.loopback = loopback;
|
||||
@ -80,11 +78,9 @@ public interface AppRTCClient {
|
||||
public final SessionDescription offerSdp;
|
||||
public final List<IceCandidate> iceCandidates;
|
||||
|
||||
public SignalingParameters(
|
||||
List<PeerConnection.IceServer> iceServers,
|
||||
boolean initiator, String clientId,
|
||||
String wssUrl, String wssPostUrl,
|
||||
SessionDescription offerSdp, List<IceCandidate> iceCandidates) {
|
||||
public SignalingParameters(List<PeerConnection.IceServer> iceServers, boolean initiator,
|
||||
String clientId, String wssUrl, String wssPostUrl, SessionDescription offerSdp,
|
||||
List<IceCandidate> iceCandidates) {
|
||||
this.iceServers = iceServers;
|
||||
this.initiator = initiator;
|
||||
this.clientId = clientId;
|
||||
|
||||
@ -45,16 +45,14 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
private boolean lastStateReportIsNear = false;
|
||||
|
||||
/** Construction */
|
||||
static AppRTCProximitySensor create(Context context,
|
||||
Runnable sensorStateListener) {
|
||||
static AppRTCProximitySensor create(Context context, Runnable sensorStateListener) {
|
||||
return new AppRTCProximitySensor(context, sensorStateListener);
|
||||
}
|
||||
|
||||
private AppRTCProximitySensor(Context context, Runnable sensorStateListener) {
|
||||
Log.d(TAG, "AppRTCProximitySensor" + AppRTCUtils.getThreadInfo());
|
||||
onSensorStateListener = sensorStateListener;
|
||||
sensorManager = ((SensorManager) context.getSystemService(
|
||||
Context.SENSOR_SERVICE));
|
||||
sensorManager = ((SensorManager) context.getSystemService(Context.SENSOR_SERVICE));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -68,8 +66,7 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
// Proximity sensor is not supported on this device.
|
||||
return false;
|
||||
}
|
||||
sensorManager.registerListener(
|
||||
this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
|
||||
sensorManager.registerListener(this, proximitySensor, SensorManager.SENSOR_DELAY_NORMAL);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -120,8 +117,8 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
}
|
||||
|
||||
Log.d(TAG, "onSensorChanged" + AppRTCUtils.getThreadInfo() + ": "
|
||||
+ "accuracy=" + event.accuracy
|
||||
+ ", timestamp=" + event.timestamp + ", distance=" + event.values[0]);
|
||||
+ "accuracy=" + event.accuracy + ", timestamp=" + event.timestamp + ", distance="
|
||||
+ event.values[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -168,5 +165,4 @@ public class AppRTCProximitySensor implements SensorEventListener {
|
||||
}
|
||||
Log.d(TAG, info.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -42,68 +42,41 @@ import org.webrtc.SurfaceViewRenderer;
|
||||
* Activity for peer connection call setup, call waiting
|
||||
* and call view.
|
||||
*/
|
||||
public class CallActivity extends Activity
|
||||
implements AppRTCClient.SignalingEvents,
|
||||
PeerConnectionClient.PeerConnectionEvents,
|
||||
CallFragment.OnCallEvents {
|
||||
|
||||
public static final String EXTRA_ROOMID =
|
||||
"org.appspot.apprtc.ROOMID";
|
||||
public static final String EXTRA_LOOPBACK =
|
||||
"org.appspot.apprtc.LOOPBACK";
|
||||
public static final String EXTRA_VIDEO_CALL =
|
||||
"org.appspot.apprtc.VIDEO_CALL";
|
||||
public static final String EXTRA_CAMERA2 =
|
||||
"org.appspot.apprtc.CAMERA2";
|
||||
public static final String EXTRA_VIDEO_WIDTH =
|
||||
"org.appspot.apprtc.VIDEO_WIDTH";
|
||||
public static final String EXTRA_VIDEO_HEIGHT =
|
||||
"org.appspot.apprtc.VIDEO_HEIGHT";
|
||||
public static final String EXTRA_VIDEO_FPS =
|
||||
"org.appspot.apprtc.VIDEO_FPS";
|
||||
public class CallActivity extends Activity implements AppRTCClient.SignalingEvents,
|
||||
PeerConnectionClient.PeerConnectionEvents,
|
||||
CallFragment.OnCallEvents {
|
||||
public static final String EXTRA_ROOMID = "org.appspot.apprtc.ROOMID";
|
||||
public static final String EXTRA_LOOPBACK = "org.appspot.apprtc.LOOPBACK";
|
||||
public static final String EXTRA_VIDEO_CALL = "org.appspot.apprtc.VIDEO_CALL";
|
||||
public static final String EXTRA_CAMERA2 = "org.appspot.apprtc.CAMERA2";
|
||||
public static final String EXTRA_VIDEO_WIDTH = "org.appspot.apprtc.VIDEO_WIDTH";
|
||||
public static final String EXTRA_VIDEO_HEIGHT = "org.appspot.apprtc.VIDEO_HEIGHT";
|
||||
public static final String EXTRA_VIDEO_FPS = "org.appspot.apprtc.VIDEO_FPS";
|
||||
public static final String EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED =
|
||||
"org.appsopt.apprtc.VIDEO_CAPTUREQUALITYSLIDER";
|
||||
public static final String EXTRA_VIDEO_BITRATE =
|
||||
"org.appspot.apprtc.VIDEO_BITRATE";
|
||||
public static final String EXTRA_VIDEOCODEC =
|
||||
"org.appspot.apprtc.VIDEOCODEC";
|
||||
public static final String EXTRA_HWCODEC_ENABLED =
|
||||
"org.appspot.apprtc.HWCODEC";
|
||||
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED =
|
||||
"org.appspot.apprtc.CAPTURETOTEXTURE";
|
||||
public static final String EXTRA_AUDIO_BITRATE =
|
||||
"org.appspot.apprtc.AUDIO_BITRATE";
|
||||
public static final String EXTRA_AUDIOCODEC =
|
||||
"org.appspot.apprtc.AUDIOCODEC";
|
||||
public static final String EXTRA_VIDEO_BITRATE = "org.appspot.apprtc.VIDEO_BITRATE";
|
||||
public static final String EXTRA_VIDEOCODEC = "org.appspot.apprtc.VIDEOCODEC";
|
||||
public static final String EXTRA_HWCODEC_ENABLED = "org.appspot.apprtc.HWCODEC";
|
||||
public static final String EXTRA_CAPTURETOTEXTURE_ENABLED = "org.appspot.apprtc.CAPTURETOTEXTURE";
|
||||
public static final String EXTRA_AUDIO_BITRATE = "org.appspot.apprtc.AUDIO_BITRATE";
|
||||
public static final String EXTRA_AUDIOCODEC = "org.appspot.apprtc.AUDIOCODEC";
|
||||
public static final String EXTRA_NOAUDIOPROCESSING_ENABLED =
|
||||
"org.appspot.apprtc.NOAUDIOPROCESSING";
|
||||
public static final String EXTRA_AECDUMP_ENABLED =
|
||||
"org.appspot.apprtc.AECDUMP";
|
||||
public static final String EXTRA_OPENSLES_ENABLED =
|
||||
"org.appspot.apprtc.OPENSLES";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AEC =
|
||||
"org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AGC =
|
||||
"org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_NS =
|
||||
"org.appspot.apprtc.DISABLE_BUILT_IN_NS";
|
||||
public static final String EXTRA_ENABLE_LEVEL_CONTROL =
|
||||
"org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
|
||||
public static final String EXTRA_DISPLAY_HUD =
|
||||
"org.appspot.apprtc.DISPLAY_HUD";
|
||||
public static final String EXTRA_AECDUMP_ENABLED = "org.appspot.apprtc.AECDUMP";
|
||||
public static final String EXTRA_OPENSLES_ENABLED = "org.appspot.apprtc.OPENSLES";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AEC = "org.appspot.apprtc.DISABLE_BUILT_IN_AEC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_AGC = "org.appspot.apprtc.DISABLE_BUILT_IN_AGC";
|
||||
public static final String EXTRA_DISABLE_BUILT_IN_NS = "org.appspot.apprtc.DISABLE_BUILT_IN_NS";
|
||||
public static final String EXTRA_ENABLE_LEVEL_CONTROL = "org.appspot.apprtc.ENABLE_LEVEL_CONTROL";
|
||||
public static final String EXTRA_DISPLAY_HUD = "org.appspot.apprtc.DISPLAY_HUD";
|
||||
public static final String EXTRA_TRACING = "org.appspot.apprtc.TRACING";
|
||||
public static final String EXTRA_CMDLINE =
|
||||
"org.appspot.apprtc.CMDLINE";
|
||||
public static final String EXTRA_RUNTIME =
|
||||
"org.appspot.apprtc.RUNTIME";
|
||||
public static final String EXTRA_CMDLINE = "org.appspot.apprtc.CMDLINE";
|
||||
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
|
||||
private static final String TAG = "CallRTCClient";
|
||||
|
||||
// List of mandatory application permissions.
|
||||
private static final String[] MANDATORY_PERMISSIONS = {
|
||||
"android.permission.MODIFY_AUDIO_SETTINGS",
|
||||
"android.permission.RECORD_AUDIO",
|
||||
"android.permission.INTERNET"
|
||||
};
|
||||
private static final String[] MANDATORY_PERMISSIONS = {"android.permission.MODIFY_AUDIO_SETTINGS",
|
||||
"android.permission.RECORD_AUDIO", "android.permission.INTERNET"};
|
||||
|
||||
// Peer connection statistics callback period in ms.
|
||||
private static final int STAT_CALLBACK_PERIOD = 1000;
|
||||
@ -152,22 +125,16 @@ public class CallActivity extends Activity
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
Thread.setDefaultUncaughtExceptionHandler(
|
||||
new UnhandledExceptionHandler(this));
|
||||
Thread.setDefaultUncaughtExceptionHandler(new UnhandledExceptionHandler(this));
|
||||
|
||||
// Set window styles for fullscreen-window size. Needs to be done before
|
||||
// adding content.
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
getWindow().addFlags(
|
||||
LayoutParams.FLAG_FULLSCREEN
|
||||
| LayoutParams.FLAG_KEEP_SCREEN_ON
|
||||
| LayoutParams.FLAG_DISMISS_KEYGUARD
|
||||
| LayoutParams.FLAG_SHOW_WHEN_LOCKED
|
||||
getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
|
||||
| LayoutParams.FLAG_DISMISS_KEYGUARD | LayoutParams.FLAG_SHOW_WHEN_LOCKED
|
||||
| LayoutParams.FLAG_TURN_SCREEN_ON);
|
||||
getWindow().getDecorView().setSystemUiVisibility(
|
||||
View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||
| View.SYSTEM_UI_FLAG_FULLSCREEN
|
||||
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
|
||||
getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
|
||||
| View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
|
||||
setContentView(R.layout.activity_call);
|
||||
|
||||
iceConnected = false;
|
||||
@ -232,30 +199,24 @@ public class CallActivity extends Activity
|
||||
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
|
||||
boolean tracing = intent.getBooleanExtra(EXTRA_TRACING, false);
|
||||
|
||||
boolean useCamera2 = Camera2Enumerator.isSupported(this)
|
||||
&& intent.getBooleanExtra(EXTRA_CAMERA2, true);
|
||||
boolean useCamera2 =
|
||||
Camera2Enumerator.isSupported(this) && intent.getBooleanExtra(EXTRA_CAMERA2, true);
|
||||
|
||||
peerConnectionParameters = new PeerConnectionParameters(
|
||||
intent.getBooleanExtra(EXTRA_VIDEO_CALL, true),
|
||||
loopback,
|
||||
tracing,
|
||||
useCamera2,
|
||||
intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0),
|
||||
intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
||||
intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
|
||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0),
|
||||
intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
|
||||
intent.getBooleanExtra(EXTRA_ENABLE_LEVEL_CONTROL, false));
|
||||
peerConnectionParameters =
|
||||
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
|
||||
tracing, useCamera2, intent.getIntExtra(EXTRA_VIDEO_WIDTH, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_HEIGHT, 0), intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
|
||||
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
|
||||
intent.getBooleanExtra(EXTRA_CAPTURETOTEXTURE_ENABLED, false),
|
||||
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
|
||||
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
|
||||
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
|
||||
intent.getBooleanExtra(EXTRA_ENABLE_LEVEL_CONTROL, false));
|
||||
commandLineRun = intent.getBooleanExtra(EXTRA_CMDLINE, false);
|
||||
runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
|
||||
|
||||
@ -268,8 +229,7 @@ public class CallActivity extends Activity
|
||||
appRtcClient = new DirectRTCClient(this);
|
||||
}
|
||||
// Create connection parameters.
|
||||
roomConnectionParameters = new RoomConnectionParameters(
|
||||
roomUri.toString(), roomId, loopback);
|
||||
roomConnectionParameters = new RoomConnectionParameters(roomUri.toString(), roomId, loopback);
|
||||
|
||||
// Create CPU monitor
|
||||
cpuMonitor = new CpuMonitor(this);
|
||||
@ -419,21 +379,19 @@ public class CallActivity extends Activity
|
||||
callStartedTimeMs = System.currentTimeMillis();
|
||||
|
||||
// Start room connection.
|
||||
logAndToast(getString(R.string.connecting_to,
|
||||
roomConnectionParameters.roomUrl));
|
||||
logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
|
||||
appRtcClient.connectToRoom(roomConnectionParameters);
|
||||
|
||||
// Create and audio manager that will take care of audio routing,
|
||||
// audio modes, audio device enumeration etc.
|
||||
audioManager = AppRTCAudioManager.create(this, new Runnable() {
|
||||
// This method will be called each time the audio state (number and
|
||||
// type of devices) has been changed.
|
||||
@Override
|
||||
public void run() {
|
||||
onAudioManagerChangedState();
|
||||
}
|
||||
// This method will be called each time the audio state (number and
|
||||
// type of devices) has been changed.
|
||||
@Override
|
||||
public void run() {
|
||||
onAudioManagerChangedState();
|
||||
}
|
||||
);
|
||||
});
|
||||
// Store existing audio settings and change audio mode to
|
||||
// MODE_IN_COMMUNICATION for best possible VoIP performance.
|
||||
Log.d(TAG, "Initializing the audio manager...");
|
||||
@ -499,13 +457,16 @@ public class CallActivity extends Activity
|
||||
.setTitle(getText(R.string.channel_error_title))
|
||||
.setMessage(errorMessage)
|
||||
.setCancelable(false)
|
||||
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
disconnect();
|
||||
}
|
||||
}).create().show();
|
||||
.setNeutralButton(R.string.ok,
|
||||
new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
disconnect();
|
||||
}
|
||||
})
|
||||
.create()
|
||||
.show();
|
||||
}
|
||||
}
|
||||
|
||||
@ -539,8 +500,8 @@ public class CallActivity extends Activity
|
||||
|
||||
signalingParameters = params;
|
||||
logAndToast("Creating peer connection, delay=" + delta + "ms");
|
||||
peerConnectionClient.createPeerConnection(rootEglBase.getEglBaseContext(),
|
||||
localRender, remoteRender, signalingParameters);
|
||||
peerConnectionClient.createPeerConnection(
|
||||
rootEglBase.getEglBaseContext(), localRender, remoteRender, signalingParameters);
|
||||
|
||||
if (signalingParameters.initiator) {
|
||||
logAndToast("Creating OFFER...");
|
||||
@ -716,8 +677,7 @@ public class CallActivity extends Activity
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionClosed() {
|
||||
}
|
||||
public void onPeerConnectionClosed() {}
|
||||
|
||||
@Override
|
||||
public void onPeerConnectionStatsReady(final StatsReport[] reports) {
|
||||
|
||||
@ -50,26 +50,18 @@ public class CallFragment extends Fragment {
|
||||
}
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
controlView =
|
||||
inflater.inflate(R.layout.fragment_call, container, false);
|
||||
public View onCreateView(
|
||||
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
|
||||
controlView = inflater.inflate(R.layout.fragment_call, container, false);
|
||||
|
||||
// Create UI controls.
|
||||
contactView =
|
||||
(TextView) controlView.findViewById(R.id.contact_name_call);
|
||||
disconnectButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_disconnect);
|
||||
cameraSwitchButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
|
||||
videoScalingButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
|
||||
toggleMuteButton =
|
||||
(ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
|
||||
captureFormatText =
|
||||
(TextView) controlView.findViewById(R.id.capture_format_text_call);
|
||||
captureFormatSlider =
|
||||
(SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
|
||||
contactView = (TextView) controlView.findViewById(R.id.contact_name_call);
|
||||
disconnectButton = (ImageButton) controlView.findViewById(R.id.button_call_disconnect);
|
||||
cameraSwitchButton = (ImageButton) controlView.findViewById(R.id.button_call_switch_camera);
|
||||
videoScalingButton = (ImageButton) controlView.findViewById(R.id.button_call_scaling_mode);
|
||||
toggleMuteButton = (ImageButton) controlView.findViewById(R.id.button_call_toggle_mic);
|
||||
captureFormatText = (TextView) controlView.findViewById(R.id.capture_format_text_call);
|
||||
captureFormatSlider = (SeekBar) controlView.findViewById(R.id.capture_format_slider_call);
|
||||
|
||||
// Add buttons click events.
|
||||
disconnectButton.setOnClickListener(new View.OnClickListener() {
|
||||
@ -90,12 +82,10 @@ public class CallFragment extends Fragment {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
|
||||
videoScalingButton.setBackgroundResource(
|
||||
R.drawable.ic_action_full_screen);
|
||||
videoScalingButton.setBackgroundResource(R.drawable.ic_action_full_screen);
|
||||
scalingType = ScalingType.SCALE_ASPECT_FIT;
|
||||
} else {
|
||||
videoScalingButton.setBackgroundResource(
|
||||
R.drawable.ic_action_return_from_full_screen);
|
||||
videoScalingButton.setBackgroundResource(R.drawable.ic_action_return_from_full_screen);
|
||||
scalingType = ScalingType.SCALE_ASPECT_FILL;
|
||||
}
|
||||
callEvents.onVideoScalingSwitch(scalingType);
|
||||
@ -144,5 +134,4 @@ public class CallFragment extends Fragment {
|
||||
super.onAttach(activity);
|
||||
callEvents = (OnCallEvents) activity;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -24,13 +24,10 @@ import java.util.List;
|
||||
* Control capture format based on a seekbar listener.
|
||||
*/
|
||||
public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener {
|
||||
private final List<CaptureFormat> formats = Arrays.asList(
|
||||
new CaptureFormat(1280, 720, 0, 30000),
|
||||
new CaptureFormat(960, 540, 0, 30000),
|
||||
new CaptureFormat(640, 480, 0, 30000),
|
||||
new CaptureFormat(480, 360, 0, 30000),
|
||||
new CaptureFormat(320, 240, 0, 30000),
|
||||
new CaptureFormat(256, 144, 0, 30000));
|
||||
private final List<CaptureFormat> formats =
|
||||
Arrays.asList(new CaptureFormat(1280, 720, 0, 30000), new CaptureFormat(960, 540, 0, 30000),
|
||||
new CaptureFormat(640, 480, 0, 30000), new CaptureFormat(480, 360, 0, 30000),
|
||||
new CaptureFormat(320, 240, 0, 30000), new CaptureFormat(256, 144, 0, 30000));
|
||||
// Prioritize framerate below this threshold and resolution above the threshold.
|
||||
private static final int FRAMERATE_THRESHOLD = 15;
|
||||
private TextView captureFormatText;
|
||||
@ -52,14 +49,14 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
|
||||
int firstFps = calculateFramerate(targetBandwidth, first);
|
||||
int secondFps = calculateFramerate(targetBandwidth, second);
|
||||
|
||||
if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
|
||||
|| firstFps == secondFps) {
|
||||
if (firstFps >= FRAMERATE_THRESHOLD && secondFps >= FRAMERATE_THRESHOLD
|
||||
|| firstFps == secondFps) {
|
||||
// Compare resolution.
|
||||
return first.width * first.height - second.width * second.height;
|
||||
} else {
|
||||
} else {
|
||||
// Compare fps.
|
||||
return firstFps - secondFps;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -76,8 +73,8 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
|
||||
// Extract max bandwidth (in millipixels / second).
|
||||
long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
|
||||
for (CaptureFormat format : formats) {
|
||||
maxCaptureBandwidth = Math.max(maxCaptureBandwidth,
|
||||
(long) format.width * format.height * format.framerate.max);
|
||||
maxCaptureBandwidth =
|
||||
Math.max(maxCaptureBandwidth, (long) format.width * format.height * format.framerate.max);
|
||||
}
|
||||
|
||||
// Fraction between 0 and 1.
|
||||
@ -97,8 +94,7 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStartTrackingTouch(SeekBar seekBar) {
|
||||
}
|
||||
public void onStartTrackingTouch(SeekBar seekBar) {}
|
||||
|
||||
@Override
|
||||
public void onStopTrackingTouch(SeekBar seekBar) {
|
||||
@ -107,8 +103,8 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
|
||||
|
||||
// Return the highest frame rate possible based on bandwidth and format.
|
||||
private int calculateFramerate(double bandwidth, CaptureFormat format) {
|
||||
return (int) Math.round(Math.min(format.framerate.max,
|
||||
(int) Math.round(bandwidth / (format.width * format.height))) / 1000.0);
|
||||
return (int) Math.round(
|
||||
Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height)))
|
||||
/ 1000.0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -118,17 +118,15 @@ public class ConnectActivity extends Activity {
|
||||
setContentView(R.layout.activity_connect);
|
||||
|
||||
roomEditText = (EditText) findViewById(R.id.room_edittext);
|
||||
roomEditText.setOnEditorActionListener(
|
||||
new TextView.OnEditorActionListener() {
|
||||
@Override
|
||||
public boolean onEditorAction(
|
||||
TextView textView, int i, KeyEvent keyEvent) {
|
||||
if (i == EditorInfo.IME_ACTION_DONE) {
|
||||
addFavoriteButton.performClick();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
roomEditText.setOnEditorActionListener(new TextView.OnEditorActionListener() {
|
||||
@Override
|
||||
public boolean onEditorAction(TextView textView, int i, KeyEvent keyEvent) {
|
||||
if (i == EditorInfo.IME_ACTION_DONE) {
|
||||
addFavoriteButton.performClick();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
});
|
||||
roomEditText.requestFocus();
|
||||
|
||||
@ -143,12 +141,9 @@ public class ConnectActivity extends Activity {
|
||||
|
||||
// If an implicit VIEW intent is launching the app, go directly to that URL.
|
||||
final Intent intent = getIntent();
|
||||
if ("android.intent.action.VIEW".equals(intent.getAction())
|
||||
&& !commandLineRun) {
|
||||
boolean loopback = intent.getBooleanExtra(
|
||||
CallActivity.EXTRA_LOOPBACK, false);
|
||||
int runTimeMs = intent.getIntExtra(
|
||||
CallActivity.EXTRA_RUNTIME, 0);
|
||||
if ("android.intent.action.VIEW".equals(intent.getAction()) && !commandLineRun) {
|
||||
boolean loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
|
||||
int runTimeMs = intent.getIntExtra(CallActivity.EXTRA_RUNTIME, 0);
|
||||
String room = sharedPref.getString(keyprefRoom, "");
|
||||
connectToRoom(room, true, loopback, runTimeMs);
|
||||
}
|
||||
@ -230,8 +225,7 @@ public class ConnectActivity extends Activity {
|
||||
Log.e(TAG, "Failed to load room list: " + e.toString());
|
||||
}
|
||||
}
|
||||
adapter = new ArrayAdapter<String>(
|
||||
this, android.R.layout.simple_list_item_1, roomList);
|
||||
adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, roomList);
|
||||
roomListView.setAdapter(adapter);
|
||||
if (adapter.getCount() > 0) {
|
||||
roomListView.requestFocus();
|
||||
@ -240,8 +234,7 @@ public class ConnectActivity extends Activity {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onActivityResult(
|
||||
int requestCode, int resultCode, Intent data) {
|
||||
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
|
||||
if (requestCode == CONNECTION_REQUEST && commandLineRun) {
|
||||
Log.d(TAG, "Return: " + resultCode);
|
||||
setResult(resultCode);
|
||||
@ -260,71 +253,63 @@ public class ConnectActivity extends Activity {
|
||||
}
|
||||
|
||||
String roomUrl = sharedPref.getString(
|
||||
keyprefRoomServerUrl,
|
||||
getString(R.string.pref_room_server_url_default));
|
||||
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
|
||||
|
||||
// Video call enabled flag.
|
||||
boolean videoCallEnabled = sharedPref.getBoolean(keyprefVideoCallEnabled,
|
||||
Boolean.valueOf(getString(R.string.pref_videocall_default)));
|
||||
boolean videoCallEnabled = sharedPref.getBoolean(
|
||||
keyprefVideoCallEnabled, Boolean.valueOf(getString(R.string.pref_videocall_default)));
|
||||
|
||||
// Use Camera2 option.
|
||||
boolean useCamera2 = sharedPref.getBoolean(keyprefCamera2,
|
||||
Boolean.valueOf(getString(R.string.pref_camera2_default)));
|
||||
boolean useCamera2 = sharedPref.getBoolean(
|
||||
keyprefCamera2, Boolean.valueOf(getString(R.string.pref_camera2_default)));
|
||||
|
||||
// Get default codecs.
|
||||
String videoCodec = sharedPref.getString(keyprefVideoCodec,
|
||||
getString(R.string.pref_videocodec_default));
|
||||
String audioCodec = sharedPref.getString(keyprefAudioCodec,
|
||||
getString(R.string.pref_audiocodec_default));
|
||||
String videoCodec =
|
||||
sharedPref.getString(keyprefVideoCodec, getString(R.string.pref_videocodec_default));
|
||||
String audioCodec =
|
||||
sharedPref.getString(keyprefAudioCodec, getString(R.string.pref_audiocodec_default));
|
||||
|
||||
// Check HW codec flag.
|
||||
boolean hwCodec = sharedPref.getBoolean(keyprefHwCodecAcceleration,
|
||||
Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
|
||||
boolean hwCodec = sharedPref.getBoolean(
|
||||
keyprefHwCodecAcceleration, Boolean.valueOf(getString(R.string.pref_hwcodec_default)));
|
||||
|
||||
// Check Capture to texture.
|
||||
boolean captureToTexture = sharedPref.getBoolean(keyprefCaptureToTexture,
|
||||
Boolean.valueOf(getString(R.string.pref_capturetotexture_default)));
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean noAudioProcessing = sharedPref.getBoolean(
|
||||
keyprefNoAudioProcessingPipeline,
|
||||
boolean noAudioProcessing = sharedPref.getBoolean(keyprefNoAudioProcessingPipeline,
|
||||
Boolean.valueOf(getString(R.string.pref_noaudioprocessing_default)));
|
||||
|
||||
// Check Disable Audio Processing flag.
|
||||
boolean aecDump = sharedPref.getBoolean(
|
||||
keyprefAecDump,
|
||||
Boolean.valueOf(getString(R.string.pref_aecdump_default)));
|
||||
keyprefAecDump, Boolean.valueOf(getString(R.string.pref_aecdump_default)));
|
||||
|
||||
// Check OpenSL ES enabled flag.
|
||||
boolean useOpenSLES = sharedPref.getBoolean(
|
||||
keyprefOpenSLES,
|
||||
Boolean.valueOf(getString(R.string.pref_opensles_default)));
|
||||
keyprefOpenSLES, Boolean.valueOf(getString(R.string.pref_opensles_default)));
|
||||
|
||||
// Check Disable built-in AEC flag.
|
||||
boolean disableBuiltInAEC = sharedPref.getBoolean(
|
||||
keyprefDisableBuiltInAec,
|
||||
boolean disableBuiltInAEC = sharedPref.getBoolean(keyprefDisableBuiltInAec,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_aec_default)));
|
||||
|
||||
// Check Disable built-in AGC flag.
|
||||
boolean disableBuiltInAGC = sharedPref.getBoolean(
|
||||
keyprefDisableBuiltInAgc,
|
||||
boolean disableBuiltInAGC = sharedPref.getBoolean(keyprefDisableBuiltInAgc,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_agc_default)));
|
||||
|
||||
// Check Disable built-in NS flag.
|
||||
boolean disableBuiltInNS = sharedPref.getBoolean(
|
||||
keyprefDisableBuiltInNs,
|
||||
boolean disableBuiltInNS = sharedPref.getBoolean(keyprefDisableBuiltInNs,
|
||||
Boolean.valueOf(getString(R.string.pref_disable_built_in_ns_default)));
|
||||
|
||||
// Check Enable level control.
|
||||
boolean enableLevelControl = sharedPref.getBoolean(
|
||||
keyprefEnableLevelControl,
|
||||
boolean enableLevelControl = sharedPref.getBoolean(keyprefEnableLevelControl,
|
||||
Boolean.valueOf(getString(R.string.pref_enable_level_control_key)));
|
||||
|
||||
// Get video resolution from settings.
|
||||
int videoWidth = 0;
|
||||
int videoHeight = 0;
|
||||
String resolution = sharedPref.getString(keyprefResolution,
|
||||
getString(R.string.pref_resolution_default));
|
||||
String resolution =
|
||||
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
|
||||
String[] dimensions = resolution.split("[ x]+");
|
||||
if (dimensions.length == 2) {
|
||||
try {
|
||||
@ -339,8 +324,7 @@ public class ConnectActivity extends Activity {
|
||||
|
||||
// Get camera fps from settings.
|
||||
int cameraFps = 0;
|
||||
String fps = sharedPref.getString(keyprefFps,
|
||||
getString(R.string.pref_fps_default));
|
||||
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
|
||||
String[] fpsValues = fps.split("[ x]+");
|
||||
if (fpsValues.length == 2) {
|
||||
try {
|
||||
@ -356,31 +340,28 @@ public class ConnectActivity extends Activity {
|
||||
|
||||
// Get video and audio start bitrate.
|
||||
int videoStartBitrate = 0;
|
||||
String bitrateTypeDefault = getString(
|
||||
R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPref.getString(
|
||||
keyprefVideoBitrateType, bitrateTypeDefault);
|
||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
String bitrateValue = sharedPref.getString(keyprefVideoBitrateValue,
|
||||
getString(R.string.pref_maxvideobitratevalue_default));
|
||||
String bitrateValue = sharedPref.getString(
|
||||
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
|
||||
videoStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
int audioStartBitrate = 0;
|
||||
bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
bitrateType = sharedPref.getString(
|
||||
keyprefAudioBitrateType, bitrateTypeDefault);
|
||||
bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
|
||||
if (!bitrateType.equals(bitrateTypeDefault)) {
|
||||
String bitrateValue = sharedPref.getString(keyprefAudioBitrateValue,
|
||||
getString(R.string.pref_startaudiobitratevalue_default));
|
||||
String bitrateValue = sharedPref.getString(
|
||||
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
|
||||
audioStartBitrate = Integer.parseInt(bitrateValue);
|
||||
}
|
||||
|
||||
// Check statistics display option.
|
||||
boolean displayHud = sharedPref.getBoolean(keyprefDisplayHud,
|
||||
Boolean.valueOf(getString(R.string.pref_displayhud_default)));
|
||||
boolean displayHud = sharedPref.getBoolean(
|
||||
keyprefDisplayHud, Boolean.valueOf(getString(R.string.pref_displayhud_default)));
|
||||
|
||||
boolean tracing = sharedPref.getBoolean(
|
||||
keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
|
||||
keyprefTracing, Boolean.valueOf(getString(R.string.pref_tracing_default)));
|
||||
|
||||
// Start AppRTCMobile activity.
|
||||
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
|
||||
@ -395,14 +376,12 @@ public class ConnectActivity extends Activity {
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
|
||||
captureQualitySlider);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
|
||||
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
|
||||
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
|
||||
intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
|
||||
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED,
|
||||
noAudioProcessing);
|
||||
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
|
||||
intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
|
||||
intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
|
||||
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
|
||||
@ -429,22 +408,25 @@ public class ConnectActivity extends Activity {
|
||||
.setTitle(getText(R.string.invalid_url_title))
|
||||
.setMessage(getString(R.string.invalid_url_text, url))
|
||||
.setCancelable(false)
|
||||
.setNeutralButton(R.string.ok, new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
}
|
||||
}).create().show();
|
||||
.setNeutralButton(R.string.ok,
|
||||
new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int id) {
|
||||
dialog.cancel();
|
||||
}
|
||||
})
|
||||
.create()
|
||||
.show();
|
||||
return false;
|
||||
}
|
||||
|
||||
private final AdapterView.OnItemClickListener
|
||||
roomListClickListener = new AdapterView.OnItemClickListener() {
|
||||
@Override
|
||||
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
||||
String roomId = ((TextView) view).getText().toString();
|
||||
connectToRoom(roomId, false, false, 0);
|
||||
}
|
||||
};
|
||||
private final AdapterView.OnItemClickListener roomListClickListener =
|
||||
new AdapterView.OnItemClickListener() {
|
||||
@Override
|
||||
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
|
||||
String roomId = ((TextView) view).getText().toString();
|
||||
connectToRoom(roomId, false, false, 0);
|
||||
}
|
||||
};
|
||||
|
||||
private final OnClickListener addFavoriteListener = new OnClickListener() {
|
||||
@Override
|
||||
|
||||
@ -228,7 +228,7 @@ class CpuMonitor {
|
||||
try {
|
||||
BufferedReader reader = new BufferedReader(fin);
|
||||
Scanner scanner = new Scanner(reader).useDelimiter("[-\n]");
|
||||
scanner.nextInt(); // Skip leading number 0.
|
||||
scanner.nextInt(); // Skip leading number 0.
|
||||
cpusPresent = 1 + scanner.nextInt();
|
||||
scanner.close();
|
||||
} catch (Exception e) {
|
||||
@ -247,7 +247,7 @@ class CpuMonitor {
|
||||
curPath = new String[cpusPresent];
|
||||
curFreqScales = new double[cpusPresent];
|
||||
for (int i = 0; i < cpusPresent; i++) {
|
||||
cpuFreqMax[i] = 0; // Frequency "not yet determined".
|
||||
cpuFreqMax[i] = 0; // Frequency "not yet determined".
|
||||
curFreqScales[i] = 0;
|
||||
maxPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
|
||||
curPath[i] = "/sys/devices/system/cpu/cpu" + i + "/cpufreq/scaling_cur_freq";
|
||||
@ -275,8 +275,8 @@ class CpuMonitor {
|
||||
int batteryLevel = 0;
|
||||
int batteryScale = intent.getIntExtra(BatteryManager.EXTRA_SCALE, 100);
|
||||
if (batteryScale > 0) {
|
||||
batteryLevel = (int) (
|
||||
100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
|
||||
batteryLevel =
|
||||
(int) (100f * intent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0) / batteryScale);
|
||||
}
|
||||
return batteryLevel;
|
||||
}
|
||||
@ -317,10 +317,10 @@ class CpuMonitor {
|
||||
Log.d(TAG, "Core " + i + ". Max frequency: " + cpufreqMax);
|
||||
lastSeenMaxFreq = cpufreqMax;
|
||||
cpuFreqMax[i] = cpufreqMax;
|
||||
maxPath[i] = null; // Kill path to free its memory.
|
||||
maxPath[i] = null; // Kill path to free its memory.
|
||||
}
|
||||
} else {
|
||||
lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
|
||||
lastSeenMaxFreq = cpuFreqMax[i]; // A valid, previously read value.
|
||||
}
|
||||
|
||||
long cpuFreqCur = readFreqFromFile(curPath[i]);
|
||||
@ -402,16 +402,20 @@ class CpuMonitor {
|
||||
private synchronized String getStatString() {
|
||||
StringBuilder stat = new StringBuilder();
|
||||
stat.append("CPU User: ")
|
||||
.append(doubleToPercent(userCpuUsage.getCurrent())).append("/")
|
||||
.append(doubleToPercent(userCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(userCpuUsage.getAverage()))
|
||||
.append(". System: ")
|
||||
.append(doubleToPercent(systemCpuUsage.getCurrent())).append("/")
|
||||
.append(doubleToPercent(systemCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(systemCpuUsage.getAverage()))
|
||||
.append(". Freq: ")
|
||||
.append(doubleToPercent(frequencyScale.getCurrent())).append("/")
|
||||
.append(doubleToPercent(frequencyScale.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(frequencyScale.getAverage()))
|
||||
.append(". Total usage: ")
|
||||
.append(doubleToPercent(totalCpuUsage.getCurrent())).append("/")
|
||||
.append(doubleToPercent(totalCpuUsage.getCurrent()))
|
||||
.append("/")
|
||||
.append(doubleToPercent(totalCpuUsage.getAverage()))
|
||||
.append(". Cores: ")
|
||||
.append(actualCpusPresent);
|
||||
@ -478,13 +482,13 @@ class CpuMonitor {
|
||||
String lines[] = line.split("\\s+");
|
||||
int length = lines.length;
|
||||
if (length >= 5) {
|
||||
userTime = parseLong(lines[1]); // user
|
||||
userTime += parseLong(lines[2]); // nice
|
||||
userTime = parseLong(lines[1]); // user
|
||||
userTime += parseLong(lines[2]); // nice
|
||||
systemTime = parseLong(lines[3]); // system
|
||||
idleTime = parseLong(lines[4]); // idle
|
||||
}
|
||||
if (length >= 8) {
|
||||
userTime += parseLong(lines[5]); // iowait
|
||||
userTime += parseLong(lines[5]); // iowait
|
||||
systemTime += parseLong(lines[6]); // irq
|
||||
systemTime += parseLong(lines[7]); // softirq
|
||||
}
|
||||
|
||||
@ -35,32 +35,28 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
private static final int DEFAULT_PORT = 8888;
|
||||
|
||||
// Regex pattern used for checking if room id looks like an IP.
|
||||
static final Pattern IP_PATTERN = Pattern.compile(
|
||||
"("
|
||||
// IPv4
|
||||
+ "((\\d+\\.){3}\\d+)|"
|
||||
// IPv6
|
||||
+ "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
|
||||
+ "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
|
||||
+ "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
|
||||
// IPv6 without []
|
||||
+ "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
|
||||
+ "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
|
||||
// Literals
|
||||
+ "localhost"
|
||||
static final Pattern IP_PATTERN = Pattern.compile("("
|
||||
// IPv4
|
||||
+ "((\\d+\\.){3}\\d+)|"
|
||||
// IPv6
|
||||
+ "\\[((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::"
|
||||
+ "(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)\\]|"
|
||||
+ "\\[(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})\\]|"
|
||||
// IPv6 without []
|
||||
+ "((([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?::(([0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4})?)|"
|
||||
+ "(([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4})|"
|
||||
// Literals
|
||||
+ "localhost"
|
||||
+ ")"
|
||||
// Optional port number
|
||||
+ "(:(\\d+))?"
|
||||
);
|
||||
+ "(:(\\d+))?");
|
||||
|
||||
private final ExecutorService executor;
|
||||
private final SignalingEvents events;
|
||||
private TCPChannelClient tcpClient;
|
||||
private RoomConnectionParameters connectionParameters;
|
||||
|
||||
private enum ConnectionState {
|
||||
NEW, CONNECTED, CLOSED, ERROR
|
||||
};
|
||||
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
|
||||
|
||||
// All alterations of the room state should be done from inside the looper thread.
|
||||
private ConnectionState roomState;
|
||||
@ -209,7 +205,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
public void run() {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "type", "remove-candidates");
|
||||
JSONArray jsonArray = new JSONArray();
|
||||
JSONArray jsonArray = new JSONArray();
|
||||
for (final IceCandidate candidate : candidates) {
|
||||
jsonArray.put(toJsonCandidate(candidate));
|
||||
}
|
||||
@ -244,7 +240,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
null, // wwsPostUrl
|
||||
null, // offerSdp
|
||||
null // iceCandidates
|
||||
);
|
||||
);
|
||||
events.onConnectedToRoom(parameters);
|
||||
}
|
||||
}
|
||||
@ -265,13 +261,11 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
events.onRemoteIceCandidatesRemoved(candidates);
|
||||
} else if (type.equals("answer")) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else if (type.equals("offer")) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
|
||||
SignalingParameters parameters = new SignalingParameters(
|
||||
// Ice servers are not needed for direct connections.
|
||||
@ -282,7 +276,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
null, // wssPostUrl
|
||||
sdp, // offerSdp
|
||||
null // iceCandidates
|
||||
);
|
||||
);
|
||||
roomState = ConnectionState.CONNECTED;
|
||||
events.onConnectedToRoom(parameters);
|
||||
} else {
|
||||
@ -347,8 +341,7 @@ public class DirectRTCClient implements AppRTCClient, TCPChannelClient.TCPChanne
|
||||
|
||||
// Converts a JSON candidate to a Java object.
|
||||
private static IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
|
||||
return new IceCandidate(json.getString("id"),
|
||||
json.getInt("label"),
|
||||
json.getString("candidate"));
|
||||
return new IceCandidate(
|
||||
json.getString("id"), json.getInt("label"), json.getString("candidate"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,8 +41,8 @@ public class HudFragment extends Fragment {
|
||||
private CpuMonitor cpuMonitor;
|
||||
|
||||
@Override
|
||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
||||
Bundle savedInstanceState) {
|
||||
public View onCreateView(
|
||||
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
|
||||
controlView = inflater.inflate(R.layout.fragment_hud, container, false);
|
||||
|
||||
// Create UI controls.
|
||||
@ -57,8 +57,8 @@ public class HudFragment extends Fragment {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (displayHud) {
|
||||
int visibility = (hudViewBwe.getVisibility() == View.VISIBLE)
|
||||
? View.INVISIBLE : View.VISIBLE;
|
||||
int visibility =
|
||||
(hudViewBwe.getVisibility() == View.VISIBLE) ? View.INVISIBLE : View.VISIBLE;
|
||||
hudViewsSetProperties(visibility);
|
||||
}
|
||||
}
|
||||
@ -126,8 +126,7 @@ public class HudFragment extends Fragment {
|
||||
String actualBitrate = null;
|
||||
|
||||
for (StatsReport report : reports) {
|
||||
if (report.type.equals("ssrc") && report.id.contains("ssrc")
|
||||
&& report.id.contains("send")) {
|
||||
if (report.type.equals("ssrc") && report.id.contains("ssrc") && report.id.contains("send")) {
|
||||
// Send video statistics.
|
||||
Map<String, String> reportMap = getReportMap(report);
|
||||
String trackId = reportMap.get("googTrackId");
|
||||
@ -195,9 +194,11 @@ public class HudFragment extends Fragment {
|
||||
|
||||
if (cpuMonitor != null) {
|
||||
encoderStat.append("CPU%: ")
|
||||
.append(cpuMonitor.getCpuUsageCurrent()).append("/")
|
||||
.append(cpuMonitor.getCpuUsageAverage())
|
||||
.append(". Freq: ").append(cpuMonitor.getFrequencyScaleAverage());
|
||||
.append(cpuMonitor.getCpuUsageCurrent())
|
||||
.append("/")
|
||||
.append(cpuMonitor.getCpuUsageAverage())
|
||||
.append(". Freq: ")
|
||||
.append(cpuMonitor.getFrequencyScaleAverage());
|
||||
}
|
||||
encoderStatView.setText(encoderStat.toString());
|
||||
}
|
||||
|
||||
@ -74,12 +74,11 @@ public class PeerConnectionClient {
|
||||
private static final String VIDEO_CODEC_H264 = "H264";
|
||||
private static final String AUDIO_CODEC_OPUS = "opus";
|
||||
private static final String AUDIO_CODEC_ISAC = "ISAC";
|
||||
private static final String VIDEO_CODEC_PARAM_START_BITRATE =
|
||||
"x-google-start-bitrate";
|
||||
private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
|
||||
private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
|
||||
private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
|
||||
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT= "googAutoGainControl";
|
||||
private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
|
||||
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
|
||||
private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
|
||||
private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
|
||||
private static final String AUDIO_LEVEL_CONTROL_CONSTRAINT = "levelControl";
|
||||
private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
|
||||
@ -162,14 +161,12 @@ public class PeerConnectionClient {
|
||||
public final boolean disableBuiltInNS;
|
||||
public final boolean enableLevelControl;
|
||||
|
||||
public PeerConnectionParameters(
|
||||
boolean videoCallEnabled, boolean loopback, boolean tracing, boolean useCamera2,
|
||||
int videoWidth, int videoHeight, int videoFps,
|
||||
int videoMaxBitrate, String videoCodec, boolean videoCodecHwAcceleration,
|
||||
boolean captureToTexture, int audioStartBitrate, String audioCodec,
|
||||
boolean noAudioProcessing, boolean aecDump, boolean useOpenSLES,
|
||||
boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS,
|
||||
boolean enableLevelControl) {
|
||||
public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing,
|
||||
boolean useCamera2, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate,
|
||||
String videoCodec, boolean videoCodecHwAcceleration, boolean captureToTexture,
|
||||
int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump,
|
||||
boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC,
|
||||
boolean disableBuiltInNS, boolean enableLevelControl) {
|
||||
this.videoCallEnabled = videoCallEnabled;
|
||||
this.useCamera2 = useCamera2;
|
||||
this.loopback = loopback;
|
||||
@ -255,10 +252,8 @@ public class PeerConnectionClient {
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
public void createPeerConnectionFactory(
|
||||
final Context context,
|
||||
final PeerConnectionParameters peerConnectionParameters,
|
||||
final PeerConnectionEvents events) {
|
||||
public void createPeerConnectionFactory(final Context context,
|
||||
final PeerConnectionParameters peerConnectionParameters, final PeerConnectionEvents events) {
|
||||
this.peerConnectionParameters = peerConnectionParameters;
|
||||
this.events = events;
|
||||
videoCallEnabled = peerConnectionParameters.videoCallEnabled;
|
||||
@ -289,10 +284,8 @@ public class PeerConnectionClient {
|
||||
});
|
||||
}
|
||||
|
||||
public void createPeerConnection(
|
||||
final EglBase.Context renderEGLContext,
|
||||
final VideoRenderer.Callbacks localRender,
|
||||
final VideoRenderer.Callbacks remoteRender,
|
||||
public void createPeerConnection(final EglBase.Context renderEGLContext,
|
||||
final VideoRenderer.Callbacks localRender, final VideoRenderer.Callbacks remoteRender,
|
||||
final SignalingParameters signalingParameters) {
|
||||
if (peerConnectionParameters == null) {
|
||||
Log.e(TAG, "Creating peer connection without initializing factory.");
|
||||
@ -329,14 +322,14 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
private void createPeerConnectionFactoryInternal(Context context) {
|
||||
PeerConnectionFactory.initializeInternalTracer();
|
||||
if (peerConnectionParameters.tracing) {
|
||||
PeerConnectionFactory.startInternalTracingCapture(
|
||||
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
|
||||
+ "webrtc-trace.txt");
|
||||
}
|
||||
Log.d(TAG, "Create peer connection factory. Use video: " +
|
||||
peerConnectionParameters.videoCallEnabled);
|
||||
PeerConnectionFactory.initializeInternalTracer();
|
||||
if (peerConnectionParameters.tracing) {
|
||||
PeerConnectionFactory.startInternalTracingCapture(
|
||||
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
|
||||
+ "webrtc-trace.txt");
|
||||
}
|
||||
Log.d(TAG,
|
||||
"Create peer connection factory. Use video: " + peerConnectionParameters.videoCallEnabled);
|
||||
isError = false;
|
||||
|
||||
// Initialize field trials.
|
||||
@ -391,8 +384,8 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
// Create peer connection factory.
|
||||
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
|
||||
peerConnectionParameters.videoCodecHwAcceleration)) {
|
||||
if (!PeerConnectionFactory.initializeAndroidGlobals(
|
||||
context, true, true, peerConnectionParameters.videoCodecHwAcceleration)) {
|
||||
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
|
||||
}
|
||||
if (options != null) {
|
||||
@ -448,30 +441,30 @@ public class PeerConnectionClient {
|
||||
// added for audio performance measurements
|
||||
if (peerConnectionParameters.noAudioProcessing) {
|
||||
Log.d(TAG, "Disabling audio processing");
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_NOISE_SUPPRESSION_CONSTRAINT , "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
|
||||
}
|
||||
if (peerConnectionParameters.enableLevelControl) {
|
||||
Log.d(TAG, "Enabling level control.");
|
||||
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
|
||||
audioConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
|
||||
}
|
||||
// Create SDP constraints.
|
||||
sdpMediaConstraints = new MediaConstraints();
|
||||
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
"OfferToReceiveAudio", "true"));
|
||||
sdpMediaConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
|
||||
if (videoCallEnabled || peerConnectionParameters.loopback) {
|
||||
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
"OfferToReceiveVideo", "true"));
|
||||
sdpMediaConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
|
||||
} else {
|
||||
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
|
||||
"OfferToReceiveVideo", "false"));
|
||||
sdpMediaConstraints.mandatory.add(
|
||||
new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -531,15 +524,12 @@ public class PeerConnectionClient {
|
||||
// Use ECDSA encryption.
|
||||
rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
|
||||
|
||||
peerConnection = factory.createPeerConnection(
|
||||
rtcConfig, pcConstraints, pcObserver);
|
||||
peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
|
||||
isInitiator = false;
|
||||
|
||||
// Set default WebRTC tracing and INFO libjingle logging.
|
||||
// NOTE: this _must_ happen while |factory| is alive!
|
||||
Logging.enableTracing(
|
||||
"logcat:",
|
||||
EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
|
||||
Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
|
||||
Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO);
|
||||
|
||||
mediaStream = factory.createLocalMediaStream("ARDAMS");
|
||||
@ -572,15 +562,13 @@ public class PeerConnectionClient {
|
||||
|
||||
if (peerConnectionParameters.aecDump) {
|
||||
try {
|
||||
aecDumpFileDescriptor = ParcelFileDescriptor.open(
|
||||
new File(Environment.getExternalStorageDirectory().getPath()
|
||||
+ File.separator
|
||||
+ "Download/audio.aecdump"),
|
||||
ParcelFileDescriptor.MODE_READ_WRITE |
|
||||
ParcelFileDescriptor.MODE_CREATE |
|
||||
ParcelFileDescriptor.MODE_TRUNCATE);
|
||||
aecDumpFileDescriptor =
|
||||
ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath()
|
||||
+ File.separator + "Download/audio.aecdump"),
|
||||
ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE
|
||||
| ParcelFileDescriptor.MODE_TRUNCATE);
|
||||
factory.startAecDump(aecDumpFileDescriptor.getFd(), -1);
|
||||
} catch(IOException e) {
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "Can not open aecdump file", e);
|
||||
}
|
||||
}
|
||||
@ -607,7 +595,7 @@ public class PeerConnectionClient {
|
||||
if (videoCapturer != null) {
|
||||
try {
|
||||
videoCapturer.stopCapture();
|
||||
} catch(InterruptedException e) {
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
videoCapturer.dispose();
|
||||
@ -773,12 +761,11 @@ public class PeerConnectionClient {
|
||||
sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
|
||||
}
|
||||
if (peerConnectionParameters.audioStartBitrate > 0) {
|
||||
sdpDescription = setStartBitrate(AUDIO_CODEC_OPUS, false,
|
||||
sdpDescription, peerConnectionParameters.audioStartBitrate);
|
||||
sdpDescription = setStartBitrate(
|
||||
AUDIO_CODEC_OPUS, false, sdpDescription, peerConnectionParameters.audioStartBitrate);
|
||||
}
|
||||
Log.d(TAG, "Set remote SDP.");
|
||||
SessionDescription sdpRemote = new SessionDescription(
|
||||
sdp.type, sdpDescription);
|
||||
SessionDescription sdpRemote = new SessionDescription(sdp.type, sdpDescription);
|
||||
peerConnection.setRemoteDescription(sdpObserver, sdpRemote);
|
||||
}
|
||||
});
|
||||
@ -792,7 +779,8 @@ public class PeerConnectionClient {
|
||||
Log.d(TAG, "Stop video source.");
|
||||
try {
|
||||
videoCapturer.stopCapture();
|
||||
} catch (InterruptedException e) {}
|
||||
} catch (InterruptedException e) {
|
||||
}
|
||||
videoCapturerStopped = true;
|
||||
}
|
||||
}
|
||||
@ -833,9 +821,7 @@ public class PeerConnectionClient {
|
||||
|
||||
for (RtpParameters.Encoding encoding : parameters.encodings) {
|
||||
// Null value means no limit.
|
||||
encoding.maxBitrateBps = maxBitrateKbps == null
|
||||
? null
|
||||
: maxBitrateKbps * BPS_IN_KBPS;
|
||||
encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS;
|
||||
}
|
||||
if (!localVideoSender.setParameters(parameters)) {
|
||||
Log.e(TAG, "RtpSender.setParameters failed.");
|
||||
@ -887,8 +873,8 @@ public class PeerConnectionClient {
|
||||
}
|
||||
}
|
||||
|
||||
private static String setStartBitrate(String codec, boolean isVideoCodec,
|
||||
String sdpDescription, int bitrateKbps) {
|
||||
private static String setStartBitrate(
|
||||
String codec, boolean isVideoCodec, String sdpDescription, int bitrateKbps) {
|
||||
String[] lines = sdpDescription.split("\r\n");
|
||||
int rtpmapLineIndex = -1;
|
||||
boolean sdpFormatUpdated = false;
|
||||
@ -909,8 +895,7 @@ public class PeerConnectionClient {
|
||||
Log.w(TAG, "No rtpmap for " + codec + " codec");
|
||||
return sdpDescription;
|
||||
}
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap
|
||||
+ " at " + lines[rtpmapLineIndex]);
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]);
|
||||
|
||||
// Check if a=fmtp string already exist in remote SDP for this codec and
|
||||
// update it with new bitrate parameter.
|
||||
@ -919,13 +904,11 @@ public class PeerConnectionClient {
|
||||
for (int i = 0; i < lines.length; i++) {
|
||||
Matcher codecMatcher = codecPattern.matcher(lines[i]);
|
||||
if (codecMatcher.matches()) {
|
||||
Log.d(TAG, "Found " + codec + " " + lines[i]);
|
||||
Log.d(TAG, "Found " + codec + " " + lines[i]);
|
||||
if (isVideoCodec) {
|
||||
lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE
|
||||
+ "=" + bitrateKbps;
|
||||
lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
|
||||
} else {
|
||||
lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE
|
||||
+ "=" + (bitrateKbps * 1000);
|
||||
lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
|
||||
}
|
||||
Log.d(TAG, "Update remote SDP line: " + lines[i]);
|
||||
sdpFormatUpdated = true;
|
||||
@ -940,22 +923,20 @@ public class PeerConnectionClient {
|
||||
if (!sdpFormatUpdated && i == rtpmapLineIndex) {
|
||||
String bitrateSet;
|
||||
if (isVideoCodec) {
|
||||
bitrateSet = "a=fmtp:" + codecRtpMap + " "
|
||||
+ VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
|
||||
bitrateSet =
|
||||
"a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps;
|
||||
} else {
|
||||
bitrateSet = "a=fmtp:" + codecRtpMap + " "
|
||||
+ AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000);
|
||||
bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "="
|
||||
+ (bitrateKbps * 1000);
|
||||
}
|
||||
Log.d(TAG, "Add remote SDP line: " + bitrateSet);
|
||||
newSdpDescription.append(bitrateSet).append("\r\n");
|
||||
}
|
||||
|
||||
}
|
||||
return newSdpDescription.toString();
|
||||
}
|
||||
|
||||
private static String preferCodec(
|
||||
String sdpDescription, String codec, boolean isAudio) {
|
||||
private static String preferCodec(String sdpDescription, String codec, boolean isAudio) {
|
||||
String[] lines = sdpDescription.split("\r\n");
|
||||
int mLineIndex = -1;
|
||||
String codecRtpMap = null;
|
||||
@ -966,8 +947,7 @@ public class PeerConnectionClient {
|
||||
if (isAudio) {
|
||||
mediaDescription = "m=audio ";
|
||||
}
|
||||
for (int i = 0; (i < lines.length)
|
||||
&& (mLineIndex == -1 || codecRtpMap == null); i++) {
|
||||
for (int i = 0; (i < lines.length) && (mLineIndex == -1 || codecRtpMap == null); i++) {
|
||||
if (lines[i].startsWith(mediaDescription)) {
|
||||
mLineIndex = i;
|
||||
continue;
|
||||
@ -985,8 +965,7 @@ public class PeerConnectionClient {
|
||||
Log.w(TAG, "No rtpmap for " + codec);
|
||||
return sdpDescription;
|
||||
}
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at "
|
||||
+ lines[mLineIndex]);
|
||||
Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at " + lines[mLineIndex]);
|
||||
String[] origMLineParts = lines[mLineIndex].split(" ");
|
||||
if (origMLineParts.length > 3) {
|
||||
StringBuilder newMLine = new StringBuilder();
|
||||
@ -1025,9 +1004,9 @@ public class PeerConnectionClient {
|
||||
|
||||
private void switchCameraInternal() {
|
||||
if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer == null) {
|
||||
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : "
|
||||
+ isError + ". Number of cameras: " + numberOfCameras);
|
||||
return; // No video is sent or only one camera is available or error happened.
|
||||
Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError
|
||||
+ ". Number of cameras: " + numberOfCameras);
|
||||
return; // No video is sent or only one camera is available or error happened.
|
||||
}
|
||||
Log.d(TAG, "Switch camera");
|
||||
videoCapturer.switchCamera(null);
|
||||
@ -1053,8 +1032,8 @@ public class PeerConnectionClient {
|
||||
|
||||
private void changeCaptureFormatInternal(int width, int height, int framerate) {
|
||||
if (!videoCallEnabled || isError || videoCapturer == null) {
|
||||
Log.e(TAG, "Failed to change capture format. Video: " + videoCallEnabled + ". Error : "
|
||||
+ isError);
|
||||
Log.e(TAG,
|
||||
"Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
|
||||
return;
|
||||
}
|
||||
Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
|
||||
@ -1064,7 +1043,7 @@ public class PeerConnectionClient {
|
||||
// Implementation detail: observe ICE & stream changes and react accordingly.
|
||||
private class PCObserver implements PeerConnection.Observer {
|
||||
@Override
|
||||
public void onIceCandidate(final IceCandidate candidate){
|
||||
public void onIceCandidate(final IceCandidate candidate) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1084,14 +1063,12 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSignalingChange(
|
||||
PeerConnection.SignalingState newState) {
|
||||
public void onSignalingChange(PeerConnection.SignalingState newState) {
|
||||
Log.d(TAG, "SignalingState: " + newState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceConnectionChange(
|
||||
final PeerConnection.IceConnectionState newState) {
|
||||
public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1108,8 +1085,7 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIceGatheringChange(
|
||||
PeerConnection.IceGatheringState newState) {
|
||||
public void onIceGatheringChange(PeerConnection.IceGatheringState newState) {
|
||||
Log.d(TAG, "IceGatheringState: " + newState);
|
||||
}
|
||||
|
||||
@ -1119,7 +1095,7 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAddStream(final MediaStream stream){
|
||||
public void onAddStream(final MediaStream stream) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1140,7 +1116,7 @@ public class PeerConnectionClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemoveStream(final MediaStream stream){
|
||||
public void onRemoveStream(final MediaStream stream) {
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -1151,8 +1127,7 @@ public class PeerConnectionClient {
|
||||
|
||||
@Override
|
||||
public void onDataChannel(final DataChannel dc) {
|
||||
reportError("AppRTC doesn't use data channels, but got: " + dc.label()
|
||||
+ " anyway!");
|
||||
reportError("AppRTC doesn't use data channels, but got: " + dc.label() + " anyway!");
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1178,8 +1153,7 @@ public class PeerConnectionClient {
|
||||
if (videoCallEnabled) {
|
||||
sdpDescription = preferCodec(sdpDescription, preferredVideoCodec, false);
|
||||
}
|
||||
final SessionDescription sdp = new SessionDescription(
|
||||
origSdp.type, sdpDescription);
|
||||
final SessionDescription sdp = new SessionDescription(origSdp.type, sdpDescription);
|
||||
localSdp = sdp;
|
||||
executor.execute(new Runnable() {
|
||||
@Override
|
||||
|
||||
@ -54,8 +54,7 @@ public class PercentFrameLayout extends ViewGroup {
|
||||
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
|
||||
final int width = getDefaultSize(Integer.MAX_VALUE, widthMeasureSpec);
|
||||
final int height = getDefaultSize(Integer.MAX_VALUE, heightMeasureSpec);
|
||||
setMeasuredDimension(
|
||||
MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
|
||||
setMeasuredDimension(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY),
|
||||
MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
|
||||
|
||||
final int childWidthMeasureSpec =
|
||||
|
||||
@ -58,8 +58,8 @@ public class RoomParametersFetcher {
|
||||
void onSignalingParametersError(final String description);
|
||||
}
|
||||
|
||||
public RoomParametersFetcher(String roomUrl, String roomMessage,
|
||||
final RoomParametersFetcherEvents events) {
|
||||
public RoomParametersFetcher(
|
||||
String roomUrl, String roomMessage, final RoomParametersFetcherEvents events) {
|
||||
this.roomUrl = roomUrl;
|
||||
this.roomMessage = roomMessage;
|
||||
this.events = events;
|
||||
@ -67,9 +67,8 @@ public class RoomParametersFetcher {
|
||||
|
||||
public void makeRequest() {
|
||||
Log.d(TAG, "Connecting to room: " + roomUrl);
|
||||
httpConnection = new AsyncHttpURLConnection(
|
||||
"POST", roomUrl, roomMessage,
|
||||
new AsyncHttpEvents() {
|
||||
httpConnection =
|
||||
new AsyncHttpURLConnection("POST", roomUrl, roomMessage, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
Log.e(TAG, "Room connection error: " + errorMessage);
|
||||
@ -114,13 +113,10 @@ public class RoomParametersFetcher {
|
||||
Log.d(TAG, "GAE->C #" + i + " : " + messageString);
|
||||
if (messageType.equals("offer")) {
|
||||
offerSdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(messageType),
|
||||
message.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(messageType), message.getString("sdp"));
|
||||
} else if (messageType.equals("candidate")) {
|
||||
IceCandidate candidate = new IceCandidate(
|
||||
message.getString("id"),
|
||||
message.getInt("label"),
|
||||
message.getString("candidate"));
|
||||
message.getString("id"), message.getInt("label"), message.getString("candidate"));
|
||||
iceCandidates.add(candidate);
|
||||
} else {
|
||||
Log.e(TAG, "Unknown message: " + messageString);
|
||||
@ -153,13 +149,10 @@ public class RoomParametersFetcher {
|
||||
}
|
||||
|
||||
SignalingParameters params = new SignalingParameters(
|
||||
iceServers, initiator,
|
||||
clientId, wssUrl, wssPostUrl,
|
||||
offerSdp, iceCandidates);
|
||||
iceServers, initiator, clientId, wssUrl, wssPostUrl, offerSdp, iceCandidates);
|
||||
events.onSignalingParametersReady(params);
|
||||
} catch (JSONException e) {
|
||||
events.onSignalingParametersError(
|
||||
"Room JSON parsing error: " + e.toString());
|
||||
events.onSignalingParametersError("Room JSON parsing error: " + e.toString());
|
||||
} catch (IOException e) {
|
||||
events.onSignalingParametersError("Room IO error: " + e.toString());
|
||||
}
|
||||
@ -169,19 +162,17 @@ public class RoomParametersFetcher {
|
||||
// off the main thread!
|
||||
private LinkedList<PeerConnection.IceServer> requestTurnServers(String url)
|
||||
throws IOException, JSONException {
|
||||
LinkedList<PeerConnection.IceServer> turnServers =
|
||||
new LinkedList<PeerConnection.IceServer>();
|
||||
LinkedList<PeerConnection.IceServer> turnServers = new LinkedList<PeerConnection.IceServer>();
|
||||
Log.d(TAG, "Request TURN from: " + url);
|
||||
HttpURLConnection connection =
|
||||
(HttpURLConnection) new URL(url).openConnection();
|
||||
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
||||
connection.setDoOutput(true);
|
||||
connection.setRequestProperty("REFERER", "https://appr.tc");
|
||||
connection.setConnectTimeout(TURN_HTTP_TIMEOUT_MS);
|
||||
connection.setReadTimeout(TURN_HTTP_TIMEOUT_MS);
|
||||
int responseCode = connection.getResponseCode();
|
||||
if (responseCode != 200) {
|
||||
throw new IOException("Non-200 response when requesting TURN server from "
|
||||
+ url + " : " + connection.getHeaderField(null));
|
||||
throw new IOException("Non-200 response when requesting TURN server from " + url + " : "
|
||||
+ connection.getHeaderField(null));
|
||||
}
|
||||
InputStream responseStream = connection.getInputStream();
|
||||
String response = drainStream(responseStream);
|
||||
@ -192,14 +183,11 @@ public class RoomParametersFetcher {
|
||||
for (int i = 0; i < iceServers.length(); ++i) {
|
||||
JSONObject server = iceServers.getJSONObject(i);
|
||||
JSONArray turnUrls = server.getJSONArray("urls");
|
||||
String username =
|
||||
server.has("username") ? server.getString("username") : "";
|
||||
String credential =
|
||||
server.has("credential") ? server.getString("credential") : "";
|
||||
String username = server.has("username") ? server.getString("username") : "";
|
||||
String credential = server.has("credential") ? server.getString("credential") : "";
|
||||
for (int j = 0; j < turnUrls.length(); j++) {
|
||||
String turnUrl = turnUrls.getString(j);
|
||||
turnServers.add(new PeerConnection.IceServer(turnUrl, username,
|
||||
credential));
|
||||
turnServers.add(new PeerConnection.IceServer(turnUrl, username, credential));
|
||||
}
|
||||
}
|
||||
return turnServers;
|
||||
@ -207,17 +195,15 @@ public class RoomParametersFetcher {
|
||||
|
||||
// Return the list of ICE servers described by a WebRTCPeerConnection
|
||||
// configuration string.
|
||||
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(
|
||||
String pcConfig) throws JSONException {
|
||||
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(String pcConfig)
|
||||
throws JSONException {
|
||||
JSONObject json = new JSONObject(pcConfig);
|
||||
JSONArray servers = json.getJSONArray("iceServers");
|
||||
LinkedList<PeerConnection.IceServer> ret =
|
||||
new LinkedList<PeerConnection.IceServer>();
|
||||
LinkedList<PeerConnection.IceServer> ret = new LinkedList<PeerConnection.IceServer>();
|
||||
for (int i = 0; i < servers.length(); ++i) {
|
||||
JSONObject server = servers.getJSONObject(i);
|
||||
String url = server.getString("urls");
|
||||
String credential =
|
||||
server.has("credential") ? server.getString("credential") : "";
|
||||
String credential = server.has("credential") ? server.getString("credential") : "";
|
||||
ret.add(new PeerConnection.IceServer(url, "", credential));
|
||||
}
|
||||
return ret;
|
||||
@ -228,5 +214,4 @@ public class RoomParametersFetcher {
|
||||
Scanner s = new Scanner(in).useDelimiter("\\A");
|
||||
return s.hasNext() ? s.next() : "";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -23,8 +23,7 @@ import org.webrtc.voiceengine.WebRtcAudioUtils;
|
||||
/**
|
||||
* Settings activity for AppRTC.
|
||||
*/
|
||||
public class SettingsActivity extends Activity
|
||||
implements OnSharedPreferenceChangeListener{
|
||||
public class SettingsActivity extends Activity implements OnSharedPreferenceChangeListener {
|
||||
private SettingsFragment settingsFragment;
|
||||
private String keyprefVideoCall;
|
||||
private String keyprefCamera2;
|
||||
@ -85,7 +84,8 @@ public class SettingsActivity extends Activity
|
||||
|
||||
// Display the fragment as the main content.
|
||||
settingsFragment = new SettingsFragment();
|
||||
getFragmentManager().beginTransaction()
|
||||
getFragmentManager()
|
||||
.beginTransaction()
|
||||
.replace(android.R.id.content, settingsFragment)
|
||||
.commit();
|
||||
}
|
||||
@ -127,8 +127,7 @@ public class SettingsActivity extends Activity
|
||||
updateSummaryB(sharedPreferences, keyPrefTracing);
|
||||
|
||||
if (!Camera2Enumerator.isSupported(this)) {
|
||||
Preference camera2Preference =
|
||||
settingsFragment.findPreference(keyprefCamera2);
|
||||
Preference camera2Preference = settingsFragment.findPreference(keyprefCamera2);
|
||||
|
||||
camera2Preference.setSummary(getString(R.string.pref_camera2_not_supported));
|
||||
camera2Preference.setEnabled(false);
|
||||
@ -173,8 +172,8 @@ public class SettingsActivity extends Activity
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences,
|
||||
String key) {
|
||||
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
|
||||
// clang-format off
|
||||
if (key.equals(keyprefResolution)
|
||||
|| key.equals(keyprefFps)
|
||||
|| key.equals(keyprefMaxVideoBitrateType)
|
||||
@ -204,6 +203,7 @@ public class SettingsActivity extends Activity
|
||||
} else if (key.equals(keyprefSpeakerphone)) {
|
||||
updateSummaryList(sharedPreferences, key);
|
||||
}
|
||||
// clang-format on
|
||||
if (key.equals(keyprefMaxVideoBitrateType)) {
|
||||
setVideoBitrateEnable(sharedPreferences);
|
||||
}
|
||||
@ -218,8 +218,7 @@ public class SettingsActivity extends Activity
|
||||
updatedPref.setSummary(sharedPreferences.getString(key, ""));
|
||||
}
|
||||
|
||||
private void updateSummaryBitrate(
|
||||
SharedPreferences sharedPreferences, String key) {
|
||||
private void updateSummaryBitrate(SharedPreferences sharedPreferences, String key) {
|
||||
Preference updatedPref = settingsFragment.findPreference(key);
|
||||
updatedPref.setSummary(sharedPreferences.getString(key, "") + " kbps");
|
||||
}
|
||||
@ -227,8 +226,8 @@ public class SettingsActivity extends Activity
|
||||
private void updateSummaryB(SharedPreferences sharedPreferences, String key) {
|
||||
Preference updatedPref = settingsFragment.findPreference(key);
|
||||
updatedPref.setSummary(sharedPreferences.getBoolean(key, true)
|
||||
? getString(R.string.pref_value_enabled)
|
||||
: getString(R.string.pref_value_disabled));
|
||||
? getString(R.string.pref_value_enabled)
|
||||
: getString(R.string.pref_value_disabled));
|
||||
}
|
||||
|
||||
private void updateSummaryList(SharedPreferences sharedPreferences, String key) {
|
||||
@ -240,8 +239,8 @@ public class SettingsActivity extends Activity
|
||||
Preference bitratePreferenceValue =
|
||||
settingsFragment.findPreference(keyprefMaxVideoBitrateValue);
|
||||
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
|
||||
String bitrateType = sharedPreferences.getString(
|
||||
keyprefMaxVideoBitrateType, bitrateTypeDefault);
|
||||
String bitrateType =
|
||||
sharedPreferences.getString(keyprefMaxVideoBitrateType, bitrateTypeDefault);
|
||||
if (bitrateType.equals(bitrateTypeDefault)) {
|
||||
bitratePreferenceValue.setEnabled(false);
|
||||
} else {
|
||||
@ -253,8 +252,8 @@ public class SettingsActivity extends Activity
|
||||
Preference bitratePreferenceValue =
|
||||
settingsFragment.findPreference(keyprefStartAudioBitrateValue);
|
||||
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
|
||||
String bitrateType = sharedPreferences.getString(
|
||||
keyprefStartAudioBitrateType, bitrateTypeDefault);
|
||||
String bitrateType =
|
||||
sharedPreferences.getString(keyprefStartAudioBitrateType, bitrateTypeDefault);
|
||||
if (bitrateType.equals(bitrateTypeDefault)) {
|
||||
bitratePreferenceValue.setEnabled(false);
|
||||
} else {
|
||||
|
||||
@ -17,7 +17,6 @@ import android.preference.PreferenceFragment;
|
||||
* Settings fragment for AppRTC.
|
||||
*/
|
||||
public class SettingsFragment extends PreferenceFragment {
|
||||
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
|
||||
@ -116,7 +116,6 @@ public class TCPChannelClient {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Base class for server and client sockets. Contains a listening thread that will call
|
||||
* eventListener.onTCPMessage on new messages.
|
||||
|
||||
@ -29,8 +29,7 @@ import java.io.StringWriter;
|
||||
* Thread.setDefaultUncaughtExceptionHandler() rather than
|
||||
* Thread.setUncaughtExceptionHandler(), to apply to background threads as well.
|
||||
*/
|
||||
public class UnhandledExceptionHandler
|
||||
implements Thread.UncaughtExceptionHandler {
|
||||
public class UnhandledExceptionHandler implements Thread.UncaughtExceptionHandler {
|
||||
private static final String TAG = "AppRTCMobileActivity";
|
||||
private final Activity activity;
|
||||
|
||||
@ -40,31 +39,30 @@ public class UnhandledExceptionHandler
|
||||
|
||||
public void uncaughtException(Thread unusedThread, final Throwable e) {
|
||||
activity.runOnUiThread(new Runnable() {
|
||||
@Override public void run() {
|
||||
String title = "Fatal error: " + getTopLevelCauseMessage(e);
|
||||
String msg = getRecursiveStackTrace(e);
|
||||
TextView errorView = new TextView(activity);
|
||||
errorView.setText(msg);
|
||||
errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
|
||||
ScrollView scrollingContainer = new ScrollView(activity);
|
||||
scrollingContainer.addView(errorView);
|
||||
Log.e(TAG, title + "\n\n" + msg);
|
||||
DialogInterface.OnClickListener listener =
|
||||
new DialogInterface.OnClickListener() {
|
||||
@Override public void onClick(
|
||||
DialogInterface dialog, int which) {
|
||||
dialog.dismiss();
|
||||
System.exit(1);
|
||||
}
|
||||
};
|
||||
AlertDialog.Builder builder =
|
||||
new AlertDialog.Builder(activity);
|
||||
builder
|
||||
.setTitle(title)
|
||||
.setView(scrollingContainer)
|
||||
.setPositiveButton("Exit", listener).show();
|
||||
}
|
||||
});
|
||||
@Override
|
||||
public void run() {
|
||||
String title = "Fatal error: " + getTopLevelCauseMessage(e);
|
||||
String msg = getRecursiveStackTrace(e);
|
||||
TextView errorView = new TextView(activity);
|
||||
errorView.setText(msg);
|
||||
errorView.setTextSize(TypedValue.COMPLEX_UNIT_SP, 8);
|
||||
ScrollView scrollingContainer = new ScrollView(activity);
|
||||
scrollingContainer.addView(errorView);
|
||||
Log.e(TAG, title + "\n\n" + msg);
|
||||
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(DialogInterface dialog, int which) {
|
||||
dialog.dismiss();
|
||||
System.exit(1);
|
||||
}
|
||||
};
|
||||
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
|
||||
builder.setTitle(title)
|
||||
.setView(scrollingContainer)
|
||||
.setPositiveButton("Exit", listener)
|
||||
.show();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Returns the Message attached to the original Cause of |t|.
|
||||
|
||||
@ -56,9 +56,7 @@ public class WebSocketChannelClient {
|
||||
/**
|
||||
* Possible WebSocket connection states.
|
||||
*/
|
||||
public enum WebSocketConnectionState {
|
||||
NEW, CONNECTED, REGISTERED, CLOSED, ERROR
|
||||
};
|
||||
public enum WebSocketConnectionState { NEW, CONNECTED, REGISTERED, CLOSED, ERROR }
|
||||
|
||||
/**
|
||||
* Callback interface for messages delivered on WebSocket.
|
||||
@ -179,8 +177,7 @@ public class WebSocketChannelClient {
|
||||
sendWSSMessage("DELETE", "");
|
||||
}
|
||||
// Close WebSocket in CONNECTED or ERROR states only.
|
||||
if (state == WebSocketConnectionState.CONNECTED
|
||||
|| state == WebSocketConnectionState.ERROR) {
|
||||
if (state == WebSocketConnectionState.CONNECTED || state == WebSocketConnectionState.ERROR) {
|
||||
ws.disconnect();
|
||||
state = WebSocketConnectionState.CLOSED;
|
||||
|
||||
@ -219,16 +216,15 @@ public class WebSocketChannelClient {
|
||||
private void sendWSSMessage(final String method, final String message) {
|
||||
String postUrl = postServerUrl + "/" + roomID + "/" + clientID;
|
||||
Log.d(TAG, "WS " + method + " : " + postUrl + " : " + message);
|
||||
AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection(
|
||||
method, postUrl, message, new AsyncHttpEvents() {
|
||||
AsyncHttpURLConnection httpConnection =
|
||||
new AsyncHttpURLConnection(method, postUrl, message, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
reportError("WS " + method + " error: " + errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHttpComplete(String response) {
|
||||
}
|
||||
public void onHttpComplete(String response) {}
|
||||
});
|
||||
httpConnection.send();
|
||||
}
|
||||
@ -237,8 +233,7 @@ public class WebSocketChannelClient {
|
||||
// called on a looper thread.
|
||||
private void checkIfCalledOnValidThread() {
|
||||
if (Thread.currentThread() != handler.getLooper().getThread()) {
|
||||
throw new IllegalStateException(
|
||||
"WebSocket method is not called on valid thread");
|
||||
throw new IllegalStateException("WebSocket method is not called on valid thread");
|
||||
}
|
||||
}
|
||||
|
||||
@ -260,8 +255,8 @@ public class WebSocketChannelClient {
|
||||
|
||||
@Override
|
||||
public void onClose(WebSocketCloseNotification code, String reason) {
|
||||
Log.d(TAG, "WebSocket connection closed. Code: " + code
|
||||
+ ". Reason: " + reason + ". State: " + state);
|
||||
Log.d(TAG, "WebSocket connection closed. Code: " + code + ". Reason: " + reason + ". State: "
|
||||
+ state);
|
||||
synchronized (closeEventLock) {
|
||||
closeEvent = true;
|
||||
closeEventLock.notify();
|
||||
@ -293,12 +288,9 @@ public class WebSocketChannelClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRawTextMessage(byte[] payload) {
|
||||
}
|
||||
public void onRawTextMessage(byte[] payload) {}
|
||||
|
||||
@Override
|
||||
public void onBinaryMessage(byte[] payload) {
|
||||
}
|
||||
public void onBinaryMessage(byte[] payload) {}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -36,19 +36,16 @@ import org.webrtc.SessionDescription;
|
||||
* Messages to other party (with local Ice candidates and answer SDP) can
|
||||
* be sent after WebSocket connection is established.
|
||||
*/
|
||||
public class WebSocketRTCClient implements AppRTCClient,
|
||||
WebSocketChannelEvents {
|
||||
public class WebSocketRTCClient implements AppRTCClient, WebSocketChannelEvents {
|
||||
private static final String TAG = "WSRTCClient";
|
||||
private static final String ROOM_JOIN = "join";
|
||||
private static final String ROOM_MESSAGE = "message";
|
||||
private static final String ROOM_LEAVE = "leave";
|
||||
|
||||
private enum ConnectionState {
|
||||
NEW, CONNECTED, CLOSED, ERROR
|
||||
};
|
||||
private enum MessageType {
|
||||
MESSAGE, LEAVE
|
||||
};
|
||||
private enum ConnectionState { NEW, CONNECTED, CLOSED, ERROR }
|
||||
|
||||
private enum MessageType { MESSAGE, LEAVE }
|
||||
|
||||
private final Handler handler;
|
||||
private boolean initiator;
|
||||
private SignalingEvents events;
|
||||
@ -101,8 +98,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
|
||||
RoomParametersFetcherEvents callbacks = new RoomParametersFetcherEvents() {
|
||||
@Override
|
||||
public void onSignalingParametersReady(
|
||||
final SignalingParameters params) {
|
||||
public void onSignalingParametersReady(final SignalingParameters params) {
|
||||
WebSocketRTCClient.this.handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -134,37 +130,32 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
}
|
||||
|
||||
// Helper functions to get connection, post message and leave message URLs
|
||||
private String getConnectionUrl(
|
||||
RoomConnectionParameters connectionParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/"
|
||||
+ connectionParameters.roomId;
|
||||
private String getConnectionUrl(RoomConnectionParameters connectionParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_JOIN + "/" + connectionParameters.roomId;
|
||||
}
|
||||
|
||||
private String getMessageUrl(RoomConnectionParameters connectionParameters,
|
||||
SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/"
|
||||
+ connectionParameters.roomId + "/" + signalingParameters.clientId;
|
||||
private String getMessageUrl(
|
||||
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_MESSAGE + "/" + connectionParameters.roomId
|
||||
+ "/" + signalingParameters.clientId;
|
||||
}
|
||||
|
||||
private String getLeaveUrl(RoomConnectionParameters connectionParameters,
|
||||
SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/"
|
||||
+ connectionParameters.roomId + "/" + signalingParameters.clientId;
|
||||
private String getLeaveUrl(
|
||||
RoomConnectionParameters connectionParameters, SignalingParameters signalingParameters) {
|
||||
return connectionParameters.roomUrl + "/" + ROOM_LEAVE + "/" + connectionParameters.roomId + "/"
|
||||
+ signalingParameters.clientId;
|
||||
}
|
||||
|
||||
// Callback issued when room parameters are extracted. Runs on local
|
||||
// looper thread.
|
||||
private void signalingParametersReady(
|
||||
final SignalingParameters signalingParameters) {
|
||||
private void signalingParametersReady(final SignalingParameters signalingParameters) {
|
||||
Log.d(TAG, "Room connection completed.");
|
||||
if (connectionParameters.loopback
|
||||
&& (!signalingParameters.initiator
|
||||
|| signalingParameters.offerSdp != null)) {
|
||||
&& (!signalingParameters.initiator || signalingParameters.offerSdp != null)) {
|
||||
reportError("Loopback room is busy.");
|
||||
return;
|
||||
}
|
||||
if (!connectionParameters.loopback
|
||||
&& !signalingParameters.initiator
|
||||
if (!connectionParameters.loopback && !signalingParameters.initiator
|
||||
&& signalingParameters.offerSdp == null) {
|
||||
Log.w(TAG, "No offer SDP in room response.");
|
||||
}
|
||||
@ -200,8 +191,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
if (connectionParameters.loopback) {
|
||||
// In loopback mode rename this offer to answer and route it back.
|
||||
SessionDescription sdpAnswer = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm("answer"),
|
||||
sdp.description);
|
||||
SessionDescription.Type.fromCanonicalForm("answer"), sdp.description);
|
||||
events.onRemoteDescription(sdpAnswer);
|
||||
}
|
||||
}
|
||||
@ -263,7 +253,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
public void run() {
|
||||
JSONObject json = new JSONObject();
|
||||
jsonPut(json, "type", "remove-candidates");
|
||||
JSONArray jsonArray = new JSONArray();
|
||||
JSONArray jsonArray = new JSONArray();
|
||||
for (final IceCandidate candidate : candidates) {
|
||||
jsonArray.put(toJsonCandidate(candidate));
|
||||
}
|
||||
@ -308,15 +298,14 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
} else if (type.equals("remove-candidates")) {
|
||||
JSONArray candidateArray = json.getJSONArray("candidates");
|
||||
IceCandidate[] candidates = new IceCandidate[candidateArray.length()];
|
||||
for (int i =0; i < candidateArray.length(); ++i) {
|
||||
for (int i = 0; i < candidateArray.length(); ++i) {
|
||||
candidates[i] = toJavaCandidate(candidateArray.getJSONObject(i));
|
||||
}
|
||||
events.onRemoteIceCandidatesRemoved(candidates);
|
||||
} else if (type.equals("answer")) {
|
||||
if (initiator) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else {
|
||||
reportError("Received answer for call initiator: " + msg);
|
||||
@ -324,8 +313,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
} else if (type.equals("offer")) {
|
||||
if (!initiator) {
|
||||
SessionDescription sdp = new SessionDescription(
|
||||
SessionDescription.Type.fromCanonicalForm(type),
|
||||
json.getString("sdp"));
|
||||
SessionDescription.Type.fromCanonicalForm(type), json.getString("sdp"));
|
||||
events.onRemoteDescription(sdp);
|
||||
} else {
|
||||
reportError("Received offer for call receiver: " + msg);
|
||||
@ -389,28 +377,28 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
logInfo += ". Message: " + message;
|
||||
}
|
||||
Log.d(TAG, "C->GAE: " + logInfo);
|
||||
AsyncHttpURLConnection httpConnection = new AsyncHttpURLConnection(
|
||||
"POST", url, message, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
reportError("GAE POST error: " + errorMessage);
|
||||
}
|
||||
AsyncHttpURLConnection httpConnection =
|
||||
new AsyncHttpURLConnection("POST", url, message, new AsyncHttpEvents() {
|
||||
@Override
|
||||
public void onHttpError(String errorMessage) {
|
||||
reportError("GAE POST error: " + errorMessage);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onHttpComplete(String response) {
|
||||
if (messageType == MessageType.MESSAGE) {
|
||||
try {
|
||||
JSONObject roomJson = new JSONObject(response);
|
||||
String result = roomJson.getString("result");
|
||||
if (!result.equals("SUCCESS")) {
|
||||
reportError("GAE POST error: " + result);
|
||||
@Override
|
||||
public void onHttpComplete(String response) {
|
||||
if (messageType == MessageType.MESSAGE) {
|
||||
try {
|
||||
JSONObject roomJson = new JSONObject(response);
|
||||
String result = roomJson.getString("result");
|
||||
if (!result.equals("SUCCESS")) {
|
||||
reportError("GAE POST error: " + result);
|
||||
}
|
||||
} catch (JSONException e) {
|
||||
reportError("GAE POST JSON error: " + e.toString());
|
||||
}
|
||||
} catch (JSONException e) {
|
||||
reportError("GAE POST JSON error: " + e.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
httpConnection.send();
|
||||
}
|
||||
|
||||
@ -425,8 +413,7 @@ public class WebSocketRTCClient implements AppRTCClient,
|
||||
|
||||
// Converts a JSON candidate to a Java object.
|
||||
IceCandidate toJavaCandidate(JSONObject json) throws JSONException {
|
||||
return new IceCandidate(json.getString("id"),
|
||||
json.getInt("label"),
|
||||
json.getString("candidate"));
|
||||
return new IceCandidate(
|
||||
json.getString("id"), json.getInt("label"), json.getString("candidate"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -17,9 +17,7 @@ import android.util.Log;
|
||||
* AppRTCUtils provides helper functions for managing thread safety.
|
||||
*/
|
||||
public final class AppRTCUtils {
|
||||
|
||||
private AppRTCUtils() {
|
||||
}
|
||||
private AppRTCUtils() {}
|
||||
|
||||
/** Helper method which throws an exception when an assertion has failed. */
|
||||
public static void assertIsTrue(boolean condition) {
|
||||
@ -30,20 +28,20 @@ public final class AppRTCUtils {
|
||||
|
||||
/** Helper method for building a string of thread information.*/
|
||||
public static String getThreadInfo() {
|
||||
return "@[name=" + Thread.currentThread().getName()
|
||||
+ ", id=" + Thread.currentThread().getId() + "]";
|
||||
return "@[name=" + Thread.currentThread().getName() + ", id=" + Thread.currentThread().getId()
|
||||
+ "]";
|
||||
}
|
||||
|
||||
/** Information about the current build, taken from system properties. */
|
||||
public static void logDeviceInfo(String tag) {
|
||||
Log.d(tag, "Android SDK: " + Build.VERSION.SDK_INT + ", "
|
||||
+ "Release: " + Build.VERSION.RELEASE + ", "
|
||||
+ "Brand: " + Build.BRAND + ", "
|
||||
+ "Device: " + Build.DEVICE + ", "
|
||||
+ "Id: " + Build.ID + ", "
|
||||
+ "Hardware: " + Build.HARDWARE + ", "
|
||||
+ "Manufacturer: " + Build.MANUFACTURER + ", "
|
||||
+ "Model: " + Build.MODEL + ", "
|
||||
+ "Product: " + Build.PRODUCT);
|
||||
+ "Release: " + Build.VERSION.RELEASE + ", "
|
||||
+ "Brand: " + Build.BRAND + ", "
|
||||
+ "Device: " + Build.DEVICE + ", "
|
||||
+ "Id: " + Build.ID + ", "
|
||||
+ "Hardware: " + Build.HARDWARE + ", "
|
||||
+ "Manufacturer: " + Build.MANUFACTURER + ", "
|
||||
+ "Model: " + Build.MODEL + ", "
|
||||
+ "Product: " + Build.PRODUCT);
|
||||
}
|
||||
}
|
||||
|
||||
@ -38,8 +38,7 @@ public class AsyncHttpURLConnection {
|
||||
void onHttpComplete(String response);
|
||||
}
|
||||
|
||||
public AsyncHttpURLConnection(String method, String url, String message,
|
||||
AsyncHttpEvents events) {
|
||||
public AsyncHttpURLConnection(String method, String url, String message, AsyncHttpEvents events) {
|
||||
this.method = method;
|
||||
this.url = url;
|
||||
this.message = message;
|
||||
@ -61,8 +60,7 @@ public class AsyncHttpURLConnection {
|
||||
|
||||
private void sendHttpMessage() {
|
||||
try {
|
||||
HttpURLConnection connection =
|
||||
(HttpURLConnection) new URL(url).openConnection();
|
||||
HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
|
||||
byte[] postData = new byte[0];
|
||||
if (message != null) {
|
||||
postData = message.getBytes("UTF-8");
|
||||
@ -96,8 +94,8 @@ public class AsyncHttpURLConnection {
|
||||
// Get response.
|
||||
int responseCode = connection.getResponseCode();
|
||||
if (responseCode != 200) {
|
||||
events.onHttpError("Non-200 response to " + method + " to URL: "
|
||||
+ url + " : " + connection.getHeaderField(null));
|
||||
events.onHttpError("Non-200 response to " + method + " to URL: " + url + " : "
|
||||
+ connection.getHeaderField(null));
|
||||
connection.disconnect();
|
||||
return;
|
||||
}
|
||||
@ -109,8 +107,7 @@ public class AsyncHttpURLConnection {
|
||||
} catch (SocketTimeoutException e) {
|
||||
events.onHttpError("HTTP " + method + " to " + url + " timeout");
|
||||
} catch (IOException e) {
|
||||
events.onHttpError("HTTP " + method + " to " + url + " error: "
|
||||
+ e.getMessage());
|
||||
events.onHttpError("HTTP " + method + " to " + url + " error: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user