Fix style issues from lint.

BUG=
R=glaznev@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/34629004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7984 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
jiayl@webrtc.org 2014-12-30 22:44:11 +00:00
parent 34ac956706
commit 5eb71eb4f4
7 changed files with 70 additions and 48 deletions

View File

@ -34,10 +34,12 @@ import android.content.IntentFilter;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
import android.media.AudioManager; import android.media.AudioManager;
import android.util.Log; import android.util.Log;
import org.appspot.apprtc.util.AppRTCUtils;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.appspot.apprtc.util.AppRTCUtils;
/** /**
* AppRTCAudioManager manages all audio related parts of the AppRTC demo. * AppRTCAudioManager manages all audio related parts of the AppRTC demo.
@ -45,7 +47,10 @@ import org.appspot.apprtc.util.AppRTCUtils;
public class AppRTCAudioManager { public class AppRTCAudioManager {
private static final String TAG = "AppRTCAudioManager"; private static final String TAG = "AppRTCAudioManager";
// Names of possible audio devices that we currently support. /**
* AudioDevice is the names of possible audio devices that we currently
* support.
*/
// TODO(henrika): add support for BLUETOOTH as well. // TODO(henrika): add support for BLUETOOTH as well.
public enum AudioDevice { public enum AudioDevice {
SPEAKER_PHONE, SPEAKER_PHONE,
@ -88,9 +93,10 @@ public class AppRTCAudioManager {
private void onProximitySensorChangedState() { private void onProximitySensorChangedState() {
// The proximity sensor should only be activated when there are exactly two // The proximity sensor should only be activated when there are exactly two
// available audio devices. // available audio devices.
if (audioDevices.size() == 2 && if (audioDevices.size() == 2
audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE) && && audioDevices.contains(AppRTCAudioManager.AudioDevice.EARPIECE)
audioDevices.contains(AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) { && audioDevices.contains(
AppRTCAudioManager.AudioDevice.SPEAKER_PHONE)) {
if (proximitySensor.sensorReportsNearState()) { if (proximitySensor.sensorReportsNearState()) {
// Sensor reports that a "handset is being held up to a person's ear", // Sensor reports that a "handset is being held up to a person's ear",
// or "something is covering the light sensor". // or "something is covering the light sensor".
@ -346,8 +352,8 @@ public class AppRTCAudioManager {
// in the list. Given the current implementation, we know that the choice // in the list. Given the current implementation, we know that the choice
// will then be between EARPIECE and SPEAKER_PHONE. // will then be between EARPIECE and SPEAKER_PHONE.
if (audioDevices.size() == 2) { if (audioDevices.size() == 2) {
AppRTCUtils.assertIsTrue(audioDevices.contains(AudioDevice.EARPIECE) && AppRTCUtils.assertIsTrue(audioDevices.contains(AudioDevice.EARPIECE)
audioDevices.contains(AudioDevice.SPEAKER_PHONE)); && audioDevices.contains(AudioDevice.SPEAKER_PHONE));
// Start the proximity sensor. // Start the proximity sensor.
proximitySensor.start(); proximitySensor.start();
} else if (audioDevices.size() == 1) { } else if (audioDevices.size() == 1) {

View File

@ -33,6 +33,9 @@ import org.webrtc.SessionDescription;
import java.util.List; import java.util.List;
/**
* AppRTCClient is the interface representing an AppRTC client.
*/
public interface AppRTCClient { public interface AppRTCClient {
/** /**
* Asynchronously connect to an AppRTC room URL, e.g. * Asynchronously connect to an AppRTC room URL, e.g.

View File

@ -27,9 +27,6 @@
package org.appspot.apprtc; package org.appspot.apprtc;
import java.util.HashMap;
import java.util.Map;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog; import android.app.AlertDialog;
import android.app.Fragment; import android.app.Fragment;
@ -62,6 +59,9 @@ import org.webrtc.VideoRenderer;
import org.webrtc.VideoRendererGui; import org.webrtc.VideoRendererGui;
import org.webrtc.VideoRendererGui.ScalingType; import org.webrtc.VideoRendererGui.ScalingType;
import java.util.HashMap;
import java.util.Map;
/** /**
* Activity of the AppRTCDemo Android app demonstrating interoperability * Activity of the AppRTCDemo Android app demonstrating interoperability
* between the Android/Java implementation of PeerConnection and the * between the Android/Java implementation of PeerConnection and the
@ -107,9 +107,9 @@ public class AppRTCDemoActivity extends Activity
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
getWindow().getDecorView().setSystemUiVisibility( getWindow().getDecorView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
View.SYSTEM_UI_FLAG_FULLSCREEN | | View.SYSTEM_UI_FLAG_FULLSCREEN
View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY); | View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY);
setContentView(R.layout.activity_fullscreen); setContentView(R.layout.activity_fullscreen);
@ -118,7 +118,7 @@ public class AppRTCDemoActivity extends Activity
iceConnected = false; iceConnected = false;
rootView = findViewById(android.R.id.content); rootView = findViewById(android.R.id.content);
encoderStatView = (TextView)findViewById(R.id.encoder_stat); encoderStatView = (TextView) findViewById(R.id.encoder_stat);
menuBar = findViewById(R.id.menubar_fragment); menuBar = findViewById(R.id.menubar_fragment);
roomNameView = (TextView) findViewById(R.id.room_name); roomNameView = (TextView) findViewById(R.id.room_name);
videoView = (GLSurfaceView) findViewById(R.id.glview); videoView = (GLSurfaceView) findViewById(R.id.glview);
@ -254,6 +254,9 @@ public class AppRTCDemoActivity extends Activity
} }
} }
/**
* MenuBar fragment for AppRTC.
*/
public static class MenuBarFragment extends Fragment { public static class MenuBarFragment extends Fragment {
@Override @Override
public View onCreateView( public View onCreateView(
@ -429,12 +432,12 @@ public class AppRTCDemoActivity extends Activity
String targetBitrate = null; String targetBitrate = null;
String actualBitrate = null; String actualBitrate = null;
for (StatsReport report : reports) { for (StatsReport report : reports) {
if (report.type.equals("ssrc") && report.id.contains("ssrc") && if (report.type.equals("ssrc") && report.id.contains("ssrc")
report.id.contains("send")) { && report.id.contains("send")) {
Map<String, String> reportMap = getReportMap(report); Map<String, String> reportMap = getReportMap(report);
String trackId = reportMap.get("googTrackId"); String trackId = reportMap.get("googTrackId");
if (trackId != null && if (trackId != null
trackId.contains(PeerConnectionClient.VIDEO_TRACK_ID)) { && trackId.contains(PeerConnectionClient.VIDEO_TRACK_ID)) {
fps = reportMap.get("googFrameRateSent"); fps = reportMap.get("googFrameRateSent");
} }
} else if (report.id.equals("bweforvideo")) { } else if (report.id.equals("bweforvideo")) {
@ -486,8 +489,8 @@ public class AppRTCDemoActivity extends Activity
return; return;
} }
final Runnable runnableThis = this; final Runnable runnableThis = this;
if (hudView.getVisibility() == View.INVISIBLE && if (hudView.getVisibility() == View.INVISIBLE
encoderStatView.getVisibility() == View.INVISIBLE) { && encoderStatView.getVisibility() == View.INVISIBLE) {
videoView.postDelayed(runnableThis, 1000); videoView.postDelayed(runnableThis, 1000);
return; return;
} }

View File

@ -50,13 +50,12 @@ import android.widget.ImageButton;
import android.widget.ListView; import android.widget.ListView;
import android.widget.TextView; import android.widget.TextView;
import java.util.ArrayList;
import java.util.Random;
import org.json.JSONArray; import org.json.JSONArray;
import org.json.JSONException; import org.json.JSONException;
import org.webrtc.MediaCodecVideoEncoder; import org.webrtc.MediaCodecVideoEncoder;
import java.util.ArrayList;
import java.util.Random;
/** /**
* Handles the initial setup where the user selects which room to join. * Handles the initial setup where the user selects which room to join.
@ -69,8 +68,8 @@ public class ConnectActivity extends Activity {
public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME"; public static final String EXTRA_RUNTIME = "org.appspot.apprtc.RUNTIME";
public static final String EXTRA_BITRATE = "org.appspot.apprtc.BITRATE"; public static final String EXTRA_BITRATE = "org.appspot.apprtc.BITRATE";
public static final String EXTRA_HWCODEC = "org.appspot.apprtc.HWCODEC"; public static final String EXTRA_HWCODEC = "org.appspot.apprtc.HWCODEC";
private static final String TAG = "ConnectRTCClient"; private static final String TAG = "ConnectActivity";
private final int CONNECTION_REQUEST = 1; private static final int CONNECTION_REQUEST = 1;
private static boolean commandLineRun = false; private static boolean commandLineRun = false;
private ImageButton addRoomButton; private ImageButton addRoomButton;
@ -141,8 +140,8 @@ public class ConnectActivity extends Activity {
// If an implicit VIEW intent is launching the app, go directly to that URL. // If an implicit VIEW intent is launching the app, go directly to that URL.
final Intent intent = getIntent(); final Intent intent = getIntent();
if ("android.intent.action.VIEW".equals(intent.getAction()) && if ("android.intent.action.VIEW".equals(intent.getAction())
!commandLineRun) { && !commandLineRun) {
commandLineRun = true; commandLineRun = true;
boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false); boolean loopback = intent.getBooleanExtra(EXTRA_LOOPBACK, false);
int runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0); int runTimeMs = intent.getIntExtra(EXTRA_RUNTIME, 0);
@ -272,8 +271,8 @@ public class ConnectActivity extends Activity {
int maxWidth = Integer.parseInt(dimensions[0]); int maxWidth = Integer.parseInt(dimensions[0]);
int maxHeight = Integer.parseInt(dimensions[1]); int maxHeight = Integer.parseInt(dimensions[1]);
if (maxWidth > 0 && maxHeight > 0) { if (maxWidth > 0 && maxHeight > 0) {
parametersResolution = "minHeight=" + maxHeight + ",maxHeight=" + parametersResolution = "minHeight=" + maxHeight + ",maxHeight="
maxHeight + ",minWidth=" + maxWidth + ",maxWidth=" + maxWidth; + maxHeight + ",minWidth=" + maxWidth + ",maxWidth=" + maxWidth;
} }
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
Log.e(TAG, "Wrong video resolution setting: " + resolution); Log.e(TAG, "Wrong video resolution setting: " + resolution);
@ -288,8 +287,8 @@ public class ConnectActivity extends Activity {
try { try {
int cameraFps = Integer.parseInt(fpsValues[0]); int cameraFps = Integer.parseInt(fpsValues[0]);
if (cameraFps > 0) { if (cameraFps > 0) {
parametersFps = "minFrameRate=" + cameraFps + parametersFps = "minFrameRate=" + cameraFps
",maxFrameRate=" + cameraFps; + ",maxFrameRate=" + cameraFps;
} }
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
Log.e(TAG, "Wrong camera fps setting: " + fps); Log.e(TAG, "Wrong camera fps setting: " + fps);
@ -352,8 +351,9 @@ public class ConnectActivity extends Activity {
} }
private boolean validateUrl(String url) { private boolean validateUrl(String url) {
if (URLUtil.isHttpsUrl(url) || URLUtil.isHttpUrl(url)) if (URLUtil.isHttpsUrl(url) || URLUtil.isHttpUrl(url)) {
return true; return true;
}
new AlertDialog.Builder(this) new AlertDialog.Builder(this)
.setTitle(getText(R.string.invalid_url_title)) .setTitle(getText(R.string.invalid_url_title))

View File

@ -35,10 +35,10 @@ import org.appspot.apprtc.AppRTCClient.SignalingParameters;
import org.webrtc.DataChannel; import org.webrtc.DataChannel;
import org.webrtc.IceCandidate; import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints; import org.webrtc.MediaConstraints;
import org.webrtc.MediaConstraints.KeyValuePair;
import org.webrtc.MediaStream; import org.webrtc.MediaStream;
import org.webrtc.MediaStreamTrack; import org.webrtc.MediaStreamTrack;
import org.webrtc.PeerConnection; import org.webrtc.PeerConnection;
import org.webrtc.MediaConstraints.KeyValuePair;
import org.webrtc.PeerConnection.IceConnectionState; import org.webrtc.PeerConnection.IceConnectionState;
import org.webrtc.PeerConnectionFactory; import org.webrtc.PeerConnectionFactory;
import org.webrtc.SdpObserver; import org.webrtc.SdpObserver;
@ -53,6 +53,9 @@ import java.util.LinkedList;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
/**
* PeerConnection client for AppRTC.
*/
public class PeerConnectionClient { public class PeerConnectionClient {
private static final String TAG = "PCRTCClient"; private static final String TAG = "PCRTCClient";
public static final String VIDEO_TRACK_ID = "ARDAMSv0"; public static final String VIDEO_TRACK_ID = "ARDAMSv0";
@ -312,8 +315,8 @@ public class PeerConnectionClient {
int[] cameraOrientation = { 0, 90, 180, 270 }; int[] cameraOrientation = { 0, 90, 180, 270 };
for (int index : cameraIndex) { for (int index : cameraIndex) {
for (int orientation : cameraOrientation) { for (int orientation : cameraOrientation) {
String name = "Camera " + index + ", Facing " + facing + String name = "Camera " + index + ", Facing " + facing
", Orientation " + orientation; + ", Orientation " + orientation;
VideoCapturer capturer = VideoCapturer.create(name); VideoCapturer capturer = VideoCapturer.create(name);
if (capturer != null) { if (capturer != null) {
Log.d(TAG, "Using camera: " + name); Log.d(TAG, "Using camera: " + name);
@ -372,8 +375,8 @@ public class PeerConnectionClient {
for (int i = 0; i < lines.length; i++) { for (int i = 0; i < lines.length; i++) {
newSdpDescription.append(lines[i]).append("\r\n"); newSdpDescription.append(lines[i]).append("\r\n");
if (i == lineIndex) { if (i == lineIndex) {
String bitrateSet = "a=fmtp:" + vp8RtpMap + String bitrateSet = "a=fmtp:" + vp8RtpMap
" x-google-start-bitrate=" + bitrateKbps; + " x-google-start-bitrate=" + bitrateKbps;
Log.d(TAG, "Add remote SDP line: " + bitrateSet); Log.d(TAG, "Add remote SDP line: " + bitrateSet);
newSdpDescription.append(bitrateSet).append("\r\n"); newSdpDescription.append(bitrateSet).append("\r\n");
} }
@ -441,8 +444,9 @@ public class PeerConnectionClient {
} }
public void switchCamera() { public void switchCamera() {
if (videoConstraints == null) if (videoConstraints == null) {
return; // No video is sent. return; // No video is sent.
}
if (pc.signalingState() != PeerConnection.SignalingState.STABLE) { if (pc.signalingState() != PeerConnection.SignalingState.STABLE) {
Log.e(TAG, "Switching camera during negotiation is not handled."); Log.e(TAG, "Switching camera during negotiation is not handled.");
@ -531,8 +535,8 @@ public class PeerConnectionClient {
public void onAddStream(final MediaStream stream){ public void onAddStream(final MediaStream stream){
uiHandler.post(new Runnable() { uiHandler.post(new Runnable() {
public void run() { public void run() {
abortUnless(stream.audioTracks.size() <= 1 && abortUnless(stream.audioTracks.size() <= 1
stream.videoTracks.size() <= 1, && stream.videoTracks.size() <= 1,
"Weird-looking stream: " + stream); "Weird-looking stream: " + stream);
if (stream.videoTracks.size() == 1) { if (stream.videoTracks.size() == 1) {
stream.videoTracks.get(0).addRenderer( stream.videoTracks.get(0).addRenderer(
@ -553,8 +557,8 @@ public class PeerConnectionClient {
@Override @Override
public void onDataChannel(final DataChannel dc) { public void onDataChannel(final DataChannel dc) {
reportError("AppRTC doesn't use data channels, but got: " + dc.label() + reportError("AppRTC doesn't use data channels, but got: " + dc.label()
" anyway!"); + " anyway!");
} }
@Override @Override

View File

@ -33,6 +33,9 @@ import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.os.Bundle; import android.os.Bundle;
import android.preference.Preference; import android.preference.Preference;
/**
* Settings activity for AppRTC.
*/
public class SettingsActivity extends Activity public class SettingsActivity extends Activity
implements OnSharedPreferenceChangeListener{ implements OnSharedPreferenceChangeListener{
private SettingsFragment settingsFragment; private SettingsFragment settingsFragment;
@ -92,15 +95,15 @@ public class SettingsActivity extends Activity
@Override @Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, public void onSharedPreferenceChanged(SharedPreferences sharedPreferences,
String key) { String key) {
if (key.equals(keyprefResolution) || if (key.equals(keyprefResolution)
key.equals(keyprefFps) || || key.equals(keyprefFps)
key.equals(keyprefStartBitrateType) || || key.equals(keyprefStartBitrateType)
key.equals(keyPrefRoomServerUrl)) { || key.equals(keyPrefRoomServerUrl)) {
updateSummary(sharedPreferences, key); updateSummary(sharedPreferences, key);
} else if (key.equals(keyprefStartBitrateValue)) { } else if (key.equals(keyprefStartBitrateValue)) {
updateSummaryBitrate(sharedPreferences, key); updateSummaryBitrate(sharedPreferences, key);
} else if (key.equals(keyprefCpuUsageDetection) || } else if (key.equals(keyprefCpuUsageDetection)
key.equals(keyprefHwCodec) || key.equals(keyprefSignaling)) { || key.equals(keyprefHwCodec) || key.equals(keyprefSignaling)) {
updateSummaryB(sharedPreferences, key); updateSummaryB(sharedPreferences, key);
} }
if (key.equals(keyprefStartBitrateType)) { if (key.equals(keyprefStartBitrateType)) {

View File

@ -30,6 +30,9 @@ package org.appspot.apprtc;
import android.os.Bundle; import android.os.Bundle;
import android.preference.PreferenceFragment; import android.preference.PreferenceFragment;
/**
* Settings fragment for AppRTC.
*/
public class SettingsFragment extends PreferenceFragment { public class SettingsFragment extends PreferenceFragment {
@Override @Override