Cleaning up Android AppRTCDemo.

- Move signaling code from Activity to a separate class
and add interface for AppRTC signaling. For now
only pure GAE signaling implements this interface.
- Move peer connection, video source and peer connection
and SDP observer code from Activity to a separate class.
- Main Activity class will do only high level calls and
event handling for peer connection and signaling classes.
- Also add video renderer position update and use full
screen for local preview until the connection is established.

BUG=
R=braveyao@webrtc.org, pthatcher@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/24019004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7469 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
glaznev@webrtc.org 2014-10-17 17:42:38 +00:00
parent 0371a37f85
commit 58202946a7
7 changed files with 1142 additions and 818 deletions

View File

@ -490,6 +490,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
updateTextureProperties = true;
}
public void setPosition(int x, int y, int width, int height,
ScalingType scalingType) {
texLeft = (x - 50) / 50.0f;
texTop = (50 - y) / 50.0f;
texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
updateTextureProperties = true;
}
@Override
public void setSize(final int width, final int height) {
Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
@ -636,6 +645,23 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
return yuvImageRenderer;
}
public static void update(
VideoRenderer.Callbacks renderer,
int x, int y, int width, int height, ScalingType scalingType) {
Log.d(TAG, "VideoRendererGui.update");
if (instance == null) {
throw new RuntimeException(
"Attempt to update yuv renderer before setting GLSurfaceView");
}
synchronized (instance.yuvImageRenderers) {
for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
if (yuvImageRenderer == renderer) {
yuvImageRenderer.setPosition(x, y, width, height, scalingType);
}
}
}
}
@Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
Log.d(TAG, "VideoRendererGui.onSurfaceCreated");

View File

@ -24,121 +24,44 @@
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.os.AsyncTask;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.PeerConnection;
import org.webrtc.SessionDescription;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
/**
* Negotiates signaling for chatting with apprtc.appspot.com "rooms".
* Uses the client<->server specifics of the apprtc AppEngine webapp.
*
* To use: create an instance of this object (registering a message handler) and
* call connectToRoom(). Once that's done call sendMessage() and wait for the
* registered handler to be called with received messages.
*/
public class AppRTCClient {
private static final String TAG = "AppRTCClient";
private GAEChannelClient channelClient;
private final Activity activity;
private final GAEChannelClient.MessageHandler gaeHandler;
private final IceServersObserver iceServersObserver;
// These members are only read/written under sendQueue's lock.
private LinkedList<String> sendQueue = new LinkedList<String>();
private AppRTCSignalingParameters appRTCSignalingParameters;
/**
* Callback fired once the room's signaling parameters specify the set of
* ICE servers to use.
*/
public static interface IceServersObserver {
public void onIceServers(List<PeerConnection.IceServer> iceServers);
}
public AppRTCClient(
Activity activity, GAEChannelClient.MessageHandler gaeHandler,
IceServersObserver iceServersObserver) {
this.activity = activity;
this.gaeHandler = gaeHandler;
this.iceServersObserver = iceServersObserver;
}
public interface AppRTCClient {
/**
* Asynchronously connect to an AppRTC room URL, e.g.
* https://apprtc.appspot.com/?r=NNN and register message-handling callbacks
* on its GAE Channel.
* https://apprtc.appspot.com/?r=NNN. Once connection is established
* onConnectedToRoom() callback with room parameters is invoked.
*/
public void connectToRoom(String url) {
while (url.indexOf('?') < 0) {
// Keep redirecting until we get a room number.
(new RedirectResolver()).execute(url);
return; // RedirectResolver above calls us back with the next URL.
}
(new RoomParameterGetter()).execute(url);
}
public void connectToRoom(String url);
/**
* Disconnect from the GAE Channel.
* Send local SDP (offer or answer, depending on role) to the
* other participant.
*/
public void disconnect() {
if (channelClient != null) {
channelClient.close();
channelClient = null;
}
}
public void sendLocalDescription(final SessionDescription sdp);
/**
* Queue a message for sending to the room's channel and send it if already
* connected (other wise queued messages are drained when the channel is
eventually established).
* Send Ice candidate to the other participant.
*/
public synchronized void sendMessage(String msg) {
synchronized (sendQueue) {
sendQueue.add(msg);
}
requestQueueDrainInBackground();
}
public void sendLocalIceCandidate(final IceCandidate candidate);
public boolean isInitiator() {
return appRTCSignalingParameters.initiator;
}
/**
* Disconnect from the channel.
*/
public void disconnect();
public MediaConstraints pcConstraints() {
return appRTCSignalingParameters.pcConstraints;
}
public MediaConstraints videoConstraints() {
return appRTCSignalingParameters.videoConstraints;
}
public MediaConstraints audioConstraints() {
return appRTCSignalingParameters.audioConstraints;
}
// Struct holding the signaling parameters of an AppRTC room.
private class AppRTCSignalingParameters {
/**
* Struct holding the signaling parameters of an AppRTC room.
*/
public class AppRTCSignalingParameters {
public final List<PeerConnection.IceServer> iceServers;
public final String gaeBaseHref;
public final String channelToken;
public final String postMessageUrl;
public final boolean initiator;
public final MediaConstraints pcConstraints;
public final MediaConstraints videoConstraints;
@ -146,13 +69,9 @@ public class AppRTCClient {
public AppRTCSignalingParameters(
List<PeerConnection.IceServer> iceServers,
String gaeBaseHref, String channelToken, String postMessageUrl,
boolean initiator, MediaConstraints pcConstraints,
MediaConstraints videoConstraints, MediaConstraints audioConstraints) {
this.iceServers = iceServers;
this.gaeBaseHref = gaeBaseHref;
this.channelToken = channelToken;
this.postMessageUrl = postMessageUrl;
this.initiator = initiator;
this.pcConstraints = pcConstraints;
this.videoConstraints = videoConstraints;
@ -160,289 +79,40 @@ public class AppRTCClient {
}
}
// Load the given URL and return the value of the Location header of the
// resulting 302 response. If the result is not a 302, throws.
private class RedirectResolver extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... urls) {
if (urls.length != 1) {
throw new RuntimeException("Must be called with a single URL");
}
try {
return followRedirect(urls[0]);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Signaling callbacks.
*/
public static interface AppRTCSignalingEvents {
/**
* Callback fired once the room's signaling parameters
* AppRTCSignalingParameters are extracted.
*/
public void onConnectedToRoom(final AppRTCSignalingParameters params);
@Override
protected void onPostExecute(String url) {
connectToRoom(url);
}
/**
* Callback fired once channel for signaling messages is opened and
* ready to receive messages.
*/
public void onChannelOpen();
private String followRedirect(String url) throws IOException {
HttpURLConnection connection = (HttpURLConnection)
new URL(url).openConnection();
connection.setInstanceFollowRedirects(false);
int code = connection.getResponseCode();
if (code != HttpURLConnection.HTTP_MOVED_TEMP) {
throw new IOException("Unexpected response: " + code + " for " + url +
", with contents: " + drainStream(connection.getInputStream()));
}
int n = 0;
String name, value;
while ((name = connection.getHeaderFieldKey(n)) != null) {
value = connection.getHeaderField(n);
if (name.equals("Location")) {
return value;
}
++n;
}
throw new IOException("Didn't find Location header!");
}
}
/**
* Callback fired once remote SDP is received.
*/
public void onRemoteDescription(final SessionDescription sdp);
// AsyncTask that converts an AppRTC room URL into the set of signaling
// parameters to use with that room.
private class RoomParameterGetter
extends AsyncTask<String, Void, AppRTCSignalingParameters> {
@Override
protected AppRTCSignalingParameters doInBackground(String... urls) {
if (urls.length != 1) {
throw new RuntimeException("Must be called with a single URL");
}
try {
return getParametersForRoomUrl(urls[0]);
} catch (JSONException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Callback fired once remote Ice candidate is received.
*/
public void onRemoteIceCandidate(final IceCandidate candidate);
@Override
protected void onPostExecute(AppRTCSignalingParameters params) {
channelClient =
new GAEChannelClient(activity, params.channelToken, gaeHandler);
synchronized (sendQueue) {
appRTCSignalingParameters = params;
}
requestQueueDrainInBackground();
iceServersObserver.onIceServers(appRTCSignalingParameters.iceServers);
}
/**
* Callback fired once channel is closed.
*/
public void onChannelClose();
// Fetches |url| and fishes the signaling parameters out of the JSON.
private AppRTCSignalingParameters getParametersForRoomUrl(String url)
throws IOException, JSONException {
url = url + "&t=json";
JSONObject roomJson = new JSONObject(
drainStream((new URL(url)).openConnection().getInputStream()));
if (roomJson.has("error")) {
JSONArray errors = roomJson.getJSONArray("error_messages");
throw new IOException(errors.toString());
}
String gaeBaseHref = url.substring(0, url.indexOf('?'));
String token = roomJson.getString("token");
String postMessageUrl = "/message?r=" +
roomJson.getString("room_key") + "&u=" +
roomJson.getString("me");
boolean initiator = roomJson.getInt("initiator") == 1;
LinkedList<PeerConnection.IceServer> iceServers =
iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
boolean isTurnPresent = false;
for (PeerConnection.IceServer server : iceServers) {
if (server.uri.startsWith("turn:")) {
isTurnPresent = true;
break;
}
}
if (!isTurnPresent) {
iceServers.add(requestTurnServer(roomJson.getString("turn_url")));
}
MediaConstraints pcConstraints = constraintsFromJSON(
roomJson.getString("pc_constraints"));
addDTLSConstraintIfMissing(pcConstraints);
Log.d(TAG, "pcConstraints: " + pcConstraints);
MediaConstraints videoConstraints = constraintsFromJSON(
getAVConstraints("video",
roomJson.getString("media_constraints")));
Log.d(TAG, "videoConstraints: " + videoConstraints);
MediaConstraints audioConstraints = constraintsFromJSON(
getAVConstraints("audio",
roomJson.getString("media_constraints")));
Log.d(TAG, "audioConstraints: " + audioConstraints);
return new AppRTCSignalingParameters(
iceServers, gaeBaseHref, token, postMessageUrl, initiator,
pcConstraints, videoConstraints, audioConstraints);
}
// Mimic Chrome and set DtlsSrtpKeyAgreement to true if not set to false by
// the web-app.
private void addDTLSConstraintIfMissing(
MediaConstraints pcConstraints) {
for (MediaConstraints.KeyValuePair pair : pcConstraints.mandatory) {
if (pair.getKey().equals("DtlsSrtpKeyAgreement")) {
return;
}
}
for (MediaConstraints.KeyValuePair pair : pcConstraints.optional) {
if (pair.getKey().equals("DtlsSrtpKeyAgreement")) {
return;
}
}
// DTLS isn't being suppressed (e.g. for debug=loopback calls), so enable
// it by default.
pcConstraints.optional.add(
new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
// Return the constraints specified for |type| of "audio" or "video" in
// |mediaConstraintsString|.
private String getAVConstraints(
String type, String mediaConstraintsString) {
try {
JSONObject json = new JSONObject(mediaConstraintsString);
// Tricksy handling of values that are allowed to be (boolean or
// MediaTrackConstraints) by the getUserMedia() spec. There are three
// cases below.
if (!json.has(type) || !json.optBoolean(type, true)) {
// Case 1: "audio"/"video" is not present, or is an explicit "false"
// boolean.
return null;
}
if (json.optBoolean(type, false)) {
// Case 2: "audio"/"video" is an explicit "true" boolean.
return "{\"mandatory\": {}, \"optional\": []}";
}
// Case 3: "audio"/"video" is an object.
return json.getJSONObject(type).toString();
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
private MediaConstraints constraintsFromJSON(String jsonString) {
if (jsonString == null) {
return null;
}
try {
MediaConstraints constraints = new MediaConstraints();
JSONObject json = new JSONObject(jsonString);
JSONObject mandatoryJSON = json.optJSONObject("mandatory");
if (mandatoryJSON != null) {
JSONArray mandatoryKeys = mandatoryJSON.names();
if (mandatoryKeys != null) {
for (int i = 0; i < mandatoryKeys.length(); ++i) {
String key = mandatoryKeys.getString(i);
String value = mandatoryJSON.getString(key);
constraints.mandatory.add(
new MediaConstraints.KeyValuePair(key, value));
}
}
}
JSONArray optionalJSON = json.optJSONArray("optional");
if (optionalJSON != null) {
for (int i = 0; i < optionalJSON.length(); ++i) {
JSONObject keyValueDict = optionalJSON.getJSONObject(i);
String key = keyValueDict.names().getString(0);
String value = keyValueDict.getString(key);
constraints.optional.add(
new MediaConstraints.KeyValuePair(key, value));
}
}
return constraints;
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Requests & returns a TURN ICE Server based on a request URL. Must be run
// off the main thread!
private PeerConnection.IceServer requestTurnServer(String url) {
try {
URLConnection connection = (new URL(url)).openConnection();
connection.addRequestProperty("user-agent", "Mozilla/5.0");
connection.addRequestProperty("origin", "https://apprtc.appspot.com");
String response = drainStream(connection.getInputStream());
JSONObject responseJSON = new JSONObject(response);
String uri = responseJSON.getJSONArray("uris").getString(0);
String username = responseJSON.getString("username");
String password = responseJSON.getString("password");
return new PeerConnection.IceServer(uri, username, password);
} catch (JSONException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
// Return the list of ICE servers described by a WebRTCPeerConnection
// configuration string.
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(
String pcConfig) {
try {
JSONObject json = new JSONObject(pcConfig);
JSONArray servers = json.getJSONArray("iceServers");
LinkedList<PeerConnection.IceServer> ret =
new LinkedList<PeerConnection.IceServer>();
for (int i = 0; i < servers.length(); ++i) {
JSONObject server = servers.getJSONObject(i);
String url = server.getString("urls");
String credential =
server.has("credential") ? server.getString("credential") : "";
ret.add(new PeerConnection.IceServer(url, "", credential));
}
return ret;
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Request an attempt to drain the send queue, on a background thread.
private void requestQueueDrainInBackground() {
(new AsyncTask<Void, Void, Void>() {
public Void doInBackground(Void... unused) {
maybeDrainQueue();
return null;
}
}).execute();
}
// Send all queued messages if connected to the room.
private void maybeDrainQueue() {
synchronized (sendQueue) {
if (appRTCSignalingParameters == null) {
return;
}
try {
for (String msg : sendQueue) {
URLConnection connection = new URL(
appRTCSignalingParameters.gaeBaseHref +
appRTCSignalingParameters.postMessageUrl).openConnection();
connection.setDoOutput(true);
connection.getOutputStream().write(msg.getBytes("UTF-8"));
if (!connection.getHeaderField(null).startsWith("HTTP/1.1 200 ")) {
throw new IOException(
"Non-200 response to POST: " + connection.getHeaderField(null) +
" for msg: " + msg);
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
sendQueue.clear();
}
}
// Return the contents of an InputStream as a String.
private static String drainStream(InputStream in) {
Scanner s = new Scanner(in).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
/**
* Callback fired once channel error happened.
*/
public void onChannelError(int code, String description);
}
}

View File

@ -41,33 +41,18 @@ import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup.LayoutParams;
import android.view.WindowManager;
import android.webkit.JavascriptInterface;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.DataChannel;
import org.appspot.apprtc.AppRTCClient.AppRTCSignalingParameters;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription;
import org.webrtc.StatsObserver;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoRendererGui;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Main Activity of the AppRTCDemo Android app demonstrating interoperability
@ -75,16 +60,12 @@ import java.util.regex.Pattern;
* apprtc.appspot.com demo webapp.
*/
public class AppRTCDemoActivity extends Activity
implements AppRTCClient.IceServersObserver {
private static final String TAG = "AppRTCDemoActivity";
private PeerConnectionFactory factory;
private VideoSource videoSource;
private boolean videoSourceStopped;
private PeerConnection pc;
private final PCObserver pcObserver = new PCObserver();
private final SDPObserver sdpObserver = new SDPObserver();
private final GAEChannelClient.MessageHandler gaeHandler = new GAEHandler();
private AppRTCClient appRtcClient = new AppRTCClient(this, gaeHandler, this);
implements AppRTCClient.AppRTCSignalingEvents,
PeerConnectionClient.PeerConnectionEvents {
private static final String TAG = "AppRTCClient";
private PeerConnectionClient pc;
private AppRTCClient appRtcClient = new GAERTCClient(this, this);
private AppRTCSignalingParameters appRtcParameters;
private AppRTCGLView vsv;
private VideoRenderer.Callbacks localRender;
private VideoRenderer.Callbacks remoteRender;
@ -92,11 +73,8 @@ public class AppRTCDemoActivity extends Activity
private final LayoutParams hudLayout =
new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
private TextView hudView;
private LinkedList<IceCandidate> queuedRemoteCandidates =
new LinkedList<IceCandidate>();
// Synchronize on quit[0] to avoid teardown-related crashes.
private final Boolean[] quit = new Boolean[] { false };
private MediaConstraints sdpMediaConstraints;
@Override
public void onCreate(Bundle savedInstanceState) {
@ -115,7 +93,7 @@ public class AppRTCDemoActivity extends Activity
VideoRendererGui.setView(vsv);
remoteRender = VideoRendererGui.create(0, 0, 100, 100,
VideoRendererGui.ScalingType.SCALE_ASPECT_FIT);
localRender = VideoRendererGui.create(70, 5, 25, 25,
localRender = VideoRendererGui.create(0, 0, 100, 100,
VideoRendererGui.ScalingType.SCALE_ASPECT_FIT);
vsv.setOnClickListener(new View.OnClickListener() {
@ -144,12 +122,6 @@ public class AppRTCDemoActivity extends Activity
AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(!isWiredHeadsetOn);
sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveAudio", "true"));
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "true"));
final Intent intent = getIntent();
if ("android.intent.action.VIEW".equals(intent.getAction())) {
connectToRoom(intent.getData().toString());
@ -158,13 +130,46 @@ public class AppRTCDemoActivity extends Activity
showGetRoomUI();
}
@Override
public void onPause() {
super.onPause();
vsv.onPause();
if (pc != null) {
pc.stopVideoSource();
}
}
@Override
public void onResume() {
super.onResume();
vsv.onResume();
if (pc != null) {
pc.startVideoSource();
}
}
@Override
public void onConfigurationChanged (Configuration newConfig) {
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getSize(displaySize);
vsv.updateDisplaySize(displaySize);
super.onConfigurationChanged(newConfig);
}
@Override
protected void onDestroy() {
disconnectAndExit();
super.onDestroy();
}
private void showGetRoomUI() {
final EditText roomInput = new EditText(this);
roomInput.setText("https://apprtc.appspot.com/?r=");
roomInput.setSelection(roomInput.getText().length());
DialogInterface.OnClickListener listener =
new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int which) {
@Override
public void onClick(DialogInterface dialog, int which) {
abortUnless(which == DialogInterface.BUTTON_POSITIVE, "lolwat?");
dialog.dismiss();
connectToRoom(roomInput.getText().toString());
@ -237,66 +242,56 @@ public class AppRTCDemoActivity extends Activity
return activeConnectionbuilder.toString();
}
@Override
public void onPause() {
super.onPause();
vsv.onPause();
if (videoSource != null) {
videoSource.stop();
videoSourceStopped = true;
// Disconnect from remote resources, dispose of local resources, and exit.
private void disconnectAndExit() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
quit[0] = true;
if (pc != null) {
pc.close();
pc = null;
}
if (appRtcClient != null) {
appRtcClient.disconnect();
appRtcClient = null;
}
finish();
}
}
@Override
public void onResume() {
super.onResume();
vsv.onResume();
if (videoSource != null && videoSourceStopped) {
videoSource.restart();
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
}
@Override
public void onConfigurationChanged (Configuration newConfig) {
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getSize(displaySize);
vsv.updateDisplaySize(displaySize);
super.onConfigurationChanged(newConfig);
}
// Just for fun (and to regression-test bug 2302) make sure that DataChannels
// can be created, queried, and disposed.
private static void createDataChannelToRegressionTestBug2302(
PeerConnection pc) {
DataChannel dc = pc.createDataChannel("dcLabel", new DataChannel.Init());
abortUnless("dcLabel".equals(dc.label()), "Unexpected label corruption?");
dc.close();
dc.dispose();
// Log |msg| and Toast about it.
private void logAndToast(String msg) {
Log.d(TAG, msg);
if (logToast != null) {
logToast.cancel();
}
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
logToast.show();
}
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
// All events are called from UI thread.
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
public void onConnectedToRoom(final AppRTCSignalingParameters params) {
appRtcParameters = params;
abortUnless(PeerConnectionFactory.initializeAndroidGlobals(
this, true, true, VideoRendererGui.getEGLContext()),
"Failed to initializeAndroidGlobals");
factory = new PeerConnectionFactory();
MediaConstraints pcConstraints = appRtcClient.pcConstraints();
pcConstraints.optional.add(
new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
pc = factory.createPeerConnection(iceServers, pcConstraints, pcObserver);
createDataChannelToRegressionTestBug2302(pc); // See method comment.
// Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
// NOTE: this _must_ happen while |factory| is alive!
// Logging.enableTracing(
// "logcat:",
// EnumSet.of(Logging.TraceLevel.TRACE_ALL),
// Logging.Severity.LS_SENSITIVE);
logAndToast("Creating peer connection...");
pc = new PeerConnectionClient(
this, localRender, remoteRender, appRtcParameters, this);
{
final PeerConnection finalPC = pc;
final PeerConnectionClient finalPC = pc;
final Runnable repeatedStatsLogger = new Runnable() {
public void run() {
synchronized (quit[0]) {
@ -330,367 +325,66 @@ public class AppRTCDemoActivity extends Activity
vsv.postDelayed(repeatedStatsLogger, 1000);
}
{
logAndToast("Creating local video source...");
MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
if (appRtcClient.videoConstraints() != null) {
VideoCapturer capturer = getVideoCapturer();
videoSource = factory.createVideoSource(
capturer, appRtcClient.videoConstraints());
VideoTrack videoTrack =
factory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(localRender));
lMS.addTrack(videoTrack);
}
if (appRtcClient.audioConstraints() != null) {
lMS.addTrack(factory.createAudioTrack(
"ARDAMSa0",
factory.createAudioSource(appRtcClient.audioConstraints())));
}
pc.addStream(lMS, new MediaConstraints());
}
logAndToast("Waiting for ICE candidates...");
}
// Cycle through likely device names for the camera and return the first
// capturer that works, or crash if none do.
private VideoCapturer getVideoCapturer() {
String[] cameraFacing = { "front", "back" };
int[] cameraIndex = { 0, 1 };
int[] cameraOrientation = { 0, 90, 180, 270 };
for (String facing : cameraFacing) {
for (int index : cameraIndex) {
for (int orientation : cameraOrientation) {
String name = "Camera " + index + ", Facing " + facing +
", Orientation " + orientation;
VideoCapturer capturer = VideoCapturer.create(name);
if (capturer != null) {
logAndToast("Using camera: " + name);
return capturer;
}
}
}
}
throw new RuntimeException("Failed to open capturer");
logAndToast("Waiting for remote connection...");
}
@Override
protected void onDestroy() {
public void onChannelOpen() {
if (appRtcParameters.initiator) {
logAndToast("Creating OFFER...");
// Create offer. Offer SDP will be sent to answering client in
// PeerConnectionEvents.onLocalDescription event.
pc.createOffer();
}
}
@Override
public void onRemoteDescription(final SessionDescription sdp) {
logAndToast("Received remote " + sdp.type + " ...");
pc.setRemoteDescription(sdp);
if (!appRtcParameters.initiator) {
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
pc.createAnswer();
}
}
@Override
public void onRemoteIceCandidate(final IceCandidate candidate) {
pc.addRemoteIceCandidate(candidate);
}
@Override
public void onChannelClose() {
logAndToast("Remote end hung up; dropping PeerConnection");
disconnectAndExit();
super.onDestroy();
}
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
@Override
public void onChannelError(int code, String description) {
logAndToast("Channel error: " + code + ". " + description);
disconnectAndExit();
}
// Log |msg| and Toast about it.
private void logAndToast(String msg) {
Log.d(TAG, msg);
if (logToast != null) {
logToast.cancel();
}
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
logToast.show();
// -----Implementation of PeerConnectionClient.PeerConnectionEvents.---------
// Send local peer connection SDP and ICE candidates to remote party.
// All callbacks are invoked from UI thread.
@Override
public void onLocalDescription(final SessionDescription sdp) {
logAndToast("Sending " + sdp.type + " ...");
appRtcClient.sendLocalDescription(sdp);
}
// Send |json| to the underlying AppEngine Channel.
private void sendMessage(JSONObject json) {
appRtcClient.sendMessage(json.toString());
@Override
public void onIceCandidate(final IceCandidate candidate) {
appRtcClient.sendLocalIceCandidate(candidate);
}
// Put a |key|->|value| mapping in |json|.
private static void jsonPut(JSONObject json, String key, Object value) {
try {
json.put(key, value);
} catch (JSONException e) {
throw new RuntimeException(e);
}
@Override
public void onIceConnected() {
logAndToast("ICE connected");
VideoRendererGui.update(localRender, 70, 70, 28, 28,
VideoRendererGui.ScalingType.SCALE_ASPECT_FIT);
}
// Mangle SDP to prefer ISAC/16000 over any other audio codec.
private static String preferISAC(String sdpDescription) {
String[] lines = sdpDescription.split("\r\n");
int mLineIndex = -1;
String isac16kRtpMap = null;
Pattern isac16kPattern =
Pattern.compile("^a=rtpmap:(\\d+) ISAC/16000[\r]?$");
for (int i = 0;
(i < lines.length) && (mLineIndex == -1 || isac16kRtpMap == null);
++i) {
if (lines[i].startsWith("m=audio ")) {
mLineIndex = i;
continue;
}
Matcher isac16kMatcher = isac16kPattern.matcher(lines[i]);
if (isac16kMatcher.matches()) {
isac16kRtpMap = isac16kMatcher.group(1);
continue;
}
}
if (mLineIndex == -1) {
Log.d(TAG, "No m=audio line, so can't prefer iSAC");
return sdpDescription;
}
if (isac16kRtpMap == null) {
Log.d(TAG, "No ISAC/16000 line, so can't prefer iSAC");
return sdpDescription;
}
String[] origMLineParts = lines[mLineIndex].split(" ");
StringBuilder newMLine = new StringBuilder();
int origPartIndex = 0;
// Format is: m=<media> <port> <proto> <fmt> ...
newMLine.append(origMLineParts[origPartIndex++]).append(" ");
newMLine.append(origMLineParts[origPartIndex++]).append(" ");
newMLine.append(origMLineParts[origPartIndex++]).append(" ");
newMLine.append(isac16kRtpMap);
for (; origPartIndex < origMLineParts.length; ++origPartIndex) {
if (!origMLineParts[origPartIndex].equals(isac16kRtpMap)) {
newMLine.append(" ").append(origMLineParts[origPartIndex]);
}
}
lines[mLineIndex] = newMLine.toString();
StringBuilder newSdpDescription = new StringBuilder();
for (String line : lines) {
newSdpDescription.append(line).append("\r\n");
}
return newSdpDescription.toString();
}
// Implementation detail: observe ICE & stream changes and react accordingly.
private class PCObserver implements PeerConnection.Observer {
@Override public void onIceCandidate(final IceCandidate candidate){
runOnUiThread(new Runnable() {
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "type", "candidate");
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
sendMessage(json);
}
});
}
@Override public void onError(){
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("PeerConnection error!");
}
});
}
@Override public void onSignalingChange(
PeerConnection.SignalingState newState) {
}
@Override public void onIceConnectionChange(
PeerConnection.IceConnectionState newState) {
}
@Override public void onIceGatheringChange(
PeerConnection.IceGatheringState newState) {
}
@Override public void onAddStream(final MediaStream stream){
runOnUiThread(new Runnable() {
public void run() {
abortUnless(stream.audioTracks.size() <= 1 &&
stream.videoTracks.size() <= 1,
"Weird-looking stream: " + stream);
if (stream.videoTracks.size() == 1) {
stream.videoTracks.get(0).addRenderer(
new VideoRenderer(remoteRender));
}
}
});
}
@Override public void onRemoveStream(final MediaStream stream){
runOnUiThread(new Runnable() {
public void run() {
stream.videoTracks.get(0).dispose();
}
});
}
@Override public void onDataChannel(final DataChannel dc) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException(
"AppRTC doesn't use data channels, but got: " + dc.label() +
" anyway!");
}
});
}
@Override public void onRenegotiationNeeded() {
// No need to do anything; AppRTC follows a pre-agreed-upon
// signaling/negotiation protocol.
}
}
// Implementation detail: handle offer creation/signaling and answer setting,
// as well as adding remote ICE candidates once the answer SDP is set.
private class SDPObserver implements SdpObserver {
private SessionDescription localSdp;
@Override public void onCreateSuccess(final SessionDescription origSdp) {
abortUnless(localSdp == null, "multiple SDP create?!?");
final SessionDescription sdp = new SessionDescription(
origSdp.type, preferISAC(origSdp.description));
localSdp = sdp;
runOnUiThread(new Runnable() {
public void run() {
pc.setLocalDescription(sdpObserver, sdp);
}
});
}
// Helper for sending local SDP (offer or answer, depending on role) to the
// other participant. Note that it is important to send the output of
// create{Offer,Answer} and not merely the current value of
// getLocalDescription() because the latter may include ICE candidates that
// we might want to filter elsewhere.
private void sendLocalDescription() {
logAndToast("Sending " + localSdp.type);
JSONObject json = new JSONObject();
jsonPut(json, "type", localSdp.type.canonicalForm());
jsonPut(json, "sdp", localSdp.description);
sendMessage(json);
}
@Override public void onSetSuccess() {
runOnUiThread(new Runnable() {
public void run() {
if (appRtcClient.isInitiator()) {
if (pc.getRemoteDescription() != null) {
// We've set our local offer and received & set the remote
// answer, so drain candidates.
drainRemoteCandidates();
} else {
// We've just set our local description so time to send it.
sendLocalDescription();
}
} else {
if (pc.getLocalDescription() == null) {
// We just set the remote offer, time to create our answer.
logAndToast("Creating answer");
pc.createAnswer(SDPObserver.this, sdpMediaConstraints);
} else {
// Answer now set as local description; send it and drain
// candidates.
sendLocalDescription();
drainRemoteCandidates();
}
}
}
});
}
@Override public void onCreateFailure(final String error) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("createSDP error: " + error);
}
});
}
@Override public void onSetFailure(final String error) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("setSDP error: " + error);
}
});
}
private void drainRemoteCandidates() {
for (IceCandidate candidate : queuedRemoteCandidates) {
pc.addIceCandidate(candidate);
}
queuedRemoteCandidates = null;
}
}
// Implementation detail: handler for receiving GAE messages and dispatching
// them appropriately.
private class GAEHandler implements GAEChannelClient.MessageHandler {
@JavascriptInterface public void onOpen() {
if (!appRtcClient.isInitiator()) {
return;
}
logAndToast("Creating offer...");
pc.createOffer(sdpObserver, sdpMediaConstraints);
}
@JavascriptInterface public void onMessage(String data) {
try {
JSONObject json = new JSONObject(data);
String type = (String) json.get("type");
if (type.equals("candidate")) {
IceCandidate candidate = new IceCandidate(
(String) json.get("id"),
json.getInt("label"),
(String) json.get("candidate"));
if (queuedRemoteCandidates != null) {
queuedRemoteCandidates.add(candidate);
} else {
pc.addIceCandidate(candidate);
}
} else if (type.equals("answer") || type.equals("offer")) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type),
preferISAC((String) json.get("sdp")));
pc.setRemoteDescription(sdpObserver, sdp);
} else if (type.equals("bye")) {
logAndToast("Remote end hung up; dropping PeerConnection");
disconnectAndExit();
} else {
throw new RuntimeException("Unexpected message: " + data);
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@JavascriptInterface public void onClose() {
disconnectAndExit();
}
@JavascriptInterface public void onError(int code, String description) {
disconnectAndExit();
}
}
// Disconnect from remote resources, dispose of local resources, and exit.
private void disconnectAndExit() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
quit[0] = true;
if (pc != null) {
pc.dispose();
pc = null;
}
if (appRtcClient != null) {
appRtcClient.sendMessage("{\"type\": \"bye\"}");
appRtcClient.disconnect();
appRtcClient = null;
}
if (videoSource != null) {
videoSource.dispose();
videoSource = null;
}
if (factory != null) {
factory.dispose();
factory = null;
}
finish();
}
}
}

View File

@ -45,7 +45,7 @@ import android.webkit.WebViewClient;
* "androidMessageHandler".
*/
public class GAEChannelClient {
private static final String TAG = "GAEChannelClient";
private static final String TAG = "GAERTCClient";
private WebView webView;
private final ProxyingMessageHandler proxyingMessageHandler;
@ -55,7 +55,7 @@ public class GAEChannelClient {
* Methods are guaranteed to be invoked on the UI thread of |activity| passed
* to GAEChannelClient's constructor.
*/
public interface MessageHandler {
public interface GAEMessageHandler {
public void onOpen();
public void onMessage(String data);
public void onClose();
@ -65,7 +65,7 @@ public class GAEChannelClient {
/** Asynchronously open an AppEngine channel. */
@SuppressLint("SetJavaScriptEnabled")
public GAEChannelClient(
Activity activity, String token, MessageHandler handler) {
Activity activity, String token, GAEMessageHandler handler) {
webView = new WebView(activity);
webView.getSettings().setJavaScriptEnabled(true);
webView.setWebChromeClient(new WebChromeClient() { // Purely for debugging.
@ -105,12 +105,12 @@ public class GAEChannelClient {
// (private, background) thread to the Activity's UI thread.
private static class ProxyingMessageHandler {
private final Activity activity;
private final MessageHandler handler;
private final GAEMessageHandler handler;
private final boolean[] disconnected = { false };
private final String token;
public
ProxyingMessageHandler(Activity activity, MessageHandler handler,
ProxyingMessageHandler(Activity activity, GAEMessageHandler handler,
String token) {
this.activity = activity;
this.handler = handler;

View File

@ -0,0 +1,487 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.os.AsyncTask;
import android.util.Log;
import android.webkit.JavascriptInterface;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.PeerConnection;
import org.webrtc.SessionDescription;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.LinkedList;
import java.util.Scanner;
/**
* Negotiates signaling for chatting with apprtc.appspot.com "rooms".
* Uses the client<->server specifics of the apprtc AppEngine webapp.
*
* To use: create an instance of this object (registering a message handler) and
* call connectToRoom(). Once room connection is established
* onConnectedToRoom() callback with room parameters is invoked.
* Messages to other party (with local Ice candidates and SDP) can
* be sent after GAE channel is opened and onChannelOpen() callback is invoked.
*/
public class GAERTCClient implements AppRTCClient {
private static final String TAG = "GAERTCClient";
private GAEChannelClient channelClient;
private final Activity activity;
private AppRTCClient.AppRTCSignalingEvents events;
private final GAEChannelClient.GAEMessageHandler gaeHandler =
new GAEHandler();
private AppRTCClient.AppRTCSignalingParameters appRTCSignalingParameters;
private String gaeBaseHref;
private String channelToken;
private String postMessageUrl;
private LinkedList<String> sendQueue = new LinkedList<String>();
public GAERTCClient(Activity activity,
AppRTCClient.AppRTCSignalingEvents events) {
this.activity = activity;
this.events = events;
}
/**
* Asynchronously connect to an AppRTC room URL, e.g.
* https://apprtc.appspot.com/?r=NNN and register message-handling callbacks
* on its GAE Channel.
*/
@Override
public void connectToRoom(String url) {
while (url.indexOf('?') < 0) {
// Keep redirecting until we get a room number.
(new RedirectResolver()).execute(url);
return; // RedirectResolver above calls us back with the next URL.
}
(new RoomParameterGetter()).execute(url);
}
/**
* Disconnect from the GAE Channel.
*/
@Override
public void disconnect() {
if (channelClient != null) {
sendMessage("{\"type\": \"bye\"}");
channelClient.close();
channelClient = null;
}
}
/**
* Send local SDP (offer or answer, depending on role) to the
* other participant. Note that it is important to send the output of
* create{Offer,Answer} and not merely the current value of
* getLocalDescription() because the latter may include ICE candidates that
* we might want to filter elsewhere.
*/
@Override
public void sendLocalDescription(final SessionDescription sdp) {
JSONObject json = new JSONObject();
jsonPut(json, "type", sdp.type.canonicalForm());
jsonPut(json, "sdp", sdp.description);
sendMessage(json.toString());
}
/**
* Send Ice candidate to the other participant.
*/
@Override
public void sendLocalIceCandidate(final IceCandidate candidate) {
JSONObject json = new JSONObject();
jsonPut(json, "type", "candidate");
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
sendMessage(json.toString());
}
// Queue a message for sending to the room's channel and send it if already
// connected (other wise queued messages are drained when the channel is
// eventually established).
private synchronized void sendMessage(String msg) {
synchronized (sendQueue) {
sendQueue.add(msg);
}
requestQueueDrainInBackground();
}
// Put a |key|->|value| mapping in |json|.
private static void jsonPut(JSONObject json, String key, Object value) {
try {
json.put(key, value);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Load the given URL and return the value of the Location header of the
// resulting 302 response. If the result is not a 302, throws.
private class RedirectResolver extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... urls) {
if (urls.length != 1) {
throw new RuntimeException("Must be called with a single URL");
}
try {
return followRedirect(urls[0]);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void onPostExecute(String url) {
connectToRoom(url);
}
private String followRedirect(String url) throws IOException {
HttpURLConnection connection = (HttpURLConnection)
new URL(url).openConnection();
connection.setInstanceFollowRedirects(false);
int code = connection.getResponseCode();
if (code != HttpURLConnection.HTTP_MOVED_TEMP) {
throw new IOException("Unexpected response: " + code + " for " + url +
", with contents: " + drainStream(connection.getInputStream()));
}
int n = 0;
String name, value;
while ((name = connection.getHeaderFieldKey(n)) != null) {
value = connection.getHeaderField(n);
if (name.equals("Location")) {
return value;
}
++n;
}
throw new IOException("Didn't find Location header!");
}
}
// AsyncTask that converts an AppRTC room URL into the set of signaling
// parameters to use with that room.
private class RoomParameterGetter
extends AsyncTask<String, Void, AppRTCSignalingParameters> {
@Override
protected AppRTCSignalingParameters doInBackground(String... urls) {
if (urls.length != 1) {
throw new RuntimeException("Must be called with a single URL");
}
try {
return getParametersForRoomUrl(urls[0]);
} catch (JSONException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void onPostExecute(AppRTCSignalingParameters params) {
channelClient =
new GAEChannelClient(activity, channelToken, gaeHandler);
synchronized (sendQueue) {
appRTCSignalingParameters = params;
}
requestQueueDrainInBackground();
events.onConnectedToRoom(appRTCSignalingParameters);
}
// Fetches |url| and fishes the signaling parameters out of the JSON.
private AppRTCSignalingParameters getParametersForRoomUrl(String url)
throws IOException, JSONException {
url = url + "&t=json";
String response = drainStream((new URL(url)).openConnection().getInputStream());
Log.d(TAG, "Room response: " + response);
JSONObject roomJson = new JSONObject(response);
if (roomJson.has("error")) {
JSONArray errors = roomJson.getJSONArray("error_messages");
throw new IOException(errors.toString());
}
gaeBaseHref = url.substring(0, url.indexOf('?'));
channelToken = roomJson.getString("token");
postMessageUrl = "/message?r=" +
roomJson.getString("room_key") + "&u=" +
roomJson.getString("me");
boolean initiator = roomJson.getInt("initiator") == 1;
LinkedList<PeerConnection.IceServer> iceServers =
iceServersFromPCConfigJSON(roomJson.getString("pc_config"));
boolean isTurnPresent = false;
for (PeerConnection.IceServer server : iceServers) {
Log.d(TAG, "IceServer: " + server);
if (server.uri.startsWith("turn:")) {
isTurnPresent = true;
break;
}
}
if (!isTurnPresent) {
PeerConnection.IceServer server =
requestTurnServer(roomJson.getString("turn_url"));
Log.d(TAG, "TurnServer: " + server);
iceServers.add(server);
}
MediaConstraints pcConstraints = constraintsFromJSON(
roomJson.getString("pc_constraints"));
addDTLSConstraintIfMissing(pcConstraints);
Log.d(TAG, "pcConstraints: " + pcConstraints);
MediaConstraints videoConstraints = constraintsFromJSON(
getAVConstraints("video",
roomJson.getString("media_constraints")));
Log.d(TAG, "videoConstraints: " + videoConstraints);
MediaConstraints audioConstraints = constraintsFromJSON(
getAVConstraints("audio",
roomJson.getString("media_constraints")));
Log.d(TAG, "audioConstraints: " + audioConstraints);
return new AppRTCSignalingParameters(
iceServers, initiator,
pcConstraints, videoConstraints, audioConstraints);
}
// Mimic Chrome and set DtlsSrtpKeyAgreement to true if not set to false by
// the web-app.
private void addDTLSConstraintIfMissing(
MediaConstraints pcConstraints) {
for (MediaConstraints.KeyValuePair pair : pcConstraints.mandatory) {
if (pair.getKey().equals("DtlsSrtpKeyAgreement")) {
return;
}
}
for (MediaConstraints.KeyValuePair pair : pcConstraints.optional) {
if (pair.getKey().equals("DtlsSrtpKeyAgreement")) {
return;
}
}
// DTLS isn't being suppressed (e.g. for debug=loopback calls), so enable
// it by default.
pcConstraints.optional.add(
new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true"));
}
// Return the constraints specified for |type| of "audio" or "video" in
// |mediaConstraintsString|.
private String getAVConstraints(
String type, String mediaConstraintsString) {
try {
JSONObject json = new JSONObject(mediaConstraintsString);
// Tricksy handling of values that are allowed to be (boolean or
// MediaTrackConstraints) by the getUserMedia() spec. There are three
// cases below.
if (!json.has(type) || !json.optBoolean(type, true)) {
// Case 1: "audio"/"video" is not present, or is an explicit "false"
// boolean.
return null;
}
if (json.optBoolean(type, false)) {
// Case 2: "audio"/"video" is an explicit "true" boolean.
return "{\"mandatory\": {}, \"optional\": []}";
}
// Case 3: "audio"/"video" is an object.
return json.getJSONObject(type).toString();
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
private MediaConstraints constraintsFromJSON(String jsonString) {
if (jsonString == null) {
return null;
}
try {
MediaConstraints constraints = new MediaConstraints();
JSONObject json = new JSONObject(jsonString);
JSONObject mandatoryJSON = json.optJSONObject("mandatory");
if (mandatoryJSON != null) {
JSONArray mandatoryKeys = mandatoryJSON.names();
if (mandatoryKeys != null) {
for (int i = 0; i < mandatoryKeys.length(); ++i) {
String key = mandatoryKeys.getString(i);
String value = mandatoryJSON.getString(key);
constraints.mandatory.add(
new MediaConstraints.KeyValuePair(key, value));
}
}
}
JSONArray optionalJSON = json.optJSONArray("optional");
if (optionalJSON != null) {
for (int i = 0; i < optionalJSON.length(); ++i) {
JSONObject keyValueDict = optionalJSON.getJSONObject(i);
String key = keyValueDict.names().getString(0);
String value = keyValueDict.getString(key);
constraints.optional.add(
new MediaConstraints.KeyValuePair(key, value));
}
}
return constraints;
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Requests & returns a TURN ICE Server based on a request URL. Must be run
// off the main thread!
private PeerConnection.IceServer requestTurnServer(String url) {
try {
URLConnection connection = (new URL(url)).openConnection();
connection.addRequestProperty("user-agent", "Mozilla/5.0");
connection.addRequestProperty("origin", "https://apprtc.appspot.com");
String response = drainStream(connection.getInputStream());
JSONObject responseJSON = new JSONObject(response);
String uri = responseJSON.getJSONArray("uris").getString(0);
String username = responseJSON.getString("username");
String password = responseJSON.getString("password");
return new PeerConnection.IceServer(uri, username, password);
} catch (JSONException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
// Return the list of ICE servers described by a WebRTCPeerConnection
// configuration string.
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(
String pcConfig) {
try {
JSONObject json = new JSONObject(pcConfig);
JSONArray servers = json.getJSONArray("iceServers");
LinkedList<PeerConnection.IceServer> ret =
new LinkedList<PeerConnection.IceServer>();
for (int i = 0; i < servers.length(); ++i) {
JSONObject server = servers.getJSONObject(i);
String url = server.getString("urls");
String credential =
server.has("credential") ? server.getString("credential") : "";
ret.add(new PeerConnection.IceServer(url, "", credential));
}
return ret;
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Request an attempt to drain the send queue, on a background thread.
private void requestQueueDrainInBackground() {
(new AsyncTask<Void, Void, Void>() {
public Void doInBackground(Void... unused) {
maybeDrainQueue();
return null;
}
}).execute();
}
// Send all queued messages if connected to the room.
private void maybeDrainQueue() {
synchronized (sendQueue) {
if (appRTCSignalingParameters == null) {
return;
}
try {
for (String msg : sendQueue) {
Log.d(TAG, "SEND: " + msg);
URLConnection connection =
new URL(gaeBaseHref + postMessageUrl).openConnection();
connection.setDoOutput(true);
connection.getOutputStream().write(msg.getBytes("UTF-8"));
if (!connection.getHeaderField(null).startsWith("HTTP/1.1 200 ")) {
throw new IOException(
"Non-200 response to POST: " + connection.getHeaderField(null) +
" for msg: " + msg);
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
sendQueue.clear();
}
}
// Return the contents of an InputStream as a String.
private static String drainStream(InputStream in) {
Scanner s = new Scanner(in).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
// Implementation detail: handler for receiving GAE messages and dispatching
// them appropriately.
private class GAEHandler implements GAEChannelClient.GAEMessageHandler {
@JavascriptInterface public void onOpen() {
events.onChannelOpen();
}
@JavascriptInterface public void onMessage(String msg) {
Log.d(TAG, "RECEIVE: " + msg);
try {
JSONObject json = new JSONObject(msg);
String type = (String) json.get("type");
if (type.equals("candidate")) {
IceCandidate candidate = new IceCandidate(
(String) json.get("id"),
json.getInt("label"),
(String) json.get("candidate"));
events.onRemoteIceCandidate(candidate);
} else if (type.equals("answer") || type.equals("offer")) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type),
(String)json.get("sdp"));
events.onRemoteDescription(sdp);
} else if (type.equals("bye")) {
events.onChannelClose();
} else {
throw new RuntimeException("Unexpected message: " + msg);
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@JavascriptInterface public void onClose() {
events.onChannelClose();
}
@JavascriptInterface public void onError(int code, String description) {
events.onChannelError(code, description);
}
}
}

View File

@ -0,0 +1,445 @@
/*
* libjingle
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.util.Log;
import org.appspot.apprtc.AppRTCClient.AppRTCSignalingParameters;
import org.webrtc.DataChannel;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.MediaStreamTrack;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnection.IceConnectionState;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription;
import org.webrtc.StatsObserver;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import java.util.LinkedList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class PeerConnectionClient {
private static final String TAG = "RTCClient";
private final Activity activity;
private PeerConnectionFactory factory;
private PeerConnection pc;
private VideoSource videoSource;
private boolean videoSourceStopped;
private final PCObserver pcObserver = new PCObserver();
private final SDPObserver sdpObserver = new SDPObserver();
private final VideoRenderer.Callbacks remoteRender;
private LinkedList<IceCandidate> queuedRemoteCandidates =
new LinkedList<IceCandidate>();
private MediaConstraints sdpMediaConstraints;
private PeerConnectionEvents events;
private boolean isInitiator;
private SessionDescription localSdp = null; // either offer or answer SDP
public PeerConnectionClient(
Activity activity,
VideoRenderer.Callbacks localRender,
VideoRenderer.Callbacks remoteRender,
AppRTCSignalingParameters appRtcParameters,
PeerConnectionEvents events) {
this.activity = activity;
this.remoteRender = remoteRender;
this.events = events;
isInitiator = appRtcParameters.initiator;
sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveAudio", "true"));
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "true"));
factory = new PeerConnectionFactory();
MediaConstraints pcConstraints = appRtcParameters.pcConstraints;
pcConstraints.optional.add(
new MediaConstraints.KeyValuePair("RtpDataChannels", "true"));
pc = factory.createPeerConnection(appRtcParameters.iceServers,
pcConstraints, pcObserver);
isInitiator = false;
// Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging.
// NOTE: this _must_ happen while |factory| is alive!
// Logging.enableTracing(
// "logcat:",
// EnumSet.of(Logging.TraceLevel.TRACE_ALL),
// Logging.Severity.LS_SENSITIVE);
Log.d(TAG, "Creating local video source");
MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
if (appRtcParameters.videoConstraints != null) {
VideoCapturer capturer = getVideoCapturer();
videoSource = factory.createVideoSource(
capturer, appRtcParameters.videoConstraints);
VideoTrack videoTrack =
factory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(localRender));
lMS.addTrack(videoTrack);
}
if (appRtcParameters.audioConstraints != null) {
lMS.addTrack(factory.createAudioTrack(
"ARDAMSa0",
factory.createAudioSource(appRtcParameters.audioConstraints)));
}
pc.addStream(lMS, new MediaConstraints());
}
public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
return pc.getStats(observer, track);
}
public void createOffer() {
isInitiator = true;
pc.createOffer(sdpObserver, sdpMediaConstraints);
}
public void createAnswer() {
isInitiator = false;
pc.createAnswer(sdpObserver, sdpMediaConstraints);
}
public void addRemoteIceCandidate(IceCandidate candidate) {
if (queuedRemoteCandidates != null) {
queuedRemoteCandidates.add(candidate);
} else {
pc.addIceCandidate(candidate);
}
}
public void setRemoteDescription(SessionDescription sdp) {
SessionDescription sdpISAC = new SessionDescription(
sdp.type, preferISAC(sdp.description));
Log.d(TAG, "Set remote SDP");
pc.setRemoteDescription(sdpObserver, sdpISAC);
}
public void stopVideoSource() {
if (videoSource != null) {
videoSource.stop();
videoSourceStopped = true;
}
}
public void startVideoSource() {
if (videoSource != null && videoSourceStopped) {
videoSource.restart();
videoSourceStopped = false;
}
}
public void close() {
if (pc != null) {
pc.dispose();
pc = null;
}
if (videoSource != null) {
videoSource.dispose();
videoSource = null;
}
if (factory != null) {
factory.dispose();
factory = null;
}
}
/**
* SDP/ICE ready callbacks.
*/
public static interface PeerConnectionEvents {
/**
* Callback fired once offer is created and local SDP is set.
*/
public void onLocalDescription(final SessionDescription sdp);
/**
* Callback fired once local Ice candidate is generated.
*/
public void onIceCandidate(final IceCandidate candidate);
/**
* Callback fired once connection is established (IceConnectionState is
* CONNECTED).
*/
public void onIceConnected();
}
// Cycle through likely device names for the camera and return the first
// capturer that works, or crash if none do.
private VideoCapturer getVideoCapturer() {
String[] cameraFacing = { "front", "back" };
int[] cameraIndex = { 0, 1 };
int[] cameraOrientation = { 0, 90, 180, 270 };
for (String facing : cameraFacing) {
for (int index : cameraIndex) {
for (int orientation : cameraOrientation) {
String name = "Camera " + index + ", Facing " + facing +
", Orientation " + orientation;
VideoCapturer capturer = VideoCapturer.create(name);
if (capturer != null) {
Log.d(TAG, "Using camera: " + name);
return capturer;
}
}
}
}
throw new RuntimeException("Failed to open capturer");
}
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
}
// Mangle SDP to prefer ISAC/16000 over any other audio codec.
private static String preferISAC(String sdpDescription) {
String[] lines = sdpDescription.split("\r\n");
int mLineIndex = -1;
String isac16kRtpMap = null;
Pattern isac16kPattern =
Pattern.compile("^a=rtpmap:(\\d+) ISAC/16000[\r]?$");
for (int i = 0;
(i < lines.length) && (mLineIndex == -1 || isac16kRtpMap == null);
++i) {
if (lines[i].startsWith("m=audio ")) {
mLineIndex = i;
continue;
}
Matcher isac16kMatcher = isac16kPattern.matcher(lines[i]);
if (isac16kMatcher.matches()) {
isac16kRtpMap = isac16kMatcher.group(1);
continue;
}
}
if (mLineIndex == -1) {
Log.d(TAG, "No m=audio line, so can't prefer iSAC");
return sdpDescription;
}
if (isac16kRtpMap == null) {
Log.d(TAG, "No ISAC/16000 line, so can't prefer iSAC");
return sdpDescription;
}
String[] origMLineParts = lines[mLineIndex].split(" ");
StringBuilder newMLine = new StringBuilder();
int origPartIndex = 0;
// Format is: m=<media> <port> <proto> <fmt> ...
newMLine.append(origMLineParts[origPartIndex++]).append(" ");
newMLine.append(origMLineParts[origPartIndex++]).append(" ");
newMLine.append(origMLineParts[origPartIndex++]).append(" ");
newMLine.append(isac16kRtpMap);
for (; origPartIndex < origMLineParts.length; ++origPartIndex) {
if (!origMLineParts[origPartIndex].equals(isac16kRtpMap)) {
newMLine.append(" ").append(origMLineParts[origPartIndex]);
}
}
lines[mLineIndex] = newMLine.toString();
StringBuilder newSdpDescription = new StringBuilder();
for (String line : lines) {
newSdpDescription.append(line).append("\r\n");
}
return newSdpDescription.toString();
}
private void drainRemoteCandidates() {
for (IceCandidate candidate : queuedRemoteCandidates) {
pc.addIceCandidate(candidate);
}
queuedRemoteCandidates = null;
}
// Implementation detail: observe ICE & stream changes and react accordingly.
private class PCObserver implements PeerConnection.Observer {
@Override
public void onIceCandidate(final IceCandidate candidate){
activity.runOnUiThread(new Runnable() {
public void run() {
events.onIceCandidate(candidate);
}
});
}
@Override
public void onError(){
activity.runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("PeerConnection error!");
}
});
}
@Override
public void onSignalingChange(
PeerConnection.SignalingState newState) {
Log.d(TAG, "SignalingState: " + newState);
}
@Override
public void onIceConnectionChange(
PeerConnection.IceConnectionState newState) {
Log.d(TAG, "IceConnectionState: " + newState);
if (newState == IceConnectionState.CONNECTED) {
activity.runOnUiThread(new Runnable() {
public void run() {
events.onIceConnected();
}
});
}
}
@Override
public void onIceGatheringChange(
PeerConnection.IceGatheringState newState) {
}
@Override
public void onAddStream(final MediaStream stream){
activity.runOnUiThread(new Runnable() {
public void run() {
abortUnless(stream.audioTracks.size() <= 1 &&
stream.videoTracks.size() <= 1,
"Weird-looking stream: " + stream);
if (stream.videoTracks.size() == 1) {
stream.videoTracks.get(0).addRenderer(
new VideoRenderer(remoteRender));
}
}
});
}
@Override
public void onRemoveStream(final MediaStream stream){
activity.runOnUiThread(new Runnable() {
public void run() {
stream.videoTracks.get(0).dispose();
}
});
}
@Override
public void onDataChannel(final DataChannel dc) {
activity.runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException(
"AppRTC doesn't use data channels, but got: " + dc.label() +
" anyway!");
}
});
}
@Override
public void onRenegotiationNeeded() {
// No need to do anything; AppRTC follows a pre-agreed-upon
// signaling/negotiation protocol.
}
}
// Implementation detail: handle offer creation/signaling and answer setting,
// as well as adding remote ICE candidates once the answer SDP is set.
private class SDPObserver implements SdpObserver {
@Override
public void onCreateSuccess(final SessionDescription origSdp) {
abortUnless(localSdp == null, "multiple SDP create?!?");
final SessionDescription sdp = new SessionDescription(
origSdp.type, preferISAC(origSdp.description));
localSdp = sdp;
activity.runOnUiThread(new Runnable() {
public void run() {
Log.d(TAG, "Set local SDP from " + sdp.type);
pc.setLocalDescription(sdpObserver, sdp);
}
});
}
@Override
public void onSetSuccess() {
activity.runOnUiThread(new Runnable() {
public void run() {
if (isInitiator) {
// For offering peer connection we first create offer and set
// local SDP, then after receiving answer set remote SDP.
if (pc.getRemoteDescription() == null) {
// We've just set our local SDP so time to send it.
Log.d(TAG, "Local SDP set succesfully");
events.onLocalDescription(localSdp);
} else {
// We've just set remote description,
// so drain remote ICE candidates.
Log.d(TAG, "Remote SDP set succesfully");
drainRemoteCandidates();
}
} else {
// For answering peer connection we set remote SDP and then
// create answer and set local SDP.
if (pc.getLocalDescription() != null) {
// We've just set our local SDP so time to send it and drain
// remote ICE candidates.
Log.d(TAG, "Local SDP set succesfully");
events.onLocalDescription(localSdp);
drainRemoteCandidates();
} else {
// We've just set remote SDP - do nothing for now -
// answer will be created soon.
Log.d(TAG, "Remote SDP set succesfully");
}
}
}
});
}
@Override
public void onCreateFailure(final String error) {
activity.runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("createSDP error: " + error);
}
});
}
@Override
public void onSetFailure(final String error) {
activity.runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("setSDP error: " + error);
}
});
}
}
}

View File

@ -325,8 +325,10 @@
'examples/android/src/org/appspot/apprtc/AppRTCClient.java',
'examples/android/src/org/appspot/apprtc/AppRTCDemoActivity.java',
'examples/android/src/org/appspot/apprtc/AppRTCGLView.java',
'examples/android/src/org/appspot/apprtc/UnhandledExceptionHandler.java',
'examples/android/src/org/appspot/apprtc/GAEChannelClient.java',
'examples/android/src/org/appspot/apprtc/GAERTCClient.java',
'examples/android/src/org/appspot/apprtc/PeerConnectionClient.java',
'examples/android/src/org/appspot/apprtc/UnhandledExceptionHandler.java',
],
'outputs': [
'<(PRODUCT_DIR)/AppRTCDemo-debug.apk',