Adds trunk/talk folder of revision 359 from libjingles google code to

trunk/talk


git-svn-id: http://webrtc.googlecode.com/svn/trunk@4318 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
henrike@webrtc.org
2013-07-10 00:45:36 +00:00
parent 6aa6229953
commit 28e2075280
1067 changed files with 275209 additions and 0 deletions

View File

@@ -0,0 +1,432 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.os.AsyncTask;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.MediaConstraints;
import org.webrtc.PeerConnection;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Negotiates signaling for chatting with apprtc.appspot.com "rooms".
* Uses the client<->server specifics of the apprtc AppEngine webapp.
*
* To use: create an instance of this object (registering a message handler) and
* call connectToRoom(). Once that's done call sendMessage() and wait for the
* registered handler to be called with received messages.
*/
public class AppRTCClient {
private static final String TAG = "AppRTCClient";
private GAEChannelClient channelClient;
private final Activity activity;
private final GAEChannelClient.MessageHandler gaeHandler;
private final IceServersObserver iceServersObserver;
// These members are only read/written under sendQueue's lock.
private LinkedList<String> sendQueue = new LinkedList<String>();
private AppRTCSignalingParameters appRTCSignalingParameters;
/**
* Callback fired once the room's signaling parameters specify the set of
* ICE servers to use.
*/
public static interface IceServersObserver {
public void onIceServers(List<PeerConnection.IceServer> iceServers);
}
public AppRTCClient(
Activity activity, GAEChannelClient.MessageHandler gaeHandler,
IceServersObserver iceServersObserver) {
this.activity = activity;
this.gaeHandler = gaeHandler;
this.iceServersObserver = iceServersObserver;
}
/**
* Asynchronously connect to an AppRTC room URL, e.g.
* https://apprtc.appspot.com/?r=NNN and register message-handling callbacks
* on its GAE Channel.
*/
public void connectToRoom(String url) {
while (url.indexOf('?') < 0) {
// Keep redirecting until we get a room number.
(new RedirectResolver()).execute(url);
return; // RedirectResolver above calls us back with the next URL.
}
(new RoomParameterGetter()).execute(url);
}
/**
* Disconnect from the GAE Channel.
*/
public void disconnect() {
if (channelClient != null) {
channelClient.close();
channelClient = null;
}
}
/**
* Queue a message for sending to the room's channel and send it if already
* connected (other wise queued messages are drained when the channel is
eventually established).
*/
public synchronized void sendMessage(String msg) {
synchronized (sendQueue) {
sendQueue.add(msg);
}
requestQueueDrainInBackground();
}
public boolean isInitiator() {
return appRTCSignalingParameters.initiator;
}
public MediaConstraints pcConstraints() {
return appRTCSignalingParameters.pcConstraints;
}
public MediaConstraints videoConstraints() {
return appRTCSignalingParameters.videoConstraints;
}
// Struct holding the signaling parameters of an AppRTC room.
private class AppRTCSignalingParameters {
public final List<PeerConnection.IceServer> iceServers;
public final String gaeBaseHref;
public final String channelToken;
public final String postMessageUrl;
public final boolean initiator;
public final MediaConstraints pcConstraints;
public final MediaConstraints videoConstraints;
public AppRTCSignalingParameters(
List<PeerConnection.IceServer> iceServers,
String gaeBaseHref, String channelToken, String postMessageUrl,
boolean initiator, MediaConstraints pcConstraints,
MediaConstraints videoConstraints) {
this.iceServers = iceServers;
this.gaeBaseHref = gaeBaseHref;
this.channelToken = channelToken;
this.postMessageUrl = postMessageUrl;
this.initiator = initiator;
this.pcConstraints = pcConstraints;
this.videoConstraints = videoConstraints;
}
}
// Load the given URL and return the value of the Location header of the
// resulting 302 response. If the result is not a 302, throws.
private class RedirectResolver extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... urls) {
if (urls.length != 1) {
throw new RuntimeException("Must be called with a single URL");
}
try {
return followRedirect(urls[0]);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void onPostExecute(String url) {
connectToRoom(url);
}
private String followRedirect(String url) throws IOException {
HttpURLConnection connection = (HttpURLConnection)
new URL(url).openConnection();
connection.setInstanceFollowRedirects(false);
int code = connection.getResponseCode();
if (code != HttpURLConnection.HTTP_MOVED_TEMP) {
throw new IOException("Unexpected response: " + code + " for " + url +
", with contents: " + drainStream(connection.getInputStream()));
}
int n = 0;
String name, value;
while ((name = connection.getHeaderFieldKey(n)) != null) {
value = connection.getHeaderField(n);
if (name.equals("Location")) {
return value;
}
++n;
}
throw new IOException("Didn't find Location header!");
}
}
// AsyncTask that converts an AppRTC room URL into the set of signaling
// parameters to use with that room.
private class RoomParameterGetter
extends AsyncTask<String, Void, AppRTCSignalingParameters> {
@Override
protected AppRTCSignalingParameters doInBackground(String... urls) {
if (urls.length != 1) {
throw new RuntimeException("Must be called with a single URL");
}
try {
return getParametersForRoomUrl(urls[0]);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void onPostExecute(AppRTCSignalingParameters params) {
channelClient =
new GAEChannelClient(activity, params.channelToken, gaeHandler);
synchronized (sendQueue) {
appRTCSignalingParameters = params;
}
requestQueueDrainInBackground();
iceServersObserver.onIceServers(appRTCSignalingParameters.iceServers);
}
// Fetches |url| and fishes the signaling parameters out of the HTML via
// regular expressions.
//
// TODO(fischman): replace this hackery with a dedicated JSON-serving URL in
// apprtc so that this isn't necessary (here and in other future apps that
// want to interop with apprtc).
private AppRTCSignalingParameters getParametersForRoomUrl(String url)
throws IOException {
final Pattern fullRoomPattern = Pattern.compile(
".*\n *Sorry, this room is full\\..*");
String roomHtml =
drainStream((new URL(url)).openConnection().getInputStream());
Matcher fullRoomMatcher = fullRoomPattern.matcher(roomHtml);
if (fullRoomMatcher.find()) {
throw new IOException("Room is full!");
}
String gaeBaseHref = url.substring(0, url.indexOf('?'));
String token = getVarValue(roomHtml, "channelToken", true);
String postMessageUrl = "/message?r=" +
getVarValue(roomHtml, "roomKey", true) + "&u=" +
getVarValue(roomHtml, "me", true);
boolean initiator = getVarValue(roomHtml, "initiator", false).equals("1");
LinkedList<PeerConnection.IceServer> iceServers =
iceServersFromPCConfigJSON(getVarValue(roomHtml, "pcConfig", false));
boolean isTurnPresent = false;
for (PeerConnection.IceServer server : iceServers) {
if (server.uri.startsWith("turn:")) {
isTurnPresent = true;
break;
}
}
if (!isTurnPresent) {
iceServers.add(
requestTurnServer(getVarValue(roomHtml, "turnUrl", true)));
}
MediaConstraints pcConstraints = constraintsFromJSON(
getVarValue(roomHtml, "pcConstraints", false));
Log.d(TAG, "pcConstraints: " + pcConstraints);
MediaConstraints videoConstraints = constraintsFromJSON(
getVideoConstraints(
getVarValue(roomHtml, "mediaConstraints", false)));
Log.d(TAG, "videoConstraints: " + videoConstraints);
return new AppRTCSignalingParameters(
iceServers, gaeBaseHref, token, postMessageUrl, initiator,
pcConstraints, videoConstraints);
}
private String getVideoConstraints(String mediaConstraintsString) {
try {
JSONObject json = new JSONObject(mediaConstraintsString);
JSONObject videoJson = json.optJSONObject("video");
if (videoJson == null) {
return "";
}
return videoJson.toString();
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
private MediaConstraints constraintsFromJSON(String jsonString) {
try {
MediaConstraints constraints = new MediaConstraints();
JSONObject json = new JSONObject(jsonString);
JSONObject mandatoryJSON = json.optJSONObject("mandatory");
if (mandatoryJSON != null) {
JSONArray mandatoryKeys = mandatoryJSON.names();
if (mandatoryKeys != null) {
for (int i = 0; i < mandatoryKeys.length(); ++i) {
String key = (String) mandatoryKeys.getString(i);
String value = mandatoryJSON.getString(key);
constraints.mandatory.add(
new MediaConstraints.KeyValuePair(key, value));
}
}
}
JSONArray optionalJSON = json.optJSONArray("optional");
if (optionalJSON != null) {
for (int i = 0; i < optionalJSON.length(); ++i) {
JSONObject keyValueDict = optionalJSON.getJSONObject(i);
String key = keyValueDict.names().getString(0);
String value = keyValueDict.getString(key);
constraints.optional.add(
new MediaConstraints.KeyValuePair(key, value));
}
}
return constraints;
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Scan |roomHtml| for declaration & assignment of |varName| and return its
// value, optionally stripping outside quotes if |stripQuotes| requests it.
private String getVarValue(
String roomHtml, String varName, boolean stripQuotes)
throws IOException {
final Pattern pattern = Pattern.compile(
".*\n *var " + varName + " = ([^\n]*);\n.*");
Matcher matcher = pattern.matcher(roomHtml);
if (!matcher.find()) {
throw new IOException("Missing " + varName + " in HTML: " + roomHtml);
}
String varValue = matcher.group(1);
if (matcher.find()) {
throw new IOException("Too many " + varName + " in HTML: " + roomHtml);
}
if (stripQuotes) {
varValue = varValue.substring(1, varValue.length() - 1);
}
return varValue;
}
// Requests & returns a TURN ICE Server based on a request URL. Must be run
// off the main thread!
private PeerConnection.IceServer requestTurnServer(String url) {
try {
URLConnection connection = (new URL(url)).openConnection();
connection.addRequestProperty("user-agent", "Mozilla/5.0");
connection.addRequestProperty("origin", "https://apprtc.appspot.com");
String response = drainStream(connection.getInputStream());
JSONObject responseJSON = new JSONObject(response);
String uri = responseJSON.getJSONArray("uris").getString(0);
String username = responseJSON.getString("username");
String password = responseJSON.getString("password");
return new PeerConnection.IceServer(uri, username, password);
} catch (JSONException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
// Return the list of ICE servers described by a WebRTCPeerConnection
// configuration string.
private LinkedList<PeerConnection.IceServer> iceServersFromPCConfigJSON(
String pcConfig) {
try {
JSONObject json = new JSONObject(pcConfig);
JSONArray servers = json.getJSONArray("iceServers");
LinkedList<PeerConnection.IceServer> ret =
new LinkedList<PeerConnection.IceServer>();
for (int i = 0; i < servers.length(); ++i) {
JSONObject server = servers.getJSONObject(i);
String url = server.getString("url");
String credential =
server.has("credential") ? server.getString("credential") : "";
ret.add(new PeerConnection.IceServer(url, "", credential));
}
return ret;
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Request an attempt to drain the send queue, on a background thread.
private void requestQueueDrainInBackground() {
(new AsyncTask<Void, Void, Void>() {
public Void doInBackground(Void... unused) {
maybeDrainQueue();
return null;
}
}).execute();
}
// Send all queued messages if connected to the room.
private void maybeDrainQueue() {
synchronized (sendQueue) {
if (appRTCSignalingParameters == null) {
return;
}
try {
for (String msg : sendQueue) {
URLConnection connection = new URL(
appRTCSignalingParameters.gaeBaseHref +
appRTCSignalingParameters.postMessageUrl).openConnection();
connection.setDoOutput(true);
connection.getOutputStream().write(msg.getBytes("UTF-8"));
if (!connection.getHeaderField(null).startsWith("HTTP/1.1 200 ")) {
throw new IOException(
"Non-200 response to POST: " + connection.getHeaderField(null) +
" for msg: " + msg);
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
sendQueue.clear();
}
}
// Return the contents of an InputStream as a String.
private static String drainStream(InputStream in) {
Scanner s = new Scanner(in).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
}

View File

@@ -0,0 +1,499 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Point;
import android.media.AudioManager;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.webkit.JavascriptInterface;
import android.widget.EditText;
import android.widget.Toast;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription;
import org.webrtc.StatsObserver;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoRenderer.I420Frame;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import java.util.LinkedList;
import java.util.List;
/**
* Main Activity of the AppRTCDemo Android app demonstrating interoperability
* between the Android/Java implementation of PeerConnection and the
* apprtc.appspot.com demo webapp.
*/
public class AppRTCDemoActivity extends Activity
implements AppRTCClient.IceServersObserver {
private static final String TAG = "AppRTCDemoActivity";
private PeerConnection pc;
private final PCObserver pcObserver = new PCObserver();
private final SDPObserver sdpObserver = new SDPObserver();
private final GAEChannelClient.MessageHandler gaeHandler = new GAEHandler();
private AppRTCClient appRtcClient = new AppRTCClient(this, gaeHandler, this);
private VideoStreamsView vsv;
private Toast logToast;
private LinkedList<IceCandidate> queuedRemoteCandidates =
new LinkedList<IceCandidate>();
// Synchronize on quit[0] to avoid teardown-related crashes.
private final Boolean[] quit = new Boolean[] { false };
private MediaConstraints sdpMediaConstraints;
private PowerManager.WakeLock wakeLock;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Since the error-handling of this demo consists of throwing
// RuntimeExceptions and we assume that'll terminate the app, we install
// this default handler so it's applied to background threads as well.
Thread.setDefaultUncaughtExceptionHandler(
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) {
e.printStackTrace();
System.exit(-1);
}
});
PowerManager powerManager = (PowerManager) getSystemService(POWER_SERVICE);
wakeLock = powerManager.newWakeLock(
PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "AppRTCDemo");
wakeLock.acquire();
Point displaySize = new Point();
getWindowManager().getDefaultDisplay().getSize(displaySize);
vsv = new VideoStreamsView(this, displaySize);
setContentView(vsv);
abortUnless(PeerConnectionFactory.initializeAndroidGlobals(this),
"Failed to initializeAndroidGlobals");
AudioManager audioManager =
((AudioManager) getSystemService(AUDIO_SERVICE));
audioManager.setMode(audioManager.isWiredHeadsetOn() ?
AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION);
audioManager.setSpeakerphoneOn(!audioManager.isWiredHeadsetOn());
sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveAudio", "true"));
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "true"));
final Intent intent = getIntent();
if ("android.intent.action.VIEW".equals(intent.getAction())) {
connectToRoom(intent.getData().toString());
return;
}
showGetRoomUI();
}
private void showGetRoomUI() {
final EditText roomInput = new EditText(this);
roomInput.setText("https://apprtc.appspot.com/?r=");
roomInput.setSelection(roomInput.getText().length());
DialogInterface.OnClickListener listener =
new DialogInterface.OnClickListener() {
@Override public void onClick(DialogInterface dialog, int which) {
abortUnless(which == DialogInterface.BUTTON_POSITIVE, "lolwat?");
dialog.dismiss();
connectToRoom(roomInput.getText().toString());
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder
.setMessage("Enter room URL").setView(roomInput)
.setPositiveButton("Go!", listener).show();
}
private void connectToRoom(String roomUrl) {
logAndToast("Connecting to room...");
appRtcClient.connectToRoom(roomUrl);
}
@Override
public void onPause() {
super.onPause();
vsv.onPause();
// TODO(fischman): IWBN to support pause/resume, but the WebRTC codebase
// isn't ready for that yet; e.g.
// https://code.google.com/p/webrtc/issues/detail?id=1407
// Instead, simply exit instead of pausing (the alternative leads to
// system-borking with wedged cameras; e.g. b/8224551)
disconnectAndExit();
}
@Override
public void onResume() {
// The onResume() is a lie! See TODO(fischman) in onPause() above.
super.onResume();
vsv.onResume();
}
@Override
public void onIceServers(List<PeerConnection.IceServer> iceServers) {
PeerConnectionFactory factory = new PeerConnectionFactory();
pc = factory.createPeerConnection(
iceServers, appRtcClient.pcConstraints(), pcObserver);
{
final PeerConnection finalPC = pc;
final Runnable repeatedStatsLogger = new Runnable() {
public void run() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
final Runnable runnableThis = this;
boolean success = finalPC.getStats(new StatsObserver() {
public void onComplete(StatsReport[] reports) {
for (StatsReport report : reports) {
Log.d(TAG, "Stats: " + report.toString());
}
vsv.postDelayed(runnableThis, 10000);
}
}, null);
if (!success) {
throw new RuntimeException("getStats() return false!");
}
}
}
};
vsv.postDelayed(repeatedStatsLogger, 10000);
}
{
logAndToast("Creating local video source...");
VideoCapturer capturer = getVideoCapturer();
VideoSource videoSource = factory.createVideoSource(
capturer, appRtcClient.videoConstraints());
MediaStream lMS = factory.createLocalMediaStream("ARDAMS");
VideoTrack videoTrack = factory.createVideoTrack("ARDAMSv0", videoSource);
videoTrack.addRenderer(new VideoRenderer(new VideoCallbacks(
vsv, VideoStreamsView.Endpoint.LOCAL)));
lMS.addTrack(videoTrack);
lMS.addTrack(factory.createAudioTrack("ARDAMSa0"));
pc.addStream(lMS, new MediaConstraints());
}
logAndToast("Waiting for ICE candidates...");
}
// Cycle through likely device names for the camera and return the first
// capturer that works, or crash if none do.
private VideoCapturer getVideoCapturer() {
String[] cameraFacing = { "front", "back" };
int[] cameraIndex = { 0, 1 };
int[] cameraOrientation = { 0, 90, 180, 270 };
for (String facing : cameraFacing) {
for (int index : cameraIndex) {
for (int orientation : cameraOrientation) {
String name = "Camera " + index + ", Facing " + facing +
", Orientation " + orientation;
VideoCapturer capturer = VideoCapturer.create(name);
if (capturer != null) {
logAndToast("Using camera: " + name);
return capturer;
}
}
}
}
throw new RuntimeException("Failed to open capturer");
}
@Override
public void onDestroy() {
super.onDestroy();
}
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
}
// Log |msg| and Toast about it.
private void logAndToast(String msg) {
Log.d(TAG, msg);
if (logToast != null) {
logToast.cancel();
}
logToast = Toast.makeText(this, msg, Toast.LENGTH_SHORT);
logToast.show();
}
// Send |json| to the underlying AppEngine Channel.
private void sendMessage(JSONObject json) {
appRtcClient.sendMessage(json.toString());
}
// Put a |key|->|value| mapping in |json|.
private static void jsonPut(JSONObject json, String key, Object value) {
try {
json.put(key, value);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
// Implementation detail: observe ICE & stream changes and react accordingly.
private class PCObserver implements PeerConnection.Observer {
@Override public void onIceCandidate(final IceCandidate candidate){
runOnUiThread(new Runnable() {
public void run() {
JSONObject json = new JSONObject();
jsonPut(json, "type", "candidate");
jsonPut(json, "label", candidate.sdpMLineIndex);
jsonPut(json, "id", candidate.sdpMid);
jsonPut(json, "candidate", candidate.sdp);
sendMessage(json);
}
});
}
@Override public void onError(){
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("PeerConnection error!");
}
});
}
@Override public void onSignalingChange(
PeerConnection.SignalingState newState) {
}
@Override public void onIceConnectionChange(
PeerConnection.IceConnectionState newState) {
}
@Override public void onIceGatheringChange(
PeerConnection.IceGatheringState newState) {
}
@Override public void onAddStream(final MediaStream stream){
runOnUiThread(new Runnable() {
public void run() {
abortUnless(stream.audioTracks.size() == 1 &&
stream.videoTracks.size() == 1,
"Weird-looking stream: " + stream);
stream.videoTracks.get(0).addRenderer(new VideoRenderer(
new VideoCallbacks(vsv, VideoStreamsView.Endpoint.REMOTE)));
}
});
}
@Override public void onRemoveStream(final MediaStream stream){
runOnUiThread(new Runnable() {
public void run() {
stream.videoTracks.get(0).dispose();
}
});
}
}
// Implementation detail: handle offer creation/signaling and answer setting,
// as well as adding remote ICE candidates once the answer SDP is set.
private class SDPObserver implements SdpObserver {
@Override public void onCreateSuccess(final SessionDescription sdp) {
runOnUiThread(new Runnable() {
public void run() {
logAndToast("Sending " + sdp.type);
JSONObject json = new JSONObject();
jsonPut(json, "type", sdp.type.canonicalForm());
jsonPut(json, "sdp", sdp.description);
sendMessage(json);
pc.setLocalDescription(sdpObserver, sdp);
}
});
}
@Override public void onSetSuccess() {
runOnUiThread(new Runnable() {
public void run() {
if (appRtcClient.isInitiator()) {
if (pc.getRemoteDescription() != null) {
// We've set our local offer and received & set the remote
// answer, so drain candidates.
drainRemoteCandidates();
}
} else {
if (pc.getLocalDescription() == null) {
// We just set the remote offer, time to create our answer.
logAndToast("Creating answer");
pc.createAnswer(SDPObserver.this, sdpMediaConstraints);
} else {
// Sent our answer and set it as local description; drain
// candidates.
drainRemoteCandidates();
}
}
}
});
}
@Override public void onCreateFailure(final String error) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("createSDP error: " + error);
}
});
}
@Override public void onSetFailure(final String error) {
runOnUiThread(new Runnable() {
public void run() {
throw new RuntimeException("setSDP error: " + error);
}
});
}
private void drainRemoteCandidates() {
for (IceCandidate candidate : queuedRemoteCandidates) {
pc.addIceCandidate(candidate);
}
queuedRemoteCandidates = null;
}
}
// Implementation detail: handler for receiving GAE messages and dispatching
// them appropriately.
private class GAEHandler implements GAEChannelClient.MessageHandler {
@JavascriptInterface public void onOpen() {
if (!appRtcClient.isInitiator()) {
return;
}
logAndToast("Creating offer...");
pc.createOffer(sdpObserver, sdpMediaConstraints);
}
@JavascriptInterface public void onMessage(String data) {
try {
JSONObject json = new JSONObject(data);
String type = (String) json.get("type");
if (type.equals("candidate")) {
IceCandidate candidate = new IceCandidate(
(String) json.get("id"),
json.getInt("label"),
(String) json.get("candidate"));
if (queuedRemoteCandidates != null) {
queuedRemoteCandidates.add(candidate);
} else {
pc.addIceCandidate(candidate);
}
} else if (type.equals("answer") || type.equals("offer")) {
SessionDescription sdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm(type),
(String) json.get("sdp"));
pc.setRemoteDescription(sdpObserver, sdp);
} else if (type.equals("bye")) {
logAndToast("Remote end hung up; dropping PeerConnection");
disconnectAndExit();
} else {
throw new RuntimeException("Unexpected message: " + data);
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@JavascriptInterface public void onClose() {
disconnectAndExit();
}
@JavascriptInterface public void onError(int code, String description) {
disconnectAndExit();
}
}
// Disconnect from remote resources, dispose of local resources, and exit.
private void disconnectAndExit() {
synchronized (quit[0]) {
if (quit[0]) {
return;
}
quit[0] = true;
wakeLock.release();
if (pc != null) {
pc.dispose();
pc = null;
}
if (appRtcClient != null) {
appRtcClient.sendMessage("{\"type\": \"bye\"}");
appRtcClient.disconnect();
appRtcClient = null;
}
finish();
}
}
// Implementation detail: bridge the VideoRenderer.Callbacks interface to the
// VideoStreamsView implementation.
private class VideoCallbacks implements VideoRenderer.Callbacks {
private final VideoStreamsView view;
private final VideoStreamsView.Endpoint stream;
public VideoCallbacks(
VideoStreamsView view, VideoStreamsView.Endpoint stream) {
this.view = view;
this.stream = stream;
}
@Override
public void setSize(final int width, final int height) {
view.queueEvent(new Runnable() {
public void run() {
view.setSize(stream, width, height);
}
});
}
@Override
public void renderFrame(I420Frame frame) {
view.queueFrame(stream, frame);
}
}
}

View File

@@ -0,0 +1,104 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import org.webrtc.VideoRenderer.I420Frame;
import java.util.HashMap;
import java.util.LinkedList;
/**
* This class acts as an allocation pool meant to minimize GC churn caused by
* frame allocation & disposal. The public API comprises of just two methods:
* copyFrame(), which allocates as necessary and copies, and
* returnFrame(), which returns frame ownership to the pool for use by a later
* call to copyFrame().
*
* This class is thread-safe; calls to copyFrame() and returnFrame() are allowed
* to happen on any thread.
*/
class FramePool {
// Maps each summary code (see summarizeFrameDimensions()) to a list of frames
// of that description.
private final HashMap<Long, LinkedList<I420Frame>> availableFrames =
new HashMap<Long, LinkedList<I420Frame>>();
// Every dimension (e.g. width, height, stride) of a frame must be less than
// this value.
private static final long MAX_DIMENSION = 4096;
public I420Frame takeFrame(I420Frame source) {
long desc = summarizeFrameDimensions(source);
I420Frame dst = null;
synchronized (availableFrames) {
LinkedList<I420Frame> frames = availableFrames.get(desc);
if (frames == null) {
frames = new LinkedList<I420Frame>();
availableFrames.put(desc, frames);
}
if (!frames.isEmpty()) {
dst = frames.pop();
} else {
dst = new I420Frame(
source.width, source.height, source.yuvStrides, null);
}
}
return dst;
}
public void returnFrame(I420Frame frame) {
long desc = summarizeFrameDimensions(frame);
synchronized (availableFrames) {
LinkedList<I420Frame> frames = availableFrames.get(desc);
if (frames == null) {
throw new IllegalArgumentException("Unexpected frame dimensions");
}
frames.add(frame);
}
}
/** Validate that |frame| can be managed by the pool. */
public static boolean validateDimensions(I420Frame frame) {
return frame.width < MAX_DIMENSION && frame.height < MAX_DIMENSION &&
frame.yuvStrides[0] < MAX_DIMENSION &&
frame.yuvStrides[1] < MAX_DIMENSION &&
frame.yuvStrides[2] < MAX_DIMENSION;
}
// Return a code summarizing the dimensions of |frame|. Two frames that
// return the same summary are guaranteed to be able to store each others'
// contents. Used like Object.hashCode(), but we need all the bits of a long
// to do a good job, and hashCode() returns int, so we do this.
private static long summarizeFrameDimensions(I420Frame frame) {
long ret = frame.width;
ret = ret * MAX_DIMENSION + frame.height;
ret = ret * MAX_DIMENSION + frame.yuvStrides[0];
ret = ret * MAX_DIMENSION + frame.yuvStrides[1];
ret = ret * MAX_DIMENSION + frame.yuvStrides[2];
return ret;
}
}

View File

@@ -0,0 +1,164 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.util.Log;
import android.webkit.ConsoleMessage;
import android.webkit.JavascriptInterface;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.webkit.WebViewClient;
/**
* Java-land version of Google AppEngine's JavaScript Channel API:
* https://developers.google.com/appengine/docs/python/channel/javascript
*
* Requires a hosted HTML page that opens the desired channel and dispatches JS
* on{Open,Message,Close,Error}() events to a global object named
* "androidMessageHandler".
*/
public class GAEChannelClient {
private static final String TAG = "GAEChannelClient";
private WebView webView;
private final ProxyingMessageHandler proxyingMessageHandler;
/**
* Callback interface for messages delivered on the Google AppEngine channel.
*
* Methods are guaranteed to be invoked on the UI thread of |activity| passed
* to GAEChannelClient's constructor.
*/
public interface MessageHandler {
public void onOpen();
public void onMessage(String data);
public void onClose();
public void onError(int code, String description);
}
/** Asynchronously open an AppEngine channel. */
@SuppressLint("SetJavaScriptEnabled")
public GAEChannelClient(
Activity activity, String token, MessageHandler handler) {
webView = new WebView(activity);
webView.getSettings().setJavaScriptEnabled(true);
webView.setWebChromeClient(new WebChromeClient() { // Purely for debugging.
public boolean onConsoleMessage (ConsoleMessage msg) {
Log.d(TAG, "console: " + msg.message() + " at " +
msg.sourceId() + ":" + msg.lineNumber());
return false;
}
});
webView.setWebViewClient(new WebViewClient() { // Purely for debugging.
public void onReceivedError(
WebView view, int errorCode, String description,
String failingUrl) {
Log.e(TAG, "JS error: " + errorCode + " in " + failingUrl +
", desc: " + description);
}
});
proxyingMessageHandler = new ProxyingMessageHandler(activity, handler);
webView.addJavascriptInterface(
proxyingMessageHandler, "androidMessageHandler");
webView.loadUrl("file:///android_asset/channel.html?token=" + token);
}
/** Close the connection to the AppEngine channel. */
public void close() {
if (webView == null) {
return;
}
proxyingMessageHandler.disconnect();
webView.removeJavascriptInterface("androidMessageHandler");
webView.loadUrl("about:blank");
webView = null;
}
// Helper class for proxying callbacks from the Java<->JS interaction
// (private, background) thread to the Activity's UI thread.
private static class ProxyingMessageHandler {
private final Activity activity;
private final MessageHandler handler;
private final boolean[] disconnected = { false };
public ProxyingMessageHandler(Activity activity, MessageHandler handler) {
this.activity = activity;
this.handler = handler;
}
public void disconnect() {
disconnected[0] = true;
}
private boolean disconnected() {
return disconnected[0];
}
@JavascriptInterface public void onOpen() {
activity.runOnUiThread(new Runnable() {
public void run() {
if (!disconnected()) {
handler.onOpen();
}
}
});
}
@JavascriptInterface public void onMessage(final String data) {
activity.runOnUiThread(new Runnable() {
public void run() {
if (!disconnected()) {
handler.onMessage(data);
}
}
});
}
@JavascriptInterface public void onClose() {
activity.runOnUiThread(new Runnable() {
public void run() {
if (!disconnected()) {
handler.onClose();
}
}
});
}
@JavascriptInterface public void onError(
final int code, final String description) {
activity.runOnUiThread(new Runnable() {
public void run() {
if (!disconnected()) {
handler.onError(code, description);
}
}
});
}
}
}

View File

@@ -0,0 +1,295 @@
/*
* libjingle
* Copyright 2013, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.appspot.apprtc;
import android.content.Context;
import android.graphics.Point;
import android.graphics.Rect;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import org.webrtc.VideoRenderer.I420Frame;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.EnumMap;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* A GLSurfaceView{,.Renderer} that efficiently renders YUV frames from local &
* remote VideoTracks using the GPU for CSC. Clients will want to call the
* constructor, setSize() and updateFrame() as appropriate, but none of the
* other public methods of this class are of interest to clients (only to system
* classes).
*/
public class VideoStreamsView
extends GLSurfaceView
implements GLSurfaceView.Renderer {
/** Identify which of the two video streams is being addressed. */
public static enum Endpoint { LOCAL, REMOTE };
private final static String TAG = "VideoStreamsView";
private EnumMap<Endpoint, Rect> rects =
new EnumMap<Endpoint, Rect>(Endpoint.class);
private Point screenDimensions;
// [0] are local Y,U,V, [1] are remote Y,U,V.
private int[][] yuvTextures = { { -1, -1, -1}, {-1, -1, -1 }};
private int posLocation = -1;
private long lastFPSLogTime = System.nanoTime();
private long numFramesSinceLastLog = 0;
private FramePool framePool = new FramePool();
public VideoStreamsView(Context c, Point screenDimensions) {
super(c);
this.screenDimensions = screenDimensions;
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(RENDERMODE_WHEN_DIRTY);
}
/** Queue |frame| to be uploaded. */
public void queueFrame(final Endpoint stream, I420Frame frame) {
// Paying for the copy of the YUV data here allows CSC and painting time
// to get spent on the render thread instead of the UI thread.
abortUnless(framePool.validateDimensions(frame), "Frame too large!");
final I420Frame frameCopy = framePool.takeFrame(frame).copyFrom(frame);
queueEvent(new Runnable() {
public void run() {
updateFrame(stream, frameCopy);
}
});
}
// Upload the planes from |frame| to the textures owned by this View.
private void updateFrame(Endpoint stream, I420Frame frame) {
int[] textures = yuvTextures[stream == Endpoint.LOCAL ? 0 : 1];
texImage2D(frame, textures);
framePool.returnFrame(frame);
requestRender();
}
/** Inform this View of the dimensions of frames coming from |stream|. */
public void setSize(Endpoint stream, int width, int height) {
// Generate 3 texture ids for Y/U/V and place them into |textures|,
// allocating enough storage for |width|x|height| pixels.
int[] textures = yuvTextures[stream == Endpoint.LOCAL ? 0 : 1];
GLES20.glGenTextures(3, textures, 0);
for (int i = 0; i < 3; ++i) {
int w = i == 0 ? width : width / 2;
int h = i == 0 ? height : height / 2;
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
checkNoGLES2Error();
}
@Override
protected void onMeasure(int unusedX, int unusedY) {
// Go big or go home!
setMeasuredDimension(screenDimensions.x, screenDimensions.y);
}
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
checkNoGLES2Error();
}
@Override
public void onDrawFrame(GL10 unused) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
drawRectangle(yuvTextures[1], remoteVertices);
drawRectangle(yuvTextures[0], localVertices);
++numFramesSinceLastLog;
long now = System.nanoTime();
if (lastFPSLogTime == -1 || now - lastFPSLogTime > 1e9) {
double fps = numFramesSinceLastLog / ((now - lastFPSLogTime) / 1e9);
Log.d(TAG, "Rendered FPS: " + fps);
lastFPSLogTime = now;
numFramesSinceLastLog = 1;
}
checkNoGLES2Error();
}
@Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
int program = GLES20.glCreateProgram();
addShaderTo(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_STRING, program);
addShaderTo(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_STRING, program);
GLES20.glLinkProgram(program);
int[] result = new int[] { GLES20.GL_FALSE };
result[0] = GLES20.GL_FALSE;
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, result, 0);
abortUnless(result[0] == GLES20.GL_TRUE,
GLES20.glGetProgramInfoLog(program));
GLES20.glUseProgram(program);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "y_tex"), 0);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "u_tex"), 1);
GLES20.glUniform1i(GLES20.glGetUniformLocation(program, "v_tex"), 2);
// Actually set in drawRectangle(), but queried only once here.
posLocation = GLES20.glGetAttribLocation(program, "in_pos");
int tcLocation = GLES20.glGetAttribLocation(program, "in_tc");
GLES20.glEnableVertexAttribArray(tcLocation);
GLES20.glVertexAttribPointer(
tcLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
checkNoGLES2Error();
}
// Wrap a float[] in a direct FloatBuffer using native byte order.
private static FloatBuffer directNativeFloatBuffer(float[] array) {
FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order(
ByteOrder.nativeOrder()).asFloatBuffer();
buffer.put(array);
buffer.flip();
return buffer;
}
// Upload the YUV planes from |frame| to |textures|.
private void texImage2D(I420Frame frame, int[] textures) {
for (int i = 0; i < 3; ++i) {
ByteBuffer plane = frame.yuvPlanes[i];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
int w = i == 0 ? frame.width : frame.width / 2;
int h = i == 0 ? frame.height : frame.height / 2;
abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w);
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane);
}
checkNoGLES2Error();
}
// Draw |textures| using |vertices| (X,Y coordinates).
private void drawRectangle(int[] textures, FloatBuffer vertices) {
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
}
GLES20.glVertexAttribPointer(
posLocation, 2, GLES20.GL_FLOAT, false, 0, vertices);
GLES20.glEnableVertexAttribArray(posLocation);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkNoGLES2Error();
}
// Compile & attach a |type| shader specified by |source| to |program|.
private static void addShaderTo(
int type, String source, int program) {
int[] result = new int[] { GLES20.GL_FALSE };
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
abortUnless(result[0] == GLES20.GL_TRUE,
GLES20.glGetShaderInfoLog(shader) + ", source: " + source);
GLES20.glAttachShader(program, shader);
GLES20.glDeleteShader(shader);
checkNoGLES2Error();
}
// Poor-man's assert(): die with |msg| unless |condition| is true.
private static void abortUnless(boolean condition, String msg) {
if (!condition) {
throw new RuntimeException(msg);
}
}
// Assert that no OpenGL ES 2.0 error has been raised.
private static void checkNoGLES2Error() {
int error = GLES20.glGetError();
abortUnless(error == GLES20.GL_NO_ERROR, "GLES20 error: " + error);
}
// Remote image should span the full screen.
private static final FloatBuffer remoteVertices = directNativeFloatBuffer(
new float[] { -1, 1, -1, -1, 1, 1, 1, -1 });
// Local image should be thumbnailish.
private static final FloatBuffer localVertices = directNativeFloatBuffer(
new float[] { 0.6f, 0.9f, 0.6f, 0.6f, 0.9f, 0.9f, 0.9f, 0.6f });
// Texture Coordinates mapping the entire texture.
private static final FloatBuffer textureCoords = directNativeFloatBuffer(
new float[] { 0, 0, 0, 1, 1, 0, 1, 1 });
// Pass-through vertex shader.
private static final String VERTEX_SHADER_STRING =
"varying vec2 interp_tc;\n" +
"\n" +
"attribute vec4 in_pos;\n" +
"attribute vec2 in_tc;\n" +
"\n" +
"void main() {\n" +
" gl_Position = in_pos;\n" +
" interp_tc = in_tc;\n" +
"}\n";
// YUV to RGB pixel shader. Loads a pixel from each plane and pass through the
// matrix.
private static final String FRAGMENT_SHADER_STRING =
"precision mediump float;\n" +
"varying vec2 interp_tc;\n" +
"\n" +
"uniform sampler2D y_tex;\n" +
"uniform sampler2D u_tex;\n" +
"uniform sampler2D v_tex;\n" +
"\n" +
"void main() {\n" +
" float y = texture2D(y_tex, interp_tc).r;\n" +
" float u = texture2D(u_tex, interp_tc).r - .5;\n" +
" float v = texture2D(v_tex, interp_tc).r - .5;\n" +
// CSC according to http://www.fourcc.org/fccyvrgb.php
" gl_FragColor = vec4(y + 1.403 * v, " +
" y - 0.344 * u - 0.714 * v, " +
" y + 1.77 * u, 1);\n" +
"}\n";
}