Added new create-offer and ice-servers demos to test the exact output of createOffer and .onicecandidate.

Updated a few demos to work on Firefox.

R=dutton@google.com

Review URL: https://webrtc-codereview.appspot.com/1581006

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5464 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
juberti@webrtc.org 2014-01-30 23:38:44 +00:00
parent bda5fa77af
commit 5db9a3f32a
9 changed files with 358 additions and 103 deletions

View File

@ -33,6 +33,7 @@ if (navigator.mozGetUserMedia) {
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.mozGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Creates iceServer from the url for FF.
createIceServer = function(url, username, password) {
@ -118,6 +119,7 @@ if (navigator.mozGetUserMedia) {
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {

View File

@ -27,15 +27,15 @@ function openCamera() {
if (mystream) {
mystream.stop();
}
navigator.webkitGetUserMedia(cameraConstraints(), gotStream, function() {
navigator.getUserMedia(cameraConstraints(), gotStream, function() {
log("GetUserMedia failed");
});
}
}
function gotStream(stream) {
log("GetUserMedia succeeded");
mystream = stream;
$("local-video").src = webkitURL.createObjectURL(stream);
attachMediaStream($("local-video"), stream);
}
function cameraConstraints() {
@ -73,8 +73,8 @@ function streamConstraints() {
}
function connect() {
pc1 = new webkitRTCPeerConnection(null);
pc2 = new webkitRTCPeerConnection(null);
pc1 = new RTCPeerConnection(null);
pc2 = new RTCPeerConnection(null);
pc1.addStream(mystream, streamConstraints());
log('PC1 creating offer');
pc1.onnegotiationeeded = function() {
@ -97,7 +97,7 @@ function connect() {
}
pc2.onaddstream = function(e) {
log('PC2 got stream');
$('remote-video').src = webkitURL.createObjectURL(e.stream);
attachMediaStream($('remote-video'), e.stream);
log('Remote video is ' + $('remote-video').src);
}
pc1.createOffer(function(desc) {
@ -280,7 +280,7 @@ function dumpStats(obj) {
}
return statsString;
}
// Utility to show the value of a field in a span called name+Display
function showValue(name, value) {

View File

@ -0,0 +1,85 @@
<!DOCTYPE html>
<html>
<head>
<!-- This sample demonstrates calling createOffer to get a SDP blob that
indicates the capabilities of the PeerConnection. -->
<title>Show createOffer Output Demo</title>
<script src="../../base/adapter.js"></script>
<style>
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<h1>WebRTC createOffer Test Page</h1>
<p>This page tests the createOffer method for a WebRTC implementation. It
creates a PeerConnection, and then prints out the SDP generated by
createOffer, with the number of desired audio MediaStreamTracks and the
checked createOffer constraints. Currently, only audio tracks can be added,
as there is no programmatic way to generate video tracks. (Web Audio is
used to generate the audio tracks.)</p>
<h3>Tracks</h3>
<p>Number of Audio Tracks<input id="num-audio-tracks" value="0"></input></p>
<h3>Constraints:</h3>
<input id="audio" type="checkbox">Offer To Receive Audio</input><br>
<input id="video" type="checkbox">Offer To Receive Video</input><br>
<input id="vad" type="checkbox">Voice Activity Detection</input><br>
<input id="restart" type="checkbox">Ice Restart</input><br>
<button id="start" onclick="createOffer()">Create Offer</button><br>
<br>
<textarea id="output"></textarea>
<script>
var numAudioTracks = document.getElementById('num-audio-tracks');
var audio = document.getElementById('audio');
var video = document.getElementById('video');
var vad = document.getElementById('vad');
var restart = document.getElementById('restart');
var output = document.getElementById('output');
var pc = new RTCPeerConnection(null);
var wacx = new webkitAudioContext();
function createOffer() {
var numRequestedAudioTracks = numAudioTracks.value;
while (numRequestedAudioTracks < pc.getLocalStreams().length) {
pc.removeStream(pc.getLocalStreams()[pc.getLocalStreams().length - 1]);
}
while (numRequestedAudioTracks > pc.getLocalStreams().length) {
// Create some dummy audio streams using Web Audio.
// Note that this fails if you try to do more than one track in Chrome
// right now.
var dst = wacx.createMediaStreamDestination();
pc.addStream(dst.stream);
}
var offerConstraints = {
"optional": [
{ "OfferToReceiveAudio": audio.checked },
{ "OfferToReceiveVideo": video.checked },
]
};
// These constraints confuse Firefox, even if declared as optional.
if (webrtcDetectedBrowser != "Firefox") {
offerConstraints.optional.push(
{ "VoiceActivityDetection": vad.checked });
offerConstraints.optional.push(
{ "IceRestart": restart.checked });
}
pc.createOffer(gotDescription, null, offerConstraints);
}
function gotDescription(desc) {
pc.setLocalDescription(desc);
output.value = desc.sdp;
}
</script>
</body>
</html>

View File

@ -3,7 +3,8 @@
<head>
<script type="text/javascript" src="../js/ccv.js"></script>
<script type="text/javascript" src="../js/face.js"></script>
<script src="/_ah/channel/jsapi"></script>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style type="text/css">
* { margin:0; padding:0; } /* to remove the top and left whitespace */
html, body { width:100%; height:100%; } /* just to be sure these are full screen*/
@ -12,7 +13,7 @@ a:link { color: #ffffff; } a:visited {color: #ffffff; }
#localCanvas {
display: block;
position: absolute;
position: absolute;
width: 100%;
height: 100%;
}
@ -27,7 +28,7 @@ a:link { color: #ffffff; } a:visited {color: #ffffff; }
width: 100%;
height: 100%;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
-webkit-transition-duration: 2s;
opacity: 0;
}
#logo {
@ -35,8 +36,8 @@ a:link { color: #ffffff; } a:visited {color: #ffffff; }
top:4;
right:4;
position:absolute;
float:right;
#opacity: 0.8;
float:right;
#opacity: 0.8;
}
#credit {
@ -44,8 +45,8 @@ a:link { color: #ffffff; } a:visited {color: #ffffff; }
top:28;
right:4;
position:absolute;
float:right;
font-size:10px;
float:right;
font-size:10px;
}
</style>
@ -55,96 +56,92 @@ a:link { color: #ffffff; } a:visited {color: #ffffff; }
<script type="text/javascript">
var localVideo;
var localCanvas;
//var worker = new Worker('ccv.js');
initialize = function() {
localVideo = document.getElementById("localVideo");
localCanvas = document.getElementById("localCanvas");
getUserMedia();
localVideo = document.getElementById("localVideo");
localCanvas = document.getElementById("localCanvas");
try {
navigator.getUserMedia({video:true}, onGotStream, onFailedStream);
//trace("Requested access to local media");
} catch (e) {
alert("getUserMedia error " + e);
//trace_e(e, "getUserMedia error");
}
}
getUserMedia = function() {
try { navigator.webkitGetUserMedia({video:true,audio:true}, onGotStream, onFailedStream);
//trace("Requested access to local media");
} catch (e) {
alert("getUserMedia error " + e);
//trace_e(e, "getUserMedia error");
}
}
poll = function() {
poll = function() {
var w = localVideo.videoWidth;
var h = localVideo.videoHeight;
var canvas = document.createElement('canvas');
canvas.width = w;
canvas.height = h;
var ctx = canvas.getContext('2d');
ctx.drawImage(localVideo, 0, 0, w, h);
var h = localVideo.videoHeight;
var canvas = document.createElement('canvas');
canvas.width = w;
canvas.height = h;
var ctx = canvas.getContext('2d');
ctx.drawImage(localVideo, 0, 0, w, h);
var comp = ccv.detect_objects({ "canvas" : ccv.grayscale(canvas),
"cascade" : cascade,
"interval" : 5,
"min_neighbors" : 1 });
/* draw detected area */
//localCanvas.left = 400;
//localCanvas.top = localVideo.top;
/*localCanvas.right = localVideo.right;
localCanvas.bottom = localVideo.bottom;*/
localCanvas.width = localVideo.clientWidth;
localCanvas.height = localVideo.clientHeight;
var ctx2 = localCanvas.getContext('2d');
ctx2.lineWidth = 2;
ctx2.lineJoin = "round";
ctx2.clearRect (0, 0, localCanvas.width,localCanvas.height);
var x_offset = 0, y_offset = 0, x_scale = 1, y_scale = 1;
if (localVideo.clientWidth * localVideo.videoHeight > localVideo.videoWidth * localVideo.clientHeight) {
x_offset = (localVideo.clientWidth - localVideo.clientHeight * localVideo.videoWidth / localVideo.videoHeight) / 2;
} else {
y_offset = (localVideo.clientHeight - localVideo.clientWidth * localVideo.videoHeight / localVideo.videoWidth) / 2;
"cascade" : cascade,
"interval" : 5,
"min_neighbors" : 1 });
/* draw detected area */
localCanvas.width = localVideo.clientWidth;
localCanvas.height = localVideo.clientHeight;
var ctx2 = localCanvas.getContext('2d');
ctx2.lineWidth = 2;
ctx2.lineJoin = "round";
ctx2.clearRect (0, 0, localCanvas.width,localCanvas.height);
var x_offset = 0, y_offset = 0, x_scale = 1, y_scale = 1;
if (localVideo.clientWidth * localVideo.videoHeight > localVideo.videoWidth * localVideo.clientHeight) {
x_offset = (localVideo.clientWidth - localVideo.clientHeight *
localVideo.videoWidth / localVideo.videoHeight) / 2;
} else {
y_offset = (localVideo.clientHeight - localVideo.clientWidth *
localVideo.videoHeight / localVideo.videoWidth) / 2;
}
x_scale = (localVideo.clientWidth - x_offset * 2) / localVideo.videoWidth;
y_scale = (localVideo.clientHeight - y_offset * 2) / localVideo.videoHeight;
for (var i = 0; i < comp.length; i++) {
comp[i].x = comp[i].x * x_scale + x_offset;
comp[i].y = comp[i].y * y_scale + y_offset;
comp[i].width = comp[i].width * x_scale;
comp[i].height = comp[i].height * y_scale;
var opacity = 0.1;
if (comp[i].confidence > 0) {
opacity += comp[i].confidence / 10;
if (opacity > 1.0) opacity = 1.0;
}
x_scale = (localVideo.clientWidth - x_offset * 2) / localVideo.videoWidth;
y_scale = (localVideo.clientHeight - y_offset * 2) / localVideo.videoHeight;
for (var i = 0; i < comp.length; i++) {
comp[i].x = comp[i].x * x_scale + x_offset;
comp[i].y = comp[i].y * y_scale + y_offset;
comp[i].width = comp[i].width * x_scale;
comp[i].height = comp[i].height * y_scale;
var opacity = 0.1;
if (comp[i].confidence > 0) {
opacity += comp[i].confidence / 10;
if (opacity > 1.0) opacity = 1.0;
}
//ctx2.strokeStyle = "rgba(255,0,0," + opacity * 255 + ")";
ctx2.lineWidth = opacity * 10;
ctx2.strokeStyle = "rgb(255,0,0)";
ctx2.strokeRect(comp[i].x, comp[i].y, comp[i].width, comp[i].height);
}
setTimeout(poll, 1000);
//ctx2.strokeStyle = "rgba(255,0,0," + opacity * 255 + ")";
ctx2.lineWidth = opacity * 10;
ctx2.strokeStyle = "rgb(255,0,0)";
ctx2.strokeRect(comp[i].x, comp[i].y, comp[i].width, comp[i].height);
}
setTimeout(poll, 1000);
}
onGotStream = function(stream) {
var url = webkitURL.createObjectURL(stream);
localVideo.style.opacity = 1; localVideo.src = url;
localStream = stream;
localVideo.style.opacity = 1;
attachMediaStream(localVideo, stream);
localStream = stream;
//trace("User has granted access to local media. url = " + url);
setTimeout(poll, 2000);
//trace("User has granted access to local media. url = " + url);
setTimeout(poll, 2000);
}
onFailedStream = function(error) {
alert("Failed to get access to local media. Error code was " + error.code + ".");
//trace_warning("Failed to get access to local media. Error code was " + error.code);
}
alert("Failed to get access to local media. Error code was " + error.code + ".");
//trace_warning("Failed to get access to local media. Error code was " + error.code);
}
setTimeout(initialize, 1);
</script>
<video id="localVideo" autoplay="autoplay" muted="true"></video>
<canvas width="1000" height="1000" id="localCanvas"></canvas>
<canvas width="1000" height="1000" id="localCanvas"></canvas>
<a href="http://www.webrtc.org"><img id="logo" alt="WebRTC" src="../images/webrtc_black_20p.png"></a>
<a href="http://liuliu.me/eyes/javascript-face-detection-explained"><div id="credit">JS Face Detect by Liu Liu</div></a>
</body>

View File

@ -2,6 +2,8 @@
<html>
<head>
<title>getUserMedia Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
@ -21,11 +23,11 @@ button {
<script>
video = document.getElementById("vid");
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
navigator.getUserMedia({video:true}, gotStream, function() {});
btn.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
attachMediaStream(video, stream);
}
</script>
</body>

View File

@ -2,6 +2,8 @@
<html>
<head>
<title>getUserMedia Demo 2</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
@ -11,7 +13,7 @@ video {
canvas {
border:5px solid black;
width:480px;
height:360px;
height:360px;
}
button {
font: 18px sans-serif;
@ -29,14 +31,14 @@ button {
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
canvas.height = 360;
btn2.disabled = true;
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
navigator.getUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
attachMediaStream(video, stream);
btn2.disabled = false
}
function snap() {

View File

@ -2,6 +2,8 @@
<html>
<head>
<title>getUserMedia Demo 3</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
@ -16,7 +18,7 @@ canvas {
button {
font: 18px sans-serif;
padding: 8px;
}
}
.grayscale {
-webkit-filter: grayscale(1);
}
@ -44,15 +46,15 @@ findex = 0;
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
canvas.height = 360;
btn2.disabled = true;
btn3.disabled = true;
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
navigator.getUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
attachMediaStream(video, stream);
btn2.disabled = false;
btn3.disabled = false;
}

View File

@ -0,0 +1,148 @@
<!DOCTYPE html>
<html>
<head>
<!-- This sample demonstrates enumeration of candidates for
the specified STUN/TURN server. -->
<title>ICE Candidate Gathering Demo</title>
<script src="../../base/adapter.js"></script>
<style>
body {
font: 14px sans-serif;
}
button {
font: 18px sans-serif;
padding: 8px;
}
select {
margin: 2px;
width:960px;
height:80px;
}
textarea {
font-family: monospace;
margin: 2px;
width:960px;
height:640px;
}
</style>
</head>
<body>
<h1>WebRTC Trickle ICE Test Page</h1>
<p>This page tests the trickle ICE functionality in a WebRTC implementation. It
creates a PeerConnection with the specified ICEServers, and then starts
candidate gathering for a session with a single audio stream. As candidates
are gathered, they are displayed in the text box below, along with an
indication when candidate gathering is complete.</p>
<p>Individual STUN and TURN servers can be added using the Add Server/Remove
Server controls below; in addition, the type of candidates released to the
application can be controlled via the IceTransports contraint.</p>
<h3>ICE Servers</h3>
<select id="servers" size="4">
<option value="{&quot;url&quot;:&quot;stun:stun.l.google.com:19302&quot;}">
stun:stun.l.google.com:19302
</option>
</select>
<br>
STUN or TURN URI:
<input id="url" size="64"></input>
<br>
TURN Username:
<input id="username" size="16"></input>
TURN Password:
<input id="password" size="16"></input>
<br>
<button id="add" onclick="addServer()">Add Server</button>
<button id="remove" onclick="removeServer()">Remove Server</button>
<h3>ICE Constraints</h3>
IceTransports value:
<input type="radio" name="transports" value="all" checked> All
<input type="radio" name="transports" value="relay"> Relay
<input type="radio" name="transports" value="none"> None
<br>
<br>
<button id="gather" onclick="start()">Gather Candidates</button>
<br>
<textarea id="output"></textarea>
<script>
var servers = document.getElementById('servers');
var url = document.getElementById('url');
var username = document.getElementById('username');
var password = document.getElementById('password');
var output = document.getElementById('output');
var pc;
var begin;
function addServer() {
var scheme = url.value.split(":")[0];
if (scheme != "stun" && scheme != "turn" && scheme != "turns") {
alert("URI is not valid");
return;
}
// Store the ICE server as a stringified JSON object in opt.value.
var opt = document.createElement("option");
opt.value = JSON.stringify(
createIceServer(url.value, username.value, password.value));
opt.text = url.value + " ";
if (username.value.length || password.value.length) {
opt.text += (" [" + username.value + ":" + password.value + "]");
}
servers.add(opt);
url.value = username.value = password.value = "";
}
function removeServer() {
for (var i = servers.options.length - 1; i >= 0; --i) {
if (servers.options[i].selected) {
servers.remove(i);
}
}
}
function start() {
// Create a PeerConnection with no streams, but force a m=audio line.
// Pass in the STUN/TURN server value from the input boxes.
output.value = "";
var iceServers = [];
for (var i = 0; i < servers.length; ++i) {
iceServers.push(JSON.parse(servers[i].value));
}
var transports = document.getElementsByName("transports");
var iceTransports;
for (var i = 0; i < transports.length; ++i) {
if (transports[i].checked) {
iceTransports = transports[i].value;
break;
}
}
var config = {"iceServers": iceServers };
var constraints = {"mandatory": {"IceTransports":iceTransports}};
trace("Creating new PeerConnection with config=" + JSON.stringify(config) +
", constraints=" + JSON.stringify(constraints));
pc = new RTCPeerConnection(config, constraints);
pc.onicecandidate = iceCallback;
pc.createOffer(gotDescription, null,
{"mandatory": {"OfferToReceiveAudio": true}});
}
function gotDescription(desc) {
begin = performance.now();
pc.setLocalDescription(desc);
}
function iceCallback(event) {
var elapsed = ((performance.now() - begin) / 1000).toFixed(3);
if (event.candidate) {
output.value += (elapsed + ": " + event.candidate.candidate);
} else {
output.value += (elapsed + ": Done");
pc.close();
pc = null;
}
}
</script>
</body>
</html>

View File

@ -61,6 +61,12 @@ WebRTC overview</a> first.
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>PeerConnection Samples</b></td>
</tr>
<tr>
<td>
<a href="html/pc1-audio.html">pc1-audio.html</a></td>
</td>
<td>Shows how to set up a simple 1:1 audio only call.</td>
</tr>
<tr>
<td>
<a href="html/pc1.html">pc1.html</a></td>
@ -77,13 +83,8 @@ WebRTC overview</a> first.
<td>
<a href="html/states.html">states.html</a></td>
<td>
Shows RTCPeerStates and RTCIceConnectionStates in a simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/pc1-audio.html">pc1-audio.html</a></td>
<td>
Shows how to set up a simple 1:1 audio only call.</td>
Shows RTCPeerStates and RTCIceConnectionStates in a
simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
@ -95,7 +96,8 @@ WebRTC overview</a> first.
<td>
<a href="html/constraints-and-stats.html">constraints-and-stats.html</a></td>
<td>
Shows how to pass constraints into the PeerConnection API, and query it for statistics.</td>
Shows how to pass constraints into the PeerConnection API,
and query it for statistics.</td>
</tr>
<tr>
<td>
@ -113,7 +115,22 @@ WebRTC overview</a> first.
<td>
<a href="html/webaudio-and-webrtc.html">webaudio-and-webrtc.html</a></td>
<td>
Captures and filters microphone input using WebAudio and sends it to a remote peer with an option to add an audio effect.</td>
Captures and filters microphone input using WebAudio and sends it to a
remote peer with an option to add an audio effect.</td>
</tr>
<tr>
<td>
<a href="html/create-offer.html">create-offer.html</a></td>
<td>
Shows the output of createOffer when various constraints
are supplied.</td>
</tr>
<tr>
<td>
<a href="html/ice-servers.html">ice-servers.html</a></td>
<td>
Tests gathering candidates from arbitrary STUN and TURN servers.
</td>
</tr>
</tbody>
</table>