Added new demo states.html & updated existing demos to work on firefox.

Review URL: https://webrtc-codereview.appspot.com/1327007

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3905 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
vikasmarwaha@webrtc.org 2013-04-25 23:22:03 +00:00
parent 91563e42da
commit 77ac84814d
8 changed files with 353 additions and 98 deletions

View File

@ -65,7 +65,15 @@ if (navigator.mozGetUserMedia) {
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
element.src = webkitURL.createObjectURL(stream);
if (typeof element.srcObject !== 'undefined') {
element.srcObject = stream;
} else if (typeof element.mozSrcObject !== 'undefined') {
element.mozSrcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
console.log('Error attaching stream to element.');
}
};
reattachMediaStream = function(to, from) {

View File

@ -122,7 +122,8 @@ function hangup() {
}
function gotRemoteStream(e){
audio2.src = webkitURL.createObjectURL(e.stream);
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(audio2, e.stream);
trace("Received remote stream");
enableDtmfSender();
}

View File

@ -141,12 +141,14 @@ function hangup() {
}
function gotRemoteStream1(e) {
vid2.src = webkitURL.createObjectURL(e.stream);
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("PC1: Received remote stream");
}
function gotRemoteStream2(e) {
vid3.src = webkitURL.createObjectURL(e.stream);
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid3, e.stream);
trace("PC2: Received remote stream");
}

View File

@ -88,7 +88,8 @@ function hangup() {
}
function gotRemoteStream(e){
audio2.src = webkitURL.createObjectURL(e.stream);
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(audio2, e.stream);
trace("Received remote stream");
}

View File

@ -112,7 +112,8 @@ function hangup() {
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}

View File

@ -115,7 +115,8 @@ function stop() {
}
function gotRemoteStream(e) {
vid2.src = webkitURL.createObjectURL(e.stream);
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}

View File

@ -0,0 +1,231 @@
<!DOCTYPE html>
<html>
<head>
<title>RTCPeerState & RTCIceConnectionState Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
label{
font: bold 15px/30px Georgia,serif;
display:inline-table;
width:200px;
text-align:left;
}
input{
font:italic bold 15px/30px Georgia,serif;
text-align:center;
}
fieldset{
border:none;
margin:0px auto;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<br>
<fieldset>
<label>pc1-state:</label>
<input type="text" id="pc1-state" size="50" disabled="true">
<br>
<label>pc1-ice-connection-state:</label>
<input type="text" id="pc1-ice-connection-state" size="50" disabled="true">
<br>
<label>pc2-state:</label>
<input type="text" id="pc2-state" size="50" disabled="true">
<br>
<label>pc2-ice-connection-state:</label>
<input type="text" id="pc2-ice-connection-state" size="50" disabled="true">
</fieldset>
<script>
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace('Using Video device: ' + videoTracks[0].label);
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
var servers = null;
var pc_constraints = {"optional": []};
pc1 = new RTCPeerConnection(servers,pc_constraints);
trace("Created local peer connection object pc1");
document.getElementById("pc1-state").value = pc1.signalingState ||
pc1.readyState;
if (typeof pc1.onsignalingstatechange !== 'undefined') {
pc1.onsignalingstatechange = stateCallback1;
} else {
pc1.onstatechange = stateCallback1;
}
document.getElementById("pc1-ice-connection-state").value =
pc1.iceConnectionState;
if (typeof pc1.oniceconnectionstatechange !== 'undefined') {
pc1.oniceconnectionstatechange = iceStateCallback1;
} else {
pc1.onicechange = iceStateCallback1;
}
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers,pc_constraints);
trace("Created remote peer connection object pc2");
document.getElementById("pc2-state").value = pc2.signalingState ||
pc2.readyState;
if (typeof pc2.onsignalingstatechange !== 'undefined') {
pc2.onsignalingstatechange = stateCallback2;
} else {
pc2.onstatechange = stateCallback2;
}
document.getElementById("pc2-ice-connection-state").value =
pc2.iceConnectionState;
if (typeof pc2.oniceconnectionstatechange !== 'undefined') {
pc2.oniceconnectionstatechange = iceStateCallback2;
} else {
pc2.onicechange = iceStateCallback2;
}
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1);
}
function gotDescription1(desc){
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
pc2.createAnswer(gotDescription2, null, sdpConstraints);
}
function gotDescription2(desc){
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
document.getElementById("pc1-state").value += "->" +
pc1.signalingState ||
pc1.readyState;
document.getElementById("pc2-state").value += "->" +
pc2.signalingState ||
pc2.readyState;
document.getElementById("pc1-ice-connection-state").value += "->" +
pc1.iceConnectionState;
document.getElementById("pc2-ice-connection-state").value += "->" +
pc2.iceConnectionState;
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream(e){
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}
function stateCallback1() {
var state;
if (pc1) {
state = pc1.signalingState || pc1.readyState;
trace("pc1 state change callback, state:" + state);
document.getElementById("pc1-state").value += "->" + state;
}
}
function stateCallback2() {
var state;
if (pc2) {
state = pc2.signalingState || pc2.readyState;
trace("pc2 state change callback, state:" + state);
document.getElementById("pc2-state").value += "->" + state;
}
}
function iceStateCallback1() {
var iceState;
if (pc1) {
iceState = pc1.iceConnectionState;
trace("pc1 ICE connection state change callback, state:" + iceState);
document.getElementById("pc1-ice-connection-state").value += "->" +
iceState;
}
}
function iceStateCallback2() {
var iceState;
if (pc2) {
iceState = pc2.iceConnectionState;
trace("pc2 ICE connection state change callback, state:" + iceState);
document.getElementById("pc2-ice-connection-state").value += "->" +
iceState;
}
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Local ICE candidate: \n" + event.candidate.candidate);
} else {
trace("end of candidates1");
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Remote ICE candidate: \n " + event.candidate.candidate);
} else {
trace("end of candidates2");
}
}
</script>
</body>
</html>

View File

@ -1,94 +1,104 @@
<html>
<head>
<title>WebRTC Samples</title>
</head>
<body>
<h1>
WebRTC Samples</h1>
<p>
Here are some sample pages that demonstrate basic <a href="http://www.webrtc.org">WebRTC</a> concepts. If you are new to WebRTC, you may want to check out this <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/">WebRTC overview</a> first.</p>
<table border="0" cellpadding="1" cellspacing="1" style="width: 100%;">
<thead>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>getUserMedia Samples</b></td>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="html/gum1.html">gum1.html</a></td>
<td>
Shows how to access the webcam and display the local video in a &lt;video/&gt; element.</td>
</tr>
<tr>
<td>
<a href="html/gum2.html">gum2.html</a></td>
<td>
Shows how to capture the current frame of video to a &lt;canvas/&gt;.</td>
</tr>
<tr>
<td>
<a href="html/gum3.html">gum3.html</a></td>
<td>
Shows how to apply CSS filters to a &lt;video/&gt; and &lt;canvas/&gt;</td>
</tr>
<tr>
<td>
<a href="html/face.html">face.html</a></td>
<td>
Shows how to perform face tracking using webcam video.</td>
</tr>
<tr><td>&nbsp; </td> <td>&nbsp; </td></tr>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>PeerConnection Samples</b></td>
</tr>
<tr>
<td>
<a href="html/pc1.html">pc1.html</a></td>
<td>
Shows how to set up a simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/pc1-audio.html">pc1-audio.html</a></td>
<td>
Shows how to set up a simple 1:1 audio only call.</td>
</tr>
<tr>
<td>
<a href="html/multiple.html">multiple.html</a></td>
<td>
Shows how to set up multiple PeerConnections.</td>
</tr>
<tr>
<td>
<a href="html/constraints-and-stats.html">constraints-and-stats.html</a></td>
<td>
Shows how to pass constraints into the PeerConnection API, and query it for statistics.</td>
</tr>
<tr>
<td>
<a href="html/dtmf1.html">dtmf1.html</a></td>
<td>
Shows how to send DTMF tones using PeerConnection API.</td>
</tr>
<tr>
<td>
<a href="html/dc1.html">dc1.html</a></td>
<td>
Shows how to send Data using PeerConnection API.</td>
</tr>
<tr>
<td>
<a href="html/local-audio-rendering.html">local-audio-rendering.html</a></td>
<td>
Shows usage of a local media stream connected to an HTML5 audio tag.</td>
</tr>
</tbody>
</table>
<p>
&nbsp;</p></body>
<head>
<title>WebRTC Samples</title>
</head>
<body>
<h1>WebRTC Samples</h1>
<p>
Here are some sample pages that demonstrate basic
<a href="http://www.webrtc.org">WebRTC</a> concepts. If you are new to WebRTC,
you may want to check out this
<a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/">
WebRTC overview</a> first.
</p>
<table border="0" cellpadding="1" cellspacing="1" style="width: 100%;">
<thead>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>getUserMedia Samples</b></td>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="html/gum1.html">gum1.html</a></td>
<td>
Shows how to access the webcam and display the local video in a &lt;video/&gt; element.</td>
</tr>
<tr>
<td>
<a href="html/gum2.html">gum2.html</a></td>
<td>
Shows how to capture the current frame of video to a &lt;canvas/&gt;.</td>
</tr>
<tr>
<td>
<a href="html/gum3.html">gum3.html</a></td>
<td>
Shows how to apply CSS filters to a &lt;video/&gt; and &lt;canvas/&gt;</td>
</tr>
<tr>
<td>
<a href="html/face.html">face.html</a></td>
<td>
Shows how to perform face tracking using webcam video.</td>
</tr>
<tr><td>&nbsp; </td> <td>&nbsp; </td></tr>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>PeerConnection Samples</b></td>
</tr>
<tr>
<td>
<a href="html/pc1.html">pc1.html</a></td>
<td>
Shows how to set up a simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/states.html">states.html</a></td>
<td>
Shows RTCPeerStates and RTCIceConnectionStates in a simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/pc1-audio.html">pc1-audio.html</a></td>
<td>
Shows how to set up a simple 1:1 audio only call.</td>
</tr>
<tr>
<td>
<a href="html/multiple.html">multiple.html</a></td>
<td>
Shows how to set up multiple PeerConnections.</td>
</tr>
<tr>
<td>
<a href="html/constraints-and-stats.html">constraints-and-stats.html</a></td>
<td>
Shows how to pass constraints into the PeerConnection API, and query it for statistics.</td>
</tr>
<tr>
<td>
<a href="html/dtmf1.html">dtmf1.html</a></td>
<td>
Shows how to send DTMF tones using PeerConnection API.</td>
</tr>
<tr>
<td>
<a href="html/dc1.html">dc1.html</a></td>
<td>
Shows how to send Data using PeerConnection API.</td>
</tr>
<tr>
<td>
<a href="html/local-audio-rendering.html">local-audio-rendering.html</a></td>
<td>
Shows usage of a local media stream connected to an HTML5 audio tag.</td>
</tr>
</tbody>
</table>
<p>&nbsp;</p>
</body>
</html>