2013-04-17 16:24:21 +02:00
|
|
|
<!DOCTYPE html>
|
|
|
|
<html>
|
2013-10-01 19:02:32 +02:00
|
|
|
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
|
|
|
|
<script src='../../base/adapter.js'></script>
|
2013-04-17 16:24:21 +02:00
|
|
|
<head>
|
2013-10-01 19:02:32 +02:00
|
|
|
<title>Device Switch Demo</title>
|
2013-04-17 16:24:21 +02:00
|
|
|
<style>
|
2013-10-01 19:02:32 +02:00
|
|
|
h2 {
|
|
|
|
font-size: 1em;
|
|
|
|
font-family: sans-serif;
|
|
|
|
margin: 0 0 0.5em 0;
|
|
|
|
padding: 0;
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
2013-10-01 19:02:32 +02:00
|
|
|
video {
|
|
|
|
width:40%;
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
</style>
|
2013-10-01 19:02:32 +02:00
|
|
|
</head>
|
|
|
|
<body>
|
|
|
|
<video id="vid1" autoplay muted></video>
|
|
|
|
<video id="vid2" autoplay></video>
|
|
|
|
<div>
|
|
|
|
<h2>Select an audio and video source, then click Start.</h2>
|
|
|
|
Audio source: <select id="audiosrc"></select>
|
|
|
|
Video source: <select id="videosrc"></select><br>
|
|
|
|
<button id="btn1">Start</button>
|
|
|
|
<button id="btn2">Call</button>
|
|
|
|
<button id="btn3">Hang Up</button>
|
|
|
|
</div>
|
2013-04-17 16:24:21 +02:00
|
|
|
<script>
|
2013-10-01 19:02:32 +02:00
|
|
|
var btn1 = document.querySelector('button#btn1');
|
|
|
|
var btn2 = document.querySelector('button#btn2');
|
|
|
|
var btn3 = document.querySelector('button#btn3');
|
|
|
|
var audio_select = document.getElementById("audiosrc");
|
|
|
|
var video_select = document.getElementById("videosrc");
|
|
|
|
//audio_select.onchange = changeDevices;
|
|
|
|
//video_select.onchange = changeDevices;
|
|
|
|
btn1.onclick = start;
|
|
|
|
btn2.onclick = call;
|
|
|
|
btn3.onclick = hangup;
|
|
|
|
btn1.disabled = false;
|
|
|
|
btn2.disabled = true;
|
|
|
|
btn3.disabled = true;
|
|
|
|
var pc1,pc2;
|
|
|
|
var localstream;
|
|
|
|
var sdpConstraints = {'mandatory': {
|
|
|
|
'OfferToReceiveAudio':true,
|
|
|
|
'OfferToReceiveVideo':true }};
|
|
|
|
refreshSources();
|
|
|
|
|
|
|
|
function refreshSources() {
|
|
|
|
if (webrtcDetectedVersion >= 30) {
|
|
|
|
MediaStreamTrack.getSources(gotSources);
|
|
|
|
} else {
|
|
|
|
alert('Make sure that you have Chrome M30 to test device enumeration api.');
|
|
|
|
}
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function gotSources(sourceInfos) {
|
|
|
|
var audio_count = 0;
|
|
|
|
var video_count = 0;
|
|
|
|
audio_select.disabled = true;
|
|
|
|
video_select.disabled = true;
|
|
|
|
audio_select.innerHTML = '';
|
|
|
|
video_select.innerHTML = '';
|
|
|
|
for (var i = 0; i < sourceInfos.length; i++) {
|
|
|
|
var option = document.createElement("option");
|
|
|
|
option.value = sourceInfos[i].id;
|
|
|
|
option.text = sourceInfos[i].label;
|
|
|
|
if (sourceInfos[i].kind === 'audio') {
|
|
|
|
audio_count++;
|
|
|
|
if (option.text === '') {
|
|
|
|
option.text = 'Audio ' + audio_count;
|
|
|
|
}
|
|
|
|
audio_select.appendChild(option);
|
|
|
|
} else {
|
|
|
|
video_count++;
|
|
|
|
if (option.text === '') {
|
|
|
|
option.text = 'Video ' + video_count;
|
|
|
|
}
|
|
|
|
video_select.appendChild(option);
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
2013-10-01 19:02:32 +02:00
|
|
|
}
|
|
|
|
audio_select.disabled = false;
|
|
|
|
video_select.disabled = false;
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function start() {
|
|
|
|
changeDevices();
|
|
|
|
btn1.disabled = true;
|
|
|
|
btn2.disabled = false;
|
|
|
|
audio_select.disabled = true;
|
|
|
|
video_select.disabled = true;
|
|
|
|
}
|
2013-04-17 16:24:21 +02:00
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function changeDevices() {
|
|
|
|
var audio_source = null;
|
|
|
|
var video_source = null;
|
|
|
|
if (audio_select.options.length > 0) {
|
|
|
|
audio_source = audio_select.options[audio_select.selectedIndex].value;
|
|
|
|
trace('selected audio_source :' + audio_source);
|
|
|
|
}
|
|
|
|
if (video_select.options.length > 0 ) {
|
|
|
|
video_source = video_select.options[video_select.selectedIndex].value;
|
|
|
|
trace('selected video_source :' + video_source);
|
|
|
|
}
|
|
|
|
setWebcamAndMic(audio_source, video_source);
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function setWebcamAndMic(audio_source, video_source) {
|
|
|
|
trace("Requesting local stream");
|
|
|
|
// Call into getUserMedia via the polyfill (adapter.js).
|
|
|
|
getUserMedia({ audio: {optional: [{sourceId: audio_source}]},
|
|
|
|
video: {optional: [{sourceId: video_source}]}
|
|
|
|
}, gotStream, function() {});
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
function gotStream(stream) {
|
2013-10-01 19:02:32 +02:00
|
|
|
trace("Received local stream");
|
|
|
|
// Call the polyfill wrapper to attach the media stream to this element.
|
|
|
|
attachMediaStream(vid1, stream);
|
|
|
|
localstream = stream;
|
|
|
|
}
|
|
|
|
|
|
|
|
function call() {
|
|
|
|
btn2.disabled = true;
|
|
|
|
btn3.disabled = false;
|
|
|
|
trace("Starting call");
|
|
|
|
videoTracks = localstream.getVideoTracks();
|
|
|
|
audioTracks = localstream.getAudioTracks();
|
|
|
|
if (videoTracks.length > 0) {
|
|
|
|
trace('Using Video device: ' + videoTracks[0].label);
|
|
|
|
}
|
|
|
|
if (audioTracks.length > 0) {
|
|
|
|
trace('Using Audio device: ' + audioTracks[0].label);
|
|
|
|
}
|
|
|
|
var servers = null;
|
|
|
|
pc1 = new RTCPeerConnection(servers);
|
|
|
|
trace("Created local peer connection object pc1");
|
|
|
|
pc1.onicecandidate = iceCallback1;
|
|
|
|
pc2 = new RTCPeerConnection(servers);
|
|
|
|
trace("Created remote peer connection object pc2");
|
|
|
|
pc2.onicecandidate = iceCallback2;
|
|
|
|
pc2.onaddstream = gotRemoteStream;
|
|
|
|
|
|
|
|
pc1.addStream(localstream);
|
|
|
|
trace("Adding Local Stream to peer connection");
|
|
|
|
pc1.createOffer(gotDescription1);
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function gotDescription1(desc) {
|
|
|
|
pc1.setLocalDescription(desc);
|
|
|
|
trace("Offer from pc1 \n" + desc.sdp);
|
|
|
|
pc2.setRemoteDescription(desc);
|
|
|
|
// Since the "remote" side has no media stream we need
|
|
|
|
// to pass in the right constraints in order for it to
|
|
|
|
// accept the incoming offer of audio and video.
|
|
|
|
pc2.createAnswer(gotDescription2, null, sdpConstraints);
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function gotDescription2(desc) {
|
|
|
|
pc2.setLocalDescription(desc);
|
|
|
|
trace("Answer from pc2 \n" + desc.sdp);
|
|
|
|
pc1.setRemoteDescription(desc);
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function hangup() {
|
|
|
|
trace("Ending call");
|
|
|
|
localstream.stop();
|
|
|
|
pc1.close();
|
|
|
|
pc2.close();
|
|
|
|
pc1 = null;
|
|
|
|
pc2 = null;
|
|
|
|
btn3.disabled = true;
|
|
|
|
btn1.disabled = false;
|
|
|
|
audio_select.disabled = false;
|
|
|
|
video_select.disabled = false;
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function gotRemoteStream(e) {
|
|
|
|
// Call the polyfill wrapper to attach the media stream to this element.
|
|
|
|
attachMediaStream(vid2, e.stream);
|
|
|
|
trace("Received remote stream");
|
2013-04-17 16:24:21 +02:00
|
|
|
}
|
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function iceCallback1(event) {
|
|
|
|
if (event.candidate) {
|
|
|
|
pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
|
|
|
|
trace("Local ICE candidate: \n" + event.candidate.candidate);
|
|
|
|
}
|
|
|
|
}
|
2013-04-17 16:24:21 +02:00
|
|
|
|
2013-10-01 19:02:32 +02:00
|
|
|
function iceCallback2(event) {
|
|
|
|
if (event.candidate) {
|
|
|
|
pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
|
|
|
|
trace("Remote ICE candidate: \n " + event.candidate.candidate);
|
|
|
|
}
|
|
|
|
}
|
2013-04-17 16:24:21 +02:00
|
|
|
</script>
|
|
|
|
</body>
|
|
|
|
</html>
|