19134bae95
BUG=2218 R=braveyao@webrtc.org, dutton@google.com Review URL: https://webrtc-codereview.appspot.com/2025004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@4892 4adac7df-926f-26a2-2b94-8c16560cd09d
202 lines
5.6 KiB
HTML
202 lines
5.6 KiB
HTML
<!DOCTYPE html>
|
|
<html>
|
|
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
|
|
<script src='../../base/adapter.js'></script>
|
|
<head>
|
|
<title>Device Switch Demo</title>
|
|
<style>
|
|
h2 {
|
|
font-size: 1em;
|
|
font-family: sans-serif;
|
|
margin: 0 0 0.5em 0;
|
|
padding: 0;
|
|
}
|
|
video {
|
|
width:40%;
|
|
}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<video id="vid1" autoplay muted></video>
|
|
<video id="vid2" autoplay></video>
|
|
<div>
|
|
<h2>Select an audio and video source, then click Start.</h2>
|
|
Audio source: <select id="audiosrc"></select>
|
|
Video source: <select id="videosrc"></select><br>
|
|
<button id="btn1">Start</button>
|
|
<button id="btn2">Call</button>
|
|
<button id="btn3">Hang Up</button>
|
|
</div>
|
|
<script>
|
|
var btn1 = document.querySelector('button#btn1');
|
|
var btn2 = document.querySelector('button#btn2');
|
|
var btn3 = document.querySelector('button#btn3');
|
|
var audio_select = document.getElementById("audiosrc");
|
|
var video_select = document.getElementById("videosrc");
|
|
//audio_select.onchange = changeDevices;
|
|
//video_select.onchange = changeDevices;
|
|
btn1.onclick = start;
|
|
btn2.onclick = call;
|
|
btn3.onclick = hangup;
|
|
btn1.disabled = false;
|
|
btn2.disabled = true;
|
|
btn3.disabled = true;
|
|
var pc1,pc2;
|
|
var localstream;
|
|
var sdpConstraints = {'mandatory': {
|
|
'OfferToReceiveAudio':true,
|
|
'OfferToReceiveVideo':true }};
|
|
refreshSources();
|
|
|
|
function refreshSources() {
|
|
if (webrtcDetectedVersion >= 30) {
|
|
MediaStreamTrack.getSources(gotSources);
|
|
} else {
|
|
alert('Make sure that you have Chrome M30 to test device enumeration api.');
|
|
}
|
|
}
|
|
|
|
function gotSources(sourceInfos) {
|
|
var audio_count = 0;
|
|
var video_count = 0;
|
|
audio_select.disabled = true;
|
|
video_select.disabled = true;
|
|
audio_select.innerHTML = '';
|
|
video_select.innerHTML = '';
|
|
for (var i = 0; i < sourceInfos.length; i++) {
|
|
var option = document.createElement("option");
|
|
option.value = sourceInfos[i].id;
|
|
option.text = sourceInfos[i].label;
|
|
if (sourceInfos[i].kind === 'audio') {
|
|
audio_count++;
|
|
if (option.text === '') {
|
|
option.text = 'Audio ' + audio_count;
|
|
}
|
|
audio_select.appendChild(option);
|
|
} else {
|
|
video_count++;
|
|
if (option.text === '') {
|
|
option.text = 'Video ' + video_count;
|
|
}
|
|
video_select.appendChild(option);
|
|
}
|
|
}
|
|
audio_select.disabled = false;
|
|
video_select.disabled = false;
|
|
}
|
|
|
|
function start() {
|
|
changeDevices();
|
|
btn1.disabled = true;
|
|
btn2.disabled = false;
|
|
audio_select.disabled = true;
|
|
video_select.disabled = true;
|
|
}
|
|
|
|
function changeDevices() {
|
|
var audio_source = null;
|
|
var video_source = null;
|
|
if (audio_select.options.length > 0) {
|
|
audio_source = audio_select.options[audio_select.selectedIndex].value;
|
|
trace('selected audio_source :' + audio_source);
|
|
}
|
|
if (video_select.options.length > 0 ) {
|
|
video_source = video_select.options[video_select.selectedIndex].value;
|
|
trace('selected video_source :' + video_source);
|
|
}
|
|
setWebcamAndMic(audio_source, video_source);
|
|
}
|
|
|
|
function setWebcamAndMic(audio_source, video_source) {
|
|
trace("Requesting local stream");
|
|
// Call into getUserMedia via the polyfill (adapter.js).
|
|
getUserMedia({ audio: {optional: [{sourceId: audio_source}]},
|
|
video: {optional: [{sourceId: video_source}]}
|
|
}, gotStream, function() {});
|
|
}
|
|
|
|
function gotStream(stream) {
|
|
trace("Received local stream");
|
|
// Call the polyfill wrapper to attach the media stream to this element.
|
|
attachMediaStream(vid1, stream);
|
|
localstream = stream;
|
|
}
|
|
|
|
function call() {
|
|
btn2.disabled = true;
|
|
btn3.disabled = false;
|
|
trace("Starting call");
|
|
videoTracks = localstream.getVideoTracks();
|
|
audioTracks = localstream.getAudioTracks();
|
|
if (videoTracks.length > 0) {
|
|
trace('Using Video device: ' + videoTracks[0].label);
|
|
}
|
|
if (audioTracks.length > 0) {
|
|
trace('Using Audio device: ' + audioTracks[0].label);
|
|
}
|
|
var servers = null;
|
|
pc1 = new RTCPeerConnection(servers);
|
|
trace("Created local peer connection object pc1");
|
|
pc1.onicecandidate = iceCallback1;
|
|
pc2 = new RTCPeerConnection(servers);
|
|
trace("Created remote peer connection object pc2");
|
|
pc2.onicecandidate = iceCallback2;
|
|
pc2.onaddstream = gotRemoteStream;
|
|
|
|
pc1.addStream(localstream);
|
|
trace("Adding Local Stream to peer connection");
|
|
pc1.createOffer(gotDescription1);
|
|
}
|
|
|
|
function gotDescription1(desc) {
|
|
pc1.setLocalDescription(desc);
|
|
trace("Offer from pc1 \n" + desc.sdp);
|
|
pc2.setRemoteDescription(desc);
|
|
// Since the "remote" side has no media stream we need
|
|
// to pass in the right constraints in order for it to
|
|
// accept the incoming offer of audio and video.
|
|
pc2.createAnswer(gotDescription2, null, sdpConstraints);
|
|
}
|
|
|
|
function gotDescription2(desc) {
|
|
pc2.setLocalDescription(desc);
|
|
trace("Answer from pc2 \n" + desc.sdp);
|
|
pc1.setRemoteDescription(desc);
|
|
}
|
|
|
|
function hangup() {
|
|
trace("Ending call");
|
|
localstream.stop();
|
|
pc1.close();
|
|
pc2.close();
|
|
pc1 = null;
|
|
pc2 = null;
|
|
btn3.disabled = true;
|
|
btn1.disabled = false;
|
|
audio_select.disabled = false;
|
|
video_select.disabled = false;
|
|
}
|
|
|
|
function gotRemoteStream(e) {
|
|
// Call the polyfill wrapper to attach the media stream to this element.
|
|
attachMediaStream(vid2, e.stream);
|
|
trace("Received remote stream");
|
|
}
|
|
|
|
function iceCallback1(event) {
|
|
if (event.candidate) {
|
|
pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
|
|
trace("Local ICE candidate: \n" + event.candidate.candidate);
|
|
}
|
|
}
|
|
|
|
function iceCallback2(event) {
|
|
if (event.candidate) {
|
|
pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
|
|
trace("Remote ICE candidate: \n " + event.candidate.candidate);
|
|
}
|
|
}
|
|
</script>
|
|
</body>
|
|
</html>
|