Adding webrtc-sample demos under trunk/samples.

Review URL: https://webrtc-codereview.appspot.com/1126005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3578 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
vikasmarwaha@webrtc.org
2013-02-27 23:22:10 +00:00
parent 132c15de30
commit 98fce15c6f
30 changed files with 3409 additions and 0 deletions

9
samples/js/demos/OWNERS Normal file
View File

@@ -0,0 +1,9 @@
juberti@webrtc.org
braveyao@webrtc.org
wu@webrtc.org
hta@webrtc.org
vikasmarwaha@webrtc.org
phoglund@webrtc.org
kjellander@webrtc.org
henrika@webrtc.org

30
samples/js/demos/app.yaml Normal file
View File

@@ -0,0 +1,30 @@
application: webrtc-demos
version: 1
runtime: python27
api_version: 1
threadsafe: yes
handlers:
- url: /favicon\.ico
static_files: favicon.ico
upload: favicon\.ico
- url: /html
static_dir: html
secure: always
- url: /images
static_dir: images
secure: always
- url: /js
static_dir: js
secure: always
- url: .*
script: main.app
secure: always
libraries:
- name: webapp2
version: "2.5.1"

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -0,0 +1,262 @@
<html>
<head>
<title>Constraints and Statistics</title>
<script>
var mystream;
var pc1;
var pc2;
$ = function(id) {
return document.getElementById(id);
}
function log(txt) {
console.log(txt);
}
function openCamera() {
if (mystream) {
mystream.stop();
}
navigator.webkitGetUserMedia(cameraConstraints(), gotStream, function() {
log("GetUserMedia failed");
});
}
function gotStream(stream) {
log("GetUserMedia succeeded");
mystream = stream;
$("local-video").src = webkitURL.createObjectURL(stream);
}
function cameraConstraints() {
var constraints = {};
constraints.audio = true;
constraints.video = { mandatory: {}, optional: [] };
if ($("minwidth").value != "0") {
constraints.video.mandatory.minWidth = $("minwidth").value;
}
if ($("maxwidth").value != "0") {
constraints.video.mandatory.maxWidth = $("maxwidth").value;
}
if ($("minheight").value != "0") {
constraints.video.mandatory.minHeight = $("minheight").value;
}
if ($("maxheight").value != "0") {
constraints.video.mandatory.maxHeight = $("maxheight").value;
}
if ($("frameRate").value != "0") {
constraints.video.mandatory.minFrameRate = $("frameRate").value;
}
log('Camera constraints are ' + JSON.stringify(constraints));
$("cameraConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
return constraints;
}
function streamConstraints() {
var constraints = { mandatory: {}, optional: [] };
if ($("bandwidth").value != "0") {
constraints.optional[0] = { 'bandwidth' : $('bandwidth').value };
}
log('Constraints are ' + JSON.stringify(constraints));
$("addStreamConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
return constraints;
}
function connect() {
pc1 = new webkitRTCPeerConnection(null);
pc2 = new webkitRTCPeerConnection(null);
pc1.addStream(mystream, streamConstraints());
log('PC1 creating offer');
pc1.onnegotiationeeded = function() {
log('Negotiation needed - PC1');
}
pc2.onnegotiationeeded = function() {
log('Negotiation needed - PC2');
}
pc1.onicecandidate = function(e) {
log('Candidate PC1');
if (e.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(e.candidate));
}
}
pc2.onicecandidate = function(e) {
log('Candidate PC2');
if (e.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(e.candidate));
}
}
pc2.onaddstream = function(e) {
log('PC2 got stream');
$('remote-video').src = webkitURL.createObjectURL(e.stream);
log('Remote video is ' + $('remote-video').src);
}
pc1.createOffer(function(desc) {
log('PC1 offering');
pc1.setLocalDescription(desc);
pc2.setRemoteDescription(desc);
pc2.createAnswer(function(desc2) {
log('PC2 answering');
pc2.setLocalDescription(desc2);
pc1.setRemoteDescription(desc2);
});
});
}
// Display statistics
var statCollector = setInterval(function() {
var display = function(str) {
$('bitrate').innerHTML = str;
}
display("No stream");
if (pc2 && pc2.remoteStreams[0]) {
if (pc2.getStats) {
display('No stats callback');
pc2.getStats(function(stats) {
log('Raw stats ' + stats);
var statsString = '';
var results = stats.result();
log('Raw results ' + results);
for (var i = 0; i < results.length; ++i) {
var res = results[i];
log(i + ': ' + JSON.stringify(res));
statsString += '<h3>Report ';
statsString += i;
statsString += '</h3>';
if (res.local) {
statsString += "<p>Local ";
statsString += dumpStats(res.local);
}
if (res.remote) {
statsString += "<p>Remote ";
statsString += dumpStats(res.remote);
}
}
$('stats').innerHTML = statsString;
display('No bitrate stats');
});
} else {
display('No stats function. Use at least Chrome 24.0.1285');
}
} else {
log('Not connected yet');
}
// Collect some stats from the video tags.
local_video = $('local-video');
if (local_video) {
$('local-video-stats').innerHTML = local_video.videoWidth +
'x' + local_video.videoHeight;
}
remote_video = $('remote-video');
if (remote_video) {
$('remote-video-stats').innerHTML = remote_video.videoWidth +
'x' + remote_video.videoHeight;
}
}, 1000);
// Dumping a stats variable as a string.
// might be named toString?
function dumpStats(obj) {
var statsString = 'Timestamp:';
statsString += obj.timestamp;
if (obj.names) {
log('Have names function');
names = obj.names();
for (var i = 0; i < names.length; ++i) {
statsString += '<br>';
statsString += names[i];
statsString += ':';
statsString += obj.stat(names[i]);
}
} else {
log('No names function');
if (obj.stat('audioOutputLevel')) {
statsString += "audioOutputLevel: ";
statsString += obj.stat('audioOutputLevel');
statsString += "<br>";
}
}
return statsString;
}
// Utility to show the value of a field in a span called name+Display
function showValue(name, value) {
$(name + 'Display').innerHTML = value;
}
</script>
</head>
<body>
<h1>Constraints and Statistics</h1>
This page is meant to give some hints on how one can use constraints and statistics in WebRTC applications.
<p>
The form to the left gives constraints you can set on the getUserMedia call.
When you hit "open", it will (re)open the camera with these constraints.
<p>
The left picture is the local preview. The right picture is the picture
after being passed through the PeerConnection (locally).
<p>
Underneath the picture you will see a running display of how many Kbits/sec
the video feed uses for transmission.
<hr>
<table>
<tr>
<td align="top">
<h2>getUserMedia constraints</h2>
<table>
<tr><td><td>Min<td>Max
<tr><td>Horizontal
<td><input type="range" id="minwidth" min="0" max="1280" value="300"
onchange="showValue(this.id, this.value)">
<td><input type="range" id="maxwidth" min="0" max="1280" value="640"
onchange="showValue(this.id, this.value)">
<td><span id="minwidthDisplay">300</span>-<span id="maxwidthDisplay">640</span>
<tr><td>Vertical
<td><input type="range" id="minheight" min="0" max="1280" value="200"
onchange="showValue(this.id, this.value)">
<td><input type="range" id="maxheight" min="0" max="1280" value="480"
onchange="showValue(this.id, this.value)">
<td><span id="minheightDisplay">200</span>-<span id="maxheightDisplay">480</span>
<tr><td>
FrameRate
<td colspan=2><input type="range" id="frameRate" min="0" max="60" value="30"
onchange="showValue(this.id, this.value)">
<td><span id="frameRateDisplay">30</span>
</table>
<input type="submit" name="capture" value="Capture!" onclick="openCamera()">
</td>
<td align="top">
<h2>addStream constraints</h2>
Maximum bitrate
<input type="range" id="bandwidth" min="0" max="2000" value="1000"
onchange="showValue(this.id, this.value)">
<span id="bandwidthDisplay">1000</span>
<br>
<input type="submit" name="connect" value="Connect!" onclick="connect()">
</td>
</tr>
<tr>
<td>
<video id="local-video" autoplay width=400></video>
</td>
<td>
<video id="remote-video" autoplay width=400></video>
</td>
<tr>
<td><span id="local-video-stats"></span>
<td><span id="remote-video-stats"></span>
<br>
<span id="bitrate">Bitrate unknown</span>
</td>
</tr>
<tr>
<td><pre><span id="cameraConstraints"></span></pre>
<td><pre><span id="addStreamConstraints"></span></pre>
</table>
<h2>Statistics report display</h2>
<div id="stats">Stats will appear here.</div>
</body>
</html>

176
samples/js/demos/html/dc1.html Executable file
View File

@@ -0,0 +1,176 @@
<!DOCTYPE html>
<html>
<head>
<title>Data Channel Demo 1</title>
<style>
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width: 480px;
height: 640px;
}
#left { position: absolute; left: 0; top: 0; width: 50%; }
#right { position: absolute; right: 0; top: 0; width: 50%; }
</style>
</head>
<body>
<div id="left">
<br>
<h2>Send data</h2>
<textarea id="dataChannelSend" rows="5" cols="15" disabled="true">
</textarea><br>
<button id="startButton" onclick="createConnection()">Start</button>
<button id="sendButton" onclick="sendData()">Send Data</button>
<button id="closeButton" onclick="closeDataChannels()">Stop Send Data
</button>
<br>
</div>
<div id="right">
<br>
<h2>Received Data</h2>
<textarea id="dataChannelReceive" rows="5" cols="15" disabled="true">
</textarea><br>
</div>
<script>
var pc1, pc2, sendChannel, receiveChannel;
startButton.disabled = false;
sendButton.disabled = true;
closeButton.disabled = true;
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function createConnection() {
var servers = null;
pc1 = new webkitRTCPeerConnection(servers,
{optional: [{RtpDataChannels: true}]});
trace('Created local peer connection object pc1');
try {
// Reliable Data Channels not yet supported in Chrome
// Data Channel api supported from Chrome M25.
// You need to start chrome with --enable-data-channels flag.
sendChannel = pc1.createDataChannel("sendDataChannel",
{reliable: false});
trace('Created send data channel');
} catch (e) {
alert('Failed to create data channel. ' +
'You need Chrome M25 or later with --enable-data-channels flag');
trace('Create Data channel failed with exception: ' + e.message);
}
pc1.onicecandidate = iceCallback1;
sendChannel.onopen = onSendChannelStateChange;
sendChannel.onclose = onSendChannelStateChange;
pc2 = new webkitRTCPeerConnection(servers,
{optional: [{RtpDataChannels: true}]});
trace('Created remote peer connection object pc2');
pc2.onicecandidate = iceCallback2;
pc2.ondatachannel = receiveChannelCallback;
pc1.createOffer(gotDescription1);
startButton.disabled = true;
closeButton.disabled = false;
}
function sendData() {
var data = document.getElementById("dataChannelSend").value;
sendChannel.send(data);
trace('Sent Data: ' + data);
}
function closeDataChannels() {
trace('Closing data Channels');
sendChannel.close();
trace('Closed data channel with label: ' + sendChannel.label);
receiveChannel.close();
trace('Closed data channel with label: ' + receiveChannel.label);
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
trace('Closed peer connections');
startButton.disabled = false;
sendButton.disabled = true;
closeButton.disabled = true;
document.getElementById("dataChannelSend").value = "";
document.getElementById("dataChannelReceive").value = "";
document.getElementById("dataChannelSend").disabled = true;
}
function gotDescription1(desc) {
pc1.setLocalDescription(desc);
trace('Offer from pc1 \n' + desc.sdp);
pc2.setRemoteDescription(desc);
pc2.createAnswer(gotDescription2);
}
function gotDescription2(desc) {
pc2.setLocalDescription(desc);
trace('Answer from pc2 \n' + desc.sdp);
pc1.setRemoteDescription(desc);
}
function iceCallback1(event) {
trace('local ice callback');
if (event.candidate) {
pc2.addIceCandidate(event.candidate);
trace('Local ICE candidate: \n' + event.candidate.candidate);
}
}
function iceCallback2(event) {
trace('remote ice callback');
if (event.candidate) {
pc1.addIceCandidate(event.candidate);
trace('Remote ICE candidate: \n ' + event.candidate.candidate);
}
}
function receiveChannelCallback(event) {
trace('Receive Channel Callback');
receiveChannel = event.channel;
receiveChannel.onmessage = onReceiveMessageCallback;
receiveChannel.onopen = onReceiveChannelStateChange;
receiveChannel.onclose = onReceiveChannelStateChange;
}
function onReceiveMessageCallback(event) {
trace('Received Message');
document.getElementById("dataChannelReceive").value = event.data;
}
function onSendChannelStateChange() {
var readyState = sendChannel.readyState;
trace('Send channel state is: ' + readyState);
if (readyState == "open") {
document.getElementById("dataChannelSend").disabled = false;
sendButton.disabled = false;
closeButton.disabled = false;
} else {
document.getElementById("dataChannelSend").disabled = true;
sendButton.disabled = true;
closeButton.disabled = true;
}
}
function onReceiveChannelStateChange() {
var readyState = receiveChannel.readyState;
trace('Receive channel state is: ' + readyState);
}
</script>
</body>
</html>

View File

@@ -0,0 +1,151 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<script type="text/javascript" src="../js/ccv.js"></script>
<script type="text/javascript" src="../js/face.js"></script>
<script src="/_ah/channel/jsapi"></script>
<style type="text/css">
* { margin:0; padding:0; } /* to remove the top and left whitespace */
html, body { width:100%; height:100%; } /* just to be sure these are full screen*/
body {font-family: 'Helvetica';background-color: #000000; }
a:link { color: #ffffff; } a:visited {color: #ffffff; }
#localCanvas {
display: block;
position: absolute;
width: 100%;
height: 100%;
}
#localVideo {
display: block;
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
width: 100%;
height: 100%;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
opacity: 0;
}
#logo {
display: block;
top:4;
right:4;
position:absolute;
float:right;
#opacity: 0.8;
}
#credit {
display: block;
top:28;
right:4;
position:absolute;
float:right;
font-size:10px;
}
</style>
<title>WebRTC Face Reco Demo Application</title>
</head>
<body>
<script type="text/javascript">
var localVideo;
var localCanvas;
//var worker = new Worker('ccv.js');
initialize = function() {
localVideo = document.getElementById("localVideo");
localCanvas = document.getElementById("localCanvas");
getUserMedia();
}
getUserMedia = function() {
try { navigator.webkitGetUserMedia({video:true,audio:true}, onGotStream, onFailedStream);
//trace("Requested access to local media");
} catch (e) {
alert("getUserMedia error " + e);
//trace_e(e, "getUserMedia error");
}
}
poll = function() {
var w = localVideo.videoWidth;
var h = localVideo.videoHeight;
var canvas = document.createElement('canvas');
canvas.width = w;
canvas.height = h;
var ctx = canvas.getContext('2d');
ctx.drawImage(localVideo, 0, 0, w, h);
var comp = ccv.detect_objects({ "canvas" : ccv.grayscale(canvas),
"cascade" : cascade,
"interval" : 5,
"min_neighbors" : 1 });
/* draw detected area */
//localCanvas.left = 400;
//localCanvas.top = localVideo.top;
/*localCanvas.right = localVideo.right;
localCanvas.bottom = localVideo.bottom;*/
localCanvas.width = localVideo.clientWidth;
localCanvas.height = localVideo.clientHeight;
var ctx2 = localCanvas.getContext('2d');
ctx2.lineWidth = 2;
ctx2.lineJoin = "round";
ctx2.clearRect (0, 0, localCanvas.width,localCanvas.height);
var x_offset = 0, y_offset = 0, x_scale = 1, y_scale = 1;
if (localVideo.clientWidth * localVideo.videoHeight > localVideo.videoWidth * localVideo.clientHeight) {
x_offset = (localVideo.clientWidth - localVideo.clientHeight * localVideo.videoWidth / localVideo.videoHeight) / 2;
} else {
y_offset = (localVideo.clientHeight - localVideo.clientWidth * localVideo.videoHeight / localVideo.videoWidth) / 2;
}
x_scale = (localVideo.clientWidth - x_offset * 2) / localVideo.videoWidth;
y_scale = (localVideo.clientHeight - y_offset * 2) / localVideo.videoHeight;
for (var i = 0; i < comp.length; i++) {
comp[i].x = comp[i].x * x_scale + x_offset;
comp[i].y = comp[i].y * y_scale + y_offset;
comp[i].width = comp[i].width * x_scale;
comp[i].height = comp[i].height * y_scale;
var opacity = 0.1;
if (comp[i].confidence > 0) {
opacity += comp[i].confidence / 10;
if (opacity > 1.0) opacity = 1.0;
}
//ctx2.strokeStyle = "rgba(255,0,0," + opacity * 255 + ")";
ctx2.lineWidth = opacity * 10;
ctx2.strokeStyle = "rgb(255,0,0)";
ctx2.strokeRect(comp[i].x, comp[i].y, comp[i].width, comp[i].height);
}
setTimeout(poll, 1000);
}
onGotStream = function(stream) {
var url = webkitURL.createObjectURL(stream);
localVideo.style.opacity = 1; localVideo.src = url;
localStream = stream;
//trace("User has granted access to local media. url = " + url);
setTimeout(poll, 2000);
}
onFailedStream = function(error) {
alert("Failed to get access to local media. Error code was " + error.code + ".");
//trace_warning("Failed to get access to local media. Error code was " + error.code);
}
setTimeout(initialize, 1);
</script>
<video id="localVideo" autoplay="autoplay"></video>
<canvas width="1000" height="1000" id="localCanvas"></canvas>
<a href="http://www.webrtc.org"><img id="logo" alt="WebRTC" src="../images/webrtc_black_20p.png"></a>
<a href="http://liuliu.me/eyes/javascript-face-detection-explained"><div id="credit">JS Face Detect by Liu Liu</div></a>
</body>
</html>

View File

@@ -0,0 +1,33 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 1</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<br>
<button id="btn" onclick="start()">Start</button>
<script>
video = document.getElementById("vid");
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
btn.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
}
</script>
</body>
</html>

View File

@@ -0,0 +1,48 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 2</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
canvas {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<canvas id="cvs"></canvas>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="snap()">Snapshot</button>
<script>
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
btn2.disabled = true;
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
btn2.disabled = false
}
function snap() {
canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
}
</script>
</body>
</html>

View File

@@ -0,0 +1,74 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 3</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
canvas {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
.grayscale {
-webkit-filter: grayscale(1);
}
.sepia {
-webkit-filter: sepia(1);
}
.invert {
-webkit-filter: invert(1);
}
.blur {
-webkit-filter: blur(3px);
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<canvas id="cvs"></canvas>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="change()">Change Filter</button>
<button id="btn3" onclick="snap()">Snapshot</button>
<script>
filters = ["", "sepia", "invert", "blur", "grayscale"];
findex = 0;
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
btn2.disabled = true;
btn3.disabled = true;
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
btn2.disabled = false;
btn3.disabled = false;
}
function change() {
video.className = '';
findex = (findex + 1) % filters.length;
if (findex != 0)
video.classList.add(filters[findex]);
}
function snap() {
canvas.className = '';
if (findex != 0)
canvas.classList.add(filters[findex]);
canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
}
</script>
</body>
</html>

View File

@@ -0,0 +1,91 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Local Audio Rendering Demo</title>
<script type="text/javascript" src="../../base/adapter.js"></script>
<script>
var audioElement;
var buttonStart;
var buttonStop;
var localStream;
$ = function(id) {
return document.getElementById(id);
};
function start() {
var constraints = {audio:true, video:false};
getUserMedia(constraints, gotStream, gotStreamFailed);
buttonStart.disabled = true;
buttonStop.disabled = false;
}
function stop() {
buttonStart.enabled = true;
buttonStop.enabled = false;
localStream.stop();
}
function gotStream(stream) {
videoTracks = stream.getVideoTracks();
audioTracks = stream.getAudioTracks();
if (audioTracks.length == 1 && videoTracks.length == 0) {
console.log('gotStream({audio:true, video:false})');
console.log('Using audio device: ' + audioTracks[0].label);
attachMediaStream(audioElement, stream);
// The audio will be muted by default from start.
// Unmute and set volume to max level so we can listen to audio in
// loopback. We restore the volume in a 'play' event to ensure that
// loading has been done (auto-mute is performed during load).
audioElement.addEventListener('play', function() {
audioElement.muted = false;
audioElement.volume = 1;
console.log('Unmuting and setting volume to max level');
}, false);
stream.onended = function() {
console.log('stream.onended');
buttonStart.disabled = false;
buttonStop.disabled = true;
};
localStream = stream;
} else {
alert('The media stream contains an invalid amount of audio tracks.');
stream.stop();
}
}
function gotStreamFailed(error) {
buttonStart.disabled = false;
buttonStop.disabled = true;
alert('Failed to get access to local media. Error code: ' + error.code);
}
function onload() {
audioElement = $('audio');
buttonStart = $('start');
buttonStop = $('stop');
buttonStart.enabled = true;
buttonStop.disabled = true;
}
</script>
</head>
<body onload="onload()">
<h2>Rendering of a local media stream using &lt;audio&gt;</h2>
<p>Demonstrates usage of a local media stream connected to an HTML5 audio tag.<br>
Press Start, select a microphone and listen to your own voice in loopback.</p>
<style>
button {
font: 14px sans-serif;
padding: 8px;
}
</style>
<audio id="audio" autoplay="autoplay" controls="controls"></audio><br><br>
<button id="start" onclick="start()">Start</button>
<button id="stop" onclick="stop()">Stop</button>
</body>
</html>

View File

@@ -0,0 +1,187 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<video id="vid3" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1_local, pc1_remote;
var pc2_local, pc2_remote;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting calls");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace("Using Video device: " + videoTracks[0].label);
if (audioTracks.length > 0)
trace("Using Audio device: " + audioTracks[0].label);
// Create an RTCPeerConnection via the polyfill (adapter.js).
var servers = null;
pc1_local = new RTCPeerConnection(servers);
pc1_remote = new RTCPeerConnection(servers);
pc1_remote.onaddstream = gotRemoteStream1;
pc1_local.onicecandidate = iceCallback1Local;
pc1_remote.onicecandidate = iceCallback1Remote;
trace("PC1: created local and remote peer connection objects");
pc2_local = new RTCPeerConnection(servers);
pc2_remote = new RTCPeerConnection(servers);
pc2_remote.onaddstream = gotRemoteStream2;
pc2_local.onicecandidate = iceCallback2Local;
pc2_remote.onicecandidate = iceCallback2Remote;
trace("PC2: created local and remote peer connection objects");
pc1_local.addStream(localstream);
trace("Adding local stream to pc1_local");
pc1_local.createOffer(gotDescription1Local);
pc2_local.addStream(localstream);
trace("Adding local stream to pc2_local");
pc2_local.createOffer(gotDescription2Local);
}
function gotDescription1Local(desc) {
pc1_local.setLocalDescription(desc);
trace("Offer from pc1_local \n" + desc.sdp);
pc1_remote.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc1_remote.createAnswer(gotDescription1Remote, null, sdpConstraints);
}
function gotDescription1Remote(desc) {
pc1_remote.setLocalDescription(desc);
trace("Answer from pc1_remote \n" + desc.sdp);
pc1_local.setRemoteDescription(desc);
}
function gotDescription2Local(desc) {
pc2_local.setLocalDescription(desc);
trace("Offer from pc2_local \n" + desc.sdp);
pc2_remote.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2_remote.createAnswer(gotDescription2Remote, null, sdpConstraints);
}
function gotDescription2Remote(desc) {
pc2_remote.setLocalDescription(desc);
trace("Answer from pc2_remote \n" + desc.sdp);
pc2_local.setRemoteDescription(desc);
}
function hangup() {
trace("Ending calls");
pc1_local.close();
pc1_remote.close();
pc2_local.close();
pc2_remote.close();
pc1_local = pc1_remote = null;
pc2_local = pc2_remote = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream1(e) {
vid2.src = webkitURL.createObjectURL(e.stream);
trace("PC1: Received remote stream");
}
function gotRemoteStream2(e) {
vid3.src = webkitURL.createObjectURL(e.stream);
trace("PC2: Received remote stream");
}
function iceCallback1Local(event) {
handleCandidate(event.candidate, pc1_remote, "PC1: ", "local");
}
function iceCallback1Remote(event) {
handleCandidate(event.candidate, pc1_local, "PC1: ", "remote");
}
function iceCallback2Local(event) {
handleCandidate(event.candidate, pc2_remote, "PC2: ", "local");
}
function iceCallback2Remote(event) {
handleCandidate(event.candidate, pc2_local, "PC2: ", "remote");
}
function handleCandidate(candidate, dest, prefix, type) {
if (candidate) {
dest.addIceCandidate(new RTCIceCandidate(candidate));
trace(prefix + "New " + type + " ICE candidate: " + candidate.candidate);
}
}
</script>
</body>
</html>

View File

@@ -0,0 +1,133 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<xtextarea id="ta1"></textarea>
<xtextarea id="ta2"></textarea>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function gotStream(stream){
trace("Received local stream");
vid1.src = webkitURL.createObjectURL(stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
navigator.webkitGetUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
if (localstream.videoTracks.length > 0)
trace('Using Video device: ' + localstream.videoTracks[0].label);
if (localstream.audioTracks.length > 0)
trace('Using Audio device: ' + localstream.audioTracks[0].label);
pc1 = new webkitPeerConnection00(null, iceCallback1);
trace("Created local peer connection object pc1");
pc2 = new webkitPeerConnection00(null, iceCallback2);
trace("Created remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
var offer = pc1.createOffer(null);
trace("Created offer:\n" + offer.toSdp());
pc1.setLocalDescription(pc1.SDP_OFFER, offer);
trace("SetLocalDesc1");
pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
trace("SetRemoteDesc2");
//ta1.value = offer.toSdp();
var answer = pc2.createAnswer(offer.toSdp(),
{has_audio:true, has_video:true});
trace("Created answer:\n" + answer.toSdp());
pc2.setLocalDescription(pc2.SDP_ANSWER, answer);
trace("SetLocalDesc2");
pc1.setRemoteDescription(pc1.SDP_ANSWER, answer);
trace("SetRemoteDesc1");
//ta2.value = answer.toSdp();
pc1.startIce();
pc2.startIce();
trace("Started ICE for both local & remote");
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received remote stream");
}
function iceCallback1(candidate,bMore){
if (candidate) {
pc2.processIceMessage(candidate);
trace("Local ICE candidate: " + candidate.toSdp());
}
}
function iceCallback2(candidate,bMore){
if (candidate) {
pc1.processIceMessage(candidate);
trace("Remote ICE candidate: " + candidate.toSdp());
}
}
</script>
</body>
</html>

View File

@@ -0,0 +1,143 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<xtextarea id="ta1"></textarea>
<xtextarea id="ta2"></textarea>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace('Using Video device: ' + videoTracks[0].label);
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
var servers = null;
pc1 = new RTCPeerConnection(servers);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1);
}
function gotDescription1(desc){
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer(gotDescription2, null, sdpConstraints);
}
function gotDescription2(desc){
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received remote stream");
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
</script>
</body>
</html>

View File

@@ -0,0 +1,125 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection PRANSWER Demo</title>
<style>
video {
border:5px solid black;
width:320px;
height:240px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Call</button>
<button id="btn15" onclick="accept()">Accept</button>
<button id="btn2" onclick="stop()">Hang Up</button>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = true;
btn2.disabled = true;
var pc1,pc2;
var localstream;
function trace(txt) {
// This function is used for logging.
console.log(txt);
}
function traceCandidate(kind, cand) {
trace("Candidate(" + kind + "): " + cand.label + ": " +
cand.toSdp().replace("\n", ""));
}
function gotStream(stream){
trace("Received local stream");
vid1.src = webkitURL.createObjectURL(stream);
localstream = stream;
btn1.disabled = false;
}
navigator.webkitGetUserMedia({audio:true, video:true}, gotStream, function() {});
function start() {
btn1.disabled = true;
btn2.disabled = false;
trace("Starting Call");
if (localstream.videoTracks.length > 0)
trace('Using Video device: ' + localstream.videoTracks[0].label); // Prints audio & video device names
if (localstream.audioTracks.length > 0)
trace('Using Audio device: ' + localstream.audioTracks[0].label);
pc1 = new webkitPeerConnection00(null,iceCallback1);
trace("Created local peer connection object pc1");
pc2 = new webkitPeerConnection00(null,iceCallback2);
trace("Created remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
var offer = pc1.createOffer(null);
trace("Created offer:\n" + offer.toSdp());
pc1.setLocalDescription(pc1.SDP_OFFER, offer);
trace("SetLocalDesc1");
pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
trace("SetRemoteDesc2");
var answer = pc2.createAnswer(offer.toSdp(), {has_audio:true, has_video:true});
var sdp = answer.toSdp();
sdp = sdp.replace(/a=sendrecv/g, "a=inactive");
answer = new SessionDescription(sdp);
trace("Created answer:\n" + answer.toSdp());
pc2.setLocalDescription(pc2.SDP_PRANSWER, answer);
trace("SetLocalDesc2");
pc1.setRemoteDescription(pc1.SDP_PRANSWER, answer);
trace("SetRemoteDesc1");
pc1.startIce(); // Start finding local ice candidates. Once it finds candidates it will call icecallback
pc2.startIce(); //Starts finding remote ice candidates. Once it finds candidates it will call iceCallback2
trace("Start ICE for both local & remote");
}
function accept() {
var sdp = pc1.remoteDescription.toSdp();
sdp = sdp.replace(/a=inactive/g, "a=sendrecv");
var answer = new SessionDescription(sdp);
pc2.setLocalDescription(pc1.SDP_ANSWER, answer);
pc1.setRemoteDescription(pc2.SDP_ANSWER, answer);
trace("Set final answer:" + sdp);
}
function stop() {
trace("Ending Call" + "\n\n");
pc1.close();
pc2.close();
pc1=null;
pc2=null;
btn2.disabled = true;
btn1.disabled = false;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received Remote Stream");
}
function iceCallback1(candidate,bMore){
if (candidate) {
pc2.processIceMessage(candidate);
traceCandidate("local", candidate);
}
}
function iceCallback2(candidate,bMore){
if (candidate) {
pc1.processIceMessage(candidate);
traceCandidate("remote", candidate);
}
}
</script>
</body>
</html>

View File

@@ -0,0 +1,142 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Rehydration Demo</title>
<style>
video {
border:5px solid black;
width:320px;
height:240px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="rehydrate()">Rehydrate</button>
<button id="btn4" onclick="stop()">Hang Up</button>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn2.disabled = true;
btn3.disabled = true;
btn4.disabled = true;
var pc1,pc2;
var localstream;
function trace(txt) {
// This function is used for logging.
console.log(txt);
}
function start() {
btn1.disabled = true;
navigator.webkitGetUserMedia({audio:true, video:true}, gotStream, function() {});
}
function gotStream(stream){
trace("Received local stream");
vid1.src = webkitURL.createObjectURL(stream);
localstream = stream;
btn2.disabled = false;
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
btn4.disabled = false;
trace("Starting Call");
if (localstream.videoTracks.length > 0)
trace('Using Video device: ' + localstream.videoTracks[0].label); // Prints audio & video device names
if (localstream.audioTracks.length > 0)
trace('Using Audio device: ' + localstream.audioTracks[0].label);
pc1 = new webkitPeerConnection00(null,iceCallback1);
trace("Created local peer connection object pc1");
pc2 = new webkitPeerConnection00(null,iceCallback2);
trace("Created remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
var offer = pc1.createOffer(null);
trace("Created offer");
pc1.setLocalDescription(pc1.SDP_OFFER, offer);
trace("SetLocalDesc1");
pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
trace("SetRemoteDesc2");
var answer = pc2.createAnswer(offer.toSdp(), {has_audio:true, has_video:true});;
trace("CreatedAnswer");
pc2.setLocalDescription(pc2.SDP_ANSWER, answer);
trace("SetLocalDesc2");
pc1.setRemoteDescription(pc1.SDP_ANSWER, answer);
trace("SetRemoteDesc1");
pc1.startIce(); // Start finding local ice candidates. Once it finds candidates it will call icecallback
pc2.startIce(); //Starts finding remote ice candidates. Once it finds candidates it will call iceCallback2
trace("Start ICE for both local & remote");
}
function rehydrate() {
var oldLocal = pc2.localDescription;
// need to munge a=crypto
pc2 = null;
trace("Destroyed remote peer connection object pc2");
pc2 = new webkitPeerConnection00(null, iceCallback3);
trace("Created new remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc2.setLocalDescription(pc2.SDP_OFFER, oldLocal);
pc1.setRemoteDescription(pc1.SDP_OFFER, oldLocal);
var answer = pc1.createAnswer(oldLocal.toSdp(), {has_audio:true, has_video:true});
pc1.setLocalDescription(pc1.SDP_ANSWER, answer);
pc2.setRemoteDescription(pc2.SDP_ANSWER, answer);
pc2.startIce();
trace("Inited new remote peer connection object pc2");
}
function stop() {
trace("Ending Call" + "\n\n");
pc1.close();
pc2.close();
pc1=null;
pc2=null;
btn2.disabled = false;
btn3.disabled = true;
btn4.disabled = true;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received Remote Stream");
}
function iceCallback1(candidate,bMore){
if (candidate) {
pc2.processIceMessage(candidate);
trace("Local ice candidate: " + candidate.toSdp());
}
}
function iceCallback2(candidate,bMore){
if (candidate) {
pc1.processIceMessage(candidate);
trace("Remote ice candidate: " + candidate.toSdp());
}
}
function iceCallback3(candidate,bMore){
if (candidate) {
var str = candidate.toSdp();
str = str.replace("generation 0", "generation 1");
var mungedCandidate = new IceCandidate(candidate.label, str);
trace("Remote ice candidate: " + mungedCandidate.toSdp());
pc1.processIceMessage(mungedCandidate);
}
}
</script>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -0,0 +1,76 @@
<html>
<head>
<title>WebRTC Samples</title>
</head>
<body>
<h1>
WebRTC Samples</h1>
<p>
Here are some sample pages that demonstrate basic <a href="http://www.webrtc.org">WebRTC</a> concepts. If you are new to WebRTC, you may want to check out this <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/">WebRTC overview</a> first.</p>
<table border="0" cellpadding="1" cellspacing="1" style="width: 100%;">
<thead>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>getUserMedia Samples</b></td>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="html/gum1.html">gum1.html</a></td>
<td>
Shows how to access the webcam and display the local video in a &lt;video/&gt; element.</td>
</tr>
<tr>
<td>
<a href="html/gum2.html">gum2.html</a></td>
<td>
Shows how to capture the current frame of video to a &lt;canvas/&gt;.</td>
</tr>
<tr>
<td>
<a href="html/gum3.html">gum3.html</a></td>
<td>
Shows how to apply CSS filters to a &lt;video/&gt; and &lt;canvas/&gt;</td>
</tr>
<tr>
<td>
<a href="html/face.html">face.html</a></td>
<td>
Shows how to perform face tracking using webcam video.</td>
</tr>
<tr><td>&nbsp; </td> <td>&nbsp; </td></tr>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>PeerConnection Samples</b></td>
</tr>
<tr>
<td>
<a href="html/pc1.html">pc1.html</a></td>
<td>
Shows how to set up a simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/pc1-deprecated.html">pc1-deprecated.html</a></td>
<td>
Like pc1.html, but uses PeerConnection00 instead of RTCPeerConnection.</td>
</tr>
<tr>
<td>
<a href="html/multiple.html">multiple.html</a></td>
<td>
Shows how to set up multiple PeerConnections.</td>
</tr>
<tr>
<td>
<a href="html/constraints-and-stats.html">constraints-and-stats.html</a></td>
<td>
Shows how to pass constraints into the PeerConnection API, and query it for statistics.</td>
</tr>
</tbody>
</table>
<p>
&nbsp;</p></body>
</html>

View File

@@ -0,0 +1,12 @@
indexes:
# AUTOGENERATED
# This index.yaml is automatically updated whenever the dev_appserver
# detects that a new type of query is run. If you want to manage the
# index.yaml file manually, remove the above marker line (the line
# saying "# AUTOGENERATED"). If you want to manage some indexes
# manually, move them above the marker line. The index.yaml file is
# automatically uploaded to the admin console when you next deploy
# your application using appcfg.py.

460
samples/js/demos/js/ccv.js Normal file
View File

@@ -0,0 +1,460 @@
if (parallable === undefined) {
var parallable = function (file, funct) {
parallable.core[funct.toString()] = funct().core;
return function () {
var i;
var async, worker_num, params;
if (arguments.length > 1) {
async = arguments[arguments.length - 2];
worker_num = arguments[arguments.length - 1];
params = new Array(arguments.length - 2);
for (i = 0; i < arguments.length - 2; i++)
params[i] = arguments[i];
} else {
async = arguments[0].async;
worker_num = arguments[0].worker;
params = arguments[0];
delete params["async"];
delete params["worker"];
params = [params];
}
var scope = { "shared" : {} };
var ctrl = funct.apply(scope, params);
if (async) {
return function (complete, error) {
var executed = 0;
var outputs = new Array(worker_num);
var inputs = ctrl.pre.apply(scope, [worker_num]);
/* sanitize scope shared because for Chrome/WebKit, worker only support JSONable data */
for (i in scope.shared)
/* delete function, if any */
if (typeof scope.shared[i] == "function")
delete scope.shared[i];
/* delete DOM object, if any */
else if (scope.shared[i].tagName !== undefined)
delete scope.shared[i];
for (i = 0; i < worker_num; i++) {
var worker = new Worker(file);
worker.onmessage = (function (i) {
return function (event) {
outputs[i] = (typeof event.data == "string") ? JSON.parse(event.data) : event.data;
executed++;
if (executed == worker_num)
complete(ctrl.post.apply(scope, [outputs]));
}
})(i);
var msg = { "input" : inputs[i],
"name" : funct.toString(),
"shared" : scope.shared,
"id" : i,
"worker" : params.worker_num };
try {
worker.postMessage(msg);
} catch (e) {
worker.postMessage(JSON.stringify(msg));
}
}
}
} else {
return ctrl.post.apply(scope, [[ctrl.core.apply(scope, [ctrl.pre.apply(scope, [1])[0], 0, 1])]]);
}
}
};
parallable.core = {};
}
function get_named_arguments(params, names) {
if (params.length > 1) {
var new_params = {};
for (var i = 0; i < names.length; i++)
new_params[names[i]] = params[i];
return new_params;
} else if (params.length == 1) {
return params[0];
} else {
return {};
}
}
var ccv = {
pre : function (image) {
if (image.tagName.toLowerCase() == "img") {
var canvas = document.createElement("canvas");
document.body.appendChild(image);
canvas.width = image.offsetWidth;
canvas.style.width = image.offsetWidth.toString() + "px";
canvas.height = image.offsetHeight;
canvas.style.height = image.offsetHeight.toString() + "px";
document.body.removeChild(image);
var ctx = canvas.getContext("2d");
ctx.drawImage(image, 0, 0);
return canvas;
}
return image;
},
grayscale : function (canvas) {
var ctx = canvas.getContext("2d");
var imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
var data = imageData.data;
var pix1, pix2, pix = canvas.width * canvas.height * 4;
while (pix > 0)
data[pix -= 4] = data[pix1 = pix + 1] = data[pix2 = pix + 2] = (data[pix] * 0.3 + data[pix1] * 0.59 + data[pix2] * 0.11);
ctx.putImageData(imageData, 0, 0);
return canvas;
},
array_group : function (seq, gfunc) {
var i, j;
var node = new Array(seq.length);
for (i = 0; i < seq.length; i++)
node[i] = {"parent" : -1,
"element" : seq[i],
"rank" : 0};
for (i = 0; i < seq.length; i++) {
if (!node[i].element)
continue;
var root = i;
while (node[root].parent != -1)
root = node[root].parent;
for (j = 0; j < seq.length; j++) {
if( i != j && node[j].element && gfunc(node[i].element, node[j].element)) {
var root2 = j;
while (node[root2].parent != -1)
root2 = node[root2].parent;
if(root2 != root) {
if(node[root].rank > node[root2].rank)
node[root2].parent = root;
else {
node[root].parent = root2;
if (node[root].rank == node[root2].rank)
node[root2].rank++;
root = root2;
}
/* compress path from node2 to the root: */
var temp, node2 = j;
while (node[node2].parent != -1) {
temp = node2;
node2 = node[node2].parent;
node[temp].parent = root;
}
/* compress path from node to the root: */
node2 = i;
while (node[node2].parent != -1) {
temp = node2;
node2 = node[node2].parent;
node[temp].parent = root;
}
}
}
}
}
var idx = new Array(seq.length);
var class_idx = 0;
for(i = 0; i < seq.length; i++) {
j = -1;
var node1 = i;
if(node[node1].element) {
while (node[node1].parent != -1)
node1 = node[node1].parent;
if(node[node1].rank >= 0)
node[node1].rank = ~class_idx++;
j = ~node[node1].rank;
}
idx[i] = j;
}
return {"index" : idx, "cat" : class_idx};
},
detect_objects : parallable("ccv.js", function (canvas, cascade, interval, min_neighbors) {
if (this.shared !== undefined) {
var params = get_named_arguments(arguments, ["canvas", "cascade", "interval", "min_neighbors"]);
this.shared.canvas = params.canvas;
this.shared.interval = params.interval;
this.shared.min_neighbors = params.min_neighbors;
this.shared.cascade = params.cascade;
this.shared.scale = Math.pow(2, 1 / (params.interval + 1));
this.shared.next = params.interval + 1;
this.shared.scale_upto = Math.floor(Math.log(Math.min(params.canvas.width / params.cascade.width, params.canvas.height / params.cascade.height)) / Math.log(this.shared.scale));
var i;
for (i = 0; i < this.shared.cascade.stage_classifier.length; i++)
this.shared.cascade.stage_classifier[i].orig_feature = this.shared.cascade.stage_classifier[i].feature;
}
function pre(worker_num) {
var canvas = this.shared.canvas;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var pyr = new Array((scale_upto + next * 2) * 4);
var ret = new Array((scale_upto + next * 2) * 4);
pyr[0] = canvas;
ret[0] = { "width" : pyr[0].width,
"height" : pyr[0].height,
"data" : pyr[0].getContext("2d").getImageData(0, 0, pyr[0].width, pyr[0].height).data };
var i;
for (i = 1; i <= interval; i++) {
pyr[i * 4] = document.createElement("canvas");
pyr[i * 4].width = Math.floor(pyr[0].width / Math.pow(scale, i));
pyr[i * 4].height = Math.floor(pyr[0].height / Math.pow(scale, i));
pyr[i * 4].getContext("2d").drawImage(pyr[0], 0, 0, pyr[0].width, pyr[0].height, 0, 0, pyr[i * 4].width, pyr[i * 4].height);
ret[i * 4] = { "width" : pyr[i * 4].width,
"height" : pyr[i * 4].height,
"data" : pyr[i * 4].getContext("2d").getImageData(0, 0, pyr[i * 4].width, pyr[i * 4].height).data };
}
for (i = next; i < scale_upto + next * 2; i++) {
pyr[i * 4] = document.createElement("canvas");
pyr[i * 4].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4].getContext("2d").drawImage(pyr[i * 4 - next * 4], 0, 0, pyr[i * 4 - next * 4].width, pyr[i * 4 - next * 4].height, 0, 0, pyr[i * 4].width, pyr[i * 4].height);
ret[i * 4] = { "width" : pyr[i * 4].width,
"height" : pyr[i * 4].height,
"data" : pyr[i * 4].getContext("2d").getImageData(0, 0, pyr[i * 4].width, pyr[i * 4].height).data };
}
for (i = next * 2; i < scale_upto + next * 2; i++) {
pyr[i * 4 + 1] = document.createElement("canvas");
pyr[i * 4 + 1].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 1].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 1].getContext("2d").drawImage(pyr[i * 4 - next * 4], 1, 0, pyr[i * 4 - next * 4].width - 1, pyr[i * 4 - next * 4].height, 0, 0, pyr[i * 4 + 1].width - 2, pyr[i * 4 + 1].height);
ret[i * 4 + 1] = { "width" : pyr[i * 4 + 1].width,
"height" : pyr[i * 4 + 1].height,
"data" : pyr[i * 4 + 1].getContext("2d").getImageData(0, 0, pyr[i * 4 + 1].width, pyr[i * 4 + 1].height).data };
pyr[i * 4 + 2] = document.createElement("canvas");
pyr[i * 4 + 2].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 2].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 2].getContext("2d").drawImage(pyr[i * 4 - next * 4], 0, 1, pyr[i * 4 - next * 4].width, pyr[i * 4 - next * 4].height - 1, 0, 0, pyr[i * 4 + 2].width, pyr[i * 4 + 2].height - 2);
ret[i * 4 + 2] = { "width" : pyr[i * 4 + 2].width,
"height" : pyr[i * 4 + 2].height,
"data" : pyr[i * 4 + 2].getContext("2d").getImageData(0, 0, pyr[i * 4 + 2].width, pyr[i * 4 + 2].height).data };
pyr[i * 4 + 3] = document.createElement("canvas");
pyr[i * 4 + 3].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 3].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 3].getContext("2d").drawImage(pyr[i * 4 - next * 4], 1, 1, pyr[i * 4 - next * 4].width - 1, pyr[i * 4 - next * 4].height - 1, 0, 0, pyr[i * 4 + 3].width - 2, pyr[i * 4 + 3].height - 2);
ret[i * 4 + 3] = { "width" : pyr[i * 4 + 3].width,
"height" : pyr[i * 4 + 3].height,
"data" : pyr[i * 4 + 3].getContext("2d").getImageData(0, 0, pyr[i * 4 + 3].width, pyr[i * 4 + 3].height).data };
}
return [ret];
};
function core(pyr, id, worker_num) {
var cascade = this.shared.cascade;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var i, j, k, x, y, q;
var scale_x = 1, scale_y = 1;
var dx = [0, 1, 0, 1];
var dy = [0, 0, 1, 1];
var seq = [];
for (i = 0; i < scale_upto; i++) {
var qw = pyr[i * 4 + next * 8].width - Math.floor(cascade.width / 4);
var qh = pyr[i * 4 + next * 8].height - Math.floor(cascade.height / 4);
var step = [pyr[i * 4].width * 4, pyr[i * 4 + next * 4].width * 4, pyr[i * 4 + next * 8].width * 4];
var paddings = [pyr[i * 4].width * 16 - qw * 16,
pyr[i * 4 + next * 4].width * 8 - qw * 8,
pyr[i * 4 + next * 8].width * 4 - qw * 4];
for (j = 0; j < cascade.stage_classifier.length; j++) {
var orig_feature = cascade.stage_classifier[j].orig_feature;
var feature = cascade.stage_classifier[j].feature = new Array(cascade.stage_classifier[j].count);
for (k = 0; k < cascade.stage_classifier[j].count; k++) {
feature[k] = {"size" : orig_feature[k].size,
"px" : new Array(orig_feature[k].size),
"pz" : new Array(orig_feature[k].size),
"nx" : new Array(orig_feature[k].size),
"nz" : new Array(orig_feature[k].size)};
for (q = 0; q < orig_feature[k].size; q++) {
feature[k].px[q] = orig_feature[k].px[q] * 4 + orig_feature[k].py[q] * step[orig_feature[k].pz[q]];
feature[k].pz[q] = orig_feature[k].pz[q];
feature[k].nx[q] = orig_feature[k].nx[q] * 4 + orig_feature[k].ny[q] * step[orig_feature[k].nz[q]];
feature[k].nz[q] = orig_feature[k].nz[q];
}
}
}
for (q = 0; q < 4; q++) {
var u8 = [pyr[i * 4].data, pyr[i * 4 + next * 4].data, pyr[i * 4 + next * 8 + q].data];
var u8o = [dx[q] * 8 + dy[q] * pyr[i * 4].width * 8, dx[q] * 4 + dy[q] * pyr[i * 4 + next * 4].width * 4, 0];
for (y = 0; y < qh; y++) {
for (x = 0; x < qw; x++) {
var sum = 0;
var flag = true;
for (j = 0; j < cascade.stage_classifier.length; j++) {
sum = 0;
var alpha = cascade.stage_classifier[j].alpha;
var feature = cascade.stage_classifier[j].feature;
for (k = 0; k < cascade.stage_classifier[j].count; k++) {
var feature_k = feature[k];
var p, pmin = u8[feature_k.pz[0]][u8o[feature_k.pz[0]] + feature_k.px[0]];
var n, nmax = u8[feature_k.nz[0]][u8o[feature_k.nz[0]] + feature_k.nx[0]];
if (pmin <= nmax) {
sum += alpha[k * 2];
} else {
var f, shortcut = true;
for (f = 0; f < feature_k.size; f++) {
if (feature_k.pz[f] >= 0) {
p = u8[feature_k.pz[f]][u8o[feature_k.pz[f]] + feature_k.px[f]];
if (p < pmin) {
if (p <= nmax) {
shortcut = false;
break;
}
pmin = p;
}
}
if (feature_k.nz[f] >= 0) {
n = u8[feature_k.nz[f]][u8o[feature_k.nz[f]] + feature_k.nx[f]];
if (n > nmax) {
if (pmin <= n) {
shortcut = false;
break;
}
nmax = n;
}
}
}
sum += (shortcut) ? alpha[k * 2 + 1] : alpha[k * 2];
}
}
if (sum < cascade.stage_classifier[j].threshold) {
flag = false;
break;
}
}
if (flag) {
seq.push({"x" : (x * 4 + dx[q] * 2) * scale_x,
"y" : (y * 4 + dy[q] * 2) * scale_y,
"width" : cascade.width * scale_x,
"height" : cascade.height * scale_y,
"neighbor" : 1,
"confidence" : sum});
}
u8o[0] += 16;
u8o[1] += 8;
u8o[2] += 4;
}
u8o[0] += paddings[0];
u8o[1] += paddings[1];
u8o[2] += paddings[2];
}
}
scale_x *= scale;
scale_y *= scale;
}
return seq;
};
function post(seq) {
var min_neighbors = this.shared.min_neighbors;
var cascade = this.shared.cascade;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var i, j;
for (i = 0; i < cascade.stage_classifier.length; i++)
cascade.stage_classifier[i].feature = cascade.stage_classifier[i].orig_feature;
seq = seq[0];
if (!(min_neighbors > 0))
return seq;
else {
var result = ccv.array_group(seq, function (r1, r2) {
var distance = Math.floor(r1.width * 0.25 + 0.5);
return r2.x <= r1.x + distance &&
r2.x >= r1.x - distance &&
r2.y <= r1.y + distance &&
r2.y >= r1.y - distance &&
r2.width <= Math.floor(r1.width * 1.5 + 0.5) &&
Math.floor(r2.width * 1.5 + 0.5) >= r1.width;
});
var ncomp = result.cat;
var idx_seq = result.index;
var comps = new Array(ncomp + 1);
for (i = 0; i < comps.length; i++)
comps[i] = {"neighbors" : 0,
"x" : 0,
"y" : 0,
"width" : 0,
"height" : 0,
"confidence" : 0};
// count number of neighbors
for(i = 0; i < seq.length; i++)
{
var r1 = seq[i];
var idx = idx_seq[i];
if (comps[idx].neighbors == 0)
comps[idx].confidence = r1.confidence;
++comps[idx].neighbors;
comps[idx].x += r1.x;
comps[idx].y += r1.y;
comps[idx].width += r1.width;
comps[idx].height += r1.height;
comps[idx].confidence = Math.max(comps[idx].confidence, r1.confidence);
}
var seq2 = [];
// calculate average bounding box
for(i = 0; i < ncomp; i++)
{
var n = comps[i].neighbors;
if (n >= min_neighbors)
seq2.push({"x" : (comps[i].x * 2 + n) / (2 * n),
"y" : (comps[i].y * 2 + n) / (2 * n),
"width" : (comps[i].width * 2 + n) / (2 * n),
"height" : (comps[i].height * 2 + n) / (2 * n),
"neighbors" : comps[i].neighbors,
"confidence" : comps[i].confidence});
}
var result_seq = [];
// filter out small face rectangles inside large face rectangles
for(i = 0; i < seq2.length; i++)
{
var r1 = seq2[i];
var flag = true;
for(j = 0; j < seq2.length; j++)
{
var r2 = seq2[j];
var distance = Math.floor(r2.width * 0.25 + 0.5);
if(i != j &&
r1.x >= r2.x - distance &&
r1.y >= r2.y - distance &&
r1.x + r1.width <= r2.x + r2.width + distance &&
r1.y + r1.height <= r2.y + r2.height + distance &&
(r2.neighbors > Math.max(3, r1.neighbors) || r1.neighbors < 3))
{
flag = false;
break;
}
}
if(flag)
result_seq.push(r1);
}
return result_seq;
}
};
return { "pre" : pre, "core" : core, "post" : post };
})
}
onmessage = function (event) {
var data = (typeof event.data == "string") ? JSON.parse(event.data) : event.data;
var scope = { "shared" : data.shared };
var result = parallable.core[data.name].apply(scope, [data.input, data.id, data.worker]);
try {
postMessage(result);
} catch (e) {
postMessage(JSON.stringify(result));
}
}

File diff suppressed because one or more lines are too long

27
samples/js/demos/main.py Normal file
View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import os
from google.appengine.ext.webapp import template
class MainHandler(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/', MainHandler)],
debug=True)