Adding webrtc-sample demos under trunk/samples.

Review URL: https://webrtc-codereview.appspot.com/1126005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3578 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
vikasmarwaha@webrtc.org 2013-02-27 23:22:10 +00:00
parent 132c15de30
commit 98fce15c6f
30 changed files with 3409 additions and 0 deletions

5
samples/js/apprtc/OWNERS Normal file
View File

@ -0,0 +1,5 @@
juberti@webrtc.org
braveyao@webrtc.org
hta@webrtc.org
wu@webrtc.org
vikasmarwaha@webrtc.org

View File

@ -0,0 +1,26 @@
application: apprtc
version: 6
runtime: python27
threadsafe: true
api_version: 1
handlers:
- url: /html
static_dir: html
- url: /images
static_dir: images
- url: /js
static_dir: js
- url: /.*
script: apprtc.app
secure: always
inbound_services:
- channel_presence
libraries:
- name: jinja2
version: latest

390
samples/js/apprtc/apprtc.py Normal file
View File

@ -0,0 +1,390 @@
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
# pylint: disable-msg=C6310
"""WebRTC Demo
This module demonstrates the WebRTC API by implementing a simple video chat app.
"""
import cgi
import datetime
import logging
import os
import random
import re
import json
import jinja2
import webapp2
import threading
from google.appengine.api import channel
from google.appengine.ext import db
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Lock for syncing DB operation in concurrent requests handling.
# TODO(brave): keeping working on improving performance with thread syncing.
# One possible method for near future is to reduce the message caching.
LOCK = threading.RLock()
def generate_random(len):
word = ''
for i in range(len):
word += random.choice('0123456789')
return word
def sanitize(key):
return re.sub('[^a-zA-Z0-9\-]', '-', key)
def make_client_id(room, user):
return room.key().id_or_name() + '/' + user
def make_pc_config(stun_server, turn_server, ts_pwd):
servers = []
if turn_server:
turn_config = 'turn:{}'.format(turn_server)
servers.append({'url':turn_config, 'credential':ts_pwd})
if stun_server:
stun_config = 'stun:{}'.format(stun_server)
else:
stun_config = 'stun:' + 'stun.l.google.com:19302'
servers.append({'url':stun_config})
return {'iceServers':servers}
def create_channel(room, user, duration_minutes):
client_id = make_client_id(room, user)
return channel.create_channel(client_id, duration_minutes)
def make_loopback_answer(message):
message = message.replace("\"offer\"", "\"answer\"")
message = message.replace("a=ice-options:google-ice\\r\\n", "")
return message
def maybe_add_fake_crypto(message):
if message.find("a=crypto") == -1:
index = len(message)
crypto_line = "a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:BAADBAADBAADBAADBAADBAADBAADBAADBAADBAAD\\r\\n"
# reverse find for multiple find and insert operations.
index = message.rfind("c=IN", 0, index)
while (index != -1):
message = message[:index] + crypto_line + message[index:]
index = message.rfind("c=IN", 0, index)
return message
def handle_message(room, user, message):
message_obj = json.loads(message)
other_user = room.get_other_user(user)
room_key = room.key().id_or_name();
if message_obj['type'] == 'bye':
# This would remove the other_user in loopback test too.
# So check its availability before forwarding Bye message.
room.remove_user(user)
logging.info('User ' + user + ' quit from room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
if other_user and room.has_user(other_user):
if message_obj['type'] == 'offer':
# Special case the loopback scenario.
if other_user == user:
message = make_loopback_answer(message)
# Workaround Chrome bug.
# Insert a=crypto line into offer from FireFox.
# TODO(juberti): Remove this call.
message = maybe_add_fake_crypto(message)
on_message(room, other_user, message)
def get_saved_messages(client_id):
return Message.gql("WHERE client_id = :id", id=client_id)
def delete_saved_messages(client_id):
messages = get_saved_messages(client_id)
for message in messages:
message.delete()
logging.info('Deleted the saved message for ' + client_id)
def send_saved_messages(client_id):
messages = get_saved_messages(client_id)
for message in messages:
channel.send_message(client_id, message.msg)
logging.info('Delivered saved message to ' + client_id);
message.delete()
def on_message(room, user, message):
client_id = make_client_id(room, user)
if room.is_connected(user):
channel.send_message(client_id, message)
logging.info('Delivered message to user ' + user);
else:
new_message = Message(client_id = client_id, msg = message)
new_message.put()
logging.info('Saved message for user ' + user)
def make_media_constraints(hd_video):
constraints = { 'optional': [], 'mandatory': {} }
# Demo 16:9 video with media constraints.
if hd_video.lower() == 'true':
# Demo with WHD by setting size with 1280x720.
constraints['mandatory']['minHeight'] = 720
constraints['mandatory']['minWidth'] = 1280
# Disabled for now due to weird stretching behavior on Mac.
#else:
# Demo with WVGA by setting Aspect Ration;
#constraints['mandatory']['maxAspectRatio'] = 1.778
#constraints['mandatory']['minAspectRatio'] = 1.777
return constraints
def make_pc_constraints(compat):
constraints = { 'optional': [] }
# For interop with FireFox. Enable DTLS in peerConnection ctor.
if compat.lower() == 'true':
constraints['optional'].append({'DtlsSrtpKeyAgreement': True})
return constraints
def make_offer_constraints(compat):
constraints = { 'mandatory': {}, 'optional': [] }
# For interop with FireFox. Disable Data Channel in createOffer.
if compat.lower() == 'true':
constraints['mandatory']['MozDontOfferDataChannel'] = True
return constraints
def append_url_arguments(request, link):
for argument in request.arguments():
if argument != 'r':
link += ('&' + cgi.escape(argument, True) + '=' +
cgi.escape(request.get(argument), True))
return link
# This database is to store the messages from the sender client when the
# receiver client is not ready to receive the messages.
# Use TextProperty instead of StringProperty for msg because
# the session description can be more than 500 characters.
class Message(db.Model):
client_id = db.StringProperty()
msg = db.TextProperty()
class Room(db.Model):
"""All the data we store for a room"""
user1 = db.StringProperty()
user2 = db.StringProperty()
user1_connected = db.BooleanProperty(default=False)
user2_connected = db.BooleanProperty(default=False)
def __str__(self):
str = '['
if self.user1:
str += "%s-%r" % (self.user1, self.user1_connected)
if self.user2:
str += ", %s-%r" % (self.user2, self.user2_connected)
str += ']'
return str
def get_occupancy(self):
occupancy = 0
if self.user1:
occupancy += 1
if self.user2:
occupancy += 1
return occupancy
def get_other_user(self, user):
if user == self.user1:
return self.user2
elif user == self.user2:
return self.user1
else:
return None
def has_user(self, user):
return (user and (user == self.user1 or user == self.user2))
def add_user(self, user):
if not self.user1:
self.user1 = user
elif not self.user2:
self.user2 = user
else:
raise RuntimeError('room is full')
self.put()
def remove_user(self, user):
delete_saved_messages(make_client_id(self, user))
if user == self.user2:
self.user2 = None
self.user2_connected = False
if user == self.user1:
if self.user2:
self.user1 = self.user2
self.user1_connected = self.user2_connected
self.user2 = None
self.user2_connected = False
else:
self.user1 = None
self.user1_connected = False
if self.get_occupancy() > 0:
self.put()
else:
self.delete()
def set_connected(self, user):
if user == self.user1:
self.user1_connected = True
if user == self.user2:
self.user2_connected = True
self.put()
def is_connected(self, user):
if user == self.user1:
return self.user1_connected
if user == self.user2:
return self.user2_connected
class ConnectPage(webapp2.RequestHandler):
def post(self):
key = self.request.get('from')
room_key, user = key.split('/')
with LOCK:
room = Room.get_by_key_name(room_key)
# Check if room has user in case that disconnect message comes before
# connect message with unknown reason, observed with local AppEngine SDK.
if room and room.has_user(user):
room.set_connected(user)
send_saved_messages(make_client_id(room, user))
logging.info('User ' + user + ' connected to room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
else:
logging.warning('Unexpected Connect Message to room ' + room_key)
class DisconnectPage(webapp2.RequestHandler):
def post(self):
key = self.request.get('from')
room_key, user = key.split('/')
with LOCK:
room = Room.get_by_key_name(room_key)
if room and room.has_user(user):
other_user = room.get_other_user(user)
room.remove_user(user)
logging.info('User ' + user + ' removed from room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
if other_user and other_user != user:
channel.send_message(make_client_id(room, other_user), '{"type":"bye"}')
logging.info('Sent BYE to ' + other_user)
logging.warning('User ' + user + ' disconnected from room ' + room_key)
class MessagePage(webapp2.RequestHandler):
def post(self):
message = self.request.body
room_key = self.request.get('r')
user = self.request.get('u')
with LOCK:
room = Room.get_by_key_name(room_key)
if room:
handle_message(room, user, message)
else:
logging.warning('Unknown room ' + room_key)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
"""Renders the main page. When this page is shown, we create a new
channel to push asynchronous updates to the client."""
# get the base url without arguments.
base_url = self.request.path_url
room_key = sanitize(self.request.get('r'))
debug = self.request.get('debug')
unittest = self.request.get('unittest')
stun_server = self.request.get('ss')
turn_server = self.request.get('ts')
hd_video = self.request.get('hd')
ts_pwd = self.request.get('tp')
# set compat to true by default.
compat = 'true'
if self.request.get('compat'):
compat = self.request.get('compat')
if debug == 'loopback':
# set compat to false as DTLS does not work for loopback.
compat = 'false'
# token_timeout for channel creation, default 30min, max 2 days, min 3min.
token_timeout = self.request.get_range('tt',
min_value = 3,
max_value = 3000,
default = 30)
if unittest:
# Always create a new room for the unit tests.
room_key = generate_random(8)
if not room_key:
room_key = generate_random(8)
redirect = '/?r=' + room_key
redirect = append_url_arguments(self.request, redirect)
self.redirect(redirect)
logging.info('Redirecting visitor to base URL to ' + redirect)
return
user = None
initiator = 0
with LOCK:
room = Room.get_by_key_name(room_key)
if not room and debug != "full":
# New room.
user = generate_random(8)
room = Room(key_name = room_key)
room.add_user(user)
if debug != 'loopback':
initiator = 0
else:
room.add_user(user)
initiator = 1
elif room and room.get_occupancy() == 1 and debug != 'full':
# 1 occupant.
user = generate_random(8)
room.add_user(user)
initiator = 1
else:
# 2 occupants (full).
template = jinja_environment.get_template('full.html')
self.response.out.write(template.render({ 'room_key': room_key }))
logging.info('Room ' + room_key + ' is full')
return
room_link = base_url + '/?r=' + room_key
room_link = append_url_arguments(self.request, room_link)
token = create_channel(room, user, token_timeout)
pc_config = make_pc_config(stun_server, turn_server, ts_pwd)
pc_constraints = make_pc_constraints(compat)
offer_constraints = make_offer_constraints(compat)
media_constraints = make_media_constraints(hd_video)
template_values = {'token': token,
'me': user,
'room_key': room_key,
'room_link': room_link,
'initiator': initiator,
'pc_config': json.dumps(pc_config),
'pc_constraints': json.dumps(pc_constraints),
'offer_constraints': json.dumps(offer_constraints),
'media_constraints': json.dumps(media_constraints)
}
if unittest:
target_page = 'test/test_' + unittest + '.html'
else:
target_page = 'index.html'
template = jinja_environment.get_template(target_page)
self.response.out.write(template.render(template_values))
logging.info('User ' + user + ' added to room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
app = webapp2.WSGIApplication([
('/', MainPage),
('/message', MessagePage),
('/_ah/channel/connected/', ConnectPage),
('/_ah/channel/disconnected/', DisconnectPage)
], debug=True)

View File

@ -0,0 +1,54 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<script src="/_ah/channel/jsapi"></script>
<style type="text/css">
a:link { color: #ffffff; }
a:visited {color: #ffffff; }
html, body {
background-color: #000000;
height: 100%;
font-family:Verdana, Arial, Helvetica, sans-serif;
}
body {
margin: 0;
padding: 0;
}
#container {
position: relative;
min-height: 100%;
width: 100%;
margin: 0px auto;
}
#footer {
spacing: 4px;
position: absolute;
bottom: 0;
width: 100%;
height: 28px;
background-color: #3F3F3F;
color: rgb(255, 255, 255);
font-size:13px; font-weight: bold;
line-height: 28px;
text-align: center;
}
#logo {
display: block;
top:4;
right:4;
position:absolute;
float:right;
#opacity: 0.8;
}
</style>
</head>
<body>
<div id="container">
<div id="footer">
Sorry, this room is full. <a href="{{room_link}}">Click here</a> to try again.
</div>
</div>
<img id="logo" alt="WebRTC" src="images/webrtc_black_20p.png">
</body>
</html>

View File

@ -0,0 +1,11 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<meta content="text/html; charset=ISO-8859-1"
http-equiv="content-type">
<title>WebRtc Demo App Help</title>
</head>
<body>
TODO
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -0,0 +1,561 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<title>WebRTC Reference App</title>
<link rel="canonical" href="{{ room_link }}"/>
<meta http-equiv="X-UA-Compatible" content="chrome=1"/>
<script src="/_ah/channel/jsapi"></script>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../base/adapter.js"></script>
<style type="text/css">
a:link { color: #ffffff; }
a:visited {color: #ffffff; }
html, body {
background-color: #000000;
height: 100%;
font-family:Verdana, Arial, Helvetica, sans-serif;
}
body {
margin: 0;
padding: 0;
}
#container {
background-color: #000000;
position: relative;
min-height: 100%;
width: 100%;
margin: 0px auto;
-webkit-perspective: 1000;
}
#card {
-webkit-transition-property: rotation;
-webkit-transition-duration: 2s;
-webkit-transform-style: preserve-3d;
}
#local {
position: absolute;
width: 100%;
-webkit-transform: scale(-1, 1);
-webkit-backface-visibility: hidden;
}
#remote {
position: absolute;
width: 100%;
-webkit-transform: rotateY(180deg);
-webkit-backface-visibility: hidden;
}
#mini {
position: absolute;
height: 30%;
width: 30%;
bottom: 32px;
right: 4px;
-webkit-transform: scale(-1, 1);
opacity: 1.0;
}
#localVideo {
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#remoteVideo {
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#miniVideo {
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#footer {
spacing: 4px;
position: absolute;
bottom: 0;
width: 100%;
height: 28px;
background-color: #3F3F3F;
color: rgb(255, 255, 255);
font-size:13px; font-weight: bold;
line-height: 28px;
text-align: center;
}
#hangup {
font-size:13px; font-weight:bold;
color:#FFFFFF;
width:128px;
height:24px;
background-color:#808080;
border-style:solid;
border-color:#FFFFFF;
margin:2px;
}
#logo {
display: block;
top:4;
right:4;
position:absolute;
float:right;
opacity: 0.5;
}
</style>
</head>
<body>
<script type="text/javascript">
var localVideo;
var miniVideo;
var remoteVideo;
var localStream;
var remoteStream;
var channel;
var channelReady = false;
var pc;
var socket;
var initiator = {{ initiator }};
var started = false;
// Set up audio and video regardless of what devices are present.
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
var isVideoMuted = false;
var isAudioMuted = false;
function initialize() {
console.log("Initializing; room={{ room_key }}.");
card = document.getElementById("card");
localVideo = document.getElementById("localVideo");
miniVideo = document.getElementById("miniVideo");
remoteVideo = document.getElementById("remoteVideo");
resetStatus();
openChannel('{{ token }}');
doGetUserMedia();
}
function openChannel(channelToken) {
console.log("Opening channel.");
var channel = new goog.appengine.Channel(channelToken);
var handler = {
'onopen': onChannelOpened,
'onmessage': onChannelMessage,
'onerror': onChannelError,
'onclose': onChannelClosed
};
socket = channel.open(handler);
}
function resetStatus() {
if (!initiator) {
setStatus("Waiting for someone to join: <a href=\"{{ room_link }}\">{{ room_link }}</a>");
} else {
setStatus("Initializing...");
}
}
function doGetUserMedia() {
// Call into getUserMedia via the polyfill (adapter.js).
var constraints = {{ media_constraints|safe }};
try {
getUserMedia({'audio':true, 'video':constraints}, onUserMediaSuccess,
onUserMediaError);
console.log("Requested access to local media with mediaConstraints:\n" +
" \"" + JSON.stringify(constraints) + "\"");
} catch (e) {
alert("getUserMedia() failed. Is this a WebRTC capable browser?");
console.log("getUserMedia failed with exception: " + e.message);
}
}
function createPeerConnection() {
var pc_config = {{ pc_config|safe }};
var pc_constraints = {{ pc_constraints|safe }};
// Force the use of a number IP STUN server for Firefox.
if (webrtcDetectedBrowser == "firefox") {
pc_config = {"iceServers":[{"url":"stun:23.21.150.121"}]};
}
try {
// Create an RTCPeerConnection via the polyfill (adapter.js).
pc = new RTCPeerConnection(pc_config, pc_constraints);
pc.onicecandidate = onIceCandidate;
console.log("Created RTCPeerConnnection with:\n" +
" config: \"" + JSON.stringify(pc_config) + "\";\n" +
" constraints: \"" + JSON.stringify(pc_constraints) + "\".");
} catch (e) {
console.log("Failed to create PeerConnection, exception: " + e.message);
alert("Cannot create RTCPeerConnection object; WebRTC is not supported by this browser.");
return;
}
pc.onaddstream = onRemoteStreamAdded;
pc.onremovestream = onRemoteStreamRemoved;
}
function maybeStart() {
if (!started && localStream && channelReady) {
setStatus("Connecting...");
console.log("Creating PeerConnection.");
createPeerConnection();
console.log("Adding local stream.");
pc.addStream(localStream);
started = true;
// Caller initiates offer to peer.
if (initiator)
doCall();
}
}
function setStatus(state) {
footer.innerHTML = state;
}
function doCall() {
var constraints = {{ offer_constraints | safe }};
// temporary measure to remove Moz* constraints in Chrome
if (webrtcDetectedBrowser === "chrome") {
for (prop in constraints.mandatory) {
if (prop.indexOf("Moz") != -1) {
delete constraints.mandatory[prop];
}
}
}
constraints = mergeConstraints(constraints, sdpConstraints);
console.log("Sending offer to peer, with constraints: \n" +
" \"" + JSON.stringify(constraints) + "\".")
pc.createOffer(setLocalAndSendMessage, null, constraints);
}
function doAnswer() {
console.log("Sending answer to peer.");
pc.createAnswer(setLocalAndSendMessage, null, sdpConstraints);
}
function mergeConstraints(cons1, cons2) {
var merged = cons1;
for (var name in cons2.mandatory) {
merged.mandatory[name] = cons2.mandatory[name];
}
merged.optional.concat(cons2.optional);
return merged;
}
function setLocalAndSendMessage(sessionDescription) {
// Set Opus as the preferred codec in SDP if Opus is present.
sessionDescription.sdp = preferOpus(sessionDescription.sdp);
pc.setLocalDescription(sessionDescription);
sendMessage(sessionDescription);
}
function sendMessage(message) {
var msgString = JSON.stringify(message);
console.log('C->S: ' + msgString);
path = '/message?r={{ room_key }}' + '&u={{ me }}';
var xhr = new XMLHttpRequest();
xhr.open('POST', path, true);
xhr.send(msgString);
}
function processSignalingMessage(message) {
var msg = JSON.parse(message);
if (msg.type === 'offer') {
// Callee creates PeerConnection
if (!initiator && !started)
maybeStart();
pc.setRemoteDescription(new RTCSessionDescription(msg));
doAnswer();
} else if (msg.type === 'answer' && started) {
pc.setRemoteDescription(new RTCSessionDescription(msg));
} else if (msg.type === 'candidate' && started) {
var candidate = new RTCIceCandidate({sdpMLineIndex:msg.label,
candidate:msg.candidate});
pc.addIceCandidate(candidate);
} else if (msg.type === 'bye' && started) {
onRemoteHangup();
}
}
function onChannelOpened() {
console.log('Channel opened.');
channelReady = true;
if (initiator) maybeStart();
}
function onChannelMessage(message) {
console.log('S->C: ' + message.data);
processSignalingMessage(message.data);
}
function onChannelError() {
console.log('Channel error.');
}
function onChannelClosed() {
console.log('Channel closed.');
}
function onUserMediaSuccess(stream) {
console.log("User has granted access to local media.");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(localVideo, stream);
localVideo.style.opacity = 1;
localStream = stream;
// Caller creates PeerConnection.
if (initiator) maybeStart();
}
function onUserMediaError(error) {
console.log("Failed to get access to local media. Error code was " + error.code);
alert("Failed to get access to local media. Error code was " + error.code + ".");
}
function onIceCandidate(event) {
if (event.candidate) {
sendMessage({type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate});
} else {
console.log("End of candidates.");
}
}
function onRemoteStreamAdded(event) {
console.log("Remote stream added.");
reattachMediaStream(miniVideo, localVideo);
attachMediaStream(remoteVideo, event.stream);
remoteStream = event.stream;
waitForRemoteVideo();
}
function onRemoteStreamRemoved(event) {
console.log("Remote stream removed.");
}
function onHangup() {
console.log("Hanging up.");
transitionToDone();
stop();
// will trigger BYE from server
socket.close();
}
function onRemoteHangup() {
console.log('Session terminated.');
transitionToWaiting();
stop();
initiator = 0;
}
function stop() {
started = false;
isAudioMuted = false;
isVideoMuted = false;
pc.close();
pc = null;
}
function waitForRemoteVideo() {
// Call the getVideoTracks method via adapter.js.
videoTracks = remoteStream.getVideoTracks();
if (videoTracks.length === 0 || remoteVideo.currentTime > 0) {
transitionToActive();
} else {
setTimeout(waitForRemoteVideo, 100);
}
}
function transitionToActive() {
remoteVideo.style.opacity = 1;
card.style.webkitTransform = "rotateY(180deg)";
setTimeout(function() { localVideo.src = ""; }, 500);
setTimeout(function() { miniVideo.style.opacity = 1; }, 1000);
setStatus("<input type=\"button\" id=\"hangup\" value=\"Hang up\" onclick=\"onHangup()\" />");
}
function transitionToWaiting() {
card.style.webkitTransform = "rotateY(0deg)";
setTimeout(function() {
localVideo.src = miniVideo.src;
miniVideo.src = "";
remoteVideo.src = "" }, 500);
miniVideo.style.opacity = 0;
remoteVideo.style.opacity = 0;
resetStatus();
}
function transitionToDone() {
localVideo.style.opacity = 0;
remoteVideo.style.opacity = 0;
miniVideo.style.opacity = 0;
setStatus("You have left the call. <a href=\"{{ room_link }}\">Click here</a> to rejoin.");
}
function enterFullScreen() {
container.webkitRequestFullScreen();
}
function toggleVideoMute() {
// Call the getVideoTracks method via adapter.js.
videoTracks = localStream.getVideoTracks();
if (videoTracks.length === 0) {
console.log("No local video available.");
return;
}
if (isVideoMuted) {
for (i = 0; i < videoTracks.length; i++) {
videoTracks[i].enabled = true;
}
console.log("Video unmuted.");
} else {
for (i = 0; i < videoTracks.length; i++) {
videoTracks[i].enabled = false;
}
console.log("Video muted.");
}
isVideoMuted = !isVideoMuted;
}
function toggleAudioMute() {
// Call the getAudioTracks method via adapter.js.
audioTracks = localStream.getAudioTracks();
if (audioTracks.length === 0) {
console.log("No local audio available.");
return;
}
if (isAudioMuted) {
for (i = 0; i < audioTracks.length; i++) {
audioTracks[i].enabled = true;
}
console.log("Audio unmuted.");
} else {
for (i = 0; i < audioTracks.length; i++){
audioTracks[i].enabled = false;
}
console.log("Audio muted.");
}
isAudioMuted = !isAudioMuted;
}
setTimeout(initialize, 1);
// Send BYE on refreshing(or leaving) a demo page
// to ensure the room is cleaned for next session.
window.onbeforeunload = function() {
sendMessage({type: 'bye'});
}
// Ctrl-D: toggle audio mute; Ctrl-E: toggle video mute.
// On Mac, Command key is instead of Ctrl.
// Return false to screen out original Chrome shortcuts.
document.onkeydown = function() {
if (navigator.appVersion.indexOf("Mac") != -1) {
if (event.metaKey && event.keyCode === 68) {
toggleAudioMute();
return false;
}
if (event.metaKey && event.keyCode === 69) {
toggleVideoMute();
return false;
}
} else {
if (event.ctrlKey && event.keyCode === 68) {
toggleAudioMute();
return false;
}
if (event.ctrlKey && event.keyCode === 69) {
toggleVideoMute();
return false;
}
}
}
// Set Opus as the default audio codec if it's present.
function preferOpus(sdp) {
var sdpLines = sdp.split('\r\n');
// Search for m line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('m=audio') !== -1) {
var mLineIndex = i;
break;
}
}
if (mLineIndex === null)
return sdp;
// If Opus is available, set it as the default in m line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('opus/48000') !== -1) {
var opusPayload = extractSdp(sdpLines[i], /:(\d+) opus\/48000/i);
if (opusPayload)
sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], opusPayload);
break;
}
}
// Remove CN in m line and sdp.
sdpLines = removeCN(sdpLines, mLineIndex);
sdp = sdpLines.join('\r\n');
return sdp;
}
function extractSdp(sdpLine, pattern) {
var result = sdpLine.match(pattern);
return (result && result.length == 2)? result[1]: null;
}
// Set the selected codec to the first in m line.
function setDefaultCodec(mLine, payload) {
var elements = mLine.split(' ');
var newLine = new Array();
var index = 0;
for (var i = 0; i < elements.length; i++) {
if (index === 3) // Format of media starts from the fourth.
newLine[index++] = payload; // Put target payload to the first.
if (elements[i] !== payload)
newLine[index++] = elements[i];
}
return newLine.join(' ');
}
// Strip CN from sdp before CN constraints is ready.
function removeCN(sdpLines, mLineIndex) {
var mLineElements = sdpLines[mLineIndex].split(' ');
// Scan from end for the convenience of removing an item.
for (var i = sdpLines.length-1; i >= 0; i--) {
var payload = extractSdp(sdpLines[i], /a=rtpmap:(\d+) CN\/\d+/i);
if (payload) {
var cnPos = mLineElements.indexOf(payload);
if (cnPos !== -1) {
// Remove CN payload from m line.
mLineElements.splice(cnPos, 1);
}
// Remove CN line in sdp
sdpLines.splice(i, 1);
}
}
sdpLines[mLineIndex] = mLineElements.join(' ');
return sdpLines;
}
</script>
<div id="container" ondblclick="enterFullScreen()">
<div id="card">
<div id="local">
<video width="100%" height="100%" id="localVideo" autoplay="autoplay" muted="true"/>
</div>
<div id="remote">
<video width="100%" height="100%" id="remoteVideo" autoplay="autoplay">
</video>
<div id="mini">
<video width="100%" height="100%" id="miniVideo" autoplay="autoplay" muted="true"/>
</div>
</div>
</div>
<div id="footer">
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,93 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<!--This is the test page for the message channel.
To run this test:
?debug=loopback&unittest=channel
-->
<html>
<head>
<link rel="canonical" href="{{ room_link }}"/>
<meta http-equiv="X-UA-Compatible" content="chrome=1"/>
<script src="/_ah/channel/jsapi"></script>
<script type="text/javascript">
var channel;
var pc;
var socket;
var expected_message_num = 8;
var receive = 0;
var test_msg =
'01234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var msg_larger_than_500 = "";
function trace(txt) {
// This function is used for logging.
var elem = document.getElementById("debug");
elem.innerHTML += txt + "<br>";
}
function runTest() {
trace("Initializing; room={{ room_key }}.");
var channel = new goog.appengine.Channel('{{ token }}');
var handler = {
'onopen': onChannelOpened,
'onmessage': onChannelMessage,
'onerror': onChannelError,
'onclose': onChannelClosed
};
for (i = 0; i < 9; ++i) {
msg_larger_than_500 += test_msg;
}
for (i = 0; i < 4; ++i) {
sendMessage({type: 'test', msgid: i, msg: msg_larger_than_500});
}
trace('channel.open');
socket = channel.open(handler);
for (i = 4; i < expected_message_num; ++i) {
sendMessage({type: 'test', msgid: i, msg: msg_larger_than_500});
}
}
function sendMessage(message) {
var msgString = JSON.stringify(message);
trace('C->S: ' + msgString);
path = '/message?r={{ room_key }}' + '&u={{ me }}';
var xhr = new XMLHttpRequest();
xhr.open('POST', path, true);
xhr.send(msgString);
}
function onChannelOpened() {
trace('Channel opened.');
}
function onChannelMessage(message) {
if (message.data != JSON.stringify({type: 'test', msgid: receive,
msg: msg_larger_than_500})) {
trace('ERROR: Expect: ' + receive + ' Actual: ' + message.data);
} else {
trace('S->C: ' + message.data);
}
++receive;
if (receive == expected_message_num) {
trace('Received all the ' + expected_message_num + ' messages.');
trace('Test passed!');
} else if (receive > expected_message_num) {
trace('Received more than expected message');
trace('Test failed!');
}
}
function onChannelError() {
trace('Channel error.');
}
function onChannelClosed() {
trace('Channel closed.');
}
</script>
</head>
<body onload="runTest()">
<pre id="debug"></pre>
</body>
</html>

View File

@ -0,0 +1,89 @@
var RTCPeerConnection = null;
var getUserMedia = null;
var attachMediaStream = null;
var reattachMediaStream = null;
var webrtcDetectedBrowser = null;
if (navigator.mozGetUserMedia) {
console.log("This appears to be Firefox");
webrtcDetectedBrowser = "firefox";
// The RTCPeerConnection object.
RTCPeerConnection = mozRTCPeerConnection;
// The RTCSessionDescription object.
RTCSessionDescription = mozRTCSessionDescription;
// The RTCIceCandidate object.
RTCIceCandidate = mozRTCIceCandidate;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.mozGetUserMedia.bind(navigator);
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
console.log("Attaching media stream");
element.mozSrcObject = stream;
element.play();
};
reattachMediaStream = function(to, from) {
console.log("Reattaching media stream");
to.mozSrcObject = from.mozSrcObject;
to.play();
};
// Fake get{Video,Audio}Tracks
MediaStream.prototype.getVideoTracks = function() {
return [];
};
MediaStream.prototype.getAudioTracks = function() {
return [];
};
} else if (navigator.webkitGetUserMedia) {
console.log("This appears to be Chrome");
webrtcDetectedBrowser = "chrome";
// The RTCPeerConnection object.
RTCPeerConnection = webkitRTCPeerConnection;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
element.src = webkitURL.createObjectURL(stream);
};
reattachMediaStream = function(to, from) {
to.src = from.src;
};
// The representation of tracks in a stream is changed in M26.
// Unify them for earlier Chrome versions in the coexisting period.
if (!webkitMediaStream.prototype.getVideoTracks) {
webkitMediaStream.prototype.getVideoTracks = function() {
return this.videoTracks;
};
webkitMediaStream.prototype.getAudioTracks = function() {
return this.audioTracks;
};
}
// New syntax of getXXXStreams method in M26.
if (!webkitRTCPeerConnection.prototype.getLocalStreams) {
webkitRTCPeerConnection.prototype.getLocalStreams = function() {
return this.localStreams;
};
webkitRTCPeerConnection.prototype.getRemoteStreams = function() {
return this.remoteStreams;
};
}
} else {
console.log("Browser does not appear to be WebRTC-capable");
}

9
samples/js/demos/OWNERS Normal file
View File

@ -0,0 +1,9 @@
juberti@webrtc.org
braveyao@webrtc.org
wu@webrtc.org
hta@webrtc.org
vikasmarwaha@webrtc.org
phoglund@webrtc.org
kjellander@webrtc.org
henrika@webrtc.org

30
samples/js/demos/app.yaml Normal file
View File

@ -0,0 +1,30 @@
application: webrtc-demos
version: 1
runtime: python27
api_version: 1
threadsafe: yes
handlers:
- url: /favicon\.ico
static_files: favicon.ico
upload: favicon\.ico
- url: /html
static_dir: html
secure: always
- url: /images
static_dir: images
secure: always
- url: /js
static_dir: js
secure: always
- url: .*
script: main.app
secure: always
libraries:
- name: webapp2
version: "2.5.1"

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -0,0 +1,262 @@
<html>
<head>
<title>Constraints and Statistics</title>
<script>
var mystream;
var pc1;
var pc2;
$ = function(id) {
return document.getElementById(id);
}
function log(txt) {
console.log(txt);
}
function openCamera() {
if (mystream) {
mystream.stop();
}
navigator.webkitGetUserMedia(cameraConstraints(), gotStream, function() {
log("GetUserMedia failed");
});
}
function gotStream(stream) {
log("GetUserMedia succeeded");
mystream = stream;
$("local-video").src = webkitURL.createObjectURL(stream);
}
function cameraConstraints() {
var constraints = {};
constraints.audio = true;
constraints.video = { mandatory: {}, optional: [] };
if ($("minwidth").value != "0") {
constraints.video.mandatory.minWidth = $("minwidth").value;
}
if ($("maxwidth").value != "0") {
constraints.video.mandatory.maxWidth = $("maxwidth").value;
}
if ($("minheight").value != "0") {
constraints.video.mandatory.minHeight = $("minheight").value;
}
if ($("maxheight").value != "0") {
constraints.video.mandatory.maxHeight = $("maxheight").value;
}
if ($("frameRate").value != "0") {
constraints.video.mandatory.minFrameRate = $("frameRate").value;
}
log('Camera constraints are ' + JSON.stringify(constraints));
$("cameraConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
return constraints;
}
function streamConstraints() {
var constraints = { mandatory: {}, optional: [] };
if ($("bandwidth").value != "0") {
constraints.optional[0] = { 'bandwidth' : $('bandwidth').value };
}
log('Constraints are ' + JSON.stringify(constraints));
$("addStreamConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
return constraints;
}
function connect() {
pc1 = new webkitRTCPeerConnection(null);
pc2 = new webkitRTCPeerConnection(null);
pc1.addStream(mystream, streamConstraints());
log('PC1 creating offer');
pc1.onnegotiationeeded = function() {
log('Negotiation needed - PC1');
}
pc2.onnegotiationeeded = function() {
log('Negotiation needed - PC2');
}
pc1.onicecandidate = function(e) {
log('Candidate PC1');
if (e.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(e.candidate));
}
}
pc2.onicecandidate = function(e) {
log('Candidate PC2');
if (e.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(e.candidate));
}
}
pc2.onaddstream = function(e) {
log('PC2 got stream');
$('remote-video').src = webkitURL.createObjectURL(e.stream);
log('Remote video is ' + $('remote-video').src);
}
pc1.createOffer(function(desc) {
log('PC1 offering');
pc1.setLocalDescription(desc);
pc2.setRemoteDescription(desc);
pc2.createAnswer(function(desc2) {
log('PC2 answering');
pc2.setLocalDescription(desc2);
pc1.setRemoteDescription(desc2);
});
});
}
// Display statistics
var statCollector = setInterval(function() {
var display = function(str) {
$('bitrate').innerHTML = str;
}
display("No stream");
if (pc2 && pc2.remoteStreams[0]) {
if (pc2.getStats) {
display('No stats callback');
pc2.getStats(function(stats) {
log('Raw stats ' + stats);
var statsString = '';
var results = stats.result();
log('Raw results ' + results);
for (var i = 0; i < results.length; ++i) {
var res = results[i];
log(i + ': ' + JSON.stringify(res));
statsString += '<h3>Report ';
statsString += i;
statsString += '</h3>';
if (res.local) {
statsString += "<p>Local ";
statsString += dumpStats(res.local);
}
if (res.remote) {
statsString += "<p>Remote ";
statsString += dumpStats(res.remote);
}
}
$('stats').innerHTML = statsString;
display('No bitrate stats');
});
} else {
display('No stats function. Use at least Chrome 24.0.1285');
}
} else {
log('Not connected yet');
}
// Collect some stats from the video tags.
local_video = $('local-video');
if (local_video) {
$('local-video-stats').innerHTML = local_video.videoWidth +
'x' + local_video.videoHeight;
}
remote_video = $('remote-video');
if (remote_video) {
$('remote-video-stats').innerHTML = remote_video.videoWidth +
'x' + remote_video.videoHeight;
}
}, 1000);
// Dumping a stats variable as a string.
// might be named toString?
function dumpStats(obj) {
var statsString = 'Timestamp:';
statsString += obj.timestamp;
if (obj.names) {
log('Have names function');
names = obj.names();
for (var i = 0; i < names.length; ++i) {
statsString += '<br>';
statsString += names[i];
statsString += ':';
statsString += obj.stat(names[i]);
}
} else {
log('No names function');
if (obj.stat('audioOutputLevel')) {
statsString += "audioOutputLevel: ";
statsString += obj.stat('audioOutputLevel');
statsString += "<br>";
}
}
return statsString;
}
// Utility to show the value of a field in a span called name+Display
function showValue(name, value) {
$(name + 'Display').innerHTML = value;
}
</script>
</head>
<body>
<h1>Constraints and Statistics</h1>
This page is meant to give some hints on how one can use constraints and statistics in WebRTC applications.
<p>
The form to the left gives constraints you can set on the getUserMedia call.
When you hit "open", it will (re)open the camera with these constraints.
<p>
The left picture is the local preview. The right picture is the picture
after being passed through the PeerConnection (locally).
<p>
Underneath the picture you will see a running display of how many Kbits/sec
the video feed uses for transmission.
<hr>
<table>
<tr>
<td align="top">
<h2>getUserMedia constraints</h2>
<table>
<tr><td><td>Min<td>Max
<tr><td>Horizontal
<td><input type="range" id="minwidth" min="0" max="1280" value="300"
onchange="showValue(this.id, this.value)">
<td><input type="range" id="maxwidth" min="0" max="1280" value="640"
onchange="showValue(this.id, this.value)">
<td><span id="minwidthDisplay">300</span>-<span id="maxwidthDisplay">640</span>
<tr><td>Vertical
<td><input type="range" id="minheight" min="0" max="1280" value="200"
onchange="showValue(this.id, this.value)">
<td><input type="range" id="maxheight" min="0" max="1280" value="480"
onchange="showValue(this.id, this.value)">
<td><span id="minheightDisplay">200</span>-<span id="maxheightDisplay">480</span>
<tr><td>
FrameRate
<td colspan=2><input type="range" id="frameRate" min="0" max="60" value="30"
onchange="showValue(this.id, this.value)">
<td><span id="frameRateDisplay">30</span>
</table>
<input type="submit" name="capture" value="Capture!" onclick="openCamera()">
</td>
<td align="top">
<h2>addStream constraints</h2>
Maximum bitrate
<input type="range" id="bandwidth" min="0" max="2000" value="1000"
onchange="showValue(this.id, this.value)">
<span id="bandwidthDisplay">1000</span>
<br>
<input type="submit" name="connect" value="Connect!" onclick="connect()">
</td>
</tr>
<tr>
<td>
<video id="local-video" autoplay width=400></video>
</td>
<td>
<video id="remote-video" autoplay width=400></video>
</td>
<tr>
<td><span id="local-video-stats"></span>
<td><span id="remote-video-stats"></span>
<br>
<span id="bitrate">Bitrate unknown</span>
</td>
</tr>
<tr>
<td><pre><span id="cameraConstraints"></span></pre>
<td><pre><span id="addStreamConstraints"></span></pre>
</table>
<h2>Statistics report display</h2>
<div id="stats">Stats will appear here.</div>
</body>
</html>

176
samples/js/demos/html/dc1.html Executable file
View File

@ -0,0 +1,176 @@
<!DOCTYPE html>
<html>
<head>
<title>Data Channel Demo 1</title>
<style>
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width: 480px;
height: 640px;
}
#left { position: absolute; left: 0; top: 0; width: 50%; }
#right { position: absolute; right: 0; top: 0; width: 50%; }
</style>
</head>
<body>
<div id="left">
<br>
<h2>Send data</h2>
<textarea id="dataChannelSend" rows="5" cols="15" disabled="true">
</textarea><br>
<button id="startButton" onclick="createConnection()">Start</button>
<button id="sendButton" onclick="sendData()">Send Data</button>
<button id="closeButton" onclick="closeDataChannels()">Stop Send Data
</button>
<br>
</div>
<div id="right">
<br>
<h2>Received Data</h2>
<textarea id="dataChannelReceive" rows="5" cols="15" disabled="true">
</textarea><br>
</div>
<script>
var pc1, pc2, sendChannel, receiveChannel;
startButton.disabled = false;
sendButton.disabled = true;
closeButton.disabled = true;
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function createConnection() {
var servers = null;
pc1 = new webkitRTCPeerConnection(servers,
{optional: [{RtpDataChannels: true}]});
trace('Created local peer connection object pc1');
try {
// Reliable Data Channels not yet supported in Chrome
// Data Channel api supported from Chrome M25.
// You need to start chrome with --enable-data-channels flag.
sendChannel = pc1.createDataChannel("sendDataChannel",
{reliable: false});
trace('Created send data channel');
} catch (e) {
alert('Failed to create data channel. ' +
'You need Chrome M25 or later with --enable-data-channels flag');
trace('Create Data channel failed with exception: ' + e.message);
}
pc1.onicecandidate = iceCallback1;
sendChannel.onopen = onSendChannelStateChange;
sendChannel.onclose = onSendChannelStateChange;
pc2 = new webkitRTCPeerConnection(servers,
{optional: [{RtpDataChannels: true}]});
trace('Created remote peer connection object pc2');
pc2.onicecandidate = iceCallback2;
pc2.ondatachannel = receiveChannelCallback;
pc1.createOffer(gotDescription1);
startButton.disabled = true;
closeButton.disabled = false;
}
function sendData() {
var data = document.getElementById("dataChannelSend").value;
sendChannel.send(data);
trace('Sent Data: ' + data);
}
function closeDataChannels() {
trace('Closing data Channels');
sendChannel.close();
trace('Closed data channel with label: ' + sendChannel.label);
receiveChannel.close();
trace('Closed data channel with label: ' + receiveChannel.label);
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
trace('Closed peer connections');
startButton.disabled = false;
sendButton.disabled = true;
closeButton.disabled = true;
document.getElementById("dataChannelSend").value = "";
document.getElementById("dataChannelReceive").value = "";
document.getElementById("dataChannelSend").disabled = true;
}
function gotDescription1(desc) {
pc1.setLocalDescription(desc);
trace('Offer from pc1 \n' + desc.sdp);
pc2.setRemoteDescription(desc);
pc2.createAnswer(gotDescription2);
}
function gotDescription2(desc) {
pc2.setLocalDescription(desc);
trace('Answer from pc2 \n' + desc.sdp);
pc1.setRemoteDescription(desc);
}
function iceCallback1(event) {
trace('local ice callback');
if (event.candidate) {
pc2.addIceCandidate(event.candidate);
trace('Local ICE candidate: \n' + event.candidate.candidate);
}
}
function iceCallback2(event) {
trace('remote ice callback');
if (event.candidate) {
pc1.addIceCandidate(event.candidate);
trace('Remote ICE candidate: \n ' + event.candidate.candidate);
}
}
function receiveChannelCallback(event) {
trace('Receive Channel Callback');
receiveChannel = event.channel;
receiveChannel.onmessage = onReceiveMessageCallback;
receiveChannel.onopen = onReceiveChannelStateChange;
receiveChannel.onclose = onReceiveChannelStateChange;
}
function onReceiveMessageCallback(event) {
trace('Received Message');
document.getElementById("dataChannelReceive").value = event.data;
}
function onSendChannelStateChange() {
var readyState = sendChannel.readyState;
trace('Send channel state is: ' + readyState);
if (readyState == "open") {
document.getElementById("dataChannelSend").disabled = false;
sendButton.disabled = false;
closeButton.disabled = false;
} else {
document.getElementById("dataChannelSend").disabled = true;
sendButton.disabled = true;
closeButton.disabled = true;
}
}
function onReceiveChannelStateChange() {
var readyState = receiveChannel.readyState;
trace('Receive channel state is: ' + readyState);
}
</script>
</body>
</html>

View File

@ -0,0 +1,151 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<script type="text/javascript" src="../js/ccv.js"></script>
<script type="text/javascript" src="../js/face.js"></script>
<script src="/_ah/channel/jsapi"></script>
<style type="text/css">
* { margin:0; padding:0; } /* to remove the top and left whitespace */
html, body { width:100%; height:100%; } /* just to be sure these are full screen*/
body {font-family: 'Helvetica';background-color: #000000; }
a:link { color: #ffffff; } a:visited {color: #ffffff; }
#localCanvas {
display: block;
position: absolute;
width: 100%;
height: 100%;
}
#localVideo {
display: block;
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
width: 100%;
height: 100%;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
opacity: 0;
}
#logo {
display: block;
top:4;
right:4;
position:absolute;
float:right;
#opacity: 0.8;
}
#credit {
display: block;
top:28;
right:4;
position:absolute;
float:right;
font-size:10px;
}
</style>
<title>WebRTC Face Reco Demo Application</title>
</head>
<body>
<script type="text/javascript">
var localVideo;
var localCanvas;
//var worker = new Worker('ccv.js');
initialize = function() {
localVideo = document.getElementById("localVideo");
localCanvas = document.getElementById("localCanvas");
getUserMedia();
}
getUserMedia = function() {
try { navigator.webkitGetUserMedia({video:true,audio:true}, onGotStream, onFailedStream);
//trace("Requested access to local media");
} catch (e) {
alert("getUserMedia error " + e);
//trace_e(e, "getUserMedia error");
}
}
poll = function() {
var w = localVideo.videoWidth;
var h = localVideo.videoHeight;
var canvas = document.createElement('canvas');
canvas.width = w;
canvas.height = h;
var ctx = canvas.getContext('2d');
ctx.drawImage(localVideo, 0, 0, w, h);
var comp = ccv.detect_objects({ "canvas" : ccv.grayscale(canvas),
"cascade" : cascade,
"interval" : 5,
"min_neighbors" : 1 });
/* draw detected area */
//localCanvas.left = 400;
//localCanvas.top = localVideo.top;
/*localCanvas.right = localVideo.right;
localCanvas.bottom = localVideo.bottom;*/
localCanvas.width = localVideo.clientWidth;
localCanvas.height = localVideo.clientHeight;
var ctx2 = localCanvas.getContext('2d');
ctx2.lineWidth = 2;
ctx2.lineJoin = "round";
ctx2.clearRect (0, 0, localCanvas.width,localCanvas.height);
var x_offset = 0, y_offset = 0, x_scale = 1, y_scale = 1;
if (localVideo.clientWidth * localVideo.videoHeight > localVideo.videoWidth * localVideo.clientHeight) {
x_offset = (localVideo.clientWidth - localVideo.clientHeight * localVideo.videoWidth / localVideo.videoHeight) / 2;
} else {
y_offset = (localVideo.clientHeight - localVideo.clientWidth * localVideo.videoHeight / localVideo.videoWidth) / 2;
}
x_scale = (localVideo.clientWidth - x_offset * 2) / localVideo.videoWidth;
y_scale = (localVideo.clientHeight - y_offset * 2) / localVideo.videoHeight;
for (var i = 0; i < comp.length; i++) {
comp[i].x = comp[i].x * x_scale + x_offset;
comp[i].y = comp[i].y * y_scale + y_offset;
comp[i].width = comp[i].width * x_scale;
comp[i].height = comp[i].height * y_scale;
var opacity = 0.1;
if (comp[i].confidence > 0) {
opacity += comp[i].confidence / 10;
if (opacity > 1.0) opacity = 1.0;
}
//ctx2.strokeStyle = "rgba(255,0,0," + opacity * 255 + ")";
ctx2.lineWidth = opacity * 10;
ctx2.strokeStyle = "rgb(255,0,0)";
ctx2.strokeRect(comp[i].x, comp[i].y, comp[i].width, comp[i].height);
}
setTimeout(poll, 1000);
}
onGotStream = function(stream) {
var url = webkitURL.createObjectURL(stream);
localVideo.style.opacity = 1; localVideo.src = url;
localStream = stream;
//trace("User has granted access to local media. url = " + url);
setTimeout(poll, 2000);
}
onFailedStream = function(error) {
alert("Failed to get access to local media. Error code was " + error.code + ".");
//trace_warning("Failed to get access to local media. Error code was " + error.code);
}
setTimeout(initialize, 1);
</script>
<video id="localVideo" autoplay="autoplay"></video>
<canvas width="1000" height="1000" id="localCanvas"></canvas>
<a href="http://www.webrtc.org"><img id="logo" alt="WebRTC" src="../images/webrtc_black_20p.png"></a>
<a href="http://liuliu.me/eyes/javascript-face-detection-explained"><div id="credit">JS Face Detect by Liu Liu</div></a>
</body>
</html>

View File

@ -0,0 +1,33 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 1</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<br>
<button id="btn" onclick="start()">Start</button>
<script>
video = document.getElementById("vid");
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
btn.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
}
</script>
</body>
</html>

View File

@ -0,0 +1,48 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 2</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
canvas {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<canvas id="cvs"></canvas>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="snap()">Snapshot</button>
<script>
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
btn2.disabled = true;
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
btn2.disabled = false
}
function snap() {
canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
}
</script>
</body>
</html>

View File

@ -0,0 +1,74 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 3</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
canvas {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
.grayscale {
-webkit-filter: grayscale(1);
}
.sepia {
-webkit-filter: sepia(1);
}
.invert {
-webkit-filter: invert(1);
}
.blur {
-webkit-filter: blur(3px);
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<canvas id="cvs"></canvas>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="change()">Change Filter</button>
<button id="btn3" onclick="snap()">Snapshot</button>
<script>
filters = ["", "sepia", "invert", "blur", "grayscale"];
findex = 0;
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
btn2.disabled = true;
btn3.disabled = true;
function start() {
navigator.webkitGetUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
video.src = webkitURL.createObjectURL(stream);
btn2.disabled = false;
btn3.disabled = false;
}
function change() {
video.className = '';
findex = (findex + 1) % filters.length;
if (findex != 0)
video.classList.add(filters[findex]);
}
function snap() {
canvas.className = '';
if (findex != 0)
canvas.classList.add(filters[findex]);
canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
}
</script>
</body>
</html>

View File

@ -0,0 +1,91 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Local Audio Rendering Demo</title>
<script type="text/javascript" src="../../base/adapter.js"></script>
<script>
var audioElement;
var buttonStart;
var buttonStop;
var localStream;
$ = function(id) {
return document.getElementById(id);
};
function start() {
var constraints = {audio:true, video:false};
getUserMedia(constraints, gotStream, gotStreamFailed);
buttonStart.disabled = true;
buttonStop.disabled = false;
}
function stop() {
buttonStart.enabled = true;
buttonStop.enabled = false;
localStream.stop();
}
function gotStream(stream) {
videoTracks = stream.getVideoTracks();
audioTracks = stream.getAudioTracks();
if (audioTracks.length == 1 && videoTracks.length == 0) {
console.log('gotStream({audio:true, video:false})');
console.log('Using audio device: ' + audioTracks[0].label);
attachMediaStream(audioElement, stream);
// The audio will be muted by default from start.
// Unmute and set volume to max level so we can listen to audio in
// loopback. We restore the volume in a 'play' event to ensure that
// loading has been done (auto-mute is performed during load).
audioElement.addEventListener('play', function() {
audioElement.muted = false;
audioElement.volume = 1;
console.log('Unmuting and setting volume to max level');
}, false);
stream.onended = function() {
console.log('stream.onended');
buttonStart.disabled = false;
buttonStop.disabled = true;
};
localStream = stream;
} else {
alert('The media stream contains an invalid amount of audio tracks.');
stream.stop();
}
}
function gotStreamFailed(error) {
buttonStart.disabled = false;
buttonStop.disabled = true;
alert('Failed to get access to local media. Error code: ' + error.code);
}
function onload() {
audioElement = $('audio');
buttonStart = $('start');
buttonStop = $('stop');
buttonStart.enabled = true;
buttonStop.disabled = true;
}
</script>
</head>
<body onload="onload()">
<h2>Rendering of a local media stream using &lt;audio&gt;</h2>
<p>Demonstrates usage of a local media stream connected to an HTML5 audio tag.<br>
Press Start, select a microphone and listen to your own voice in loopback.</p>
<style>
button {
font: 14px sans-serif;
padding: 8px;
}
</style>
<audio id="audio" autoplay="autoplay" controls="controls"></audio><br><br>
<button id="start" onclick="start()">Start</button>
<button id="stop" onclick="stop()">Stop</button>
</body>
</html>

View File

@ -0,0 +1,187 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<video id="vid3" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1_local, pc1_remote;
var pc2_local, pc2_remote;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting calls");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace("Using Video device: " + videoTracks[0].label);
if (audioTracks.length > 0)
trace("Using Audio device: " + audioTracks[0].label);
// Create an RTCPeerConnection via the polyfill (adapter.js).
var servers = null;
pc1_local = new RTCPeerConnection(servers);
pc1_remote = new RTCPeerConnection(servers);
pc1_remote.onaddstream = gotRemoteStream1;
pc1_local.onicecandidate = iceCallback1Local;
pc1_remote.onicecandidate = iceCallback1Remote;
trace("PC1: created local and remote peer connection objects");
pc2_local = new RTCPeerConnection(servers);
pc2_remote = new RTCPeerConnection(servers);
pc2_remote.onaddstream = gotRemoteStream2;
pc2_local.onicecandidate = iceCallback2Local;
pc2_remote.onicecandidate = iceCallback2Remote;
trace("PC2: created local and remote peer connection objects");
pc1_local.addStream(localstream);
trace("Adding local stream to pc1_local");
pc1_local.createOffer(gotDescription1Local);
pc2_local.addStream(localstream);
trace("Adding local stream to pc2_local");
pc2_local.createOffer(gotDescription2Local);
}
function gotDescription1Local(desc) {
pc1_local.setLocalDescription(desc);
trace("Offer from pc1_local \n" + desc.sdp);
pc1_remote.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc1_remote.createAnswer(gotDescription1Remote, null, sdpConstraints);
}
function gotDescription1Remote(desc) {
pc1_remote.setLocalDescription(desc);
trace("Answer from pc1_remote \n" + desc.sdp);
pc1_local.setRemoteDescription(desc);
}
function gotDescription2Local(desc) {
pc2_local.setLocalDescription(desc);
trace("Offer from pc2_local \n" + desc.sdp);
pc2_remote.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2_remote.createAnswer(gotDescription2Remote, null, sdpConstraints);
}
function gotDescription2Remote(desc) {
pc2_remote.setLocalDescription(desc);
trace("Answer from pc2_remote \n" + desc.sdp);
pc2_local.setRemoteDescription(desc);
}
function hangup() {
trace("Ending calls");
pc1_local.close();
pc1_remote.close();
pc2_local.close();
pc2_remote.close();
pc1_local = pc1_remote = null;
pc2_local = pc2_remote = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream1(e) {
vid2.src = webkitURL.createObjectURL(e.stream);
trace("PC1: Received remote stream");
}
function gotRemoteStream2(e) {
vid3.src = webkitURL.createObjectURL(e.stream);
trace("PC2: Received remote stream");
}
function iceCallback1Local(event) {
handleCandidate(event.candidate, pc1_remote, "PC1: ", "local");
}
function iceCallback1Remote(event) {
handleCandidate(event.candidate, pc1_local, "PC1: ", "remote");
}
function iceCallback2Local(event) {
handleCandidate(event.candidate, pc2_remote, "PC2: ", "local");
}
function iceCallback2Remote(event) {
handleCandidate(event.candidate, pc2_local, "PC2: ", "remote");
}
function handleCandidate(candidate, dest, prefix, type) {
if (candidate) {
dest.addIceCandidate(new RTCIceCandidate(candidate));
trace(prefix + "New " + type + " ICE candidate: " + candidate.candidate);
}
}
</script>
</body>
</html>

View File

@ -0,0 +1,133 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<xtextarea id="ta1"></textarea>
<xtextarea id="ta2"></textarea>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function gotStream(stream){
trace("Received local stream");
vid1.src = webkitURL.createObjectURL(stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
navigator.webkitGetUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
if (localstream.videoTracks.length > 0)
trace('Using Video device: ' + localstream.videoTracks[0].label);
if (localstream.audioTracks.length > 0)
trace('Using Audio device: ' + localstream.audioTracks[0].label);
pc1 = new webkitPeerConnection00(null, iceCallback1);
trace("Created local peer connection object pc1");
pc2 = new webkitPeerConnection00(null, iceCallback2);
trace("Created remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
var offer = pc1.createOffer(null);
trace("Created offer:\n" + offer.toSdp());
pc1.setLocalDescription(pc1.SDP_OFFER, offer);
trace("SetLocalDesc1");
pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
trace("SetRemoteDesc2");
//ta1.value = offer.toSdp();
var answer = pc2.createAnswer(offer.toSdp(),
{has_audio:true, has_video:true});
trace("Created answer:\n" + answer.toSdp());
pc2.setLocalDescription(pc2.SDP_ANSWER, answer);
trace("SetLocalDesc2");
pc1.setRemoteDescription(pc1.SDP_ANSWER, answer);
trace("SetRemoteDesc1");
//ta2.value = answer.toSdp();
pc1.startIce();
pc2.startIce();
trace("Started ICE for both local & remote");
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received remote stream");
}
function iceCallback1(candidate,bMore){
if (candidate) {
pc2.processIceMessage(candidate);
trace("Local ICE candidate: " + candidate.toSdp());
}
}
function iceCallback2(candidate,bMore){
if (candidate) {
pc1.processIceMessage(candidate);
trace("Remote ICE candidate: " + candidate.toSdp());
}
}
</script>
</body>
</html>

View File

@ -0,0 +1,143 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<xtextarea id="ta1"></textarea>
<xtextarea id="ta2"></textarea>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace('Using Video device: ' + videoTracks[0].label);
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
var servers = null;
pc1 = new RTCPeerConnection(servers);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1);
}
function gotDescription1(desc){
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer(gotDescription2, null, sdpConstraints);
}
function gotDescription2(desc){
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received remote stream");
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
</script>
</body>
</html>

View File

@ -0,0 +1,125 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection PRANSWER Demo</title>
<style>
video {
border:5px solid black;
width:320px;
height:240px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Call</button>
<button id="btn15" onclick="accept()">Accept</button>
<button id="btn2" onclick="stop()">Hang Up</button>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = true;
btn2.disabled = true;
var pc1,pc2;
var localstream;
function trace(txt) {
// This function is used for logging.
console.log(txt);
}
function traceCandidate(kind, cand) {
trace("Candidate(" + kind + "): " + cand.label + ": " +
cand.toSdp().replace("\n", ""));
}
function gotStream(stream){
trace("Received local stream");
vid1.src = webkitURL.createObjectURL(stream);
localstream = stream;
btn1.disabled = false;
}
navigator.webkitGetUserMedia({audio:true, video:true}, gotStream, function() {});
function start() {
btn1.disabled = true;
btn2.disabled = false;
trace("Starting Call");
if (localstream.videoTracks.length > 0)
trace('Using Video device: ' + localstream.videoTracks[0].label); // Prints audio & video device names
if (localstream.audioTracks.length > 0)
trace('Using Audio device: ' + localstream.audioTracks[0].label);
pc1 = new webkitPeerConnection00(null,iceCallback1);
trace("Created local peer connection object pc1");
pc2 = new webkitPeerConnection00(null,iceCallback2);
trace("Created remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
var offer = pc1.createOffer(null);
trace("Created offer:\n" + offer.toSdp());
pc1.setLocalDescription(pc1.SDP_OFFER, offer);
trace("SetLocalDesc1");
pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
trace("SetRemoteDesc2");
var answer = pc2.createAnswer(offer.toSdp(), {has_audio:true, has_video:true});
var sdp = answer.toSdp();
sdp = sdp.replace(/a=sendrecv/g, "a=inactive");
answer = new SessionDescription(sdp);
trace("Created answer:\n" + answer.toSdp());
pc2.setLocalDescription(pc2.SDP_PRANSWER, answer);
trace("SetLocalDesc2");
pc1.setRemoteDescription(pc1.SDP_PRANSWER, answer);
trace("SetRemoteDesc1");
pc1.startIce(); // Start finding local ice candidates. Once it finds candidates it will call icecallback
pc2.startIce(); //Starts finding remote ice candidates. Once it finds candidates it will call iceCallback2
trace("Start ICE for both local & remote");
}
function accept() {
var sdp = pc1.remoteDescription.toSdp();
sdp = sdp.replace(/a=inactive/g, "a=sendrecv");
var answer = new SessionDescription(sdp);
pc2.setLocalDescription(pc1.SDP_ANSWER, answer);
pc1.setRemoteDescription(pc2.SDP_ANSWER, answer);
trace("Set final answer:" + sdp);
}
function stop() {
trace("Ending Call" + "\n\n");
pc1.close();
pc2.close();
pc1=null;
pc2=null;
btn2.disabled = true;
btn1.disabled = false;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received Remote Stream");
}
function iceCallback1(candidate,bMore){
if (candidate) {
pc2.processIceMessage(candidate);
traceCandidate("local", candidate);
}
}
function iceCallback2(candidate,bMore){
if (candidate) {
pc1.processIceMessage(candidate);
traceCandidate("remote", candidate);
}
}
</script>
</body>
</html>

View File

@ -0,0 +1,142 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Rehydration Demo</title>
<style>
video {
border:5px solid black;
width:320px;
height:240px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="rehydrate()">Rehydrate</button>
<button id="btn4" onclick="stop()">Hang Up</button>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn2.disabled = true;
btn3.disabled = true;
btn4.disabled = true;
var pc1,pc2;
var localstream;
function trace(txt) {
// This function is used for logging.
console.log(txt);
}
function start() {
btn1.disabled = true;
navigator.webkitGetUserMedia({audio:true, video:true}, gotStream, function() {});
}
function gotStream(stream){
trace("Received local stream");
vid1.src = webkitURL.createObjectURL(stream);
localstream = stream;
btn2.disabled = false;
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
btn4.disabled = false;
trace("Starting Call");
if (localstream.videoTracks.length > 0)
trace('Using Video device: ' + localstream.videoTracks[0].label); // Prints audio & video device names
if (localstream.audioTracks.length > 0)
trace('Using Audio device: ' + localstream.audioTracks[0].label);
pc1 = new webkitPeerConnection00(null,iceCallback1);
trace("Created local peer connection object pc1");
pc2 = new webkitPeerConnection00(null,iceCallback2);
trace("Created remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
var offer = pc1.createOffer(null);
trace("Created offer");
pc1.setLocalDescription(pc1.SDP_OFFER, offer);
trace("SetLocalDesc1");
pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
trace("SetRemoteDesc2");
var answer = pc2.createAnswer(offer.toSdp(), {has_audio:true, has_video:true});;
trace("CreatedAnswer");
pc2.setLocalDescription(pc2.SDP_ANSWER, answer);
trace("SetLocalDesc2");
pc1.setRemoteDescription(pc1.SDP_ANSWER, answer);
trace("SetRemoteDesc1");
pc1.startIce(); // Start finding local ice candidates. Once it finds candidates it will call icecallback
pc2.startIce(); //Starts finding remote ice candidates. Once it finds candidates it will call iceCallback2
trace("Start ICE for both local & remote");
}
function rehydrate() {
var oldLocal = pc2.localDescription;
// need to munge a=crypto
pc2 = null;
trace("Destroyed remote peer connection object pc2");
pc2 = new webkitPeerConnection00(null, iceCallback3);
trace("Created new remote peer connection object pc2");
pc2.onaddstream = gotRemoteStream;
pc2.setLocalDescription(pc2.SDP_OFFER, oldLocal);
pc1.setRemoteDescription(pc1.SDP_OFFER, oldLocal);
var answer = pc1.createAnswer(oldLocal.toSdp(), {has_audio:true, has_video:true});
pc1.setLocalDescription(pc1.SDP_ANSWER, answer);
pc2.setRemoteDescription(pc2.SDP_ANSWER, answer);
pc2.startIce();
trace("Inited new remote peer connection object pc2");
}
function stop() {
trace("Ending Call" + "\n\n");
pc1.close();
pc2.close();
pc1=null;
pc2=null;
btn2.disabled = false;
btn3.disabled = true;
btn4.disabled = true;
}
function gotRemoteStream(e){
vid2.src = webkitURL.createObjectURL(e.stream);
trace("Received Remote Stream");
}
function iceCallback1(candidate,bMore){
if (candidate) {
pc2.processIceMessage(candidate);
trace("Local ice candidate: " + candidate.toSdp());
}
}
function iceCallback2(candidate,bMore){
if (candidate) {
pc1.processIceMessage(candidate);
trace("Remote ice candidate: " + candidate.toSdp());
}
}
function iceCallback3(candidate,bMore){
if (candidate) {
var str = candidate.toSdp();
str = str.replace("generation 0", "generation 1");
var mungedCandidate = new IceCandidate(candidate.label, str);
trace("Remote ice candidate: " + mungedCandidate.toSdp());
pc1.processIceMessage(mungedCandidate);
}
}
</script>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -0,0 +1,76 @@
<html>
<head>
<title>WebRTC Samples</title>
</head>
<body>
<h1>
WebRTC Samples</h1>
<p>
Here are some sample pages that demonstrate basic <a href="http://www.webrtc.org">WebRTC</a> concepts. If you are new to WebRTC, you may want to check out this <a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/">WebRTC overview</a> first.</p>
<table border="0" cellpadding="1" cellspacing="1" style="width: 100%;">
<thead>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>getUserMedia Samples</b></td>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="html/gum1.html">gum1.html</a></td>
<td>
Shows how to access the webcam and display the local video in a &lt;video/&gt; element.</td>
</tr>
<tr>
<td>
<a href="html/gum2.html">gum2.html</a></td>
<td>
Shows how to capture the current frame of video to a &lt;canvas/&gt;.</td>
</tr>
<tr>
<td>
<a href="html/gum3.html">gum3.html</a></td>
<td>
Shows how to apply CSS filters to a &lt;video/&gt; and &lt;canvas/&gt;</td>
</tr>
<tr>
<td>
<a href="html/face.html">face.html</a></td>
<td>
Shows how to perform face tracking using webcam video.</td>
</tr>
<tr><td>&nbsp; </td> <td>&nbsp; </td></tr>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>PeerConnection Samples</b></td>
</tr>
<tr>
<td>
<a href="html/pc1.html">pc1.html</a></td>
<td>
Shows how to set up a simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/pc1-deprecated.html">pc1-deprecated.html</a></td>
<td>
Like pc1.html, but uses PeerConnection00 instead of RTCPeerConnection.</td>
</tr>
<tr>
<td>
<a href="html/multiple.html">multiple.html</a></td>
<td>
Shows how to set up multiple PeerConnections.</td>
</tr>
<tr>
<td>
<a href="html/constraints-and-stats.html">constraints-and-stats.html</a></td>
<td>
Shows how to pass constraints into the PeerConnection API, and query it for statistics.</td>
</tr>
</tbody>
</table>
<p>
&nbsp;</p></body>
</html>

View File

@ -0,0 +1,12 @@
indexes:
# AUTOGENERATED
# This index.yaml is automatically updated whenever the dev_appserver
# detects that a new type of query is run. If you want to manage the
# index.yaml file manually, remove the above marker line (the line
# saying "# AUTOGENERATED"). If you want to manage some indexes
# manually, move them above the marker line. The index.yaml file is
# automatically uploaded to the admin console when you next deploy
# your application using appcfg.py.

460
samples/js/demos/js/ccv.js Normal file
View File

@ -0,0 +1,460 @@
if (parallable === undefined) {
var parallable = function (file, funct) {
parallable.core[funct.toString()] = funct().core;
return function () {
var i;
var async, worker_num, params;
if (arguments.length > 1) {
async = arguments[arguments.length - 2];
worker_num = arguments[arguments.length - 1];
params = new Array(arguments.length - 2);
for (i = 0; i < arguments.length - 2; i++)
params[i] = arguments[i];
} else {
async = arguments[0].async;
worker_num = arguments[0].worker;
params = arguments[0];
delete params["async"];
delete params["worker"];
params = [params];
}
var scope = { "shared" : {} };
var ctrl = funct.apply(scope, params);
if (async) {
return function (complete, error) {
var executed = 0;
var outputs = new Array(worker_num);
var inputs = ctrl.pre.apply(scope, [worker_num]);
/* sanitize scope shared because for Chrome/WebKit, worker only support JSONable data */
for (i in scope.shared)
/* delete function, if any */
if (typeof scope.shared[i] == "function")
delete scope.shared[i];
/* delete DOM object, if any */
else if (scope.shared[i].tagName !== undefined)
delete scope.shared[i];
for (i = 0; i < worker_num; i++) {
var worker = new Worker(file);
worker.onmessage = (function (i) {
return function (event) {
outputs[i] = (typeof event.data == "string") ? JSON.parse(event.data) : event.data;
executed++;
if (executed == worker_num)
complete(ctrl.post.apply(scope, [outputs]));
}
})(i);
var msg = { "input" : inputs[i],
"name" : funct.toString(),
"shared" : scope.shared,
"id" : i,
"worker" : params.worker_num };
try {
worker.postMessage(msg);
} catch (e) {
worker.postMessage(JSON.stringify(msg));
}
}
}
} else {
return ctrl.post.apply(scope, [[ctrl.core.apply(scope, [ctrl.pre.apply(scope, [1])[0], 0, 1])]]);
}
}
};
parallable.core = {};
}
function get_named_arguments(params, names) {
if (params.length > 1) {
var new_params = {};
for (var i = 0; i < names.length; i++)
new_params[names[i]] = params[i];
return new_params;
} else if (params.length == 1) {
return params[0];
} else {
return {};
}
}
var ccv = {
pre : function (image) {
if (image.tagName.toLowerCase() == "img") {
var canvas = document.createElement("canvas");
document.body.appendChild(image);
canvas.width = image.offsetWidth;
canvas.style.width = image.offsetWidth.toString() + "px";
canvas.height = image.offsetHeight;
canvas.style.height = image.offsetHeight.toString() + "px";
document.body.removeChild(image);
var ctx = canvas.getContext("2d");
ctx.drawImage(image, 0, 0);
return canvas;
}
return image;
},
grayscale : function (canvas) {
var ctx = canvas.getContext("2d");
var imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
var data = imageData.data;
var pix1, pix2, pix = canvas.width * canvas.height * 4;
while (pix > 0)
data[pix -= 4] = data[pix1 = pix + 1] = data[pix2 = pix + 2] = (data[pix] * 0.3 + data[pix1] * 0.59 + data[pix2] * 0.11);
ctx.putImageData(imageData, 0, 0);
return canvas;
},
array_group : function (seq, gfunc) {
var i, j;
var node = new Array(seq.length);
for (i = 0; i < seq.length; i++)
node[i] = {"parent" : -1,
"element" : seq[i],
"rank" : 0};
for (i = 0; i < seq.length; i++) {
if (!node[i].element)
continue;
var root = i;
while (node[root].parent != -1)
root = node[root].parent;
for (j = 0; j < seq.length; j++) {
if( i != j && node[j].element && gfunc(node[i].element, node[j].element)) {
var root2 = j;
while (node[root2].parent != -1)
root2 = node[root2].parent;
if(root2 != root) {
if(node[root].rank > node[root2].rank)
node[root2].parent = root;
else {
node[root].parent = root2;
if (node[root].rank == node[root2].rank)
node[root2].rank++;
root = root2;
}
/* compress path from node2 to the root: */
var temp, node2 = j;
while (node[node2].parent != -1) {
temp = node2;
node2 = node[node2].parent;
node[temp].parent = root;
}
/* compress path from node to the root: */
node2 = i;
while (node[node2].parent != -1) {
temp = node2;
node2 = node[node2].parent;
node[temp].parent = root;
}
}
}
}
}
var idx = new Array(seq.length);
var class_idx = 0;
for(i = 0; i < seq.length; i++) {
j = -1;
var node1 = i;
if(node[node1].element) {
while (node[node1].parent != -1)
node1 = node[node1].parent;
if(node[node1].rank >= 0)
node[node1].rank = ~class_idx++;
j = ~node[node1].rank;
}
idx[i] = j;
}
return {"index" : idx, "cat" : class_idx};
},
detect_objects : parallable("ccv.js", function (canvas, cascade, interval, min_neighbors) {
if (this.shared !== undefined) {
var params = get_named_arguments(arguments, ["canvas", "cascade", "interval", "min_neighbors"]);
this.shared.canvas = params.canvas;
this.shared.interval = params.interval;
this.shared.min_neighbors = params.min_neighbors;
this.shared.cascade = params.cascade;
this.shared.scale = Math.pow(2, 1 / (params.interval + 1));
this.shared.next = params.interval + 1;
this.shared.scale_upto = Math.floor(Math.log(Math.min(params.canvas.width / params.cascade.width, params.canvas.height / params.cascade.height)) / Math.log(this.shared.scale));
var i;
for (i = 0; i < this.shared.cascade.stage_classifier.length; i++)
this.shared.cascade.stage_classifier[i].orig_feature = this.shared.cascade.stage_classifier[i].feature;
}
function pre(worker_num) {
var canvas = this.shared.canvas;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var pyr = new Array((scale_upto + next * 2) * 4);
var ret = new Array((scale_upto + next * 2) * 4);
pyr[0] = canvas;
ret[0] = { "width" : pyr[0].width,
"height" : pyr[0].height,
"data" : pyr[0].getContext("2d").getImageData(0, 0, pyr[0].width, pyr[0].height).data };
var i;
for (i = 1; i <= interval; i++) {
pyr[i * 4] = document.createElement("canvas");
pyr[i * 4].width = Math.floor(pyr[0].width / Math.pow(scale, i));
pyr[i * 4].height = Math.floor(pyr[0].height / Math.pow(scale, i));
pyr[i * 4].getContext("2d").drawImage(pyr[0], 0, 0, pyr[0].width, pyr[0].height, 0, 0, pyr[i * 4].width, pyr[i * 4].height);
ret[i * 4] = { "width" : pyr[i * 4].width,
"height" : pyr[i * 4].height,
"data" : pyr[i * 4].getContext("2d").getImageData(0, 0, pyr[i * 4].width, pyr[i * 4].height).data };
}
for (i = next; i < scale_upto + next * 2; i++) {
pyr[i * 4] = document.createElement("canvas");
pyr[i * 4].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4].getContext("2d").drawImage(pyr[i * 4 - next * 4], 0, 0, pyr[i * 4 - next * 4].width, pyr[i * 4 - next * 4].height, 0, 0, pyr[i * 4].width, pyr[i * 4].height);
ret[i * 4] = { "width" : pyr[i * 4].width,
"height" : pyr[i * 4].height,
"data" : pyr[i * 4].getContext("2d").getImageData(0, 0, pyr[i * 4].width, pyr[i * 4].height).data };
}
for (i = next * 2; i < scale_upto + next * 2; i++) {
pyr[i * 4 + 1] = document.createElement("canvas");
pyr[i * 4 + 1].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 1].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 1].getContext("2d").drawImage(pyr[i * 4 - next * 4], 1, 0, pyr[i * 4 - next * 4].width - 1, pyr[i * 4 - next * 4].height, 0, 0, pyr[i * 4 + 1].width - 2, pyr[i * 4 + 1].height);
ret[i * 4 + 1] = { "width" : pyr[i * 4 + 1].width,
"height" : pyr[i * 4 + 1].height,
"data" : pyr[i * 4 + 1].getContext("2d").getImageData(0, 0, pyr[i * 4 + 1].width, pyr[i * 4 + 1].height).data };
pyr[i * 4 + 2] = document.createElement("canvas");
pyr[i * 4 + 2].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 2].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 2].getContext("2d").drawImage(pyr[i * 4 - next * 4], 0, 1, pyr[i * 4 - next * 4].width, pyr[i * 4 - next * 4].height - 1, 0, 0, pyr[i * 4 + 2].width, pyr[i * 4 + 2].height - 2);
ret[i * 4 + 2] = { "width" : pyr[i * 4 + 2].width,
"height" : pyr[i * 4 + 2].height,
"data" : pyr[i * 4 + 2].getContext("2d").getImageData(0, 0, pyr[i * 4 + 2].width, pyr[i * 4 + 2].height).data };
pyr[i * 4 + 3] = document.createElement("canvas");
pyr[i * 4 + 3].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 3].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 3].getContext("2d").drawImage(pyr[i * 4 - next * 4], 1, 1, pyr[i * 4 - next * 4].width - 1, pyr[i * 4 - next * 4].height - 1, 0, 0, pyr[i * 4 + 3].width - 2, pyr[i * 4 + 3].height - 2);
ret[i * 4 + 3] = { "width" : pyr[i * 4 + 3].width,
"height" : pyr[i * 4 + 3].height,
"data" : pyr[i * 4 + 3].getContext("2d").getImageData(0, 0, pyr[i * 4 + 3].width, pyr[i * 4 + 3].height).data };
}
return [ret];
};
function core(pyr, id, worker_num) {
var cascade = this.shared.cascade;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var i, j, k, x, y, q;
var scale_x = 1, scale_y = 1;
var dx = [0, 1, 0, 1];
var dy = [0, 0, 1, 1];
var seq = [];
for (i = 0; i < scale_upto; i++) {
var qw = pyr[i * 4 + next * 8].width - Math.floor(cascade.width / 4);
var qh = pyr[i * 4 + next * 8].height - Math.floor(cascade.height / 4);
var step = [pyr[i * 4].width * 4, pyr[i * 4 + next * 4].width * 4, pyr[i * 4 + next * 8].width * 4];
var paddings = [pyr[i * 4].width * 16 - qw * 16,
pyr[i * 4 + next * 4].width * 8 - qw * 8,
pyr[i * 4 + next * 8].width * 4 - qw * 4];
for (j = 0; j < cascade.stage_classifier.length; j++) {
var orig_feature = cascade.stage_classifier[j].orig_feature;
var feature = cascade.stage_classifier[j].feature = new Array(cascade.stage_classifier[j].count);
for (k = 0; k < cascade.stage_classifier[j].count; k++) {
feature[k] = {"size" : orig_feature[k].size,
"px" : new Array(orig_feature[k].size),
"pz" : new Array(orig_feature[k].size),
"nx" : new Array(orig_feature[k].size),
"nz" : new Array(orig_feature[k].size)};
for (q = 0; q < orig_feature[k].size; q++) {
feature[k].px[q] = orig_feature[k].px[q] * 4 + orig_feature[k].py[q] * step[orig_feature[k].pz[q]];
feature[k].pz[q] = orig_feature[k].pz[q];
feature[k].nx[q] = orig_feature[k].nx[q] * 4 + orig_feature[k].ny[q] * step[orig_feature[k].nz[q]];
feature[k].nz[q] = orig_feature[k].nz[q];
}
}
}
for (q = 0; q < 4; q++) {
var u8 = [pyr[i * 4].data, pyr[i * 4 + next * 4].data, pyr[i * 4 + next * 8 + q].data];
var u8o = [dx[q] * 8 + dy[q] * pyr[i * 4].width * 8, dx[q] * 4 + dy[q] * pyr[i * 4 + next * 4].width * 4, 0];
for (y = 0; y < qh; y++) {
for (x = 0; x < qw; x++) {
var sum = 0;
var flag = true;
for (j = 0; j < cascade.stage_classifier.length; j++) {
sum = 0;
var alpha = cascade.stage_classifier[j].alpha;
var feature = cascade.stage_classifier[j].feature;
for (k = 0; k < cascade.stage_classifier[j].count; k++) {
var feature_k = feature[k];
var p, pmin = u8[feature_k.pz[0]][u8o[feature_k.pz[0]] + feature_k.px[0]];
var n, nmax = u8[feature_k.nz[0]][u8o[feature_k.nz[0]] + feature_k.nx[0]];
if (pmin <= nmax) {
sum += alpha[k * 2];
} else {
var f, shortcut = true;
for (f = 0; f < feature_k.size; f++) {
if (feature_k.pz[f] >= 0) {
p = u8[feature_k.pz[f]][u8o[feature_k.pz[f]] + feature_k.px[f]];
if (p < pmin) {
if (p <= nmax) {
shortcut = false;
break;
}
pmin = p;
}
}
if (feature_k.nz[f] >= 0) {
n = u8[feature_k.nz[f]][u8o[feature_k.nz[f]] + feature_k.nx[f]];
if (n > nmax) {
if (pmin <= n) {
shortcut = false;
break;
}
nmax = n;
}
}
}
sum += (shortcut) ? alpha[k * 2 + 1] : alpha[k * 2];
}
}
if (sum < cascade.stage_classifier[j].threshold) {
flag = false;
break;
}
}
if (flag) {
seq.push({"x" : (x * 4 + dx[q] * 2) * scale_x,
"y" : (y * 4 + dy[q] * 2) * scale_y,
"width" : cascade.width * scale_x,
"height" : cascade.height * scale_y,
"neighbor" : 1,
"confidence" : sum});
}
u8o[0] += 16;
u8o[1] += 8;
u8o[2] += 4;
}
u8o[0] += paddings[0];
u8o[1] += paddings[1];
u8o[2] += paddings[2];
}
}
scale_x *= scale;
scale_y *= scale;
}
return seq;
};
function post(seq) {
var min_neighbors = this.shared.min_neighbors;
var cascade = this.shared.cascade;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var i, j;
for (i = 0; i < cascade.stage_classifier.length; i++)
cascade.stage_classifier[i].feature = cascade.stage_classifier[i].orig_feature;
seq = seq[0];
if (!(min_neighbors > 0))
return seq;
else {
var result = ccv.array_group(seq, function (r1, r2) {
var distance = Math.floor(r1.width * 0.25 + 0.5);
return r2.x <= r1.x + distance &&
r2.x >= r1.x - distance &&
r2.y <= r1.y + distance &&
r2.y >= r1.y - distance &&
r2.width <= Math.floor(r1.width * 1.5 + 0.5) &&
Math.floor(r2.width * 1.5 + 0.5) >= r1.width;
});
var ncomp = result.cat;
var idx_seq = result.index;
var comps = new Array(ncomp + 1);
for (i = 0; i < comps.length; i++)
comps[i] = {"neighbors" : 0,
"x" : 0,
"y" : 0,
"width" : 0,
"height" : 0,
"confidence" : 0};
// count number of neighbors
for(i = 0; i < seq.length; i++)
{
var r1 = seq[i];
var idx = idx_seq[i];
if (comps[idx].neighbors == 0)
comps[idx].confidence = r1.confidence;
++comps[idx].neighbors;
comps[idx].x += r1.x;
comps[idx].y += r1.y;
comps[idx].width += r1.width;
comps[idx].height += r1.height;
comps[idx].confidence = Math.max(comps[idx].confidence, r1.confidence);
}
var seq2 = [];
// calculate average bounding box
for(i = 0; i < ncomp; i++)
{
var n = comps[i].neighbors;
if (n >= min_neighbors)
seq2.push({"x" : (comps[i].x * 2 + n) / (2 * n),
"y" : (comps[i].y * 2 + n) / (2 * n),
"width" : (comps[i].width * 2 + n) / (2 * n),
"height" : (comps[i].height * 2 + n) / (2 * n),
"neighbors" : comps[i].neighbors,
"confidence" : comps[i].confidence});
}
var result_seq = [];
// filter out small face rectangles inside large face rectangles
for(i = 0; i < seq2.length; i++)
{
var r1 = seq2[i];
var flag = true;
for(j = 0; j < seq2.length; j++)
{
var r2 = seq2[j];
var distance = Math.floor(r2.width * 0.25 + 0.5);
if(i != j &&
r1.x >= r2.x - distance &&
r1.y >= r2.y - distance &&
r1.x + r1.width <= r2.x + r2.width + distance &&
r1.y + r1.height <= r2.y + r2.height + distance &&
(r2.neighbors > Math.max(3, r1.neighbors) || r1.neighbors < 3))
{
flag = false;
break;
}
}
if(flag)
result_seq.push(r1);
}
return result_seq;
}
};
return { "pre" : pre, "core" : core, "post" : post };
})
}
onmessage = function (event) {
var data = (typeof event.data == "string") ? JSON.parse(event.data) : event.data;
var scope = { "shared" : data.shared };
var result = parallable.core[data.name].apply(scope, [data.input, data.id, data.worker]);
try {
postMessage(result);
} catch (e) {
postMessage(JSON.stringify(result));
}
}

File diff suppressed because one or more lines are too long

27
samples/js/demos/main.py Normal file
View File

@ -0,0 +1,27 @@
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import os
from google.appengine.ext.webapp import template
class MainHandler(webapp2.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, {}))
app = webapp2.WSGIApplication([('/', MainHandler)],
debug=True)