Removing samples directory following move to Github

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5871 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
dutton@google.com 2014-04-09 09:55:54 +00:00
parent 61c1b8ea32
commit 7ecc142d6b
47 changed files with 0 additions and 6196 deletions

View File

@ -1,9 +0,0 @@
braveyao@webrtc.org
dutton@google.com
henrika@webrtc.org
hta@webrtc.org
juberti@webrtc.org
kjellander@webrtc.org
phoglund@webrtc.org
vikasmarwaha@webrtc.org
wu@webrtc.org

View File

@ -1,29 +0,0 @@
application: apprtc
version: 6
runtime: python27
threadsafe: true
api_version: 1
handlers:
- url: /html
static_dir: html
- url: /images
static_dir: images
- url: /js
static_dir: js
- url: /css
static_dir: css
- url: /.*
script: apprtc.app
secure: always
inbound_services:
- channel_presence
libraries:
- name: jinja2
version: latest

View File

@ -1,482 +0,0 @@
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""WebRTC Demo
This module demonstrates the WebRTC API by implementing a simple video chat app.
"""
import cgi
import logging
import os
import random
import re
import json
import jinja2
import webapp2
import threading
from google.appengine.api import channel
from google.appengine.ext import db
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Lock for syncing DB operation in concurrent requests handling.
# TODO(brave): keeping working on improving performance with thread syncing.
# One possible method for near future is to reduce the message caching.
LOCK = threading.RLock()
def generate_random(length):
word = ''
for _ in range(length):
word += random.choice('0123456789')
return word
def sanitize(key):
return re.sub('[^a-zA-Z0-9\-]', '-', key)
def make_client_id(room, user):
return room.key().id_or_name() + '/' + user
def get_default_stun_server(user_agent):
default_stun_server = 'stun.l.google.com:19302'
if 'Firefox' in user_agent:
default_stun_server = 'stun.services.mozilla.com'
return default_stun_server
def get_preferred_audio_receive_codec():
return 'opus/48000'
def get_preferred_audio_send_codec(user_agent):
# Empty string means no preference.
preferred_audio_send_codec = ''
# Prefer to send ISAC on Chrome for Android.
if 'Android' in user_agent and 'Chrome' in user_agent:
preferred_audio_send_codec = 'ISAC/16000'
return preferred_audio_send_codec
def make_pc_config(stun_server, turn_server, ts_pwd):
servers = []
if turn_server:
turn_config = 'turn:{}'.format(turn_server)
servers.append({'urls':turn_config, 'credential':ts_pwd})
if stun_server:
stun_config = 'stun:{}'.format(stun_server)
servers.append({'urls':stun_config})
return {'iceServers':servers}
def create_channel(room, user, duration_minutes):
client_id = make_client_id(room, user)
return channel.create_channel(client_id, duration_minutes)
def make_loopback_answer(message):
message = message.replace("\"offer\"", "\"answer\"")
message = message.replace("a=ice-options:google-ice\\r\\n", "")
return message
def handle_message(room, user, message):
message_obj = json.loads(message)
other_user = room.get_other_user(user)
room_key = room.key().id_or_name()
if message_obj['type'] == 'bye':
# This would remove the other_user in loopback test too.
# So check its availability before forwarding Bye message.
room.remove_user(user)
logging.info('User ' + user + ' quit from room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
if other_user and room.has_user(other_user):
if message_obj['type'] == 'offer':
# Special case the loopback scenario.
if other_user == user:
message = make_loopback_answer(message)
on_message(room, other_user, message)
else:
# For unittest
on_message(room, user, message)
def get_saved_messages(client_id):
return Message.gql("WHERE client_id = :id", id=client_id)
def delete_saved_messages(client_id):
messages = get_saved_messages(client_id)
for message in messages:
message.delete()
logging.info('Deleted the saved message for ' + client_id)
def send_saved_messages(client_id):
messages = get_saved_messages(client_id)
for message in messages:
channel.send_message(client_id, message.msg)
logging.info('Delivered saved message to ' + client_id)
message.delete()
def on_message(room, user, message):
client_id = make_client_id(room, user)
if room.is_connected(user):
channel.send_message(client_id, message)
logging.info('Delivered message to user ' + user)
else:
new_message = Message(client_id = client_id, msg = message)
new_message.put()
logging.info('Saved message for user ' + user)
def make_media_track_constraints(constraints_string):
if not constraints_string or constraints_string.lower() == 'true':
track_constraints = True
elif constraints_string.lower() == 'false':
track_constraints = False
else:
track_constraints = {'mandatory': {}, 'optional': []}
for constraint_string in constraints_string.split(','):
constraint = constraint_string.split('=')
if len(constraint) != 2:
logging.error('Ignoring malformed constraint: ' + constraint_string)
continue
if constraint[0].startswith('goog'):
track_constraints['optional'].append({constraint[0]: constraint[1]})
else:
track_constraints['mandatory'][constraint[0]] = constraint[1]
return track_constraints
def make_media_stream_constraints(audio, video):
stream_constraints = (
{'audio': make_media_track_constraints(audio),
'video': make_media_track_constraints(video)})
logging.info('Applying media constraints: ' + str(stream_constraints))
return stream_constraints
def maybe_add_constraint(constraints, param, constraint):
if (param.lower() == 'true'):
constraints['optional'].append({constraint: True})
elif (param.lower() == 'false'):
constraints['optional'].append({constraint: False})
return constraints
def make_pc_constraints(dtls, dscp, ipv6):
constraints = { 'optional': [] }
maybe_add_constraint(constraints, dtls, 'DtlsSrtpKeyAgreement')
maybe_add_constraint(constraints, dscp, 'googDscp')
maybe_add_constraint(constraints, ipv6, 'googIPv6')
return constraints
def make_offer_constraints():
constraints = { 'mandatory': {}, 'optional': [] }
return constraints
def append_url_arguments(request, link):
for argument in request.arguments():
if argument != 'r':
link += ('&' + cgi.escape(argument, True) + '=' +
cgi.escape(request.get(argument), True))
return link
# This database is to store the messages from the sender client when the
# receiver client is not ready to receive the messages.
# Use TextProperty instead of StringProperty for msg because
# the session description can be more than 500 characters.
class Message(db.Model):
client_id = db.StringProperty()
msg = db.TextProperty()
class Room(db.Model):
"""All the data we store for a room"""
user1 = db.StringProperty()
user2 = db.StringProperty()
user1_connected = db.BooleanProperty(default=False)
user2_connected = db.BooleanProperty(default=False)
def __str__(self):
result = '['
if self.user1:
result += "%s-%r" % (self.user1, self.user1_connected)
if self.user2:
result += ", %s-%r" % (self.user2, self.user2_connected)
result += ']'
return result
def get_occupancy(self):
occupancy = 0
if self.user1:
occupancy += 1
if self.user2:
occupancy += 1
return occupancy
def get_other_user(self, user):
if user == self.user1:
return self.user2
elif user == self.user2:
return self.user1
else:
return None
def has_user(self, user):
return (user and (user == self.user1 or user == self.user2))
def add_user(self, user):
if not self.user1:
self.user1 = user
elif not self.user2:
self.user2 = user
else:
raise RuntimeError('room is full')
self.put()
def remove_user(self, user):
delete_saved_messages(make_client_id(self, user))
if user == self.user2:
self.user2 = None
self.user2_connected = False
if user == self.user1:
if self.user2:
self.user1 = self.user2
self.user1_connected = self.user2_connected
self.user2 = None
self.user2_connected = False
else:
self.user1 = None
self.user1_connected = False
if self.get_occupancy() > 0:
self.put()
else:
self.delete()
def set_connected(self, user):
if user == self.user1:
self.user1_connected = True
if user == self.user2:
self.user2_connected = True
self.put()
def is_connected(self, user):
if user == self.user1:
return self.user1_connected
if user == self.user2:
return self.user2_connected
@db.transactional
def connect_user_to_room(room_key, user):
room = Room.get_by_key_name(room_key)
# Check if room has user in case that disconnect message comes before
# connect message with unknown reason, observed with local AppEngine SDK.
if room and room.has_user(user):
room.set_connected(user)
logging.info('User ' + user + ' connected to room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
else:
logging.warning('Unexpected Connect Message to room ' + room_key)
return room
class ConnectPage(webapp2.RequestHandler):
def post(self):
key = self.request.get('from')
room_key, user = key.split('/')
with LOCK:
room = connect_user_to_room(room_key, user)
if room and room.has_user(user):
send_saved_messages(make_client_id(room, user))
class DisconnectPage(webapp2.RequestHandler):
def post(self):
key = self.request.get('from')
room_key, user = key.split('/')
with LOCK:
room = Room.get_by_key_name(room_key)
if room and room.has_user(user):
other_user = room.get_other_user(user)
room.remove_user(user)
logging.info('User ' + user + ' removed from room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
if other_user and other_user != user:
channel.send_message(make_client_id(room, other_user),
'{"type":"bye"}')
logging.info('Sent BYE to ' + other_user)
logging.warning('User ' + user + ' disconnected from room ' + room_key)
class MessagePage(webapp2.RequestHandler):
def post(self):
message = self.request.body
room_key = self.request.get('r')
user = self.request.get('u')
with LOCK:
room = Room.get_by_key_name(room_key)
if room:
handle_message(room, user, message)
else:
logging.warning('Unknown room ' + room_key)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
"""Renders the main page. When this page is shown, we create a new
channel to push asynchronous updates to the client."""
# Append strings to this list to have them thrown up in message boxes. This
# will also cause the app to fail.
error_messages = []
# Get the base url without arguments.
base_url = self.request.path_url
user_agent = self.request.headers['User-Agent']
room_key = sanitize(self.request.get('r'))
stun_server = self.request.get('ss')
if not stun_server:
stun_server = get_default_stun_server(user_agent)
turn_server = self.request.get('ts')
ts_pwd = self.request.get('tp')
# Use "audio" and "video" to set the media stream constraints. Defined here:
# http://goo.gl/V7cZg
#
# "true" and "false" are recognized and interpreted as bools, for example:
# "?audio=true&video=false" (Start an audio-only call.)
# "?audio=false" (Start a video-only call.)
# If unspecified, the stream constraint defaults to True.
#
# To specify media track constraints, pass in a comma-separated list of
# key/value pairs, separated by a "=". Examples:
# "?audio=googEchoCancellation=false,googAutoGainControl=true"
# (Disable echo cancellation and enable gain control.)
#
# "?video=minWidth=1280,minHeight=720,googNoiseReduction=true"
# (Set the minimum resolution to 1280x720 and enable noise reduction.)
#
# Keys starting with "goog" will be added to the "optional" key; all others
# will be added to the "mandatory" key.
#
# The audio keys are defined here: talk/app/webrtc/localaudiosource.cc
# The video keys are defined here: talk/app/webrtc/videosource.cc
audio = self.request.get('audio')
video = self.request.get('video')
if self.request.get('hd').lower() == 'true':
if video:
message = 'The "hd" parameter has overridden video=' + str(video)
logging.error(message)
error_messages.append(message)
video = 'minWidth=1280,minHeight=720'
if self.request.get('minre') or self.request.get('maxre'):
message = ('The "minre" and "maxre" parameters are no longer supported. '
'Use "video" instead.')
logging.error(message)
error_messages.append(message)
audio_send_codec = self.request.get('asc')
if not audio_send_codec:
audio_send_codec = get_preferred_audio_send_codec(user_agent)
audio_receive_codec = self.request.get('arc')
if not audio_receive_codec:
audio_receive_codec = get_preferred_audio_receive_codec()
# Set stereo to false by default.
stereo = 'false'
if self.request.get('stereo'):
stereo = self.request.get('stereo')
# Options for making pcConstraints
dtls = self.request.get('dtls')
dscp = self.request.get('dscp')
ipv6 = self.request.get('ipv6')
debug = self.request.get('debug')
if debug == 'loopback':
# Set dtls to false as DTLS does not work for loopback.
dtls = 'false'
# token_timeout for channel creation, default 30min, max 1 days, min 3min.
token_timeout = self.request.get_range('tt',
min_value = 3,
max_value = 1440,
default = 30)
unittest = self.request.get('unittest')
if unittest:
# Always create a new room for the unit tests.
room_key = generate_random(8)
if not room_key:
room_key = generate_random(8)
redirect = '/?r=' + room_key
redirect = append_url_arguments(self.request, redirect)
self.redirect(redirect)
logging.info('Redirecting visitor to base URL to ' + redirect)
return
user = None
initiator = 0
with LOCK:
room = Room.get_by_key_name(room_key)
if not room and debug != "full":
# New room.
user = generate_random(8)
room = Room(key_name = room_key)
room.add_user(user)
if debug != 'loopback':
initiator = 0
else:
room.add_user(user)
initiator = 1
elif room and room.get_occupancy() == 1 and debug != 'full':
# 1 occupant.
user = generate_random(8)
room.add_user(user)
initiator = 1
else:
# 2 occupants (full).
template = jinja_environment.get_template('full.html')
self.response.out.write(template.render({ 'room_key': room_key }))
logging.info('Room ' + room_key + ' is full')
return
if turn_server == 'false':
turn_server = None
turn_url = ''
else:
turn_url = 'https://computeengineondemand.appspot.com/'
turn_url = turn_url + 'turn?' + 'username=' + user + '&key=4080218913'
room_link = base_url + '?r=' + room_key
room_link = append_url_arguments(self.request, room_link)
token = create_channel(room, user, token_timeout)
pc_config = make_pc_config(stun_server, turn_server, ts_pwd)
pc_constraints = make_pc_constraints(dtls, dscp, ipv6)
offer_constraints = make_offer_constraints()
media_constraints = make_media_stream_constraints(audio, video)
template_values = {'error_messages': error_messages,
'token': token,
'me': user,
'room_key': room_key,
'room_link': room_link,
'initiator': initiator,
'pc_config': json.dumps(pc_config),
'pc_constraints': json.dumps(pc_constraints),
'offer_constraints': json.dumps(offer_constraints),
'media_constraints': json.dumps(media_constraints),
'turn_url': turn_url,
'stereo': stereo,
'audio_send_codec': audio_send_codec,
'audio_receive_codec': audio_receive_codec
}
if unittest:
target_page = 'test/test_' + unittest + '.html'
else:
target_page = 'index.html'
template = jinja_environment.get_template(target_page)
self.response.out.write(template.render(template_values))
logging.info('User ' + user + ' added to room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
app = webapp2.WSGIApplication([
('/', MainPage),
('/message', MessagePage),
('/_ah/channel/connected/', ConnectPage),
('/_ah/channel/disconnected/', DisconnectPage)
], debug=True)

View File

@ -1,95 +0,0 @@
a:link { color: #FFFFFF; }
a:visited {color: #FFFFFF; }
html, body {
background-color: #000000;
height: 100%;
font-family: Verdana, Arial, Helvetica, sans-serif;
}
body {
margin: 0;
padding: 0;
}
footer {
position: absolute;
bottom: 0;
width: 100%;
height: 28px;
background-color: #3F3F3F;
color: #FFFFFF;
font-size: 13px; font-weight: bold;
line-height: 28px;
text-align: center;
}
#container {
background-color: #000000;
position: absolute;
height: 100%;
width: 100%;
margin: 0px auto;
-webkit-perspective: 1000;
}
#card {
-webkit-transition-duration: 2s;
-webkit-transform-style: preserve-3d;
}
#local {
position: absolute;
width: 100%;
transform: scale(-1, 1);
-webkit-transform: scale(-1, 1);
-webkit-backface-visibility: hidden;
}
#remote {
position: absolute;
width: 100%;
-webkit-transform: rotateY(180deg);
-webkit-backface-visibility: hidden;
}
#mini {
position: absolute;
height: 30%;
width: 30%;
bottom: 32px;
right: 4px;
opacity: 1.0;
transform: scale(-1, 1);
-webkit-transform: scale(-1, 1);
}
#localVideo {
width: 100%;
height: 100%;
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#remoteVideo {
width: 100%;
height: 100%;
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#miniVideo {
width: 100%;
height: 100%;
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#hangup {
font-size: 13px; font-weight: bold;
color: #FFFFFF;
width: 128px;
height: 24px;
background-color: #808080;
border-style: solid;
border-color: #FFFFFF;
margin: 2px;
}
#infoDiv {
position: absolute;
float: right;
background-color: grey;
margin: 2px;
display: none;
}

View File

@ -1,55 +0,0 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<script src="/_ah/channel/jsapi"></script>
<style type="text/css">
a:link { color: #ffffff; }
a:visited {color: #ffffff; }
html, body {
background-color: #000000;
height: 100%;
font-family:Verdana, Arial, Helvetica, sans-serif;
}
body {
margin: 0;
padding: 0;
}
#container {
position: relative;
min-height: 100%;
width: 100%;
margin: 0px auto;
}
#footer {
spacing: 4px;
position: absolute;
bottom: 0;
width: 100%;
height: 28px;
background-color: #3F3F3F;
color: rgb(255, 255, 255);
font-size:13px; font-weight: bold;
line-height: 28px;
text-align: center;
}
#logo {
display: block;
top:4;
right:4;
position:absolute;
float:right;
#opacity: 0.8;
}
</style>
</head>
<body>
<div id="container">
<div id="footer">
Sorry, this room is full.
<a href="{{room_link}}">Click here</a> to try again.
</div>
</div>
<img id="logo" alt="WebRTC" src="images/webrtc_black_20p.png">
</body>
</html>

View File

@ -1,11 +0,0 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<meta content="text/html; charset=ISO-8859-1"
http-equiv="content-type">
<title>WebRtc Demo App Help</title>
</head>
<body>
TODO
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -1,53 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>WebRTC Reference App</title>
<meta http-equiv="X-UA-Compatible" content="chrome=1"/>
<link rel="canonical" href="{{ room_link }}"/>
<link rel="stylesheet" href="css/main.css" />
<script type="text/javascript" src="/_ah/channel/jsapi"></script>
<script src="/js/main.js"></script>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="/js/adapter.js"></script>
</head>
<body>
<script type="text/javascript">
var errorMessages = {{ error_messages }};
var channelToken = '{{ token }}';
var me = '{{ me }}';
var roomKey = '{{ room_key }}';
var roomLink = '{{ room_link }}';
var initiator = {{ initiator }};
var pcConfig = {{ pc_config | safe }};
var pcConstraints = {{ pc_constraints | safe }};
var offerConstraints = {{ offer_constraints | safe }};
var mediaConstraints = {{ media_constraints | safe }};
var turnUrl = '{{ turn_url }}';
var stereo = {{ stereo }};
var audio_send_codec = '{{ audio_send_codec }}';
var audio_receive_codec = '{{ audio_receive_codec }}';
setTimeout(initialize, 1);
</script>
<div id="container" ondblclick="enterFullScreen()">
<div id="card">
<div id="local">
<video id="localVideo" autoplay="autoplay" muted="true"/>
</div>
<div id="remote">
<video id="remoteVideo" autoplay="autoplay">
</video>
<div id="mini">
<video id="miniVideo" autoplay="autoplay" muted="true"/>
</div>
</div>
</div>
</div>
</body>
<footer id="status">
</footer>
<div id="infoDiv"></div>
</html>

View File

@ -1 +0,0 @@
../../base/adapter.js

View File

@ -1,763 +0,0 @@
var localVideo;
var miniVideo;
var remoteVideo;
var hasLocalStream;
var localStream;
var remoteStream;
var channel;
var pc;
var socket;
var xmlhttp;
var started = false;
var turnDone = false;
var channelReady = false;
var signalingReady = false;
var msgQueue = [];
// Set up audio and video regardless of what devices are present.
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true }};
var isVideoMuted = false;
var isAudioMuted = false;
// Types of gathered ICE Candidates.
var gatheredIceCandidateTypes = { Local: {}, Remote: {} };
var infoDivErrors = [];
function initialize() {
if (errorMessages.length > 0) {
for (i = 0; i < errorMessages.length; ++i) {
window.alert(errorMessages[i]);
}
return;
}
console.log('Initializing; room=' + roomKey + '.');
card = document.getElementById('card');
localVideo = document.getElementById('localVideo');
// Reset localVideo display to center.
localVideo.addEventListener('loadedmetadata', function(){
window.onresize();});
miniVideo = document.getElementById('miniVideo');
remoteVideo = document.getElementById('remoteVideo');
resetStatus();
// NOTE: AppRTCClient.java searches & parses this line; update there when
// changing here.
openChannel();
maybeRequestTurn();
// Caller is always ready to create peerConnection.
signalingReady = initiator;
if (mediaConstraints.audio === false &&
mediaConstraints.video === false) {
hasLocalStream = false;
maybeStart();
} else {
hasLocalStream = true;
doGetUserMedia();
}
}
function openChannel() {
console.log('Opening channel.');
var channel = new goog.appengine.Channel(channelToken);
var handler = {
'onopen': onChannelOpened,
'onmessage': onChannelMessage,
'onerror': onChannelError,
'onclose': onChannelClosed
};
socket = channel.open(handler);
}
function maybeRequestTurn() {
// Allow to skip turn by passing ts=false to apprtc.
if (turnUrl == '') {
turnDone = true;
return;
}
for (var i = 0, len = pcConfig.iceServers.length; i < len; i++) {
if (pcConfig.iceServers[i].urls.substr(0, 5) === 'turn:') {
turnDone = true;
return;
}
}
var currentDomain = document.domain;
if (currentDomain.search('localhost') === -1 &&
currentDomain.search('apprtc') === -1) {
// Not authorized domain. Try with default STUN instead.
turnDone = true;
return;
}
// No TURN server. Get one from computeengineondemand.appspot.com.
xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = onTurnResult;
xmlhttp.open('GET', turnUrl, true);
xmlhttp.send();
}
function onTurnResult() {
if (xmlhttp.readyState !== 4)
return;
if (xmlhttp.status === 200) {
var turnServer = JSON.parse(xmlhttp.responseText);
// Create turnUris using the polyfill (adapter.js).
var iceServers = createIceServers(turnServer.uris,
turnServer.username,
turnServer.password);
if (iceServers !== null) {
pcConfig.iceServers = pcConfig.iceServers.concat(iceServers);
}
} else {
messageError('No TURN server; unlikely that media will traverse networks. '
+ 'If this persists please report it to '
+ 'discuss-webrtc@googlegroups.com.');
}
// If TURN request failed, continue the call with default STUN.
turnDone = true;
maybeStart();
}
function resetStatus() {
if (!initiator) {
setStatus('Waiting for someone to join: \
<a href=' + roomLink + '>' + roomLink + '</a>');
} else {
setStatus('Initializing...');
}
}
function doGetUserMedia() {
// Call into getUserMedia via the polyfill (adapter.js).
try {
getUserMedia(mediaConstraints, onUserMediaSuccess,
onUserMediaError);
console.log('Requested access to local media with mediaConstraints:\n' +
' \'' + JSON.stringify(mediaConstraints) + '\'');
} catch (e) {
alert('getUserMedia() failed. Is this a WebRTC capable browser?');
messageError('getUserMedia failed with exception: ' + e.message);
}
}
function createPeerConnection() {
try {
// Create an RTCPeerConnection via the polyfill (adapter.js).
pc = new RTCPeerConnection(pcConfig, pcConstraints);
pc.onicecandidate = onIceCandidate;
console.log('Created RTCPeerConnnection with:\n' +
' config: \'' + JSON.stringify(pcConfig) + '\';\n' +
' constraints: \'' + JSON.stringify(pcConstraints) + '\'.');
} catch (e) {
messageError('Failed to create PeerConnection, exception: ' + e.message);
alert('Cannot create RTCPeerConnection object; \
WebRTC is not supported by this browser.');
return;
}
pc.onaddstream = onRemoteStreamAdded;
pc.onremovestream = onRemoteStreamRemoved;
pc.onsignalingstatechange = onSignalingStateChanged;
pc.oniceconnectionstatechange = onIceConnectionStateChanged;
}
function maybeStart() {
if (!started && signalingReady && channelReady && turnDone &&
(localStream || !hasLocalStream)) {
setStatus('Connecting...');
console.log('Creating PeerConnection.');
createPeerConnection();
if (hasLocalStream) {
console.log('Adding local stream.');
pc.addStream(localStream);
} else {
console.log('Not sending any stream.');
}
started = true;
if (initiator)
doCall();
else
calleeStart();
}
}
function setStatus(state) {
document.getElementById('status').innerHTML = state;
}
function doCall() {
var constraints = mergeConstraints(offerConstraints, sdpConstraints);
console.log('Sending offer to peer, with constraints: \n' +
' \'' + JSON.stringify(constraints) + '\'.')
pc.createOffer(setLocalAndSendMessage,
onCreateSessionDescriptionError, constraints);
}
function calleeStart() {
// Callee starts to process cached offer and other messages.
while (msgQueue.length > 0) {
processSignalingMessage(msgQueue.shift());
}
}
function doAnswer() {
console.log('Sending answer to peer.');
pc.createAnswer(setLocalAndSendMessage,
onCreateSessionDescriptionError, sdpConstraints);
}
function mergeConstraints(cons1, cons2) {
var merged = cons1;
for (var name in cons2.mandatory) {
merged.mandatory[name] = cons2.mandatory[name];
}
merged.optional.concat(cons2.optional);
return merged;
}
function setLocalAndSendMessage(sessionDescription) {
sessionDescription.sdp = maybePreferAudioReceiveCodec(sessionDescription.sdp);
pc.setLocalDescription(sessionDescription,
onSetSessionDescriptionSuccess, onSetSessionDescriptionError);
sendMessage(sessionDescription);
}
function setRemote(message) {
// Set Opus in Stereo, if stereo enabled.
if (stereo)
message.sdp = addStereo(message.sdp);
message.sdp = maybePreferAudioSendCodec(message.sdp);
pc.setRemoteDescription(new RTCSessionDescription(message),
onSetRemoteDescriptionSuccess, onSetSessionDescriptionError);
function onSetRemoteDescriptionSuccess() {
console.log("Set remote session description success.");
// By now all addstream events for the setRemoteDescription have fired.
// So we can know if the peer is sending any stream or is only receiving.
if (remoteStream) {
waitForRemoteVideo();
} else {
console.log("Not receiving any stream.");
transitionToActive();
}
}
}
function sendMessage(message) {
var msgString = JSON.stringify(message);
console.log('C->S: ' + msgString);
// NOTE: AppRTCClient.java searches & parses this line; update there when
// changing here.
path = '/message?r=' + roomKey + '&u=' + me;
var xhr = new XMLHttpRequest();
xhr.open('POST', path, true);
xhr.send(msgString);
}
function processSignalingMessage(message) {
if (!started) {
messageError('peerConnection has not been created yet!');
return;
}
if (message.type === 'offer') {
setRemote(message);
doAnswer();
} else if (message.type === 'answer') {
setRemote(message);
} else if (message.type === 'candidate') {
var candidate = new RTCIceCandidate({sdpMLineIndex: message.label,
candidate: message.candidate});
noteIceCandidate("Remote", iceCandidateType(message.candidate));
pc.addIceCandidate(candidate,
onAddIceCandidateSuccess, onAddIceCandidateError);
} else if (message.type === 'bye') {
onRemoteHangup();
}
}
function onAddIceCandidateSuccess() {
console.log('AddIceCandidate success.');
}
function onAddIceCandidateError(error) {
messageError('Failed to add Ice Candidate: ' + error.toString());
}
function onChannelOpened() {
console.log('Channel opened.');
channelReady = true;
maybeStart();
}
function onChannelMessage(message) {
console.log('S->C: ' + message.data);
var msg = JSON.parse(message.data);
// Since the turn response is async and also GAE might disorder the
// Message delivery due to possible datastore query at server side,
// So callee needs to cache messages before peerConnection is created.
if (!initiator && !started) {
if (msg.type === 'offer') {
// Add offer to the beginning of msgQueue, since we can't handle
// Early candidates before offer at present.
msgQueue.unshift(msg);
// Callee creates PeerConnection
signalingReady = true;
maybeStart();
} else {
msgQueue.push(msg);
}
} else {
processSignalingMessage(msg);
}
}
function onChannelError() {
messageError('Channel error.');
}
function onChannelClosed() {
console.log('Channel closed.');
}
function messageError(msg) {
console.log(msg);
infoDivErrors.push(msg);
updateInfoDiv();
}
function onUserMediaSuccess(stream) {
console.log('User has granted access to local media.');
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(localVideo, stream);
localVideo.style.opacity = 1;
localStream = stream;
// Caller creates PeerConnection.
maybeStart();
}
function onUserMediaError(error) {
messageError('Failed to get access to local media. Error code was ' +
error.code + '. Continuing without sending a stream.');
alert('Failed to get access to local media. Error code was ' +
error.code + '. Continuing without sending a stream.');
hasLocalStream = false;
maybeStart();
}
function onCreateSessionDescriptionError(error) {
messageError('Failed to create session description: ' + error.toString());
}
function onSetSessionDescriptionSuccess() {
console.log('Set session description success.');
}
function onSetSessionDescriptionError(error) {
messageError('Failed to set session description: ' + error.toString());
}
function iceCandidateType(candidateSDP) {
if (candidateSDP.indexOf("typ relay ") >= 0)
return "TURN";
if (candidateSDP.indexOf("typ srflx ") >= 0)
return "STUN";
if (candidateSDP.indexOf("typ host ") >= 0)
return "HOST";
return "UNKNOWN";
}
function onIceCandidate(event) {
if (event.candidate) {
sendMessage({type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate});
noteIceCandidate("Local", iceCandidateType(event.candidate.candidate));
} else {
console.log('End of candidates.');
}
}
function onRemoteStreamAdded(event) {
console.log('Remote stream added.');
attachMediaStream(remoteVideo, event.stream);
remoteStream = event.stream;
}
function onRemoteStreamRemoved(event) {
console.log('Remote stream removed.');
}
function onSignalingStateChanged(event) {
updateInfoDiv();
}
function onIceConnectionStateChanged(event) {
updateInfoDiv();
}
function onHangup() {
console.log('Hanging up.');
transitionToDone();
localStream.stop();
stop();
// will trigger BYE from server
socket.close();
}
function onRemoteHangup() {
console.log('Session terminated.');
initiator = 0;
transitionToWaiting();
stop();
}
function stop() {
started = false;
signalingReady = false;
isAudioMuted = false;
isVideoMuted = false;
pc.close();
pc = null;
remoteStream = null;
msgQueue.length = 0;
}
function waitForRemoteVideo() {
// Call the getVideoTracks method via adapter.js.
videoTracks = remoteStream.getVideoTracks();
if (videoTracks.length === 0 || remoteVideo.currentTime > 0) {
transitionToActive();
} else {
setTimeout(waitForRemoteVideo, 100);
}
}
function transitionToActive() {
reattachMediaStream(miniVideo, localVideo);
remoteVideo.style.opacity = 1;
card.style.webkitTransform = 'rotateY(180deg)';
setTimeout(function() { localVideo.src = ''; }, 500);
setTimeout(function() { miniVideo.style.opacity = 1; }, 1000);
// Reset window display according to the asperio of remote video.
window.onresize();
setStatus('<input type=\'button\' id=\'hangup\' value=\'Hang up\' \
onclick=\'onHangup()\' />');
}
function transitionToWaiting() {
card.style.webkitTransform = 'rotateY(0deg)';
setTimeout(function() {
localVideo.src = miniVideo.src;
miniVideo.src = '';
remoteVideo.src = '' }, 500);
miniVideo.style.opacity = 0;
remoteVideo.style.opacity = 0;
resetStatus();
}
function transitionToDone() {
localVideo.style.opacity = 0;
remoteVideo.style.opacity = 0;
miniVideo.style.opacity = 0;
setStatus('You have left the call. <a href=' + roomLink + '>\
Click here</a> to rejoin.');
}
function enterFullScreen() {
container.webkitRequestFullScreen();
}
function noteIceCandidate(location, type) {
if (gatheredIceCandidateTypes[location][type])
return;
gatheredIceCandidateTypes[location][type] = 1;
updateInfoDiv();
}
function getInfoDiv() {
return document.getElementById("infoDiv");
}
function updateInfoDiv() {
var contents = "<pre>Gathered ICE Candidates\n";
for (var endpoint in gatheredIceCandidateTypes) {
contents += endpoint + ":\n";
for (var type in gatheredIceCandidateTypes[endpoint])
contents += " " + type + "\n";
}
if (pc != null) {
contents += "Gathering: " + pc.iceGatheringState + "\n";
contents += "</pre>\n";
contents += "<pre>PC State:\n";
contents += "Signaling: " + pc.signalingState + "\n";
contents += "ICE: " + pc.iceConnectionState + "\n";
}
var div = getInfoDiv();
div.innerHTML = contents + "</pre>";
for (var msg in infoDivErrors) {
div.innerHTML += '<p style="background-color: red; color: yellow;">' +
infoDivErrors[msg] + '</p>';
}
if (infoDivErrors.length)
showInfoDiv();
}
function toggleInfoDiv() {
var div = getInfoDiv();
if (div.style.display == "block") {
div.style.display = "none";
} else {
showInfoDiv();
}
}
function showInfoDiv() {
var div = getInfoDiv();
div.style.display = "block";
}
function toggleVideoMute() {
// Call the getVideoTracks method via adapter.js.
videoTracks = localStream.getVideoTracks();
if (videoTracks.length === 0) {
console.log('No local video available.');
return;
}
if (isVideoMuted) {
for (i = 0; i < videoTracks.length; i++) {
videoTracks[i].enabled = true;
}
console.log('Video unmuted.');
} else {
for (i = 0; i < videoTracks.length; i++) {
videoTracks[i].enabled = false;
}
console.log('Video muted.');
}
isVideoMuted = !isVideoMuted;
}
function toggleAudioMute() {
// Call the getAudioTracks method via adapter.js.
audioTracks = localStream.getAudioTracks();
if (audioTracks.length === 0) {
console.log('No local audio available.');
return;
}
if (isAudioMuted) {
for (i = 0; i < audioTracks.length; i++) {
audioTracks[i].enabled = true;
}
console.log('Audio unmuted.');
} else {
for (i = 0; i < audioTracks.length; i++){
audioTracks[i].enabled = false;
}
console.log('Audio muted.');
}
isAudioMuted = !isAudioMuted;
}
// Mac: hotkey is Command.
// Non-Mac: hotkey is Control.
// <hotkey>-D: toggle audio mute.
// <hotkey>-E: toggle video mute.
// <hotkey>-I: toggle Info box.
// Return false to screen out original Chrome shortcuts.
document.onkeydown = function(event) {
var hotkey = event.ctrlKey;
if (navigator.appVersion.indexOf('Mac') != -1)
hotkey = event.metaKey;
if (!hotkey)
return;
switch (event.keyCode) {
case 68:
toggleAudioMute();
return false;
case 69:
toggleVideoMute();
return false;
case 73:
toggleInfoDiv();
return false;
default:
return;
}
}
function maybePreferAudioSendCodec(sdp) {
if (audio_send_codec == '') {
console.log('No preference on audio send codec.');
return sdp;
}
console.log('Prefer audio send codec: ' + audio_send_codec);
return preferAudioCodec(sdp, audio_send_codec);
}
function maybePreferAudioReceiveCodec(sdp) {
if (audio_receive_codec == '') {
console.log('No preference on audio receive codec.');
return sdp;
}
console.log('Prefer audio receive codec: ' + audio_receive_codec);
return preferAudioCodec(sdp, audio_receive_codec);
}
// Set |codec| as the default audio codec if it's present.
// The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'.
function preferAudioCodec(sdp, codec) {
var fields = codec.split('/');
if (fields.length != 2) {
console.log('Invalid codec setting: ' + codec);
return sdp;
}
var name = fields[0];
var rate = fields[1];
var sdpLines = sdp.split('\r\n');
// Search for m line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('m=audio') !== -1) {
var mLineIndex = i;
break;
}
}
if (mLineIndex === null)
return sdp;
// If the codec is available, set it as the default in m line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search(name + '/' + rate) !== -1) {
var regexp = new RegExp(':(\\d+) ' + name + '\\/' + rate, 'i');
var payload = extractSdp(sdpLines[i], regexp);
if (payload)
sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex],
payload);
break;
}
}
// Remove CN in m line and sdp.
sdpLines = removeCN(sdpLines, mLineIndex);
sdp = sdpLines.join('\r\n');
return sdp;
}
// Set Opus in stereo if stereo is enabled.
function addStereo(sdp) {
var sdpLines = sdp.split('\r\n');
// Find opus payload.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('opus/48000') !== -1) {
var opusPayload = extractSdp(sdpLines[i], /:(\d+) opus\/48000/i);
break;
}
}
// Find the payload in fmtp line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('a=fmtp') !== -1) {
var payload = extractSdp(sdpLines[i], /a=fmtp:(\d+)/ );
if (payload === opusPayload) {
var fmtpLineIndex = i;
break;
}
}
}
// No fmtp line found.
if (fmtpLineIndex === null)
return sdp;
// Append stereo=1 to fmtp line.
sdpLines[fmtpLineIndex] = sdpLines[fmtpLineIndex].concat(' stereo=1');
sdp = sdpLines.join('\r\n');
return sdp;
}
function extractSdp(sdpLine, pattern) {
var result = sdpLine.match(pattern);
return (result && result.length == 2)? result[1]: null;
}
// Set the selected codec to the first in m line.
function setDefaultCodec(mLine, payload) {
var elements = mLine.split(' ');
var newLine = new Array();
var index = 0;
for (var i = 0; i < elements.length; i++) {
if (index === 3) // Format of media starts from the fourth.
newLine[index++] = payload; // Put target payload to the first.
if (elements[i] !== payload)
newLine[index++] = elements[i];
}
return newLine.join(' ');
}
// Strip CN from sdp before CN constraints is ready.
function removeCN(sdpLines, mLineIndex) {
var mLineElements = sdpLines[mLineIndex].split(' ');
// Scan from end for the convenience of removing an item.
for (var i = sdpLines.length-1; i >= 0; i--) {
var payload = extractSdp(sdpLines[i], /a=rtpmap:(\d+) CN\/\d+/i);
if (payload) {
var cnPos = mLineElements.indexOf(payload);
if (cnPos !== -1) {
// Remove CN payload from m line.
mLineElements.splice(cnPos, 1);
}
// Remove CN line in sdp
sdpLines.splice(i, 1);
}
}
sdpLines[mLineIndex] = mLineElements.join(' ');
return sdpLines;
}
// Send BYE on refreshing(or leaving) a demo page
// to ensure the room is cleaned for next session.
window.onbeforeunload = function() {
sendMessage({type: 'bye'});
}
// Set the video diplaying in the center of window.
window.onresize = function(){
var aspectRatio;
if (remoteVideo.style.opacity === '1') {
aspectRatio = remoteVideo.videoWidth/remoteVideo.videoHeight;
} else if (localVideo.style.opacity === '1') {
aspectRatio = localVideo.videoWidth/localVideo.videoHeight;
} else {
return;
}
var innerHeight = this.innerHeight;
var innerWidth = this.innerWidth;
var videoWidth = innerWidth < aspectRatio * window.innerHeight ?
innerWidth : aspectRatio * window.innerHeight;
var videoHeight = innerHeight < window.innerWidth / aspectRatio ?
innerHeight : window.innerWidth / aspectRatio;
containerDiv = document.getElementById('container');
containerDiv.style.width = videoWidth + 'px';
containerDiv.style.height = videoHeight + 'px';
containerDiv.style.left = (innerWidth - videoWidth) / 2 + 'px';
containerDiv.style.top = (innerHeight - videoHeight) / 2 + 'px';
};

View File

@ -1,93 +0,0 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<!--This is the test page for the message channel.
To run this test:
?debug=loopback&unittest=channel
-->
<html>
<head>
<link rel="canonical" href="{{ room_link }}"/>
<meta http-equiv="X-UA-Compatible" content="chrome=1"/>
<script src="/_ah/channel/jsapi"></script>
<script type="text/javascript">
var channel;
var pc;
var socket;
var expected_message_num = 8;
var receive = 0;
var test_msg =
'01234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var msg_larger_than_500 = "";
function trace(txt) {
// This function is used for logging.
var elem = document.getElementById("debug");
elem.innerHTML += txt + "<br>";
}
function runTest() {
trace("Initializing; room={{ room_key }}.");
var channel = new goog.appengine.Channel('{{ token }}');
var handler = {
'onopen': onChannelOpened,
'onmessage': onChannelMessage,
'onerror': onChannelError,
'onclose': onChannelClosed
};
for (i = 0; i < 9; ++i) {
msg_larger_than_500 += test_msg;
}
for (i = 0; i < 4; ++i) {
sendMessage({type: 'test', msgid: i, msg: msg_larger_than_500});
}
trace('channel.open');
socket = channel.open(handler);
for (i = 4; i < expected_message_num; ++i) {
sendMessage({type: 'test', msgid: i, msg: msg_larger_than_500});
}
}
function sendMessage(message) {
var msgString = JSON.stringify(message);
trace('C->S: ' + msgString);
path = '/message?r={{ room_key }}' + '&u={{ me }}';
var xhr = new XMLHttpRequest();
xhr.open('POST', path, true);
xhr.send(msgString);
}
function onChannelOpened() {
trace('Channel opened.');
}
function onChannelMessage(message) {
if (message.data != JSON.stringify({type: 'test', msgid: receive,
msg: msg_larger_than_500})) {
trace('ERROR: Expect: ' + receive + ' Actual: ' + message.data);
} else {
trace('S->C: ' + message.data);
}
++receive;
if (receive == expected_message_num) {
trace('Received all the ' + expected_message_num + ' messages.');
trace('Test passed!');
} else if (receive > expected_message_num) {
trace('Received more than expected message');
trace('Test failed!');
}
}
function onChannelError() {
trace('Channel error.');
}
function onChannelClosed() {
trace('Channel closed.');
}
</script>
</head>
<body onload="runTest()">
<pre id="debug"></pre>
</body>
</html>

View File

@ -1,9 +0,0 @@
This script contains a simple prober that verifies that:
- CEOD vends TURN server URIs with credentials on demand (mimicking apprtc)
- rfc5766-turn-server vends TURN candidates from the servers vended by CEOD.
To use simply run ./turn-prober.sh
If it prints "PASS" (and exits 0) then all is well.
If it prints a mess of logs (and exits non-0) then something has gone sideways
and apprtc.appspot.com is probably not working well (b/c of missing TURN
functionality).

View File

@ -1,132 +0,0 @@
<html>
<head>
<script>
var CEOD_URL = ("https://computeengineondemand.appspot.com/turn?" +
"username=1234&key=5678");
var xmlhttp = null;
var turnServers = []; // Array of {turnUri, username, password}.
// The next two arrays' entries correspond 1:1 to turnServers.
var gotTurn = []; // Entries are null (not done), "PASS", or "FAIL"
var pcs = []; // Entries are RTCPeerConnection objects.
// Test is done; log & replace document body with an appropriate message.
function finish(msg) {
msg = "DONE: " + msg;
console.log(msg);
document.body.innerHTML = msg;
}
// Handle created offer SDP.
function offerHandler(i, c) {
var pc = pcs[i];
pc.setLocalDescription(c,
function() {},
function(e) {console.log("sLD error: " + e); });
pc = null;
}
// Handle SDP offer creation error.
function offerError(i, e) {
console.log("createOffer error: " + e);
checkForCompletion(i, "FAIL (offerError)");
}
// Register a terminal condition |msg| for the |index|'th server and
// terminate the test with an appropriate message if all servers are done.
function checkForCompletion(index, msg) {
gotTurn[index] = msg;
var pass = true;
for (var i = 0; i < gotTurn.length; ++i) {
if (!gotTurn[i])
return;
if (gotTurn[i] != "PASS") {
pass = false;
// Don't "break" because a later still-null gotTurn value should let
// us wait more.
}
}
if (pass) {
finish("PASS");
} else {
finish("FAIL: " + JSON.stringify(gotTurn));
}
}
// Make sure we don't wait forever for TURN to complete.
function nanny(i) {
if (!gotTurn[i]) {
checkForCompletion(i, "FAIL (TURN server failed to respond)");
}
}
// Handle incoming ICE candidate |c| from |turnServers[i]|.
function onIceCandidate(i, c) {
var pc = pcs[i];
if (!c || !c.candidate) {
checkForCompletion(
i, !gotTurn[i] ? "FAIL (no TURN candidate)" :
(gotTurn[i] == "PASS") ? "PASS" : gotTurn[i]);
return;
}
if (c.candidate.candidate.indexOf(" typ relay ") >= 0) {
gotTurn[i] = "PASS";
}
}
// Kick off the test.
function go() {
xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = onTurnResult;
xmlhttp.open('GET', CEOD_URL, true);
xmlhttp.send();
}
// Handle the XMLHttpRequest's response.
function onTurnResult() {
if (xmlhttp.readyState != 4)
return;
if (xmlhttp.status != 200) {
finish("FAIL (no TURN server)");
return;
}
var turnServer = JSON.parse(xmlhttp.responseText);
for (i = 0; i < turnServer.uris.length; i++) {
if (turnServer.uris[i].indexOf(":3479?") >= 0) {
// Why does CEOD vend useless port 3479 URIs?
continue;
}
console.log("Adding to test: " +
[turnServer.uris[i], turnServer.username,
turnServer.password]);
gotTurn.push(null);
pcs.push(new webkitRTCPeerConnection({
"iceServers": [{
"url": turnServer.uris[i],
"username": turnServer.username,
"credential": turnServer.password
}]
}));
var index = pcs.length - 1;
var pc = pcs[index];
if (!pc) {
checkForCompletion(index, "FAIL (PeerConnection ctor failed)");
continue;
}
pc.onicecandidate = (
function(p) { return function(c) { onIceCandidate(p, c); } })(index);
pc.createOffer(
(function(p) { return function(o) { offerHandler(p, o); } })(index),
(function(p) { return function(e) { offerError(p, e); } })(index),
{'mandatory': { 'OfferToReceiveAudio': true } });
window.setTimeout(
(function(p) { return function() { nanny(p); } })(index), 10000);
}
}
</script>
</head>
<body onload="go()">
</body>
</html>

View File

@ -1,49 +0,0 @@
#!/bin/bash -e
function chrome_pids() {
ps axuwww|grep $D|grep c[h]rome|awk '{print $2}'
}
cd $(dirname $0)
export D=$(mktemp -d)
CHROME_LOG_FILE="${D}/chrome_debug.log"
touch $CHROME_LOG_FILE
XVFB="xvfb-run -a -e $CHROME_LOG_FILE -s '-screen 0 1024x768x24'"
if [ -n "$DISPLAY" ]; then
XVFB=""
fi
# "eval" below is required by $XVFB containing a quoted argument.
eval $XVFB chrome \
--enable-logging=stderr \
--no-first-run \
--disable-web-security \
--user-data-dir=$D \
--vmodule="*media/*=3,*turn*=3" \
"file://${PWD}/turn-prober.html" > $CHROME_LOG_FILE 2>&1 &
CHROME_PID=$!
while ! grep -q DONE $CHROME_LOG_FILE && chrome_pids|grep -q .; do
sleep 0.1
done
# Suppress bash's Killed message for the chrome above.
exec 3>&2
exec 2>/dev/null
while [ ! -z "$(chrome_pids)" ]; do
kill -9 $(chrome_pids)
done
exec 2>&3
exec 3>&-
DONE=$(grep DONE $CHROME_LOG_FILE)
EXIT_CODE=0
if ! grep -q "DONE: PASS" $CHROME_LOG_FILE; then
cat $CHROME_LOG_FILE
EXIT_CODE=1
fi
rm -rf $D
exit $EXIT_CODE

View File

@ -1,198 +0,0 @@
var RTCPeerConnection = null;
var getUserMedia = null;
var attachMediaStream = null;
var reattachMediaStream = null;
var webrtcDetectedBrowser = null;
var webrtcDetectedVersion = null;
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function maybeFixConfiguration(pcConfig) {
if (pcConfig == null) {
return;
}
for (var i = 0; i < pcConfig.iceServers.length; i++) {
if (pcConfig.iceServers[i].hasOwnProperty('urls')){
pcConfig.iceServers[i]['url'] = pcConfig.iceServers[i]['urls'];
delete pcConfig.iceServers[i]['urls'];
}
}
}
if (navigator.mozGetUserMedia) {
console.log("This appears to be Firefox");
webrtcDetectedBrowser = "firefox";
webrtcDetectedVersion =
parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
// The RTCPeerConnection object.
var RTCPeerConnection = function(pcConfig, pcConstraints) {
// .urls is not supported in FF yet.
maybeFixConfiguration(pcConfig);
return new mozRTCPeerConnection(pcConfig, pcConstraints);
}
// The RTCSessionDescription object.
RTCSessionDescription = mozRTCSessionDescription;
// The RTCIceCandidate object.
RTCIceCandidate = mozRTCIceCandidate;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.mozGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Creates iceServer from the url for FF.
createIceServer = function(url, username, password) {
var iceServer = null;
var url_parts = url.split(':');
if (url_parts[0].indexOf('stun') === 0) {
// Create iceServer with stun url.
iceServer = { 'url': url };
} else if (url_parts[0].indexOf('turn') === 0) {
if (webrtcDetectedVersion < 27) {
// Create iceServer with turn url.
// Ignore the transport parameter from TURN url for FF version <=27.
var turn_url_parts = url.split("?");
// Return null for createIceServer if transport=tcp.
if (turn_url_parts.length === 1 ||
turn_url_parts[1].indexOf('transport=udp') === 0) {
iceServer = {'url': turn_url_parts[0],
'credential': password,
'username': username};
}
} else {
// FF 27 and above supports transport parameters in TURN url,
// So passing in the full url to create iceServer.
iceServer = {'url': url,
'credential': password,
'username': username};
}
}
return iceServer;
};
createIceServers = function(urls, username, password) {
var iceServers = [];
// Use .url for FireFox.
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i],
username,
password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
return iceServers;
}
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
console.log("Attaching media stream");
element.mozSrcObject = stream;
element.play();
};
reattachMediaStream = function(to, from) {
console.log("Reattaching media stream");
to.mozSrcObject = from.mozSrcObject;
to.play();
};
// Fake get{Video,Audio}Tracks
if (!MediaStream.prototype.getVideoTracks) {
MediaStream.prototype.getVideoTracks = function() {
return [];
};
}
if (!MediaStream.prototype.getAudioTracks) {
MediaStream.prototype.getAudioTracks = function() {
return [];
};
}
} else if (navigator.webkitGetUserMedia) {
console.log("This appears to be Chrome");
webrtcDetectedBrowser = "chrome";
webrtcDetectedVersion =
parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10);
// Creates iceServer from the url for Chrome M33 and earlier.
createIceServer = function(url, username, password) {
var iceServer = null;
var url_parts = url.split(':');
if (url_parts[0].indexOf('stun') === 0) {
// Create iceServer with stun url.
iceServer = { 'url': url };
} else if (url_parts[0].indexOf('turn') === 0) {
// Chrome M28 & above uses below TURN format.
iceServer = {'url': url,
'credential': password,
'username': username};
}
return iceServer;
};
// Creates iceServers from the urls for Chrome M34 and above.
createIceServers = function(urls, username, password) {
var iceServers = [];
if (webrtcDetectedVersion >= 34) {
// .urls is supported since Chrome M34.
iceServers = {'urls': urls,
'credential': password,
'username': username };
} else {
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i],
username,
password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
}
return iceServers;
};
// The RTCPeerConnection object.
var RTCPeerConnection = function(pcConfig, pcConstraints) {
// .urls is supported since Chrome M34.
if (webrtcDetectedVersion < 34) {
maybeFixConfiguration(pcConfig);
}
return new webkitRTCPeerConnection(pcConfig, pcConstraints);
}
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
if (typeof element.srcObject !== 'undefined') {
element.srcObject = stream;
} else if (typeof element.mozSrcObject !== 'undefined') {
element.mozSrcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
console.log('Error attaching stream to element.');
}
};
reattachMediaStream = function(to, from) {
to.src = from.src;
};
} else {
console.log("Browser does not appear to be WebRTC-capable");
}

View File

@ -1,5 +0,0 @@
These demos are moving to Github: https://github.com/GoogleChrome/webrtc.
Please file bugs and patches there from now on.
Thanks!

View File

@ -1,377 +0,0 @@
<html>
<head>
<title>Constraints and Statistics</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style type="text/css">
td { vertical-align: top; }
</style>
<script>
var mystream;
var pc1;
var pc2;
var bytesPrev = 0;
var timestampPrev = 0;
$ = function(id) {
return document.getElementById(id);
}
function log(txt) {
console.log(txt);
}
function openCamera() {
if (mystream) {
mystream.stop();
}
navigator.getUserMedia(cameraConstraints(), gotStream, function() {
log("GetUserMedia failed");
});
}
function gotStream(stream) {
log("GetUserMedia succeeded");
mystream = stream;
attachMediaStream($("local-video"), stream);
}
function cameraConstraints() {
var constraints = {};
constraints.audio = true;
constraints.video = { mandatory: {}, optional: [] };
if ($("minwidth").value != "0") {
constraints.video.mandatory.minWidth = $("minwidth").value;
}
if ($("maxwidth").value != "0") {
constraints.video.mandatory.maxWidth = $("maxwidth").value;
}
if ($("minheight").value != "0") {
constraints.video.mandatory.minHeight = $("minheight").value;
}
if ($("maxheight").value != "0") {
constraints.video.mandatory.maxHeight = $("maxheight").value;
}
if ($("frameRate").value != "0") {
constraints.video.mandatory.minFrameRate = $("frameRate").value;
}
log('Camera constraints are ' + JSON.stringify(constraints));
$("cameraConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
return constraints;
}
function streamConstraints() {
var constraints = { mandatory: {}, optional: [] };
if ($("bandwidth").value != "0") {
constraints.optional[0] = { 'bandwidth' : $('bandwidth').value };
}
log('Constraints are ' + JSON.stringify(constraints));
$("addStreamConstraints").innerHTML = JSON.stringify(constraints, null, ' ');
return constraints;
}
function connect() {
pc1 = new RTCPeerConnection(null);
pc2 = new RTCPeerConnection(null);
pc1.addStream(mystream, streamConstraints());
log('PC1 creating offer');
pc1.onnegotiationeeded = function() {
log('Negotiation needed - PC1');
}
pc2.onnegotiationeeded = function() {
log('Negotiation needed - PC2');
}
pc1.onicecandidate = function(e) {
log('Candidate PC1');
if (e.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(e.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
}
}
pc2.onicecandidate = function(e) {
log('Candidate PC2');
if (e.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(e.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
}
}
pc2.onaddstream = function(e) {
log('PC2 got stream');
attachMediaStream($('remote-video'), e.stream);
log('Remote video is ' + $('remote-video').src);
}
pc1.createOffer(function(desc) {
log('PC1 offering');
pc1.setLocalDescription(desc);
pc2.setRemoteDescription(desc);
pc2.createAnswer(function(desc2) {
log('PC2 answering');
pc2.setLocalDescription(desc2);
pc1.setRemoteDescription(desc2);
});
});
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
// Augumentation of stats entries with utility functions.
// The augumented entry does what the stats entry does, but adds
// utility functions.
function AugumentedStatsResponse(response) {
this.response = response;
this.addressPairMap = [];
}
AugumentedStatsResponse.prototype.collectAddressPairs = function(componentId) {
if (!this.addressPairMap[componentId]) {
this.addressPairMap[componentId] = [];
for (var i = 0; i < this.response.result().length; ++i) {
var res = this.response.result()[i];
if (res.type == 'googCandidatePair' &&
res.stat('googChannelId') == componentId) {
this.addressPairMap[componentId].push(res);
}
}
}
return this.addressPairMap[componentId];
}
AugumentedStatsResponse.prototype.result = function() {
return this.response.result();
}
// The indexed getter isn't easy to prototype.
AugumentedStatsResponse.prototype.get = function(key) {
return this.response[key];
}
// Display statistics
var statCollector = setInterval(function() {
var display = function(str) {
$('bitrate').innerHTML = str;
}
display("No stream");
if (pc2 && pc2.getRemoteStreams()[0]) {
if (pc2.getStats) {
pc2.getStats(function(rawStats) {
stats = new AugumentedStatsResponse(rawStats);
var statsString = '';
var results = stats.result();
var videoFlowInfo = 'No bitrate stats';
for (var i = 0; i < results.length; ++i) {
var res = results[i];
statsString += '<h3>Report ';
statsString += i;
statsString += '</h3>';
if (!res.local || res.local === res) {
statsString += dumpStats(res);
// The bandwidth info for video is in a type ssrc stats record
// with googFrameHeightReceived defined.
// Should check for mediatype = video, but this is not
// implemented yet.
if (res.type == 'ssrc' && res.stat('googFrameHeightReceived')) {
// This is the video flow.
videoFlowInfo = extractVideoFlowInfo(res, stats);
}
} else {
// Pre-227.0.1445 (188719) browser
if (res.local) {
statsString += "<p>Local ";
statsString += dumpStats(res.local);
}
if (res.remote) {
statsString += "<p>Remote ";
statsString += dumpStats(res.remote);
}
}
}
$('receiverstats').innerHTML = statsString;
display(videoFlowInfo);
});
pc1.getStats(function(stats) {
var statsString = '';
var results = stats.result();
for (var i = 0; i < results.length; ++i) {
var res = results[i];
statsString += '<h3>Report ';
statsString += i;
statsString += '</h3>';
if (!res.local || res.local === res) {
statsString += dumpStats(res);
}
}
$('senderstats').innerHTML = statsString;
});
} else {
display('No stats function. Use at least Chrome 24.0.1285');
}
} else {
log('Not connected yet');
}
// Collect some stats from the video tags.
local_video = $('local-video');
if (local_video) {
$('local-video-stats').innerHTML = local_video.videoWidth +
'x' + local_video.videoHeight;
}
remote_video = $('remote-video');
if (remote_video) {
$('remote-video-stats').innerHTML = remote_video.videoWidth +
'x' + remote_video.videoHeight;
}
}, 1000);
function extractVideoFlowInfo(res, allStats) {
var description = '';
var bytesNow = res.stat('bytesReceived');
if (timestampPrev > 0) {
var bitRate = Math.round((bytesNow - bytesPrev) * 8 /
(res.timestamp - timestampPrev));
description = bitRate + ' kbits/sec';
}
timestampPrev = res.timestamp;
bytesPrev = bytesNow;
if (res.stat('transportId')) {
component = allStats.get(res.stat('transportId'));
if (component) {
addresses = allStats.collectAddressPairs(component.id);
if (addresses.length > 0) {
description += ' from IP ';
description += addresses[0].stat('googRemoteAddress');
} else {
description += ' no address';
}
} else {
description += ' No component stats';
}
} else {
description += ' No component ID';
}
return description;
}
// Dumping a stats variable as a string.
// might be named toString?
function dumpStats(obj) {
var statsString = 'Timestamp:';
statsString += obj.timestamp;
if (obj.id) {
statsString += "<br>id ";
statsString += obj.id;
}
if (obj.type) {
statsString += " type ";
statsString += obj.type;
}
if (obj.names) {
names = obj.names();
for (var i = 0; i < names.length; ++i) {
statsString += '<br>';
statsString += names[i];
statsString += ':';
statsString += obj.stat(names[i]);
}
} else {
if (obj.stat('audioOutputLevel')) {
statsString += "audioOutputLevel: ";
statsString += obj.stat('audioOutputLevel');
statsString += "<br>";
}
}
return statsString;
}
// Utility to show the value of a field in a span called name+Display
function showValue(name, value) {
$(name + 'Display').innerHTML = value;
}
</script>
</head>
<body>
<h1>Constraints and Statistics</h1>
This page is meant to give some hints on how one can use constraints and statistics in WebRTC applications.
<p>
The form to the left gives constraints you can set on the getUserMedia call.
When you hit "open", it will (re)open the camera with these constraints.
<p>
The left picture is the local preview. The right picture is the picture
after being passed through the PeerConnection (locally).
<p>
Underneath the picture you will see a running display of how many Kbits/sec
the video feed uses for transmission.
<hr>
<table>
<tr>
<td align="top">
<h2>getUserMedia constraints</h2>
<table>
<tr><td><td>Min<td>Max
<tr><td>Horizontal
<td><input type="range" id="minwidth" min="0" max="1280" value="300"
onchange="showValue(this.id, this.value)">
<td><input type="range" id="maxwidth" min="0" max="1280" value="640"
onchange="showValue(this.id, this.value)">
<td><span id="minwidthDisplay">300</span>-<span id="maxwidthDisplay">640</span>
<tr><td>Vertical
<td><input type="range" id="minheight" min="0" max="1280" value="200"
onchange="showValue(this.id, this.value)">
<td><input type="range" id="maxheight" min="0" max="1280" value="480"
onchange="showValue(this.id, this.value)">
<td><span id="minheightDisplay">200</span>-<span id="maxheightDisplay">480</span>
<tr><td>
FrameRate
<td colspan=2><input type="range" id="frameRate" min="0" max="60" value="30"
onchange="showValue(this.id, this.value)">
<td><span id="frameRateDisplay">30</span>
</table>
<input type="submit" name="capture" value="Capture!" onclick="openCamera()">
</td>
<td align="top">
<h2>addStream constraints</h2>
Maximum bitrate
<input type="range" id="bandwidth" min="0" max="2000" value="1000"
onchange="showValue(this.id, this.value)">
<span id="bandwidthDisplay">1000</span>
<br>
<input type="submit" name="connect" value="Connect!" onclick="connect()">
</td>
</tr>
<tr>
<td>
<video id="local-video" autoplay width=400 muted="true"></video>
</td>
<td>
<video id="remote-video" autoplay width=400></video>
</td>
<tr>
<td><span id="local-video-stats"></span>
<td><span id="remote-video-stats"></span>
<br>
<span id="bitrate">Bitrate unknown</span>
</td>
</tr>
<tr>
<td><pre><span id="cameraConstraints"></span></pre>
<td><pre><span id="addStreamConstraints"></span></pre>
</table>
<h2>Statistics report display</h2>
<table>
<tr>
<th>Sender side<th>Receiver side
<tr>
<td align="top"><div id="senderstats">Stats will appear here.</div>
<td align="top"><div id="receiverstats">Stats will appear here.</div>
</table>
</body>
</html>

View File

@ -1,85 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<!-- This sample demonstrates calling createOffer to get a SDP blob that
indicates the capabilities of the PeerConnection. -->
<title>Show createOffer Output Demo</title>
<script src="../../base/adapter.js"></script>
<style>
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<h1>WebRTC createOffer Test Page</h1>
<p>This page tests the createOffer method for a WebRTC implementation. It
creates a PeerConnection, and then prints out the SDP generated by
createOffer, with the number of desired audio MediaStreamTracks and the
checked createOffer constraints. Currently, only audio tracks can be added,
as there is no programmatic way to generate video tracks. (Web Audio is
used to generate the audio tracks.)</p>
<h3>Tracks</h3>
<p>Number of Audio Tracks<input id="num-audio-tracks" value="0"></input></p>
<h3>Constraints:</h3>
<input id="audio" type="checkbox">Offer To Receive Audio</input><br>
<input id="video" type="checkbox">Offer To Receive Video</input><br>
<input id="vad" type="checkbox">Voice Activity Detection</input><br>
<input id="restart" type="checkbox">Ice Restart</input><br>
<button id="start" onclick="createOffer()">Create Offer</button><br>
<br>
<textarea id="output"></textarea>
<script>
var numAudioTracks = document.getElementById('num-audio-tracks');
var audio = document.getElementById('audio');
var video = document.getElementById('video');
var vad = document.getElementById('vad');
var restart = document.getElementById('restart');
var output = document.getElementById('output');
var pc = new RTCPeerConnection(null);
var wacx = new webkitAudioContext();
function createOffer() {
var numRequestedAudioTracks = numAudioTracks.value;
while (numRequestedAudioTracks < pc.getLocalStreams().length) {
pc.removeStream(pc.getLocalStreams()[pc.getLocalStreams().length - 1]);
}
while (numRequestedAudioTracks > pc.getLocalStreams().length) {
// Create some dummy audio streams using Web Audio.
// Note that this fails if you try to do more than one track in Chrome
// right now.
var dst = wacx.createMediaStreamDestination();
pc.addStream(dst.stream);
}
var offerConstraints = {
"optional": [
{ "OfferToReceiveAudio": audio.checked },
{ "OfferToReceiveVideo": video.checked },
]
};
// These constraints confuse Firefox, even if declared as optional.
if (webrtcDetectedBrowser != "Firefox") {
offerConstraints.optional.push(
{ "VoiceActivityDetection": vad.checked });
offerConstraints.optional.push(
{ "IceRestart": restart.checked });
}
pc.createOffer(gotDescription, null, offerConstraints);
}
function gotDescription(desc) {
pc.setLocalDescription(desc);
output.value = desc.sdp;
}
</script>
</body>
</html>

View File

@ -1,240 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>Data Channel Demo 1</title>
<style>
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
height: 400px;
width: 300px;
}
div#send {
float: left;
margin-right: 20px;
}
div#receive {
}
div#sendreceive {
margin: 0 0 20px 0;
}
h2 {
margin: 0 0 10px 0;
}
</style>
</head>
<body>
<div id="sendreceive">
<div id="send">
<h2>Send data</h2>
<textarea id="dataChannelSendId" rows="5" cols="15" disabled="true"
placeholder="Press Start, enter some text, then press Send Data.">
</textarea>
</div>
<div id="receive">
<h2>Received Data</h2>
<textarea id="dataChannelReceiveId" rows="5" cols="15" disabled="true">
</textarea>
</div>
</div>
<form>
<p>Choose SCTP or RTP for transmitting data.</p>
<input type="radio" id="useSctp" name="transportbtn" checked/>
<label for="useSctp">Use SCTP</label>
<input type="radio" id="useRtp" name="transportbtn"/>
<label for="useRtp">Use RTP</label>
</form>
<button id="startButton">Start</button>
<button id="sendButton" disabled>Send Data</button>
<button id="closeButton" disabled>Stop</button>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src='../../base/adapter.js'></script>
<script>
var pc1, pc2, sendChannel, receiveChannel, pcConstraint, dataConstraint;
var dataChannelSend = document.getElementById("dataChannelSendId");
var dataChannelReceive = document.getElementById("dataChannelReceiveId");
var sctp_select = document.getElementById('useSctp');
var rtp_select = document.getElementById('useRtp');
var startButton = document.querySelector('button#startButton');
var sendButton = document.querySelector('button#sendButton');
var closeButton = document.querySelector('button#closeButton');
startButton.onclick = createConnection;
sendButton.onclick = sendData;
closeButton.onclick = closeDataChannels;
rtp_select.onclick = enableStartButton;
sctp_select.onclick = enableStartButton;
function enableStartButton() {
startButton.disabled = false;
}
function disableSendButton() {
sendButton.disabled = true;
}
rtp_select.onclick = sctp_select.onclick = function() {
dataChannelReceive.value = '';
dataChannelSend.value = '';
disableSendButton();
enableStartButton();
};
function createConnection() {
dataChannelSendId.placeholder = "";
var servers = null;
pcConstraint = null;
dataConstraint = null;
if (sctp_select.checked &&
(webrtcDetectedBrowser === 'chrome' && webrtcDetectedVersion >= 31) ||
webrtcDetectedBrowser === 'firefox'){
// SCTP is supported from Chrome M31 and is supported in FF.
// No need to pass DTLS constraint as it is on by default in Chrome M31.
// For SCTP, reliable and ordered is true by default.
trace('Using SCTP based Data Channels');
} else {
pcConstraint = {optional: [{RtpDataChannels: true}]};
if (!rtp_select.checked) {
// Use rtp data channels for chrome versions older than M31.
trace('Using RTP based Data Channels,' +
'as you are on an older version than M31.');
alert('Reverting to RTP based data channels,' +
'as you are on an older version than M31.');
rtp_select.checked = true;
}
}
pc1 = new RTCPeerConnection(servers, pcConstraint);
trace('Created local peer connection object pc1');
try {
// Data Channel api supported from Chrome M25.
// You might need to start chrome with --enable-data-channels flag.
sendChannel = pc1.createDataChannel("sendDataChannel", dataConstraint);
trace('Created send data channel');
} catch (e) {
alert('Failed to create data channel. ' +
'You need Chrome M25 or later with --enable-data-channels flag');
trace('Create Data channel failed with exception: ' + e.message);
}
pc1.onicecandidate = iceCallback1;
sendChannel.onopen = onSendChannelStateChange;
sendChannel.onclose = onSendChannelStateChange;
pc2 = new RTCPeerConnection(servers, pcConstraint);
trace('Created remote peer connection object pc2');
pc2.onicecandidate = iceCallback2;
pc2.ondatachannel = receiveChannelCallback;
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
startButton.disabled = true;
closeButton.disabled = false;
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function sendData() {
var data = dataChannelSend.value;
sendChannel.send(data);
trace('Sent Data: ' + data);
}
function closeDataChannels() {
trace('Closing data Channels');
sendChannel.close();
trace('Closed data channel with label: ' + sendChannel.label);
receiveChannel.close();
trace('Closed data channel with label: ' + receiveChannel.label);
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
trace('Closed peer connections');
startButton.disabled = false;
sendButton.disabled = true;
closeButton.disabled = true;
dataChannelSend.value = "";
dataChannelReceive.value = "";
dataChannelSend.disabled = true;
}
function gotDescription1(desc) {
pc1.setLocalDescription(desc);
trace('Offer from pc1 \n' + desc.sdp);
pc2.setRemoteDescription(desc);
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError);
}
function gotDescription2(desc) {
pc2.setLocalDescription(desc);
trace('Answer from pc2 \n' + desc.sdp);
pc1.setRemoteDescription(desc);
}
function iceCallback1(event) {
trace('local ice callback');
if (event.candidate) {
pc2.addIceCandidate(event.candidate,
onAddIceCandidateSuccess, onAddIceCandidateError);
trace('Local ICE candidate: \n' + event.candidate.candidate);
}
}
function iceCallback2(event) {
trace('remote ice callback');
if (event.candidate) {
pc1.addIceCandidate(event.candidate,
onAddIceCandidateSuccess, onAddIceCandidateError);
trace('Remote ICE candidate: \n ' + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace('AddIceCandidate success.');
}
function onAddIceCandidateError(error) {
trace('Failed to add Ice Candidate: ' + error.toString());
}
function receiveChannelCallback(event) {
trace('Receive Channel Callback');
receiveChannel = event.channel;
receiveChannel.onmessage = onReceiveMessageCallback;
receiveChannel.onopen = onReceiveChannelStateChange;
receiveChannel.onclose = onReceiveChannelStateChange;
}
function onReceiveMessageCallback(event) {
trace('Received Message');
dataChannelReceive.value = event.data;
}
function onSendChannelStateChange() {
var readyState = sendChannel.readyState;
trace('Send channel state is: ' + readyState);
if (readyState == "open") {
dataChannelSend.disabled = false;
dataChannelSendId.focus();
sendButton.disabled = false;
closeButton.disabled = false;
} else {
dataChannelSend.disabled = true;
sendButton.disabled = true;
closeButton.disabled = true;
}
}
function onReceiveChannelStateChange() {
var readyState = receiveChannel.readyState;
trace('Receive channel state is: ' + readyState);
}
</script>
</body>
</html>

View File

@ -1,211 +0,0 @@
<!DOCTYPE html>
<html>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src='../../base/adapter.js'></script>
<head>
<title>Device Switch Demo</title>
<style>
h2 {
font-size: 1em;
font-family: sans-serif;
margin: 0 0 0.5em 0;
padding: 0;
}
video {
width:40%;
}
</style>
</head>
<body>
<video id="vid1" autoplay muted></video>
<video id="vid2" autoplay></video>
<div>
<h2>Select an audio and video source, then click Start.</h2>
Audio source: <select id="audiosrc"></select>
Video source: <select id="videosrc"></select><br>
<button id="btn1">Start</button>
<button id="btn2">Call</button>
<button id="btn3">Hang Up</button>
</div>
<script>
var btn1 = document.querySelector('button#btn1');
var btn2 = document.querySelector('button#btn2');
var btn3 = document.querySelector('button#btn3');
var audio_select = document.getElementById("audiosrc");
var video_select = document.getElementById("videosrc");
//audio_select.onchange = changeDevices;
//video_select.onchange = changeDevices;
btn1.onclick = start;
btn2.onclick = call;
btn3.onclick = hangup;
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
refreshSources();
function refreshSources() {
if (webrtcDetectedVersion >= 30) {
MediaStreamTrack.getSources(gotSources);
} else {
alert('Make sure that you have Chrome M30 to test device enumeration api.');
}
}
function gotSources(sourceInfos) {
var audio_count = 0;
var video_count = 0;
audio_select.disabled = true;
video_select.disabled = true;
audio_select.innerHTML = '';
video_select.innerHTML = '';
for (var i = 0; i < sourceInfos.length; i++) {
var option = document.createElement("option");
option.value = sourceInfos[i].id;
option.text = sourceInfos[i].label;
if (sourceInfos[i].kind === 'audio') {
audio_count++;
if (option.text === '') {
option.text = 'Audio ' + audio_count;
}
audio_select.appendChild(option);
} else {
video_count++;
if (option.text === '') {
option.text = 'Video ' + video_count;
}
video_select.appendChild(option);
}
}
audio_select.disabled = false;
video_select.disabled = false;
}
function start() {
changeDevices();
btn1.disabled = true;
btn2.disabled = false;
audio_select.disabled = true;
video_select.disabled = true;
}
function changeDevices() {
var audio_source = null;
var video_source = null;
if (audio_select.options.length > 0) {
audio_source = audio_select.options[audio_select.selectedIndex].value;
trace('selected audio_source :' + audio_source);
}
if (video_select.options.length > 0 ) {
video_source = video_select.options[video_select.selectedIndex].value;
trace('selected video_source :' + video_source);
}
setWebcamAndMic(audio_source, video_source);
}
function setWebcamAndMic(audio_source, video_source) {
trace("Requesting local stream");
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({ audio: {optional: [{sourceId: audio_source}]},
video: {optional: [{sourceId: video_source}]}
}, gotStream, function() {});
}
function gotStream(stream) {
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0) {
trace('Using Video device: ' + videoTracks[0].label);
}
if (audioTracks.length > 0) {
trace('Using Audio device: ' + audioTracks[0].label);
}
var servers = null;
pc1 = new RTCPeerConnection(servers);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1);
}
function gotDescription1(desc) {
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer(gotDescription2, null, sdpConstraints);
}
function gotDescription2(desc) {
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
localstream.stop();
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn1.disabled = false;
audio_select.disabled = false;
video_select.disabled = false;
}
function gotRemoteStream(e) {
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}
function iceCallback1(event) {
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event) {
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
</script>
</body>
</html>

View File

@ -1,208 +0,0 @@
<html>
<head>
<title>PeerConnection DTMF Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
button {
font: 18px sans-serif;
padding: 8px;
}
#left { position: absolute; left: 20; top: 0; width: 50%; }
#right { position: absolute; right: 0; top: 0; width: 50%; }
</style>
</head>
<body onload="onload()">
<div id="left">
<audio id="audio1" autoplay="autoplay" muted="true"></audio>
<h3>Send Dtmf Tones</h3>
<div id="dialingPad"></div>
<br><br>
duration:
<input type="text" id="dtmf-tones-duration" size="10" value="500"/>
<br><br>
tone-gap:
<input type="text" id="dtmf-tones-gap" size="10" value="50"/>
<br><br>
tones:
<input type="text" id="dtmf-tones" size="10"
value="1199##9,6633221,9966332,9966332,1199##9,6633221"/>
<button id="sendTones"
onclick="sendTone(document.getElementById('dtmf-tones').value)">Send tones
</button>
<br><br>
<button id="callBtn" onclick="call()">Call</button>
<button id="hangBtn" onclick="hangup()">Hang Up</button>
<br><br>
</div>
<div id="right">
<audio id="audio2" autoplay="autoplay"></audio>
<h3>Sent Tones</h3>
<textarea id="dtmfTonesSent" rows="12" cols="40" disabled="true">
</textarea><br>
</div>
<script>
callBtn.disabled = false;
hangBtn.disabled = true;
sendTones.disabled = true;
var pc1 = null,pc2 = null;
var localstream = null;
var dtmfSender = null;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':false }};
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
localstream = stream;
audioTracks = localstream.getAudioTracks();
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function call() {
trace("Starting call");
var servers = null;
var pc_constraints = {"optional": []};
pc1 = new RTCPeerConnection(servers,pc_constraints);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers,pc_constraints);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
trace("Requesting local stream");
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:false},
gotStream, function() {});
callBtn.disabled = true;
hangBtn.disabled = false;
sendTones.disabled = false;
}
function gotDescription1(desc){
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio.
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError,
sdpConstraints);
}
function gotDescription2(desc){
// Setting PCMU as the preferred codec.
desc.sdp = desc.sdp.replace(/m=.*\r\n/, "m=audio 1 RTP/SAVPF 0 126\r\n");
// Workaround for issue 1603.
desc.sdp = desc.sdp.replace(/.*fmtp.*\r\n/g, "");
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
localstream = null;
dtmfSender = null;
callBtn.disabled = false;
hangBtn.disabled = true;
sendTones.disabled = true;
document.getElementById("dtmfTonesSent").value = "Dtmf de-activated\n";
}
function gotRemoteStream(e){
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(audio2, e.stream);
trace("Received remote stream");
enableDtmfSender();
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
function enableDtmfSender(){
document.getElementById("dtmfTonesSent").value = "Dtmf activated\n";
if (localstream != null) {
var local_audio_track = localstream.getAudioTracks()[0];
dtmfSender = pc1.createDTMFSender(local_audio_track);
trace("Created DTMF Sender\n");
dtmfSender.ontonechange = dtmfOnToneChange;
}
else {
trace("No Local Stream to create DTMF Sender\n");
}
}
function dtmfOnToneChange(tone){
if (tone) {
trace("Sent Dtmf tone: \t" + tone.tone);
document.getElementById("dtmfTonesSent").value += tone.tone + '\t';
}
}
function sendTone(tones){
if (dtmfSender) {
duration = document.getElementById("dtmf-tones-duration").value;
gap = document.getElementById("dtmf-tones-gap").value;
dtmfSender.insertDTMF(tones, duration, gap);
}
}
function createDialingPad() {
var tones = '1234567890*#ABCD';
var dialingPad = document.getElementById('dialingPad');
for (var i = 0; i < tones.length; ++i) {
var tone = tones.charAt(i);
dialingPad.innerHTML += '<button id="' +
tone + '" onclick="sendTone(\'' + tone +
'\')" style="height:40px; width: 30px">' + tone + '</button>';
if ((i + 1) % 4 == 0) {
dialingPad.innerHTML += '<br>';
}
}
}
function onload() {
createDialingPad();
}
</script>
</body>
</html>

View File

@ -1,148 +0,0 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<script type="text/javascript" src="../js/ccv.js"></script>
<script type="text/javascript" src="../js/face.js"></script>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style type="text/css">
* { margin:0; padding:0; } /* to remove the top and left whitespace */
html, body { width:100%; height:100%; } /* just to be sure these are full screen*/
body {font-family: 'Helvetica';background-color: #000000; }
a:link { color: #ffffff; } a:visited {color: #ffffff; }
#localCanvas {
display: block;
position: absolute;
width: 100%;
height: 100%;
}
#localVideo {
display: block;
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
width: 100%;
height: 100%;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
opacity: 0;
}
#logo {
display: block;
top:4;
right:4;
position:absolute;
float:right;
#opacity: 0.8;
}
#credit {
display: block;
top:28;
right:4;
position:absolute;
float:right;
font-size:10px;
}
</style>
<title>WebRTC Face Reco Demo Application</title>
</head>
<body>
<script type="text/javascript">
var localVideo;
var localCanvas;
initialize = function() {
localVideo = document.getElementById("localVideo");
localCanvas = document.getElementById("localCanvas");
try {
navigator.getUserMedia({video:true}, onGotStream, onFailedStream);
//trace("Requested access to local media");
} catch (e) {
alert("getUserMedia error " + e);
//trace_e(e, "getUserMedia error");
}
}
poll = function() {
var w = localVideo.videoWidth;
var h = localVideo.videoHeight;
var canvas = document.createElement('canvas');
canvas.width = w;
canvas.height = h;
var ctx = canvas.getContext('2d');
ctx.drawImage(localVideo, 0, 0, w, h);
var comp = ccv.detect_objects({ "canvas" : ccv.grayscale(canvas),
"cascade" : cascade,
"interval" : 5,
"min_neighbors" : 1 });
/* draw detected area */
localCanvas.width = localVideo.clientWidth;
localCanvas.height = localVideo.clientHeight;
var ctx2 = localCanvas.getContext('2d');
ctx2.lineWidth = 2;
ctx2.lineJoin = "round";
ctx2.clearRect (0, 0, localCanvas.width,localCanvas.height);
var x_offset = 0, y_offset = 0, x_scale = 1, y_scale = 1;
if (localVideo.clientWidth * localVideo.videoHeight > localVideo.videoWidth * localVideo.clientHeight) {
x_offset = (localVideo.clientWidth - localVideo.clientHeight *
localVideo.videoWidth / localVideo.videoHeight) / 2;
} else {
y_offset = (localVideo.clientHeight - localVideo.clientWidth *
localVideo.videoHeight / localVideo.videoWidth) / 2;
}
x_scale = (localVideo.clientWidth - x_offset * 2) / localVideo.videoWidth;
y_scale = (localVideo.clientHeight - y_offset * 2) / localVideo.videoHeight;
for (var i = 0; i < comp.length; i++) {
comp[i].x = comp[i].x * x_scale + x_offset;
comp[i].y = comp[i].y * y_scale + y_offset;
comp[i].width = comp[i].width * x_scale;
comp[i].height = comp[i].height * y_scale;
var opacity = 0.1;
if (comp[i].confidence > 0) {
opacity += comp[i].confidence / 10;
if (opacity > 1.0) opacity = 1.0;
}
//ctx2.strokeStyle = "rgba(255,0,0," + opacity * 255 + ")";
ctx2.lineWidth = opacity * 10;
ctx2.strokeStyle = "rgb(255,0,0)";
ctx2.strokeRect(comp[i].x, comp[i].y, comp[i].width, comp[i].height);
}
setTimeout(poll, 1000);
}
onGotStream = function(stream) {
localVideo.style.opacity = 1;
attachMediaStream(localVideo, stream);
localStream = stream;
//trace("User has granted access to local media. url = " + url);
setTimeout(poll, 2000);
}
onFailedStream = function(error) {
alert("Failed to get access to local media. Error code was " + error.code + ".");
//trace_warning("Failed to get access to local media. Error code was " + error.code);
}
setTimeout(initialize, 1);
</script>
<video id="localVideo" autoplay="autoplay" muted="true"></video>
<canvas width="1000" height="1000" id="localCanvas"></canvas>
<a href="http://www.webrtc.org"><img id="logo" alt="WebRTC" src="../images/webrtc_black_20p.png"></a>
<a href="http://liuliu.me/eyes/javascript-face-detection-explained"><div id="credit">JS Face Detect by Liu Liu</div></a>
</body>
</html>

View File

@ -1,35 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<br>
<button id="btn" onclick="start()">Start</button>
<script>
video = document.getElementById("vid");
function start() {
navigator.getUserMedia({video:true}, gotStream, function() {});
btn.disabled = true;
}
function gotStream(stream) {
attachMediaStream(video, stream);
}
</script>
</body>
</html>

View File

@ -1,50 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 2</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
canvas {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<canvas id="cvs"></canvas>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="snap()">Snapshot</button>
<script>
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
btn2.disabled = true;
function start() {
navigator.getUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
attachMediaStream(video, stream);
btn2.disabled = false
}
function snap() {
canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
}
</script>
</body>
</html>

View File

@ -1,76 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>getUserMedia Demo 3</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
canvas {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
.grayscale {
-webkit-filter: grayscale(1);
}
.sepia {
-webkit-filter: sepia(1);
}
.invert {
-webkit-filter: invert(1);
}
.blur {
-webkit-filter: blur(3px);
}
</style>
</head>
<body>
<video id="vid" autoplay="true"></video>
<canvas id="cvs"></canvas>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="change()">Change Filter</button>
<button id="btn3" onclick="snap()">Snapshot</button>
<script>
filters = ["", "sepia", "invert", "blur", "grayscale"];
findex = 0;
video = document.getElementById("vid");
canvas = document.getElementById("cvs");
canvas.width = 480;
canvas.height = 360;
btn2.disabled = true;
btn3.disabled = true;
function start() {
navigator.getUserMedia({video:true}, gotStream, function() {});
btn1.disabled = true;
}
function gotStream(stream) {
attachMediaStream(video, stream);
btn2.disabled = false;
btn3.disabled = false;
}
function change() {
video.className = '';
findex = (findex + 1) % filters.length;
if (findex != 0)
video.classList.add(filters[findex]);
}
function snap() {
canvas.className = '';
if (findex != 0)
canvas.classList.add(filters[findex]);
canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
}
</script>
</body>
</html>

View File

@ -1,95 +0,0 @@
<!DOCTYPE html>
<!-- saved from url=(0070)http://webrtc.googlecode.com/svn/trunk/samples/js/demos/html/gum1.html -->
<html><head><meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
<title>getUserMedia Demo 4</title>
<style>
video {
border:5px solid black;
width:400px;
height:300px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<video id="vid1" name="morrallas" autoplay="true"></video>
<video id="vid2" autoplay="true"></video>
<video id="vid3" autoplay="true"></video>
<video id="vid4" autoplay="true"></video>
<br/>
<button id="btn" onclick="start1()">Start Lowres (160x120)</button>
<button id="btn" onclick="start2()">Start Midres (320x240) @15fps</button>
<button id="btn" onclick="start3()">Start NormRes Unconstrained (640x480)</button>
<button id="btn" onclick="start4()">Start HighRes (960x720) @8fps</button>
<script>
video1= document.getElementById("vid1");
var constraints1 ={
"audio": false, "video": { "mandatory": {
"minWidth": "160", "maxWidth": "160", "minHeight": "120", "maxHeight": "120",
"minFrameRate": "30"
},
"optional": []
}
}
video2= document.getElementById("vid2");
var constraints2 ={
"audio": false, "video": { "mandatory": {
"minWidth": "320", "maxWidth": "320", "minHeight": "240", "maxHeight": "240",
"maxFrameRate": "15"
},
"optional": []
}
}
video3 = document.getElementById("vid3");
video4= document.getElementById("vid4");
var constraints4 ={
"audio": false, "video": { "mandatory": {
"minWidth": "960", "maxWidth": "960", "minHeight": "720", "maxHeight": "720",
"maxFrameRate": "8"
},
"optional": []
}
}
function start1() {
navigator.webkitGetUserMedia(constraints1, gotStream1, getUserMediaError);
btn.disabled = true;
}
function gotStream1(stream) {
video1.src = webkitURL.createObjectURL(stream);
}
function start2() {
navigator.webkitGetUserMedia(constraints2, gotStream2, getUserMediaError);
btn.disabled = true;
}
function gotStream2(stream) {
video2.src = webkitURL.createObjectURL(stream);
}
function start3() {
navigator.webkitGetUserMedia({video:true}, gotStream3, getUserMediaError);
btn.disabled = true;
}
function gotStream3(stream) {
video3.src = webkitURL.createObjectURL(stream);
}
function start4() {
navigator.webkitGetUserMedia(constraints4, gotStream4, getUserMediaError);
btn.disabled = true;
}
function gotStream4(stream) {
video4.src = webkitURL.createObjectURL(stream);
}
function getUserMediaError(e) {
alert('Error during webkitGetUserMedia: '+e);
}
</script>
</body></html>

View File

@ -1,148 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<!-- This sample demonstrates enumeration of candidates for
the specified STUN/TURN server. -->
<title>ICE Candidate Gathering Demo</title>
<script src="../../base/adapter.js"></script>
<style>
body {
font: 14px sans-serif;
}
button {
font: 18px sans-serif;
padding: 8px;
}
select {
margin: 2px;
width:960px;
height:80px;
}
textarea {
font-family: monospace;
margin: 2px;
width:960px;
height:640px;
}
</style>
</head>
<body>
<h1>WebRTC Trickle ICE Test Page</h1>
<p>This page tests the trickle ICE functionality in a WebRTC implementation. It
creates a PeerConnection with the specified ICEServers, and then starts
candidate gathering for a session with a single audio stream. As candidates
are gathered, they are displayed in the text box below, along with an
indication when candidate gathering is complete.</p>
<p>Individual STUN and TURN servers can be added using the Add Server/Remove
Server controls below; in addition, the type of candidates released to the
application can be controlled via the IceTransports contraint.</p>
<h3>ICE Servers</h3>
<select id="servers" size="4">
<option value="{&quot;url&quot;:&quot;stun:stun.l.google.com:19302&quot;}">
stun:stun.l.google.com:19302
</option>
</select>
<br>
STUN or TURN URI:
<input id="url" size="64"></input>
<br>
TURN Username:
<input id="username" size="16"></input>
TURN Password:
<input id="password" size="16"></input>
<br>
<button id="add" onclick="addServer()">Add Server</button>
<button id="remove" onclick="removeServer()">Remove Server</button>
<h3>ICE Constraints</h3>
IceTransports value:
<input type="radio" name="transports" value="all" checked> All
<input type="radio" name="transports" value="relay"> Relay
<input type="radio" name="transports" value="none"> None
<br>
<br>
<button id="gather" onclick="start()">Gather Candidates</button>
<br>
<textarea id="output"></textarea>
<script>
var servers = document.getElementById('servers');
var url = document.getElementById('url');
var username = document.getElementById('username');
var password = document.getElementById('password');
var output = document.getElementById('output');
var pc;
var begin;
function addServer() {
var scheme = url.value.split(":")[0];
if (scheme != "stun" && scheme != "turn" && scheme != "turns") {
alert("URI is not valid");
return;
}
// Store the ICE server as a stringified JSON object in opt.value.
var opt = document.createElement("option");
opt.value = JSON.stringify(
createIceServer(url.value, username.value, password.value));
opt.text = url.value + " ";
if (username.value.length || password.value.length) {
opt.text += (" [" + username.value + ":" + password.value + "]");
}
servers.add(opt);
url.value = username.value = password.value = "";
}
function removeServer() {
for (var i = servers.options.length - 1; i >= 0; --i) {
if (servers.options[i].selected) {
servers.remove(i);
}
}
}
function start() {
// Create a PeerConnection with no streams, but force a m=audio line.
// Pass in the STUN/TURN server value from the input boxes.
output.value = "";
var iceServers = [];
for (var i = 0; i < servers.length; ++i) {
iceServers.push(JSON.parse(servers[i].value));
}
var transports = document.getElementsByName("transports");
var iceTransports;
for (var i = 0; i < transports.length; ++i) {
if (transports[i].checked) {
iceTransports = transports[i].value;
break;
}
}
var config = {"iceServers": iceServers };
var constraints = {"mandatory": {"IceTransports":iceTransports}};
trace("Creating new PeerConnection with config=" + JSON.stringify(config) +
", constraints=" + JSON.stringify(constraints));
pc = new RTCPeerConnection(config, constraints);
pc.onicecandidate = iceCallback;
pc.createOffer(gotDescription, null,
{"mandatory": {"OfferToReceiveAudio": true}});
}
function gotDescription(desc) {
begin = performance.now();
pc.setLocalDescription(desc);
}
function iceCallback(event) {
var elapsed = ((performance.now() - begin) / 1000).toFixed(3);
if (event.candidate) {
output.value += (elapsed + ": " + event.candidate.candidate);
} else {
output.value += (elapsed + ": Done");
pc.close();
pc = null;
}
}
</script>
</body>
</html>

View File

@ -1,80 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Local Audio Rendering Demo</title>
<script type="text/javascript" src="../../base/adapter.js"></script>
<script>
var audioElement;
var buttonStart;
var buttonStop;
var localStream;
$ = function(id) {
return document.getElementById(id);
};
function start() {
var constraints = {audio:true, video:false};
getUserMedia(constraints, gotStream, gotStreamFailed);
buttonStart.disabled = true;
buttonStop.disabled = false;
}
function stop() {
buttonStart.enabled = true;
buttonStop.enabled = false;
localStream.stop();
}
function gotStream(stream) {
videoTracks = stream.getVideoTracks();
audioTracks = stream.getAudioTracks();
if (audioTracks.length == 1 && videoTracks.length == 0) {
console.log('gotStream({audio:true, video:false})');
console.log('Using audio device: ' + audioTracks[0].label);
attachMediaStream(audioElement, stream);
stream.onended = function() {
console.log('stream.onended');
buttonStart.disabled = false;
buttonStop.disabled = true;
};
localStream = stream;
} else {
alert('The media stream contains an invalid amount of audio tracks.');
stream.stop();
}
}
function gotStreamFailed(error) {
buttonStart.disabled = false;
buttonStop.disabled = true;
alert('Failed to get access to local media. Error code: ' + error.code);
}
function onload() {
audioElement = $('audio');
buttonStart = $('start');
buttonStop = $('stop');
buttonStart.enabled = true;
buttonStop.disabled = true;
}
</script>
</head>
<body onload="onload()">
<h2>Rendering of a local media stream using &lt;audio&gt;</h2>
<p>Demonstrates usage of a local media stream connected to an HTML5 audio tag.<br>
Press Start, select a microphone and listen to your own voice in loopback.</p>
<style>
button {
font: 14px sans-serif;
padding: 8px;
}
</style>
<audio id="audio" autoplay="autoplay" controls="controls"></audio><br><br>
<button id="start" onclick="start()">Start</button>
<button id="stop" onclick="stop()">Stop</button>
</body>
</html>

View File

@ -1,172 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Local Audio Rendering Demo</title>
<script type="text/javascript" src="../../base/adapter.js"></script>
<script>
var buttonStart;
var buttonStop;
var localStream;
var reporter;
var audioContext;
// Meter class that generates a number correlated to audio volume.
// The meter class itself displays nothing, but it makes the
// instantaneous and time-decaying volumes available for inspection.
// It also reports on the fraction of samples that were at or near
// the top of the measurement range.
function SoundMeter(context) {
this.context = context
this.volume = 0.0;
this.slow_volume = 0.0;
this.clip = 0.0;
this.script = context.createScriptProcessor(2048, 1, 1);
that = this;
this.script.onaudioprocess = function(event) {
var input = event.inputBuffer.getChannelData(0);
var i;
var sum = 0.0;
var clipcount = 0;
for (i = 0; i < input.length; ++i) {
sum += input[i] * input[i];
if (Math.abs(input[i]) > 0.99) {
clipcount += 1
}
}
that.volume = Math.sqrt(sum / input.length);
that.slow_volume = 0.95 * that.slow_volume + 0.05 * that.volume;
that.clip = clipcount / input.length;
}
}
SoundMeter.prototype.connectToSource = function(stream) {
console.log('SoundMeter connecting');
this.mic = this.context.createMediaStreamSource(stream);
this.mic.connect(this.script);
// Necessary to make sample run, but should not be.
this.script.connect(this.context.destination);
}
SoundMeter.prototype.stop = function() {
this.mic.disconnect();
this.script.disconnect();
}
// End of SoundMeter class.
$ = function(id) {
return document.getElementById(id);
};
function start() {
var constraints = {audio:true, video:false};
getUserMedia(constraints, gotStream, gotStreamFailed);
buttonStart.disabled = true;
buttonStop.disabled = false;
}
function stop() {
buttonStart.enabled = true;
buttonStop.enabled = false;
localStream.stop();
clearInterval(reporter);
soundMeter.stop();
}
function gotStream(stream) {
var videoTracks = stream.getVideoTracks();
var audioTracks = stream.getAudioTracks();
if (audioTracks.length == 1 && videoTracks.length == 0) {
console.log('gotStream({audio:true, video:false})');
console.log('Using audio device: ' + audioTracks[0].label);
stream.onended = function() {
console.log('stream.onended');
buttonStart.disabled = false;
buttonStop.disabled = true;
};
localStream = stream;
var soundMeter = new SoundMeter(audioContext);
soundMeter.connectToSource(stream);
// Set up reporting of the volume every 0.2 seconds.
var meter = $('volume');
var decaying_meter = $('decaying_volume');
var meter_canvas = $('graphic_volume').getContext('2d');
var meter_slow = $('graphic_slow').getContext('2d');
var meter_clip = $('graphic_clip').getContext('2d');
reporter = setInterval(function() {
meter.textContent = soundMeter.volume.toFixed(2);
decaying_meter.textContent = soundMeter.slow_volume.toFixed(2);
paintMeter(meter_canvas, soundMeter.volume);
paintMeter(meter_slow, soundMeter.slow_volume);
paintMeter(meter_clip, soundMeter.clip);
}, 200);
} else {
alert('The media stream contains an invalid number of tracks:'
+ audioTracks.length + ' audio ' + videoTracks.length + ' video');
stream.stop();
}
}
function gotStreamFailed(error) {
buttonStart.disabled = false;
buttonStop.disabled = true;
alert('Failed to get access to local media. Error code: ' + error.code);
}
function onload() {
try {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audioContext = new AudioContext();
} catch(e) {
alert('Web Audio API not found');
}
buttonStart = $('start');
buttonStop = $('stop');
buttonStart.enabled = true;
buttonStop.disabled = true;
}
function paintMeter(context, number) {
context.clearRect(0, 0, 400, 20);
context.fillStyle = 'red';
context.fillRect(0, 0, number * 400, 20);
}
</script>
<style>
button {
font: 14px sans-serif;
padding: 8px;
}
canvas {
border:1px solid #000000;
}
</style>
</head>
<body onload="onload()">
<h2>Measuring the volume of an audio stream using WebAudio</h2>
<p>Demonstrates measuring the volume of a local media stream
using WebAudio.<br>
Press Start, select a microphone, listen to your own voice in loopback,
and see the numbers change as you speak.</p>
The "instant" volume changes approximately every 50 ms; the "slow"
volume approximates the average volume over about a second.
<br>
Note that you will NOT hear your own voice; use the
<a href="local-audio-rendering.html">local audio rendering demo</a>
for that.
<p>
<button id="start" onclick="start()">Start</button>
<button id="stop" onclick="stop()">Stop</button><br><br>
Volume (instant): <span id="volume">Not set</span><br>
Volume (slow): <span id="decaying_volume">Not set</span><br>
<canvas id="graphic_volume" width="400" height="20"></canvas> Volume<br>
<canvas id="graphic_slow" width="400" height="20"></canvas> Slow<br>
<canvas id="graphic_clip" width="400" height="20"></canvas> Clipping
</body>
</html>

View File

@ -1,98 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<script src="../js/videopipe.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="addrelay()">Insert relay</button>
<button id="btn4" onclick="hangup()">Hang Up</button>
<br>
<xtextarea id="ta1"></textarea>
<script>
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
btn4.disabled = true;
var pipes = new Array();
var localstream;
var remotestream;
function gotStream(stream){
trace("Received local stream");
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function gotRemoteStream(stream){
remotestream = stream;
attachMediaStream(vid2, stream);
trace("Received remote stream");
trace(pipes.length + ' elements in chain');
ta1.textContent = pipes.length + ' elements in chain';
btn3.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
getUserMedia({audio:false, video:true},
gotStream,
function() {
alert('getUserMedia failed');
});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
btn4.disabled = false;
trace("Starting call");
pipes.push(new VideoPipe(localstream, gotRemoteStream));
}
function addrelay() {
pipes.push(new VideoPipe(remotestream, gotRemoteStream));
btn3.disabled = true;
}
function hangup() {
trace("Ending call");
while (pipes.length > 0) {
var pipe = pipes.pop()
pipe.close();
}
btn3.disabled = true;
btn4.disabled = true;
btn2.disabled = false;
}
</script>
</body>
</html>

View File

@ -1,196 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay="true" muted="true"></video>
<video id="vid2" autoplay></video>
<video id="vid3" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1_local, pc1_remote;
var pc2_local, pc2_remote;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting calls");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace("Using Video device: " + videoTracks[0].label);
if (audioTracks.length > 0)
trace("Using Audio device: " + audioTracks[0].label);
// Create an RTCPeerConnection via the polyfill (adapter.js).
var servers = null;
pc1_local = new RTCPeerConnection(servers);
pc1_remote = new RTCPeerConnection(servers);
pc1_remote.onaddstream = gotRemoteStream1;
pc1_local.onicecandidate = iceCallback1Local;
pc1_remote.onicecandidate = iceCallback1Remote;
trace("PC1: created local and remote peer connection objects");
pc2_local = new RTCPeerConnection(servers);
pc2_remote = new RTCPeerConnection(servers);
pc2_remote.onaddstream = gotRemoteStream2;
pc2_local.onicecandidate = iceCallback2Local;
pc2_remote.onicecandidate = iceCallback2Remote;
trace("PC2: created local and remote peer connection objects");
pc1_local.addStream(localstream);
trace("Adding local stream to pc1_local");
pc1_local.createOffer(gotDescription1Local, onCreateSessionDescriptionError);
pc2_local.addStream(localstream);
trace("Adding local stream to pc2_local");
pc2_local.createOffer(gotDescription2Local, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function gotDescription1Local(desc) {
pc1_local.setLocalDescription(desc);
trace("Offer from pc1_local \n" + desc.sdp);
pc1_remote.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc1_remote.createAnswer(gotDescription1Remote,
onCreateSessionDescriptionError, sdpConstraints);
}
function gotDescription1Remote(desc) {
pc1_remote.setLocalDescription(desc);
trace("Answer from pc1_remote \n" + desc.sdp);
pc1_local.setRemoteDescription(desc);
}
function gotDescription2Local(desc) {
pc2_local.setLocalDescription(desc);
trace("Offer from pc2_local \n" + desc.sdp);
pc2_remote.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2_remote.createAnswer(gotDescription2Remote,
onCreateSessionDescriptionError, sdpConstraints);
}
function gotDescription2Remote(desc) {
pc2_remote.setLocalDescription(desc);
trace("Answer from pc2_remote \n" + desc.sdp);
pc2_local.setRemoteDescription(desc);
}
function hangup() {
trace("Ending calls");
pc1_local.close();
pc1_remote.close();
pc2_local.close();
pc2_remote.close();
pc1_local = pc1_remote = null;
pc2_local = pc2_remote = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream1(e) {
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("PC1: Received remote stream");
}
function gotRemoteStream2(e) {
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid3, e.stream);
trace("PC2: Received remote stream");
}
function iceCallback1Local(event) {
handleCandidate(event.candidate, pc1_remote, "PC1: ", "local");
}
function iceCallback1Remote(event) {
handleCandidate(event.candidate, pc1_local, "PC1: ", "remote");
}
function iceCallback2Local(event) {
handleCandidate(event.candidate, pc2_remote, "PC2: ", "local");
}
function iceCallback2Remote(event) {
handleCandidate(event.candidate, pc2_local, "PC2: ", "remote");
}
function handleCandidate(candidate, dest, prefix, type) {
if (candidate) {
dest.addIceCandidate(new RTCIceCandidate(candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace(prefix + "New " + type + " ICE candidate: " + candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
</script>
</body>
</html>

View File

@ -1,126 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Audio Only Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
button {
font: 18px sans-serif;
padding: 8px;
}
</style>
</head>
<body>
<h2>Local-Audio</h2>
<audio id="audio1" autoplay="autoplay" controls="controls" muted="true"></audio>
<h2>Remote-Audio</h2>
<audio id="audio2" autoplay="autoplay" controls="controls"></audio>
<br><br>
<button id="btn1" onclick="call()">Call</button>
<button id="btn2" onclick="hangup()">Hang Up</button>
<br><br>
<script>
btn1.disabled = false;
btn2.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':false }};
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
localstream = stream;
audioTracks = localstream.getAudioTracks();
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function call() {
btn1.disabled = true;
btn2.disabled = false;
trace("Starting call");
var servers = null;
var pc_constraints = {"optional": []};
pc1 = new RTCPeerConnection(servers,pc_constraints);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers,pc_constraints);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
trace("Requesting local stream");
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:false},
gotStream, function() {});
}
function gotDescription1(desc){
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio.
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError,
sdpConstraints);
}
function gotDescription2(desc){
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
btn2.disabled = true;
btn1.disabled = false;
}
function gotRemoteStream(e){
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(audio2, e.stream);
trace("Received remote stream");
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
</script>
</body>
</html>

View File

@ -1,152 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
textarea {
font-family: monospace;
margin: 2px;
width:480px;
height:640px;
}
</style>
</head>
<body>
<video id="vid1" autoplay="true" muted="true"></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<xtextarea id="ta1"></textarea>
<xtextarea id="ta2"></textarea>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace('Using Video device: ' + videoTracks[0].label);
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
var servers = null;
pc1 = new RTCPeerConnection(servers);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function gotDescription1(desc){
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError,
sdpConstraints);
}
function gotDescription2(desc){
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream(e){
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
</script>
</body>
</html>

View File

@ -1,320 +0,0 @@
<!DOCTYPE html>
<html>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src='../../base/adapter.js'></script>
<head>
<title>PC1 SDP Munge Demo</title>
<style>
div#left {
float: left;
margin: 0 2em 2.5em 0;
}
div#right {
float: left;
margin: 0 0 1.5em 0;
}
div#buttons button {
margin: 0 1.3em 1em 0;
width: 13em;
}
div#buttons {
max-width: 30em;
}
div#source {
clear: both;
margin: 0 0 1em 0;
}
div#select {
margin: 0 0 1em 0;
}
select {
margin: 0 1em 0 0;
}
h2 {
font-size: 1em;
font-family: sans-serif;
margin: 0 0 0.3em 0;
padding: 0;
}
textarea {
height: 20em;
width: 98%;
}
video {
background: #666;
margin: 0 0 1.5em 0;
width: 320px;
height: 240px;
}
</style>
</head>
<body>
<div id="left">
<h2>Local Preview</h2>
<video id="vid1" autoplay muted></video>
<h2>Offer SDP</h2>
<textarea id="offerSdp"></textarea><br><br>
</div>
<div id="right">
<h2>Remote Preview</h2>
<video id="vid2" autoplay></video>
<h2>Answer SDP</h2>
<textarea id="answerSdp"></textarea>
</div>
<div id="source">
<label for="audiosrc">Audio source: </label><select id="audiosrc"></select>
<label for="videosrc">Video source: </label><select id="videosrc"></select>
</div>
<div id="select">Select an audio & video source, then click GetUserMedia:</div>
<div id="buttons">
<button id="btnMedia" onclick="getMedia()">GetUserMedia</button>
<button id="btnCreatePC" onclick="createPC()">Create PeerConnection</button>
<button id="btnOffer" onclick="createOffer()">Create Offer</button>
<button id="btnSetOffer" onclick="setOffer()">Set Offer</button>
<button id="btnAnswer" onclick="createAnswer()">Create Answer</button>
<button id="btnSetAnswer" onclick="setAnswer()">Set Answer</button>
<button id="btnHangUp" onclick="hangup()">Hang Up</button>
</div>
<script>
var audio_select = document.getElementById("audiosrc");
var video_select = document.getElementById("videosrc");
btnMedia.disabled = false;
btnCreatePC.disabled = true;
btnOffer.disabled = true;
btnSetOffer.disabled = true;
btnAnswer.disabled = true;
btnSetAnswer.disabled = true;
btnHangUp.disabled = true;
//audio_select.onchange = changeDevices;
//video_select.onchange = changeDevices;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
refreshSources();
function refreshSources() {
if (webrtcDetectedBrowser === 'chrome' && webrtcDetectedVersion >= 30) {
MediaStreamTrack.getSources(gotSources);
} else {
alert('Failed to enumerate devices, you need Chrome version 30 or higher');
}
}
function gotSources(sourceInfos) {
var audio_count = 0;
var video_count = 0;
audio_select.disabled = true;
video_select.disabled = true;
audio_select.innerHTML = '';
video_select.innerHTML = '';
for (var i = 0; i < sourceInfos.length; i++) {
var option = document.createElement("option");
option.value = sourceInfos[i].id;
option.text = sourceInfos[i].label;
if (sourceInfos[i].kind === 'audio') {
audio_count++;
if (option.text === '') {
option.text = 'Audio ' + audio_count;
}
audio_select.appendChild(option);
} else {
video_count++;
if (option.text === '') {
option.text = 'Video ' + video_count;
}
video_select.appendChild(option);
}
}
audio_select.disabled = false;
video_select.disabled = false;
}
function getMedia() {
changeDevices();
audio_select.disabled = true;
video_select.disabled = true;
btnMedia.disabled = true;
btnCreatePC.disabled = false;
}
function changeDevices() {
var audio_source = null;
var video_source = null;
if (audio_select.options.length > 0) {
audio_source = audio_select.options[audio_select.selectedIndex].value;
trace('selected audio_source :' + audio_source);
}
if (video_select.options.length > 0 ) {
video_source = video_select.options[video_select.selectedIndex].value;
trace('selected video_source :' + video_source);
}
setWebcamAndMic(audio_source, video_source);
}
function setWebcamAndMic(audio_source, video_source) {
trace("Requesting local stream");
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({ audio: {optional: [{sourceId: audio_source}]},
video: {optional: [{sourceId: video_source}]}
}, gotStream, function() {});
}
function gotStream(stream) {
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
}
function createPC() {
btnCreatePC.disabled = true;
btnOffer.disabled = false;
btnAnswer.disabled = false;
btnSetOffer.disabled = false;
btnSetAnswer.disabled = false;
btnHangUp.disabled = false;
trace("Starting call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0) {
trace('Using Video device: ' + videoTracks[0].label);
}
if (audioTracks.length > 0) {
trace('Using Audio device: ' + audioTracks[0].label);
}
var servers = null;
pc1 = new RTCPeerConnection(servers);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
}
function onSetSessionDescriptionSuccess() {
trace('Set session description success.');
}
function onSetSessionDescriptionError(error) {
trace('Failed to set session description: ' + error.toString());
}
// Workaround for crbug/322756.
function maybeAddLineBreakToEnd(sdp) {
var endWithLineBreak = new RegExp(/\n$/);
if (!endWithLineBreak.test(sdp)) {
return sdp + '\n';
}
return sdp;
}
function createOffer(){
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function setOffer(){
var sdp = document.getElementById("offerSdp").value;
sdp = maybeAddLineBreakToEnd(sdp);
var offer = new RTCSessionDescription({type:'offer',sdp:sdp});
pc1.setLocalDescription(offer,
onSetSessionDescriptionSuccess,
onSetSessionDescriptionError);
trace("Modified Offer from pc1 \n" + sdp);
pc2.setRemoteDescription(offer,
onSetSessionDescriptionSuccess,
onSetSessionDescriptionError);
}
function gotDescription1(desc) {
document.getElementById("offerSdp").disabled = false;
document.getElementById("offerSdp").value = desc.sdp;
}
function createAnswer(){
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError,
sdpConstraints);
}
function setAnswer(){
var sdp = document.getElementById("answerSdp").value;
sdp = maybeAddLineBreakToEnd(sdp);
var answer = new RTCSessionDescription({type:'answer',sdp:sdp});
pc2.setLocalDescription(answer,
onSetSessionDescriptionSuccess,
onSetSessionDescriptionError);
trace("Modified Answer from pc2 \n" + sdp);
pc1.setRemoteDescription(answer,
onSetSessionDescriptionSuccess,
onSetSessionDescriptionError);
}
function gotDescription2(desc) {
document.getElementById("answerSdp").disabled = false;
document.getElementById("answerSdp").value = desc.sdp;
}
function hangup() {
trace("Ending call");
localstream.stop();
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
audio_select.disabled = false;
video_select.disabled = false;
document.getElementById("offerSdp").disabled = true;
document.getElementById("answerSdp").disabled = true;
btnMedia.disabled = false;
btnCreatePC.disabled = true;
btnOffer.disabled = true;
btnSetOffer.disabled = true;
btnAnswer.disabled = true;
btnSetAnswer.disabled = true;
btnHangUp.disabled = true;
}
function gotRemoteStream(e) {
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}
function iceCallback1(event) {
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event) {
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
</script>
</body>
</html>

View File

@ -1,154 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>PeerConnection PRANSWER Demo</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:320px;
height:240px;
}
</style>
</head>
<body>
<video id="vid1" autoplay="true" muted="true"></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Call</button>
<button id="btn2" onclick="accept()">Accept</button>
<button id="btn3" onclick="stop()">Hang Up</button>
<script>
//var vid1 = document.getElementById("vid1");
//var vid2 = document.getElementById("vid2");
btn1.disabled = true;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function gotStream(stream) {
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn1.disabled = false;
}
getUserMedia({audio:true, video:true}, gotStream, function() {});
function start() {
btn1.disabled = true;
btn2.disabled = false;
btn3.disabled = false;
trace("Starting Call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace('Using Video device: ' + videoTracks[0].label);
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
var servers = null;
pc1 = new RTCPeerConnection(servers);
trace("Created local peer connection object pc1");
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers);
trace("Created remote peer connection object pc2");
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function gotDescription1(desc) {
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError,
sdpConstraints);
}
function gotDescription2(desc) {
// Provisional answer, set a=inactive & set sdp type to pranswer.
desc.sdp = desc.sdp.replace(/a=recvonly/g, "a=inactive");
desc.type = "pranswer";
pc2.setLocalDescription(desc);
trace("Pranswer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function gotDescription3(desc) {
// Final answer, setting a=recvonly & sdp type to answer.
desc.sdp = desc.sdp.replace(/a=inactive/g, "a=recvonly");
desc.type = "answer";
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function accept() {
pc2.createAnswer(gotDescription3, null, sdpConstraints);
btn2.disabled = true;
btn1.disabled = false;
}
function stop() {
trace("Ending Call" + "\n\n");
pc1.close();
pc2.close();
pc1=null;
pc2=null;
btn2.disabled = true;
btn1.disabled = false;
btn3.disabled = true;
}
function gotRemoteStream(e) {
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}
function iceCallback1(event) {
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function iceCallback2(event) {
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
</script>
</body>
</html>

View File

@ -1,246 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>RTCPeerState & RTCIceConnectionState Demo 1</title>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="../../base/adapter.js"></script>
<style>
video {
border:5px solid black;
width:480px;
height:360px;
}
button {
font: 18px sans-serif;
padding: 8px;
}
label{
font: bold 15px/30px Georgia,serif;
display:inline-table;
width:200px;
text-align:left;
}
input{
font:italic bold 15px/30px Georgia,serif;
text-align:center;
}
fieldset{
border:none;
margin:0px auto;
}
</style>
</head>
<body>
<video id="vid1" autoplay></video>
<video id="vid2" autoplay></video>
<br>
<button id="btn1" onclick="start()">Start</button>
<button id="btn2" onclick="call()">Call</button>
<button id="btn3" onclick="hangup()">Hang Up</button>
<br>
<br>
<fieldset>
<label>pc1-state:</label>
<input type="text" id="pc1-state" size="50" disabled="true">
<br>
<label>pc1-ice-connection-state:</label>
<input type="text" id="pc1-ice-connection-state" size="50" disabled="true">
<br>
<label>pc2-state:</label>
<input type="text" id="pc2-state" size="50" disabled="true">
<br>
<label>pc2-ice-connection-state:</label>
<input type="text" id="pc2-ice-connection-state" size="50" disabled="true">
</fieldset>
<script>
btn1.disabled = false;
btn2.disabled = true;
btn3.disabled = true;
var pc1,pc2;
var localstream;
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio':true,
'OfferToReceiveVideo':true }};
function gotStream(stream){
trace("Received local stream");
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(vid1, stream);
localstream = stream;
btn2.disabled = false;
}
function start() {
trace("Requesting local stream");
btn1.disabled = true;
// Call into getUserMedia via the polyfill (adapter.js).
getUserMedia({audio:true, video:true},
gotStream, function() {});
}
function call() {
btn2.disabled = true;
btn3.disabled = false;
trace("Starting call");
videoTracks = localstream.getVideoTracks();
audioTracks = localstream.getAudioTracks();
if (videoTracks.length > 0)
trace('Using Video device: ' + videoTracks[0].label);
if (audioTracks.length > 0)
trace('Using Audio device: ' + audioTracks[0].label);
var servers = null;
var pc_constraints = {"optional": []};
pc1 = new RTCPeerConnection(servers,pc_constraints);
trace("Created local peer connection object pc1");
document.getElementById("pc1-state").value = pc1.signalingState ||
pc1.readyState;
if (typeof pc1.onsignalingstatechange !== 'undefined') {
pc1.onsignalingstatechange = stateCallback1;
} else {
pc1.onstatechange = stateCallback1;
}
document.getElementById("pc1-ice-connection-state").value =
pc1.iceConnectionState;
if (typeof pc1.oniceconnectionstatechange !== 'undefined') {
pc1.oniceconnectionstatechange = iceStateCallback1;
} else {
pc1.onicechange = iceStateCallback1;
}
pc1.onicecandidate = iceCallback1;
pc2 = new RTCPeerConnection(servers,pc_constraints);
trace("Created remote peer connection object pc2");
document.getElementById("pc2-state").value = pc2.signalingState ||
pc2.readyState;
if (typeof pc2.onsignalingstatechange !== 'undefined') {
pc2.onsignalingstatechange = stateCallback2;
} else {
pc2.onstatechange = stateCallback2;
}
document.getElementById("pc2-ice-connection-state").value =
pc2.iceConnectionState;
if (typeof pc2.oniceconnectionstatechange !== 'undefined') {
pc2.oniceconnectionstatechange = iceStateCallback2;
} else {
pc2.onicechange = iceStateCallback2;
}
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(localstream);
trace("Adding Local Stream to peer connection");
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function gotDescription1(desc) {
pc1.setLocalDescription(desc);
trace("Offer from pc1 \n" + desc.sdp);
pc2.setRemoteDescription(desc);
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError,
sdpConstraints);
}
function gotDescription2(desc) {
pc2.setLocalDescription(desc);
trace("Answer from pc2 \n" + desc.sdp);
pc1.setRemoteDescription(desc);
}
function hangup() {
trace("Ending call");
pc1.close();
pc2.close();
document.getElementById("pc1-state").value += "->" +
pc1.signalingState ||
pc1.readyState;
document.getElementById("pc2-state").value += "->" +
pc2.signalingState ||
pc2.readyState;
document.getElementById("pc1-ice-connection-state").value += "->" +
pc1.iceConnectionState;
document.getElementById("pc2-ice-connection-state").value += "->" +
pc2.iceConnectionState;
pc1 = null;
pc2 = null;
btn3.disabled = true;
btn2.disabled = false;
}
function gotRemoteStream(e){
attachMediaStream(vid2, e.stream);
trace("Received remote stream");
}
function stateCallback1() {
var state;
if (pc1) {
state = pc1.signalingState || pc1.readyState;
trace("pc1 state change callback, state:" + state);
document.getElementById("pc1-state").value += "->" + state;
}
}
function stateCallback2() {
var state;
if (pc2) {
state = pc2.signalingState || pc2.readyState;
trace("pc2 state change callback, state:" + state);
document.getElementById("pc2-state").value += "->" + state;
}
}
function iceStateCallback1() {
var iceState;
if (pc1) {
iceState = pc1.iceConnectionState;
trace("pc1 ICE connection state change callback, state:" + iceState);
document.getElementById("pc1-ice-connection-state").value += "->" +
iceState;
}
}
function iceStateCallback2() {
var iceState;
if (pc2) {
iceState = pc2.iceConnectionState;
trace("pc2 ICE connection state change callback, state:" + iceState);
document.getElementById("pc2-ice-connection-state").value += "->" +
iceState;
}
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Local ICE candidate: \n" + event.candidate.candidate);
} else {
trace("end of candidates1");
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
trace("Remote ICE candidate: \n " + event.candidate.candidate);
} else {
trace("end of candidates2");
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
</script>
</body>
</html>

View File

@ -1,264 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Audio effects with WebAudio in WebRTC</title>
<script type="text/javascript" src="../../base/adapter.js"></script>
<script>
var audioElement;
var buttonStart;
var buttonStop;
var localStream;
var pc1, pc2;
var display;
var webAudio;
// WebAudio helper class which takes care of the WebAudio related parts.
function WebAudio() {
this.context = new webkitAudioContext();
this.soundBuffer = null;
}
WebAudio.prototype.start = function() {
this.filter = this.context.createBiquadFilter();
this.filter.type = this.filter.HIGHPASS;
this.filter.frequency.value = 1500;
}
WebAudio.prototype.applyFilter = function(stream) {
this.mic = this.context.createMediaStreamSource(stream);
this.mic.connect(this.filter);
this.peer = this.context.createMediaStreamDestination();
this.filter.connect(this.peer);
return this.peer.stream;
}
WebAudio.prototype.renderLocally = function(enabled) {
if (enabled) {
this.mic.connect(this.context.destination);
} else {
this.mic.disconnect(0);
this.mic.connect(this.filter);
}
}
WebAudio.prototype.stop = function() {
this.mic.disconnect(0);
this.filter.disconnect(0);
mic = null;
peer = null;
}
WebAudio.prototype.addEffect = function() {
var effect = this.context.createBufferSource();
effect.buffer = this.soundBuffer;
if (this.peer) {
effect.connect(this.peer);
effect.start(0);
}
}
WebAudio.prototype.loadCompleted = function() {
this.soundBuffer = this.context.createBuffer(this.request.response, true);
}
WebAudio.prototype.loadSound = function(url) {
this.request = new XMLHttpRequest();
this.request.open('GET', url, true);
this.request.responseType = 'arraybuffer';
this.request.onload = this.loadCompleted.bind(this);
this.request.send();
}
// Global methods.
function trace(txt) {
display.innerHTML += txt + "<br>";
}
function logEvent(e) {
console.log(e.type + ':' + e.target + ':' + e.target.id + ':muted=' +
e.target.muted);
}
$ = function(id) {
return document.getElementById(id);
};
function start() {
webAudio.start();
var constraints = {audio:true, video:false};
getUserMedia(constraints, gotStream, gotStreamFailed);
buttonStart.disabled = true;
buttonStop.disabled = false;
}
function stop() {
webAudio.stop();
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
buttonStart.enabled = true;
buttonStop.enabled = false;
localStream.stop();
}
function gotStream(stream) {
audioTracks = stream.getAudioTracks();
if (audioTracks.length == 1) {
console.log('gotStream({audio:true, video:false})');
var filteredStream = webAudio.applyFilter(stream);
var servers = null;
pc1 = new webkitRTCPeerConnection(servers);
console.log('Created local peer connection object pc1');
pc1.onicecandidate = iceCallback1;
pc2 = new webkitRTCPeerConnection(servers);
console.log('Created remote peer connection object pc2');
pc2.onicecandidate = iceCallback2;
pc2.onaddstream = gotRemoteStream;
pc1.addStream(filteredStream);
pc1.createOffer(gotDescription1);
stream.onended = function() {
console.log('stream.onended');
buttonStart.disabled = false;
buttonStop.disabled = true;
};
localStream = stream;
} else {
alert('The media stream contains an invalid amount of audio tracks.');
stream.stop();
}
}
function gotStreamFailed(error) {
buttonStart.disabled = false;
buttonStop.disabled = true;
alert('Failed to get access to local media. Error code: ' + error.code);
}
function forceOpus(sdp) {
// Remove all other codecs (not the video codecs though).
sdp = sdp.replace(/m=audio (\d+) RTP\/SAVPF.*\r\n/g,
'm=audio $1 RTP/SAVPF 111\r\n');
sdp = sdp.replace(/a=rtpmap:(?!111)\d{1,3} (?!VP8|red|ulpfec).*\r\n/g, '');
return sdp;
}
function gotDescription1(desc){
console.log('Offer from pc1 \n' + desc.sdp);
var modifiedOffer = new RTCSessionDescription({type: 'offer',
sdp: forceOpus(desc.sdp)});
pc1.setLocalDescription(modifiedOffer);
console.log('Offer from pc1 \n' + modifiedOffer.sdp);
pc2.setRemoteDescription(modifiedOffer);
pc2.createAnswer(gotDescription2);
}
function gotDescription2(desc){
pc2.setLocalDescription(desc);
console.log('Answer from pc2 \n' + desc.sdp);
pc1.setRemoteDescription(desc);
}
function gotRemoteStream(e){
attachMediaStream(audioElement, e.stream);
}
function iceCallback1(event){
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
console.log('Local ICE candidate: \n' + event.candidate.candidate);
}
}
function iceCallback2(event){
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
onAddIceCandidateSuccess, onAddIceCandidateError);
console.log('Remote ICE candidate: \n ' + event.candidate.candidate);
}
}
function onAddIceCandidateSuccess() {
trace("AddIceCandidate success.");
}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
function handleKeyDown(event) {
var keyCode = event.keyCode;
webAudio.addEffect();
}
function doMix(checkbox) {
webAudio.renderLocally(checkbox.checked);
}
function onload() {
webAudio = new WebAudio();
webAudio.loadSound('../sounds/Shamisen-C4.wav');
audioElement = $('audio');
buttonStart = $('start');
buttonStop = $('stop');
display = $('display');
document.addEventListener('keydown', handleKeyDown, false);
buttonStart.enabled = true;
buttonStop.disabled = true;
}
</script>
</head>
<body onload='onload()'>
<h2>Capture microphone input and stream it out to a peer with a processing
effect applied to the audio.</h2>
<p>The audio stream is: <br><br>
o Recorded using <a href="http://www.html5audio.org/2012/09/live-audio-input-comes-to-googles-chrome-canary.html"
title="Live audio input comes to Google's Chrome Canary">live-audio
input.</a><br>
o Filtered using an HP filter with fc=1500 Hz.<br>
o Encoded using <a href="http://www.opus-codec.org/" title="Opus Codec">
Opus.</a><br>
o Transmitted (in loopback) to remote peer using
<a href="http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcpeerconnection-interface"
title="RTCPeerConnection Interface">RTCPeerConnection</a> where it is decoded.<br>
o Finally, the received remote stream is used as source to an &lt;audio&gt;
tag and played out locally.<br>
<br>Press any key to add an effect to the transmitted audio while talking.
</p>
<p>Please note that: <br><br>
o Linux is currently not supported.<br>
o Sample rate and channel configuration must be the same for input and
output sides on Windows.<br>
o Only the Default microphone device can be used for capturing.
</p>
<p>For more information, see <a href="https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/webrtc-integration.html"
title="Example 3: Capture microphone input and stream it out to a peer with a processing effect applied to the audio">
WebRTC integration with the Web Audio API.</a>
</p>
<style>
button {
font: 14px sans-serif;
padding: 8px;
}
</style>
<audio id="audio" autoplay controls></audio><br><br>
<button id="start" onclick="start()">Start</button>
<button id="stop" onclick="stop()">Stop</button><br><br>
Add local audio to output:<input id="mix" type="checkbox" onclick="doMix(this);"><br><br>
<pre id="display"></pre>
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -1,140 +0,0 @@
<html>
<head>
<title>WebRTC Samples</title>
</head>
<body>
<h1>WebRTC Samples</h1>
<p>
Here are some sample pages that demonstrate basic
<a href="http://www.webrtc.org">WebRTC</a> concepts. If you are new to WebRTC,
you may want to check out this
<a href="http://www.html5rocks.com/en/tutorials/webrtc/basics/">
WebRTC overview</a> first.
</p>
<table border="0" cellpadding="1" cellspacing="1" style="width: 100%;">
<thead>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>getUserMedia Samples</b></td>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="html/gum1.html">gum1.html</a></td>
<td>
Shows how to access the webcam and display the local video in a &lt;video/&gt; element.</td>
</tr>
<tr>
<td>
<a href="html/gum2.html">gum2.html</a></td>
<td>
Shows how to capture the current frame of video to a &lt;canvas/&gt;.</td>
</tr>
<tr>
<td>
<a href="html/gum3.html">gum3.html</a></td>
<td>
Shows how to apply CSS filters to a &lt;video/&gt; and &lt;canvas/&gt;</td>
</tr>
<tr>
<td>
<a href="html/face.html">face.html</a></td>
<td>
Shows how to perform face tracking using webcam video.</td>
</tr>
<tr>
<td>
<a href="html/local-audio-rendering.html">local-audio-rendering.html</a>
</td>
<td>
Shows usage of a local media stream connected to an HTML5 audio tag.</td>
</tr>
<tr>
<td>
<a href="html/local-audio-volume.html">local-audio-volume.html</a></td>
<td>
Shows how to display the volume of a local audio track.</td>
</tr>
<tr><td>&nbsp; </td> <td>&nbsp; </td></tr>
<tr>
<td colspan="2" scope="col" style="background-color: rgb(0, 204, 255);">
<b>PeerConnection Samples</b></td>
</tr>
<tr>
<td>
<a href="html/pc1-audio.html">pc1-audio.html</a></td>
</td>
<td>Shows how to set up a simple 1:1 audio only call.</td>
</tr>
<tr>
<td>
<a href="html/pc1.html">pc1.html</a></td>
<td>
Shows how to set up a simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/pc1_sdp_munge.html">pc1_sdp_munge.html</a></td>
<td>
Allows you to modify offer/answer sdp with pc1 demo.</td>
</tr>
<tr>
<td>
<a href="html/states.html">states.html</a></td>
<td>
Shows RTCPeerStates and RTCIceConnectionStates in a
simple 1:1 audio/video call.</td>
</tr>
<tr>
<td>
<a href="html/multiple.html">multiple.html</a></td>
<td>
Shows how to set up multiple PeerConnections.</td>
</tr>
<tr>
<td>
<a href="html/constraints-and-stats.html">constraints-and-stats.html</a></td>
<td>
Shows how to pass constraints into the PeerConnection API,
and query it for statistics.</td>
</tr>
<tr>
<td>
<a href="html/dtmf1.html">dtmf1.html</a></td>
<td>
Shows how to send DTMF tones using PeerConnection API.</td>
</tr>
<tr>
<td>
<a href="html/dc1.html">dc1.html</a></td>
<td>
Shows how to send Data using PeerConnection API.</td>
</tr>
<tr>
<td>
<a href="html/webaudio-and-webrtc.html">webaudio-and-webrtc.html</a></td>
<td>
Captures and filters microphone input using WebAudio and sends it to a
remote peer with an option to add an audio effect.</td>
</tr>
<tr>
<td>
<a href="html/create-offer.html">create-offer.html</a></td>
<td>
Shows the output of createOffer when various constraints
are supplied.</td>
</tr>
<tr>
<td>
<a href="html/ice-servers.html">ice-servers.html</a></td>
<td>
Tests gathering candidates from arbitrary STUN and TURN servers.
</td>
</tr>
</tbody>
</table>
<p>&nbsp;</p>
</body>
</html>

View File

@ -1,12 +0,0 @@
indexes:
# AUTOGENERATED
# This index.yaml is automatically updated whenever the dev_appserver
# detects that a new type of query is run. If you want to manage the
# index.yaml file manually, remove the above marker line (the line
# saying "# AUTOGENERATED"). If you want to manage some indexes
# manually, move them above the marker line. The index.yaml file is
# automatically uploaded to the admin console when you next deploy
# your application using appcfg.py.

View File

@ -1,460 +0,0 @@
if (parallable === undefined) {
var parallable = function (file, funct) {
parallable.core[funct.toString()] = funct().core;
return function () {
var i;
var async, worker_num, params;
if (arguments.length > 1) {
async = arguments[arguments.length - 2];
worker_num = arguments[arguments.length - 1];
params = new Array(arguments.length - 2);
for (i = 0; i < arguments.length - 2; i++)
params[i] = arguments[i];
} else {
async = arguments[0].async;
worker_num = arguments[0].worker;
params = arguments[0];
delete params["async"];
delete params["worker"];
params = [params];
}
var scope = { "shared" : {} };
var ctrl = funct.apply(scope, params);
if (async) {
return function (complete, error) {
var executed = 0;
var outputs = new Array(worker_num);
var inputs = ctrl.pre.apply(scope, [worker_num]);
/* sanitize scope shared because for Chrome/WebKit, worker only support JSONable data */
for (i in scope.shared)
/* delete function, if any */
if (typeof scope.shared[i] == "function")
delete scope.shared[i];
/* delete DOM object, if any */
else if (scope.shared[i].tagName !== undefined)
delete scope.shared[i];
for (i = 0; i < worker_num; i++) {
var worker = new Worker(file);
worker.onmessage = (function (i) {
return function (event) {
outputs[i] = (typeof event.data == "string") ? JSON.parse(event.data) : event.data;
executed++;
if (executed == worker_num)
complete(ctrl.post.apply(scope, [outputs]));
}
})(i);
var msg = { "input" : inputs[i],
"name" : funct.toString(),
"shared" : scope.shared,
"id" : i,
"worker" : params.worker_num };
try {
worker.postMessage(msg);
} catch (e) {
worker.postMessage(JSON.stringify(msg));
}
}
}
} else {
return ctrl.post.apply(scope, [[ctrl.core.apply(scope, [ctrl.pre.apply(scope, [1])[0], 0, 1])]]);
}
}
};
parallable.core = {};
}
function get_named_arguments(params, names) {
if (params.length > 1) {
var new_params = {};
for (var i = 0; i < names.length; i++)
new_params[names[i]] = params[i];
return new_params;
} else if (params.length == 1) {
return params[0];
} else {
return {};
}
}
var ccv = {
pre : function (image) {
if (image.tagName.toLowerCase() == "img") {
var canvas = document.createElement("canvas");
document.body.appendChild(image);
canvas.width = image.offsetWidth;
canvas.style.width = image.offsetWidth.toString() + "px";
canvas.height = image.offsetHeight;
canvas.style.height = image.offsetHeight.toString() + "px";
document.body.removeChild(image);
var ctx = canvas.getContext("2d");
ctx.drawImage(image, 0, 0);
return canvas;
}
return image;
},
grayscale : function (canvas) {
var ctx = canvas.getContext("2d");
var imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
var data = imageData.data;
var pix1, pix2, pix = canvas.width * canvas.height * 4;
while (pix > 0)
data[pix -= 4] = data[pix1 = pix + 1] = data[pix2 = pix + 2] = (data[pix] * 0.3 + data[pix1] * 0.59 + data[pix2] * 0.11);
ctx.putImageData(imageData, 0, 0);
return canvas;
},
array_group : function (seq, gfunc) {
var i, j;
var node = new Array(seq.length);
for (i = 0; i < seq.length; i++)
node[i] = {"parent" : -1,
"element" : seq[i],
"rank" : 0};
for (i = 0; i < seq.length; i++) {
if (!node[i].element)
continue;
var root = i;
while (node[root].parent != -1)
root = node[root].parent;
for (j = 0; j < seq.length; j++) {
if( i != j && node[j].element && gfunc(node[i].element, node[j].element)) {
var root2 = j;
while (node[root2].parent != -1)
root2 = node[root2].parent;
if(root2 != root) {
if(node[root].rank > node[root2].rank)
node[root2].parent = root;
else {
node[root].parent = root2;
if (node[root].rank == node[root2].rank)
node[root2].rank++;
root = root2;
}
/* compress path from node2 to the root: */
var temp, node2 = j;
while (node[node2].parent != -1) {
temp = node2;
node2 = node[node2].parent;
node[temp].parent = root;
}
/* compress path from node to the root: */
node2 = i;
while (node[node2].parent != -1) {
temp = node2;
node2 = node[node2].parent;
node[temp].parent = root;
}
}
}
}
}
var idx = new Array(seq.length);
var class_idx = 0;
for(i = 0; i < seq.length; i++) {
j = -1;
var node1 = i;
if(node[node1].element) {
while (node[node1].parent != -1)
node1 = node[node1].parent;
if(node[node1].rank >= 0)
node[node1].rank = ~class_idx++;
j = ~node[node1].rank;
}
idx[i] = j;
}
return {"index" : idx, "cat" : class_idx};
},
detect_objects : parallable("ccv.js", function (canvas, cascade, interval, min_neighbors) {
if (this.shared !== undefined) {
var params = get_named_arguments(arguments, ["canvas", "cascade", "interval", "min_neighbors"]);
this.shared.canvas = params.canvas;
this.shared.interval = params.interval;
this.shared.min_neighbors = params.min_neighbors;
this.shared.cascade = params.cascade;
this.shared.scale = Math.pow(2, 1 / (params.interval + 1));
this.shared.next = params.interval + 1;
this.shared.scale_upto = Math.floor(Math.log(Math.min(params.canvas.width / params.cascade.width, params.canvas.height / params.cascade.height)) / Math.log(this.shared.scale));
var i;
for (i = 0; i < this.shared.cascade.stage_classifier.length; i++)
this.shared.cascade.stage_classifier[i].orig_feature = this.shared.cascade.stage_classifier[i].feature;
}
function pre(worker_num) {
var canvas = this.shared.canvas;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var pyr = new Array((scale_upto + next * 2) * 4);
var ret = new Array((scale_upto + next * 2) * 4);
pyr[0] = canvas;
ret[0] = { "width" : pyr[0].width,
"height" : pyr[0].height,
"data" : pyr[0].getContext("2d").getImageData(0, 0, pyr[0].width, pyr[0].height).data };
var i;
for (i = 1; i <= interval; i++) {
pyr[i * 4] = document.createElement("canvas");
pyr[i * 4].width = Math.floor(pyr[0].width / Math.pow(scale, i));
pyr[i * 4].height = Math.floor(pyr[0].height / Math.pow(scale, i));
pyr[i * 4].getContext("2d").drawImage(pyr[0], 0, 0, pyr[0].width, pyr[0].height, 0, 0, pyr[i * 4].width, pyr[i * 4].height);
ret[i * 4] = { "width" : pyr[i * 4].width,
"height" : pyr[i * 4].height,
"data" : pyr[i * 4].getContext("2d").getImageData(0, 0, pyr[i * 4].width, pyr[i * 4].height).data };
}
for (i = next; i < scale_upto + next * 2; i++) {
pyr[i * 4] = document.createElement("canvas");
pyr[i * 4].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4].getContext("2d").drawImage(pyr[i * 4 - next * 4], 0, 0, pyr[i * 4 - next * 4].width, pyr[i * 4 - next * 4].height, 0, 0, pyr[i * 4].width, pyr[i * 4].height);
ret[i * 4] = { "width" : pyr[i * 4].width,
"height" : pyr[i * 4].height,
"data" : pyr[i * 4].getContext("2d").getImageData(0, 0, pyr[i * 4].width, pyr[i * 4].height).data };
}
for (i = next * 2; i < scale_upto + next * 2; i++) {
pyr[i * 4 + 1] = document.createElement("canvas");
pyr[i * 4 + 1].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 1].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 1].getContext("2d").drawImage(pyr[i * 4 - next * 4], 1, 0, pyr[i * 4 - next * 4].width - 1, pyr[i * 4 - next * 4].height, 0, 0, pyr[i * 4 + 1].width - 2, pyr[i * 4 + 1].height);
ret[i * 4 + 1] = { "width" : pyr[i * 4 + 1].width,
"height" : pyr[i * 4 + 1].height,
"data" : pyr[i * 4 + 1].getContext("2d").getImageData(0, 0, pyr[i * 4 + 1].width, pyr[i * 4 + 1].height).data };
pyr[i * 4 + 2] = document.createElement("canvas");
pyr[i * 4 + 2].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 2].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 2].getContext("2d").drawImage(pyr[i * 4 - next * 4], 0, 1, pyr[i * 4 - next * 4].width, pyr[i * 4 - next * 4].height - 1, 0, 0, pyr[i * 4 + 2].width, pyr[i * 4 + 2].height - 2);
ret[i * 4 + 2] = { "width" : pyr[i * 4 + 2].width,
"height" : pyr[i * 4 + 2].height,
"data" : pyr[i * 4 + 2].getContext("2d").getImageData(0, 0, pyr[i * 4 + 2].width, pyr[i * 4 + 2].height).data };
pyr[i * 4 + 3] = document.createElement("canvas");
pyr[i * 4 + 3].width = Math.floor(pyr[i * 4 - next * 4].width / 2);
pyr[i * 4 + 3].height = Math.floor(pyr[i * 4 - next * 4].height / 2);
pyr[i * 4 + 3].getContext("2d").drawImage(pyr[i * 4 - next * 4], 1, 1, pyr[i * 4 - next * 4].width - 1, pyr[i * 4 - next * 4].height - 1, 0, 0, pyr[i * 4 + 3].width - 2, pyr[i * 4 + 3].height - 2);
ret[i * 4 + 3] = { "width" : pyr[i * 4 + 3].width,
"height" : pyr[i * 4 + 3].height,
"data" : pyr[i * 4 + 3].getContext("2d").getImageData(0, 0, pyr[i * 4 + 3].width, pyr[i * 4 + 3].height).data };
}
return [ret];
};
function core(pyr, id, worker_num) {
var cascade = this.shared.cascade;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var i, j, k, x, y, q;
var scale_x = 1, scale_y = 1;
var dx = [0, 1, 0, 1];
var dy = [0, 0, 1, 1];
var seq = [];
for (i = 0; i < scale_upto; i++) {
var qw = pyr[i * 4 + next * 8].width - Math.floor(cascade.width / 4);
var qh = pyr[i * 4 + next * 8].height - Math.floor(cascade.height / 4);
var step = [pyr[i * 4].width * 4, pyr[i * 4 + next * 4].width * 4, pyr[i * 4 + next * 8].width * 4];
var paddings = [pyr[i * 4].width * 16 - qw * 16,
pyr[i * 4 + next * 4].width * 8 - qw * 8,
pyr[i * 4 + next * 8].width * 4 - qw * 4];
for (j = 0; j < cascade.stage_classifier.length; j++) {
var orig_feature = cascade.stage_classifier[j].orig_feature;
var feature = cascade.stage_classifier[j].feature = new Array(cascade.stage_classifier[j].count);
for (k = 0; k < cascade.stage_classifier[j].count; k++) {
feature[k] = {"size" : orig_feature[k].size,
"px" : new Array(orig_feature[k].size),
"pz" : new Array(orig_feature[k].size),
"nx" : new Array(orig_feature[k].size),
"nz" : new Array(orig_feature[k].size)};
for (q = 0; q < orig_feature[k].size; q++) {
feature[k].px[q] = orig_feature[k].px[q] * 4 + orig_feature[k].py[q] * step[orig_feature[k].pz[q]];
feature[k].pz[q] = orig_feature[k].pz[q];
feature[k].nx[q] = orig_feature[k].nx[q] * 4 + orig_feature[k].ny[q] * step[orig_feature[k].nz[q]];
feature[k].nz[q] = orig_feature[k].nz[q];
}
}
}
for (q = 0; q < 4; q++) {
var u8 = [pyr[i * 4].data, pyr[i * 4 + next * 4].data, pyr[i * 4 + next * 8 + q].data];
var u8o = [dx[q] * 8 + dy[q] * pyr[i * 4].width * 8, dx[q] * 4 + dy[q] * pyr[i * 4 + next * 4].width * 4, 0];
for (y = 0; y < qh; y++) {
for (x = 0; x < qw; x++) {
var sum = 0;
var flag = true;
for (j = 0; j < cascade.stage_classifier.length; j++) {
sum = 0;
var alpha = cascade.stage_classifier[j].alpha;
var feature = cascade.stage_classifier[j].feature;
for (k = 0; k < cascade.stage_classifier[j].count; k++) {
var feature_k = feature[k];
var p, pmin = u8[feature_k.pz[0]][u8o[feature_k.pz[0]] + feature_k.px[0]];
var n, nmax = u8[feature_k.nz[0]][u8o[feature_k.nz[0]] + feature_k.nx[0]];
if (pmin <= nmax) {
sum += alpha[k * 2];
} else {
var f, shortcut = true;
for (f = 0; f < feature_k.size; f++) {
if (feature_k.pz[f] >= 0) {
p = u8[feature_k.pz[f]][u8o[feature_k.pz[f]] + feature_k.px[f]];
if (p < pmin) {
if (p <= nmax) {
shortcut = false;
break;
}
pmin = p;
}
}
if (feature_k.nz[f] >= 0) {
n = u8[feature_k.nz[f]][u8o[feature_k.nz[f]] + feature_k.nx[f]];
if (n > nmax) {
if (pmin <= n) {
shortcut = false;
break;
}
nmax = n;
}
}
}
sum += (shortcut) ? alpha[k * 2 + 1] : alpha[k * 2];
}
}
if (sum < cascade.stage_classifier[j].threshold) {
flag = false;
break;
}
}
if (flag) {
seq.push({"x" : (x * 4 + dx[q] * 2) * scale_x,
"y" : (y * 4 + dy[q] * 2) * scale_y,
"width" : cascade.width * scale_x,
"height" : cascade.height * scale_y,
"neighbor" : 1,
"confidence" : sum});
}
u8o[0] += 16;
u8o[1] += 8;
u8o[2] += 4;
}
u8o[0] += paddings[0];
u8o[1] += paddings[1];
u8o[2] += paddings[2];
}
}
scale_x *= scale;
scale_y *= scale;
}
return seq;
};
function post(seq) {
var min_neighbors = this.shared.min_neighbors;
var cascade = this.shared.cascade;
var interval = this.shared.interval;
var scale = this.shared.scale;
var next = this.shared.next;
var scale_upto = this.shared.scale_upto;
var i, j;
for (i = 0; i < cascade.stage_classifier.length; i++)
cascade.stage_classifier[i].feature = cascade.stage_classifier[i].orig_feature;
seq = seq[0];
if (!(min_neighbors > 0))
return seq;
else {
var result = ccv.array_group(seq, function (r1, r2) {
var distance = Math.floor(r1.width * 0.25 + 0.5);
return r2.x <= r1.x + distance &&
r2.x >= r1.x - distance &&
r2.y <= r1.y + distance &&
r2.y >= r1.y - distance &&
r2.width <= Math.floor(r1.width * 1.5 + 0.5) &&
Math.floor(r2.width * 1.5 + 0.5) >= r1.width;
});
var ncomp = result.cat;
var idx_seq = result.index;
var comps = new Array(ncomp + 1);
for (i = 0; i < comps.length; i++)
comps[i] = {"neighbors" : 0,
"x" : 0,
"y" : 0,
"width" : 0,
"height" : 0,
"confidence" : 0};
// count number of neighbors
for(i = 0; i < seq.length; i++)
{
var r1 = seq[i];
var idx = idx_seq[i];
if (comps[idx].neighbors == 0)
comps[idx].confidence = r1.confidence;
++comps[idx].neighbors;
comps[idx].x += r1.x;
comps[idx].y += r1.y;
comps[idx].width += r1.width;
comps[idx].height += r1.height;
comps[idx].confidence = Math.max(comps[idx].confidence, r1.confidence);
}
var seq2 = [];
// calculate average bounding box
for(i = 0; i < ncomp; i++)
{
var n = comps[i].neighbors;
if (n >= min_neighbors)
seq2.push({"x" : (comps[i].x * 2 + n) / (2 * n),
"y" : (comps[i].y * 2 + n) / (2 * n),
"width" : (comps[i].width * 2 + n) / (2 * n),
"height" : (comps[i].height * 2 + n) / (2 * n),
"neighbors" : comps[i].neighbors,
"confidence" : comps[i].confidence});
}
var result_seq = [];
// filter out small face rectangles inside large face rectangles
for(i = 0; i < seq2.length; i++)
{
var r1 = seq2[i];
var flag = true;
for(j = 0; j < seq2.length; j++)
{
var r2 = seq2[j];
var distance = Math.floor(r2.width * 0.25 + 0.5);
if(i != j &&
r1.x >= r2.x - distance &&
r1.y >= r2.y - distance &&
r1.x + r1.width <= r2.x + r2.width + distance &&
r1.y + r1.height <= r2.y + r2.height + distance &&
(r2.neighbors > Math.max(3, r1.neighbors) || r1.neighbors < 3))
{
flag = false;
break;
}
}
if(flag)
result_seq.push(r1);
}
return result_seq;
}
};
return { "pre" : pre, "core" : core, "post" : post };
})
}
onmessage = function (event) {
var data = (typeof event.data == "string") ? JSON.parse(event.data) : event.data;
var scope = { "shared" : data.shared };
var result = parallable.core[data.name].apply(scope, [data.input, data.id, data.worker]);
try {
postMessage(result);
} catch (e) {
postMessage(JSON.stringify(result));
}
}

View File

@ -1,14 +0,0 @@
application: webrtc-demos
version: 1
runtime: python27
api_version: 1
threadsafe: yes
handlers:
- url: .*
script: main.app
secure: always
libraries:
- name: webapp2
version: "2.5.1"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -1,36 +0,0 @@
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
class PageHandler(webapp2.RequestHandler):
def get(self):
base_url = self.request.path
if self.request.path == '/':
self.redirect("http://webrtc.googlecode.com/svn/trunk/"
+ "samples/js/demos/index.html"
, permanent=True)
else:
self.redirect("http://webrtc.googlecode.com/svn/trunk/"
+ "samples/js/demos"
+ base_url,
permanent=True)
app = webapp2.WSGIApplication([
(r'/*.*', PageHandler),
], debug=True)

File diff suppressed because one or more lines are too long

View File

@ -1,68 +0,0 @@
//
// A "videopipe" abstraction on top of WebRTC.
//
// The usage of this abstraction:
// var pipe = new VideoPipe(mediastream, handlerFunction);
// handlerFunction = function(mediastream) {
// do_something
// }
// pipe.close();
//
// The VideoPipe will set up 2 PeerConnections, connect them to each
// other, and call HandlerFunction when the stream is available in the
// second PeerConnection.
//
function errorHandler(context) {
return function(error) {
trace('Failure in ' + context + ': ' + error.toString);
}
}
function successHandler(context) {
return function() {
trace('Success in ' + context);
}
}
function noAction() {
}
function VideoPipe(stream, handler) {
var servers = null;
var pc1 = new RTCPeerConnection(servers);
var pc2 = new RTCPeerConnection(servers);
pc1.addStream(stream);
pc1.onicecandidate = function(event) {
if (event.candidate) {
pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
noAction, errorHandler('AddIceCandidate'));
}
}
pc2.onicecandidate = function(event) {
if (event.candidate) {
pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
noAction, errorHandler('AddIceCandidate'));
}
}
pc2.onaddstream = function(e) {
handler(e.stream);
}
pc1.createOffer(function(desc) {
pc1.setLocalDescription(desc);
pc2.setRemoteDescription(desc);
pc2.createAnswer(function(desc2) {
pc2.setLocalDescription(desc2);
pc1.setRemoteDescription(desc2);
}, errorHandler('pc2.createAnswer'));
}, errorHandler('pc1.createOffer'));
this.pc1 = pc1;
this.pc2 = pc2;
}
VideoPipe.prototype.close = function() {
this.pc1.close();
this.pc2.close();
}