Partial revert of "Removing samples directory following move to Github"

Reason: Unfortunately we depend on AppRTC being in this location
for the bots in our Chromium WebRTC waterfalls so I'm reverting
this until we've solved that dependency.

This reverts apprtc and adapter.js from being removed in r5871.

R=phoglund@webrtc.org
TBR=dutton@google.com
BUG=

Review URL: https://webrtc-codereview.appspot.com/11529004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5873 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
kjellander@webrtc.org 2014-04-09 13:52:24 +00:00
parent 8883a0f47f
commit 52fd65b16a
15 changed files with 1979 additions and 0 deletions

9
samples/js/OWNERS Normal file
View File

@ -0,0 +1,9 @@
braveyao@webrtc.org
dutton@google.com
henrika@webrtc.org
hta@webrtc.org
juberti@webrtc.org
kjellander@webrtc.org
phoglund@webrtc.org
vikasmarwaha@webrtc.org
wu@webrtc.org

View File

@ -0,0 +1,29 @@
application: apprtc
version: 6
runtime: python27
threadsafe: true
api_version: 1
handlers:
- url: /html
static_dir: html
- url: /images
static_dir: images
- url: /js
static_dir: js
- url: /css
static_dir: css
- url: /.*
script: apprtc.app
secure: always
inbound_services:
- channel_presence
libraries:
- name: jinja2
version: latest

482
samples/js/apprtc/apprtc.py Normal file
View File

@ -0,0 +1,482 @@
#!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
"""WebRTC Demo
This module demonstrates the WebRTC API by implementing a simple video chat app.
"""
import cgi
import logging
import os
import random
import re
import json
import jinja2
import webapp2
import threading
from google.appengine.api import channel
from google.appengine.ext import db
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Lock for syncing DB operation in concurrent requests handling.
# TODO(brave): keeping working on improving performance with thread syncing.
# One possible method for near future is to reduce the message caching.
LOCK = threading.RLock()
def generate_random(length):
word = ''
for _ in range(length):
word += random.choice('0123456789')
return word
def sanitize(key):
return re.sub('[^a-zA-Z0-9\-]', '-', key)
def make_client_id(room, user):
return room.key().id_or_name() + '/' + user
def get_default_stun_server(user_agent):
default_stun_server = 'stun.l.google.com:19302'
if 'Firefox' in user_agent:
default_stun_server = 'stun.services.mozilla.com'
return default_stun_server
def get_preferred_audio_receive_codec():
return 'opus/48000'
def get_preferred_audio_send_codec(user_agent):
# Empty string means no preference.
preferred_audio_send_codec = ''
# Prefer to send ISAC on Chrome for Android.
if 'Android' in user_agent and 'Chrome' in user_agent:
preferred_audio_send_codec = 'ISAC/16000'
return preferred_audio_send_codec
def make_pc_config(stun_server, turn_server, ts_pwd):
servers = []
if turn_server:
turn_config = 'turn:{}'.format(turn_server)
servers.append({'urls':turn_config, 'credential':ts_pwd})
if stun_server:
stun_config = 'stun:{}'.format(stun_server)
servers.append({'urls':stun_config})
return {'iceServers':servers}
def create_channel(room, user, duration_minutes):
client_id = make_client_id(room, user)
return channel.create_channel(client_id, duration_minutes)
def make_loopback_answer(message):
message = message.replace("\"offer\"", "\"answer\"")
message = message.replace("a=ice-options:google-ice\\r\\n", "")
return message
def handle_message(room, user, message):
message_obj = json.loads(message)
other_user = room.get_other_user(user)
room_key = room.key().id_or_name()
if message_obj['type'] == 'bye':
# This would remove the other_user in loopback test too.
# So check its availability before forwarding Bye message.
room.remove_user(user)
logging.info('User ' + user + ' quit from room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
if other_user and room.has_user(other_user):
if message_obj['type'] == 'offer':
# Special case the loopback scenario.
if other_user == user:
message = make_loopback_answer(message)
on_message(room, other_user, message)
else:
# For unittest
on_message(room, user, message)
def get_saved_messages(client_id):
return Message.gql("WHERE client_id = :id", id=client_id)
def delete_saved_messages(client_id):
messages = get_saved_messages(client_id)
for message in messages:
message.delete()
logging.info('Deleted the saved message for ' + client_id)
def send_saved_messages(client_id):
messages = get_saved_messages(client_id)
for message in messages:
channel.send_message(client_id, message.msg)
logging.info('Delivered saved message to ' + client_id)
message.delete()
def on_message(room, user, message):
client_id = make_client_id(room, user)
if room.is_connected(user):
channel.send_message(client_id, message)
logging.info('Delivered message to user ' + user)
else:
new_message = Message(client_id = client_id, msg = message)
new_message.put()
logging.info('Saved message for user ' + user)
def make_media_track_constraints(constraints_string):
if not constraints_string or constraints_string.lower() == 'true':
track_constraints = True
elif constraints_string.lower() == 'false':
track_constraints = False
else:
track_constraints = {'mandatory': {}, 'optional': []}
for constraint_string in constraints_string.split(','):
constraint = constraint_string.split('=')
if len(constraint) != 2:
logging.error('Ignoring malformed constraint: ' + constraint_string)
continue
if constraint[0].startswith('goog'):
track_constraints['optional'].append({constraint[0]: constraint[1]})
else:
track_constraints['mandatory'][constraint[0]] = constraint[1]
return track_constraints
def make_media_stream_constraints(audio, video):
stream_constraints = (
{'audio': make_media_track_constraints(audio),
'video': make_media_track_constraints(video)})
logging.info('Applying media constraints: ' + str(stream_constraints))
return stream_constraints
def maybe_add_constraint(constraints, param, constraint):
if (param.lower() == 'true'):
constraints['optional'].append({constraint: True})
elif (param.lower() == 'false'):
constraints['optional'].append({constraint: False})
return constraints
def make_pc_constraints(dtls, dscp, ipv6):
constraints = { 'optional': [] }
maybe_add_constraint(constraints, dtls, 'DtlsSrtpKeyAgreement')
maybe_add_constraint(constraints, dscp, 'googDscp')
maybe_add_constraint(constraints, ipv6, 'googIPv6')
return constraints
def make_offer_constraints():
constraints = { 'mandatory': {}, 'optional': [] }
return constraints
def append_url_arguments(request, link):
for argument in request.arguments():
if argument != 'r':
link += ('&' + cgi.escape(argument, True) + '=' +
cgi.escape(request.get(argument), True))
return link
# This database is to store the messages from the sender client when the
# receiver client is not ready to receive the messages.
# Use TextProperty instead of StringProperty for msg because
# the session description can be more than 500 characters.
class Message(db.Model):
client_id = db.StringProperty()
msg = db.TextProperty()
class Room(db.Model):
"""All the data we store for a room"""
user1 = db.StringProperty()
user2 = db.StringProperty()
user1_connected = db.BooleanProperty(default=False)
user2_connected = db.BooleanProperty(default=False)
def __str__(self):
result = '['
if self.user1:
result += "%s-%r" % (self.user1, self.user1_connected)
if self.user2:
result += ", %s-%r" % (self.user2, self.user2_connected)
result += ']'
return result
def get_occupancy(self):
occupancy = 0
if self.user1:
occupancy += 1
if self.user2:
occupancy += 1
return occupancy
def get_other_user(self, user):
if user == self.user1:
return self.user2
elif user == self.user2:
return self.user1
else:
return None
def has_user(self, user):
return (user and (user == self.user1 or user == self.user2))
def add_user(self, user):
if not self.user1:
self.user1 = user
elif not self.user2:
self.user2 = user
else:
raise RuntimeError('room is full')
self.put()
def remove_user(self, user):
delete_saved_messages(make_client_id(self, user))
if user == self.user2:
self.user2 = None
self.user2_connected = False
if user == self.user1:
if self.user2:
self.user1 = self.user2
self.user1_connected = self.user2_connected
self.user2 = None
self.user2_connected = False
else:
self.user1 = None
self.user1_connected = False
if self.get_occupancy() > 0:
self.put()
else:
self.delete()
def set_connected(self, user):
if user == self.user1:
self.user1_connected = True
if user == self.user2:
self.user2_connected = True
self.put()
def is_connected(self, user):
if user == self.user1:
return self.user1_connected
if user == self.user2:
return self.user2_connected
@db.transactional
def connect_user_to_room(room_key, user):
room = Room.get_by_key_name(room_key)
# Check if room has user in case that disconnect message comes before
# connect message with unknown reason, observed with local AppEngine SDK.
if room and room.has_user(user):
room.set_connected(user)
logging.info('User ' + user + ' connected to room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
else:
logging.warning('Unexpected Connect Message to room ' + room_key)
return room
class ConnectPage(webapp2.RequestHandler):
def post(self):
key = self.request.get('from')
room_key, user = key.split('/')
with LOCK:
room = connect_user_to_room(room_key, user)
if room and room.has_user(user):
send_saved_messages(make_client_id(room, user))
class DisconnectPage(webapp2.RequestHandler):
def post(self):
key = self.request.get('from')
room_key, user = key.split('/')
with LOCK:
room = Room.get_by_key_name(room_key)
if room and room.has_user(user):
other_user = room.get_other_user(user)
room.remove_user(user)
logging.info('User ' + user + ' removed from room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
if other_user and other_user != user:
channel.send_message(make_client_id(room, other_user),
'{"type":"bye"}')
logging.info('Sent BYE to ' + other_user)
logging.warning('User ' + user + ' disconnected from room ' + room_key)
class MessagePage(webapp2.RequestHandler):
def post(self):
message = self.request.body
room_key = self.request.get('r')
user = self.request.get('u')
with LOCK:
room = Room.get_by_key_name(room_key)
if room:
handle_message(room, user, message)
else:
logging.warning('Unknown room ' + room_key)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
"""Renders the main page. When this page is shown, we create a new
channel to push asynchronous updates to the client."""
# Append strings to this list to have them thrown up in message boxes. This
# will also cause the app to fail.
error_messages = []
# Get the base url without arguments.
base_url = self.request.path_url
user_agent = self.request.headers['User-Agent']
room_key = sanitize(self.request.get('r'))
stun_server = self.request.get('ss')
if not stun_server:
stun_server = get_default_stun_server(user_agent)
turn_server = self.request.get('ts')
ts_pwd = self.request.get('tp')
# Use "audio" and "video" to set the media stream constraints. Defined here:
# http://goo.gl/V7cZg
#
# "true" and "false" are recognized and interpreted as bools, for example:
# "?audio=true&video=false" (Start an audio-only call.)
# "?audio=false" (Start a video-only call.)
# If unspecified, the stream constraint defaults to True.
#
# To specify media track constraints, pass in a comma-separated list of
# key/value pairs, separated by a "=". Examples:
# "?audio=googEchoCancellation=false,googAutoGainControl=true"
# (Disable echo cancellation and enable gain control.)
#
# "?video=minWidth=1280,minHeight=720,googNoiseReduction=true"
# (Set the minimum resolution to 1280x720 and enable noise reduction.)
#
# Keys starting with "goog" will be added to the "optional" key; all others
# will be added to the "mandatory" key.
#
# The audio keys are defined here: talk/app/webrtc/localaudiosource.cc
# The video keys are defined here: talk/app/webrtc/videosource.cc
audio = self.request.get('audio')
video = self.request.get('video')
if self.request.get('hd').lower() == 'true':
if video:
message = 'The "hd" parameter has overridden video=' + str(video)
logging.error(message)
error_messages.append(message)
video = 'minWidth=1280,minHeight=720'
if self.request.get('minre') or self.request.get('maxre'):
message = ('The "minre" and "maxre" parameters are no longer supported. '
'Use "video" instead.')
logging.error(message)
error_messages.append(message)
audio_send_codec = self.request.get('asc')
if not audio_send_codec:
audio_send_codec = get_preferred_audio_send_codec(user_agent)
audio_receive_codec = self.request.get('arc')
if not audio_receive_codec:
audio_receive_codec = get_preferred_audio_receive_codec()
# Set stereo to false by default.
stereo = 'false'
if self.request.get('stereo'):
stereo = self.request.get('stereo')
# Options for making pcConstraints
dtls = self.request.get('dtls')
dscp = self.request.get('dscp')
ipv6 = self.request.get('ipv6')
debug = self.request.get('debug')
if debug == 'loopback':
# Set dtls to false as DTLS does not work for loopback.
dtls = 'false'
# token_timeout for channel creation, default 30min, max 1 days, min 3min.
token_timeout = self.request.get_range('tt',
min_value = 3,
max_value = 1440,
default = 30)
unittest = self.request.get('unittest')
if unittest:
# Always create a new room for the unit tests.
room_key = generate_random(8)
if not room_key:
room_key = generate_random(8)
redirect = '/?r=' + room_key
redirect = append_url_arguments(self.request, redirect)
self.redirect(redirect)
logging.info('Redirecting visitor to base URL to ' + redirect)
return
user = None
initiator = 0
with LOCK:
room = Room.get_by_key_name(room_key)
if not room and debug != "full":
# New room.
user = generate_random(8)
room = Room(key_name = room_key)
room.add_user(user)
if debug != 'loopback':
initiator = 0
else:
room.add_user(user)
initiator = 1
elif room and room.get_occupancy() == 1 and debug != 'full':
# 1 occupant.
user = generate_random(8)
room.add_user(user)
initiator = 1
else:
# 2 occupants (full).
template = jinja_environment.get_template('full.html')
self.response.out.write(template.render({ 'room_key': room_key }))
logging.info('Room ' + room_key + ' is full')
return
if turn_server == 'false':
turn_server = None
turn_url = ''
else:
turn_url = 'https://computeengineondemand.appspot.com/'
turn_url = turn_url + 'turn?' + 'username=' + user + '&key=4080218913'
room_link = base_url + '?r=' + room_key
room_link = append_url_arguments(self.request, room_link)
token = create_channel(room, user, token_timeout)
pc_config = make_pc_config(stun_server, turn_server, ts_pwd)
pc_constraints = make_pc_constraints(dtls, dscp, ipv6)
offer_constraints = make_offer_constraints()
media_constraints = make_media_stream_constraints(audio, video)
template_values = {'error_messages': error_messages,
'token': token,
'me': user,
'room_key': room_key,
'room_link': room_link,
'initiator': initiator,
'pc_config': json.dumps(pc_config),
'pc_constraints': json.dumps(pc_constraints),
'offer_constraints': json.dumps(offer_constraints),
'media_constraints': json.dumps(media_constraints),
'turn_url': turn_url,
'stereo': stereo,
'audio_send_codec': audio_send_codec,
'audio_receive_codec': audio_receive_codec
}
if unittest:
target_page = 'test/test_' + unittest + '.html'
else:
target_page = 'index.html'
template = jinja_environment.get_template(target_page)
self.response.out.write(template.render(template_values))
logging.info('User ' + user + ' added to room ' + room_key)
logging.info('Room ' + room_key + ' has state ' + str(room))
app = webapp2.WSGIApplication([
('/', MainPage),
('/message', MessagePage),
('/_ah/channel/connected/', ConnectPage),
('/_ah/channel/disconnected/', DisconnectPage)
], debug=True)

View File

@ -0,0 +1,95 @@
a:link { color: #FFFFFF; }
a:visited {color: #FFFFFF; }
html, body {
background-color: #000000;
height: 100%;
font-family: Verdana, Arial, Helvetica, sans-serif;
}
body {
margin: 0;
padding: 0;
}
footer {
position: absolute;
bottom: 0;
width: 100%;
height: 28px;
background-color: #3F3F3F;
color: #FFFFFF;
font-size: 13px; font-weight: bold;
line-height: 28px;
text-align: center;
}
#container {
background-color: #000000;
position: absolute;
height: 100%;
width: 100%;
margin: 0px auto;
-webkit-perspective: 1000;
}
#card {
-webkit-transition-duration: 2s;
-webkit-transform-style: preserve-3d;
}
#local {
position: absolute;
width: 100%;
transform: scale(-1, 1);
-webkit-transform: scale(-1, 1);
-webkit-backface-visibility: hidden;
}
#remote {
position: absolute;
width: 100%;
-webkit-transform: rotateY(180deg);
-webkit-backface-visibility: hidden;
}
#mini {
position: absolute;
height: 30%;
width: 30%;
bottom: 32px;
right: 4px;
opacity: 1.0;
transform: scale(-1, 1);
-webkit-transform: scale(-1, 1);
}
#localVideo {
width: 100%;
height: 100%;
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#remoteVideo {
width: 100%;
height: 100%;
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#miniVideo {
width: 100%;
height: 100%;
opacity: 0;
-webkit-transition-property: opacity;
-webkit-transition-duration: 2s;
}
#hangup {
font-size: 13px; font-weight: bold;
color: #FFFFFF;
width: 128px;
height: 24px;
background-color: #808080;
border-style: solid;
border-color: #FFFFFF;
margin: 2px;
}
#infoDiv {
position: absolute;
float: right;
background-color: grey;
margin: 2px;
display: none;
}

View File

@ -0,0 +1,55 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<script src="/_ah/channel/jsapi"></script>
<style type="text/css">
a:link { color: #ffffff; }
a:visited {color: #ffffff; }
html, body {
background-color: #000000;
height: 100%;
font-family:Verdana, Arial, Helvetica, sans-serif;
}
body {
margin: 0;
padding: 0;
}
#container {
position: relative;
min-height: 100%;
width: 100%;
margin: 0px auto;
}
#footer {
spacing: 4px;
position: absolute;
bottom: 0;
width: 100%;
height: 28px;
background-color: #3F3F3F;
color: rgb(255, 255, 255);
font-size:13px; font-weight: bold;
line-height: 28px;
text-align: center;
}
#logo {
display: block;
top:4;
right:4;
position:absolute;
float:right;
#opacity: 0.8;
}
</style>
</head>
<body>
<div id="container">
<div id="footer">
Sorry, this room is full.
<a href="{{room_link}}">Click here</a> to try again.
</div>
</div>
<img id="logo" alt="WebRTC" src="images/webrtc_black_20p.png">
</body>
</html>

View File

@ -0,0 +1,11 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<meta content="text/html; charset=ISO-8859-1"
http-equiv="content-type">
<title>WebRtc Demo App Help</title>
</head>
<body>
TODO
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -0,0 +1,53 @@
<!DOCTYPE html>
<html>
<head>
<title>WebRTC Reference App</title>
<meta http-equiv="X-UA-Compatible" content="chrome=1"/>
<link rel="canonical" href="{{ room_link }}"/>
<link rel="stylesheet" href="css/main.css" />
<script type="text/javascript" src="/_ah/channel/jsapi"></script>
<script src="/js/main.js"></script>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="/js/adapter.js"></script>
</head>
<body>
<script type="text/javascript">
var errorMessages = {{ error_messages }};
var channelToken = '{{ token }}';
var me = '{{ me }}';
var roomKey = '{{ room_key }}';
var roomLink = '{{ room_link }}';
var initiator = {{ initiator }};
var pcConfig = {{ pc_config | safe }};
var pcConstraints = {{ pc_constraints | safe }};
var offerConstraints = {{ offer_constraints | safe }};
var mediaConstraints = {{ media_constraints | safe }};
var turnUrl = '{{ turn_url }}';
var stereo = {{ stereo }};
var audio_send_codec = '{{ audio_send_codec }}';
var audio_receive_codec = '{{ audio_receive_codec }}';
setTimeout(initialize, 1);
</script>
<div id="container" ondblclick="enterFullScreen()">
<div id="card">
<div id="local">
<video id="localVideo" autoplay="autoplay" muted="true"/>
</div>
<div id="remote">
<video id="remoteVideo" autoplay="autoplay">
</video>
<div id="mini">
<video id="miniVideo" autoplay="autoplay" muted="true"/>
</div>
</div>
</div>
</div>
</body>
<footer id="status">
</footer>
<div id="infoDiv"></div>
</html>

View File

@ -0,0 +1 @@
../../base/adapter.js

View File

@ -0,0 +1,763 @@
var localVideo;
var miniVideo;
var remoteVideo;
var hasLocalStream;
var localStream;
var remoteStream;
var channel;
var pc;
var socket;
var xmlhttp;
var started = false;
var turnDone = false;
var channelReady = false;
var signalingReady = false;
var msgQueue = [];
// Set up audio and video regardless of what devices are present.
var sdpConstraints = {'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true }};
var isVideoMuted = false;
var isAudioMuted = false;
// Types of gathered ICE Candidates.
var gatheredIceCandidateTypes = { Local: {}, Remote: {} };
var infoDivErrors = [];
function initialize() {
if (errorMessages.length > 0) {
for (i = 0; i < errorMessages.length; ++i) {
window.alert(errorMessages[i]);
}
return;
}
console.log('Initializing; room=' + roomKey + '.');
card = document.getElementById('card');
localVideo = document.getElementById('localVideo');
// Reset localVideo display to center.
localVideo.addEventListener('loadedmetadata', function(){
window.onresize();});
miniVideo = document.getElementById('miniVideo');
remoteVideo = document.getElementById('remoteVideo');
resetStatus();
// NOTE: AppRTCClient.java searches & parses this line; update there when
// changing here.
openChannel();
maybeRequestTurn();
// Caller is always ready to create peerConnection.
signalingReady = initiator;
if (mediaConstraints.audio === false &&
mediaConstraints.video === false) {
hasLocalStream = false;
maybeStart();
} else {
hasLocalStream = true;
doGetUserMedia();
}
}
function openChannel() {
console.log('Opening channel.');
var channel = new goog.appengine.Channel(channelToken);
var handler = {
'onopen': onChannelOpened,
'onmessage': onChannelMessage,
'onerror': onChannelError,
'onclose': onChannelClosed
};
socket = channel.open(handler);
}
function maybeRequestTurn() {
// Allow to skip turn by passing ts=false to apprtc.
if (turnUrl == '') {
turnDone = true;
return;
}
for (var i = 0, len = pcConfig.iceServers.length; i < len; i++) {
if (pcConfig.iceServers[i].urls.substr(0, 5) === 'turn:') {
turnDone = true;
return;
}
}
var currentDomain = document.domain;
if (currentDomain.search('localhost') === -1 &&
currentDomain.search('apprtc') === -1) {
// Not authorized domain. Try with default STUN instead.
turnDone = true;
return;
}
// No TURN server. Get one from computeengineondemand.appspot.com.
xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = onTurnResult;
xmlhttp.open('GET', turnUrl, true);
xmlhttp.send();
}
function onTurnResult() {
if (xmlhttp.readyState !== 4)
return;
if (xmlhttp.status === 200) {
var turnServer = JSON.parse(xmlhttp.responseText);
// Create turnUris using the polyfill (adapter.js).
var iceServers = createIceServers(turnServer.uris,
turnServer.username,
turnServer.password);
if (iceServers !== null) {
pcConfig.iceServers = pcConfig.iceServers.concat(iceServers);
}
} else {
messageError('No TURN server; unlikely that media will traverse networks. '
+ 'If this persists please report it to '
+ 'discuss-webrtc@googlegroups.com.');
}
// If TURN request failed, continue the call with default STUN.
turnDone = true;
maybeStart();
}
function resetStatus() {
if (!initiator) {
setStatus('Waiting for someone to join: \
<a href=' + roomLink + '>' + roomLink + '</a>');
} else {
setStatus('Initializing...');
}
}
function doGetUserMedia() {
// Call into getUserMedia via the polyfill (adapter.js).
try {
getUserMedia(mediaConstraints, onUserMediaSuccess,
onUserMediaError);
console.log('Requested access to local media with mediaConstraints:\n' +
' \'' + JSON.stringify(mediaConstraints) + '\'');
} catch (e) {
alert('getUserMedia() failed. Is this a WebRTC capable browser?');
messageError('getUserMedia failed with exception: ' + e.message);
}
}
function createPeerConnection() {
try {
// Create an RTCPeerConnection via the polyfill (adapter.js).
pc = new RTCPeerConnection(pcConfig, pcConstraints);
pc.onicecandidate = onIceCandidate;
console.log('Created RTCPeerConnnection with:\n' +
' config: \'' + JSON.stringify(pcConfig) + '\';\n' +
' constraints: \'' + JSON.stringify(pcConstraints) + '\'.');
} catch (e) {
messageError('Failed to create PeerConnection, exception: ' + e.message);
alert('Cannot create RTCPeerConnection object; \
WebRTC is not supported by this browser.');
return;
}
pc.onaddstream = onRemoteStreamAdded;
pc.onremovestream = onRemoteStreamRemoved;
pc.onsignalingstatechange = onSignalingStateChanged;
pc.oniceconnectionstatechange = onIceConnectionStateChanged;
}
function maybeStart() {
if (!started && signalingReady && channelReady && turnDone &&
(localStream || !hasLocalStream)) {
setStatus('Connecting...');
console.log('Creating PeerConnection.');
createPeerConnection();
if (hasLocalStream) {
console.log('Adding local stream.');
pc.addStream(localStream);
} else {
console.log('Not sending any stream.');
}
started = true;
if (initiator)
doCall();
else
calleeStart();
}
}
function setStatus(state) {
document.getElementById('status').innerHTML = state;
}
function doCall() {
var constraints = mergeConstraints(offerConstraints, sdpConstraints);
console.log('Sending offer to peer, with constraints: \n' +
' \'' + JSON.stringify(constraints) + '\'.')
pc.createOffer(setLocalAndSendMessage,
onCreateSessionDescriptionError, constraints);
}
function calleeStart() {
// Callee starts to process cached offer and other messages.
while (msgQueue.length > 0) {
processSignalingMessage(msgQueue.shift());
}
}
function doAnswer() {
console.log('Sending answer to peer.');
pc.createAnswer(setLocalAndSendMessage,
onCreateSessionDescriptionError, sdpConstraints);
}
function mergeConstraints(cons1, cons2) {
var merged = cons1;
for (var name in cons2.mandatory) {
merged.mandatory[name] = cons2.mandatory[name];
}
merged.optional.concat(cons2.optional);
return merged;
}
function setLocalAndSendMessage(sessionDescription) {
sessionDescription.sdp = maybePreferAudioReceiveCodec(sessionDescription.sdp);
pc.setLocalDescription(sessionDescription,
onSetSessionDescriptionSuccess, onSetSessionDescriptionError);
sendMessage(sessionDescription);
}
function setRemote(message) {
// Set Opus in Stereo, if stereo enabled.
if (stereo)
message.sdp = addStereo(message.sdp);
message.sdp = maybePreferAudioSendCodec(message.sdp);
pc.setRemoteDescription(new RTCSessionDescription(message),
onSetRemoteDescriptionSuccess, onSetSessionDescriptionError);
function onSetRemoteDescriptionSuccess() {
console.log("Set remote session description success.");
// By now all addstream events for the setRemoteDescription have fired.
// So we can know if the peer is sending any stream or is only receiving.
if (remoteStream) {
waitForRemoteVideo();
} else {
console.log("Not receiving any stream.");
transitionToActive();
}
}
}
function sendMessage(message) {
var msgString = JSON.stringify(message);
console.log('C->S: ' + msgString);
// NOTE: AppRTCClient.java searches & parses this line; update there when
// changing here.
path = '/message?r=' + roomKey + '&u=' + me;
var xhr = new XMLHttpRequest();
xhr.open('POST', path, true);
xhr.send(msgString);
}
function processSignalingMessage(message) {
if (!started) {
messageError('peerConnection has not been created yet!');
return;
}
if (message.type === 'offer') {
setRemote(message);
doAnswer();
} else if (message.type === 'answer') {
setRemote(message);
} else if (message.type === 'candidate') {
var candidate = new RTCIceCandidate({sdpMLineIndex: message.label,
candidate: message.candidate});
noteIceCandidate("Remote", iceCandidateType(message.candidate));
pc.addIceCandidate(candidate,
onAddIceCandidateSuccess, onAddIceCandidateError);
} else if (message.type === 'bye') {
onRemoteHangup();
}
}
function onAddIceCandidateSuccess() {
console.log('AddIceCandidate success.');
}
function onAddIceCandidateError(error) {
messageError('Failed to add Ice Candidate: ' + error.toString());
}
function onChannelOpened() {
console.log('Channel opened.');
channelReady = true;
maybeStart();
}
function onChannelMessage(message) {
console.log('S->C: ' + message.data);
var msg = JSON.parse(message.data);
// Since the turn response is async and also GAE might disorder the
// Message delivery due to possible datastore query at server side,
// So callee needs to cache messages before peerConnection is created.
if (!initiator && !started) {
if (msg.type === 'offer') {
// Add offer to the beginning of msgQueue, since we can't handle
// Early candidates before offer at present.
msgQueue.unshift(msg);
// Callee creates PeerConnection
signalingReady = true;
maybeStart();
} else {
msgQueue.push(msg);
}
} else {
processSignalingMessage(msg);
}
}
function onChannelError() {
messageError('Channel error.');
}
function onChannelClosed() {
console.log('Channel closed.');
}
function messageError(msg) {
console.log(msg);
infoDivErrors.push(msg);
updateInfoDiv();
}
function onUserMediaSuccess(stream) {
console.log('User has granted access to local media.');
// Call the polyfill wrapper to attach the media stream to this element.
attachMediaStream(localVideo, stream);
localVideo.style.opacity = 1;
localStream = stream;
// Caller creates PeerConnection.
maybeStart();
}
function onUserMediaError(error) {
messageError('Failed to get access to local media. Error code was ' +
error.code + '. Continuing without sending a stream.');
alert('Failed to get access to local media. Error code was ' +
error.code + '. Continuing without sending a stream.');
hasLocalStream = false;
maybeStart();
}
function onCreateSessionDescriptionError(error) {
messageError('Failed to create session description: ' + error.toString());
}
function onSetSessionDescriptionSuccess() {
console.log('Set session description success.');
}
function onSetSessionDescriptionError(error) {
messageError('Failed to set session description: ' + error.toString());
}
function iceCandidateType(candidateSDP) {
if (candidateSDP.indexOf("typ relay ") >= 0)
return "TURN";
if (candidateSDP.indexOf("typ srflx ") >= 0)
return "STUN";
if (candidateSDP.indexOf("typ host ") >= 0)
return "HOST";
return "UNKNOWN";
}
function onIceCandidate(event) {
if (event.candidate) {
sendMessage({type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate});
noteIceCandidate("Local", iceCandidateType(event.candidate.candidate));
} else {
console.log('End of candidates.');
}
}
function onRemoteStreamAdded(event) {
console.log('Remote stream added.');
attachMediaStream(remoteVideo, event.stream);
remoteStream = event.stream;
}
function onRemoteStreamRemoved(event) {
console.log('Remote stream removed.');
}
function onSignalingStateChanged(event) {
updateInfoDiv();
}
function onIceConnectionStateChanged(event) {
updateInfoDiv();
}
function onHangup() {
console.log('Hanging up.');
transitionToDone();
localStream.stop();
stop();
// will trigger BYE from server
socket.close();
}
function onRemoteHangup() {
console.log('Session terminated.');
initiator = 0;
transitionToWaiting();
stop();
}
function stop() {
started = false;
signalingReady = false;
isAudioMuted = false;
isVideoMuted = false;
pc.close();
pc = null;
remoteStream = null;
msgQueue.length = 0;
}
function waitForRemoteVideo() {
// Call the getVideoTracks method via adapter.js.
videoTracks = remoteStream.getVideoTracks();
if (videoTracks.length === 0 || remoteVideo.currentTime > 0) {
transitionToActive();
} else {
setTimeout(waitForRemoteVideo, 100);
}
}
function transitionToActive() {
reattachMediaStream(miniVideo, localVideo);
remoteVideo.style.opacity = 1;
card.style.webkitTransform = 'rotateY(180deg)';
setTimeout(function() { localVideo.src = ''; }, 500);
setTimeout(function() { miniVideo.style.opacity = 1; }, 1000);
// Reset window display according to the asperio of remote video.
window.onresize();
setStatus('<input type=\'button\' id=\'hangup\' value=\'Hang up\' \
onclick=\'onHangup()\' />');
}
function transitionToWaiting() {
card.style.webkitTransform = 'rotateY(0deg)';
setTimeout(function() {
localVideo.src = miniVideo.src;
miniVideo.src = '';
remoteVideo.src = '' }, 500);
miniVideo.style.opacity = 0;
remoteVideo.style.opacity = 0;
resetStatus();
}
function transitionToDone() {
localVideo.style.opacity = 0;
remoteVideo.style.opacity = 0;
miniVideo.style.opacity = 0;
setStatus('You have left the call. <a href=' + roomLink + '>\
Click here</a> to rejoin.');
}
function enterFullScreen() {
container.webkitRequestFullScreen();
}
function noteIceCandidate(location, type) {
if (gatheredIceCandidateTypes[location][type])
return;
gatheredIceCandidateTypes[location][type] = 1;
updateInfoDiv();
}
function getInfoDiv() {
return document.getElementById("infoDiv");
}
function updateInfoDiv() {
var contents = "<pre>Gathered ICE Candidates\n";
for (var endpoint in gatheredIceCandidateTypes) {
contents += endpoint + ":\n";
for (var type in gatheredIceCandidateTypes[endpoint])
contents += " " + type + "\n";
}
if (pc != null) {
contents += "Gathering: " + pc.iceGatheringState + "\n";
contents += "</pre>\n";
contents += "<pre>PC State:\n";
contents += "Signaling: " + pc.signalingState + "\n";
contents += "ICE: " + pc.iceConnectionState + "\n";
}
var div = getInfoDiv();
div.innerHTML = contents + "</pre>";
for (var msg in infoDivErrors) {
div.innerHTML += '<p style="background-color: red; color: yellow;">' +
infoDivErrors[msg] + '</p>';
}
if (infoDivErrors.length)
showInfoDiv();
}
function toggleInfoDiv() {
var div = getInfoDiv();
if (div.style.display == "block") {
div.style.display = "none";
} else {
showInfoDiv();
}
}
function showInfoDiv() {
var div = getInfoDiv();
div.style.display = "block";
}
function toggleVideoMute() {
// Call the getVideoTracks method via adapter.js.
videoTracks = localStream.getVideoTracks();
if (videoTracks.length === 0) {
console.log('No local video available.');
return;
}
if (isVideoMuted) {
for (i = 0; i < videoTracks.length; i++) {
videoTracks[i].enabled = true;
}
console.log('Video unmuted.');
} else {
for (i = 0; i < videoTracks.length; i++) {
videoTracks[i].enabled = false;
}
console.log('Video muted.');
}
isVideoMuted = !isVideoMuted;
}
function toggleAudioMute() {
// Call the getAudioTracks method via adapter.js.
audioTracks = localStream.getAudioTracks();
if (audioTracks.length === 0) {
console.log('No local audio available.');
return;
}
if (isAudioMuted) {
for (i = 0; i < audioTracks.length; i++) {
audioTracks[i].enabled = true;
}
console.log('Audio unmuted.');
} else {
for (i = 0; i < audioTracks.length; i++){
audioTracks[i].enabled = false;
}
console.log('Audio muted.');
}
isAudioMuted = !isAudioMuted;
}
// Mac: hotkey is Command.
// Non-Mac: hotkey is Control.
// <hotkey>-D: toggle audio mute.
// <hotkey>-E: toggle video mute.
// <hotkey>-I: toggle Info box.
// Return false to screen out original Chrome shortcuts.
document.onkeydown = function(event) {
var hotkey = event.ctrlKey;
if (navigator.appVersion.indexOf('Mac') != -1)
hotkey = event.metaKey;
if (!hotkey)
return;
switch (event.keyCode) {
case 68:
toggleAudioMute();
return false;
case 69:
toggleVideoMute();
return false;
case 73:
toggleInfoDiv();
return false;
default:
return;
}
}
function maybePreferAudioSendCodec(sdp) {
if (audio_send_codec == '') {
console.log('No preference on audio send codec.');
return sdp;
}
console.log('Prefer audio send codec: ' + audio_send_codec);
return preferAudioCodec(sdp, audio_send_codec);
}
function maybePreferAudioReceiveCodec(sdp) {
if (audio_receive_codec == '') {
console.log('No preference on audio receive codec.');
return sdp;
}
console.log('Prefer audio receive codec: ' + audio_receive_codec);
return preferAudioCodec(sdp, audio_receive_codec);
}
// Set |codec| as the default audio codec if it's present.
// The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'.
function preferAudioCodec(sdp, codec) {
var fields = codec.split('/');
if (fields.length != 2) {
console.log('Invalid codec setting: ' + codec);
return sdp;
}
var name = fields[0];
var rate = fields[1];
var sdpLines = sdp.split('\r\n');
// Search for m line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('m=audio') !== -1) {
var mLineIndex = i;
break;
}
}
if (mLineIndex === null)
return sdp;
// If the codec is available, set it as the default in m line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search(name + '/' + rate) !== -1) {
var regexp = new RegExp(':(\\d+) ' + name + '\\/' + rate, 'i');
var payload = extractSdp(sdpLines[i], regexp);
if (payload)
sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex],
payload);
break;
}
}
// Remove CN in m line and sdp.
sdpLines = removeCN(sdpLines, mLineIndex);
sdp = sdpLines.join('\r\n');
return sdp;
}
// Set Opus in stereo if stereo is enabled.
function addStereo(sdp) {
var sdpLines = sdp.split('\r\n');
// Find opus payload.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('opus/48000') !== -1) {
var opusPayload = extractSdp(sdpLines[i], /:(\d+) opus\/48000/i);
break;
}
}
// Find the payload in fmtp line.
for (var i = 0; i < sdpLines.length; i++) {
if (sdpLines[i].search('a=fmtp') !== -1) {
var payload = extractSdp(sdpLines[i], /a=fmtp:(\d+)/ );
if (payload === opusPayload) {
var fmtpLineIndex = i;
break;
}
}
}
// No fmtp line found.
if (fmtpLineIndex === null)
return sdp;
// Append stereo=1 to fmtp line.
sdpLines[fmtpLineIndex] = sdpLines[fmtpLineIndex].concat(' stereo=1');
sdp = sdpLines.join('\r\n');
return sdp;
}
function extractSdp(sdpLine, pattern) {
var result = sdpLine.match(pattern);
return (result && result.length == 2)? result[1]: null;
}
// Set the selected codec to the first in m line.
function setDefaultCodec(mLine, payload) {
var elements = mLine.split(' ');
var newLine = new Array();
var index = 0;
for (var i = 0; i < elements.length; i++) {
if (index === 3) // Format of media starts from the fourth.
newLine[index++] = payload; // Put target payload to the first.
if (elements[i] !== payload)
newLine[index++] = elements[i];
}
return newLine.join(' ');
}
// Strip CN from sdp before CN constraints is ready.
function removeCN(sdpLines, mLineIndex) {
var mLineElements = sdpLines[mLineIndex].split(' ');
// Scan from end for the convenience of removing an item.
for (var i = sdpLines.length-1; i >= 0; i--) {
var payload = extractSdp(sdpLines[i], /a=rtpmap:(\d+) CN\/\d+/i);
if (payload) {
var cnPos = mLineElements.indexOf(payload);
if (cnPos !== -1) {
// Remove CN payload from m line.
mLineElements.splice(cnPos, 1);
}
// Remove CN line in sdp
sdpLines.splice(i, 1);
}
}
sdpLines[mLineIndex] = mLineElements.join(' ');
return sdpLines;
}
// Send BYE on refreshing(or leaving) a demo page
// to ensure the room is cleaned for next session.
window.onbeforeunload = function() {
sendMessage({type: 'bye'});
}
// Set the video diplaying in the center of window.
window.onresize = function(){
var aspectRatio;
if (remoteVideo.style.opacity === '1') {
aspectRatio = remoteVideo.videoWidth/remoteVideo.videoHeight;
} else if (localVideo.style.opacity === '1') {
aspectRatio = localVideo.videoWidth/localVideo.videoHeight;
} else {
return;
}
var innerHeight = this.innerHeight;
var innerWidth = this.innerWidth;
var videoWidth = innerWidth < aspectRatio * window.innerHeight ?
innerWidth : aspectRatio * window.innerHeight;
var videoHeight = innerHeight < window.innerWidth / aspectRatio ?
innerHeight : window.innerWidth / aspectRatio;
containerDiv = document.getElementById('container');
containerDiv.style.width = videoWidth + 'px';
containerDiv.style.height = videoHeight + 'px';
containerDiv.style.left = (innerWidth - videoWidth) / 2 + 'px';
containerDiv.style.top = (innerHeight - videoHeight) / 2 + 'px';
};

View File

@ -0,0 +1,93 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<!--This is the test page for the message channel.
To run this test:
?debug=loopback&unittest=channel
-->
<html>
<head>
<link rel="canonical" href="{{ room_link }}"/>
<meta http-equiv="X-UA-Compatible" content="chrome=1"/>
<script src="/_ah/channel/jsapi"></script>
<script type="text/javascript">
var channel;
var pc;
var socket;
var expected_message_num = 8;
var receive = 0;
var test_msg =
'01234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var msg_larger_than_500 = "";
function trace(txt) {
// This function is used for logging.
var elem = document.getElementById("debug");
elem.innerHTML += txt + "<br>";
}
function runTest() {
trace("Initializing; room={{ room_key }}.");
var channel = new goog.appengine.Channel('{{ token }}');
var handler = {
'onopen': onChannelOpened,
'onmessage': onChannelMessage,
'onerror': onChannelError,
'onclose': onChannelClosed
};
for (i = 0; i < 9; ++i) {
msg_larger_than_500 += test_msg;
}
for (i = 0; i < 4; ++i) {
sendMessage({type: 'test', msgid: i, msg: msg_larger_than_500});
}
trace('channel.open');
socket = channel.open(handler);
for (i = 4; i < expected_message_num; ++i) {
sendMessage({type: 'test', msgid: i, msg: msg_larger_than_500});
}
}
function sendMessage(message) {
var msgString = JSON.stringify(message);
trace('C->S: ' + msgString);
path = '/message?r={{ room_key }}' + '&u={{ me }}';
var xhr = new XMLHttpRequest();
xhr.open('POST', path, true);
xhr.send(msgString);
}
function onChannelOpened() {
trace('Channel opened.');
}
function onChannelMessage(message) {
if (message.data != JSON.stringify({type: 'test', msgid: receive,
msg: msg_larger_than_500})) {
trace('ERROR: Expect: ' + receive + ' Actual: ' + message.data);
} else {
trace('S->C: ' + message.data);
}
++receive;
if (receive == expected_message_num) {
trace('Received all the ' + expected_message_num + ' messages.');
trace('Test passed!');
} else if (receive > expected_message_num) {
trace('Received more than expected message');
trace('Test failed!');
}
}
function onChannelError() {
trace('Channel error.');
}
function onChannelClosed() {
trace('Channel closed.');
}
</script>
</head>
<body onload="runTest()">
<pre id="debug"></pre>
</body>
</html>

View File

@ -0,0 +1,9 @@
This script contains a simple prober that verifies that:
- CEOD vends TURN server URIs with credentials on demand (mimicking apprtc)
- rfc5766-turn-server vends TURN candidates from the servers vended by CEOD.
To use simply run ./turn-prober.sh
If it prints "PASS" (and exits 0) then all is well.
If it prints a mess of logs (and exits non-0) then something has gone sideways
and apprtc.appspot.com is probably not working well (b/c of missing TURN
functionality).

View File

@ -0,0 +1,132 @@
<html>
<head>
<script>
var CEOD_URL = ("https://computeengineondemand.appspot.com/turn?" +
"username=1234&key=5678");
var xmlhttp = null;
var turnServers = []; // Array of {turnUri, username, password}.
// The next two arrays' entries correspond 1:1 to turnServers.
var gotTurn = []; // Entries are null (not done), "PASS", or "FAIL"
var pcs = []; // Entries are RTCPeerConnection objects.
// Test is done; log & replace document body with an appropriate message.
function finish(msg) {
msg = "DONE: " + msg;
console.log(msg);
document.body.innerHTML = msg;
}
// Handle created offer SDP.
function offerHandler(i, c) {
var pc = pcs[i];
pc.setLocalDescription(c,
function() {},
function(e) {console.log("sLD error: " + e); });
pc = null;
}
// Handle SDP offer creation error.
function offerError(i, e) {
console.log("createOffer error: " + e);
checkForCompletion(i, "FAIL (offerError)");
}
// Register a terminal condition |msg| for the |index|'th server and
// terminate the test with an appropriate message if all servers are done.
function checkForCompletion(index, msg) {
gotTurn[index] = msg;
var pass = true;
for (var i = 0; i < gotTurn.length; ++i) {
if (!gotTurn[i])
return;
if (gotTurn[i] != "PASS") {
pass = false;
// Don't "break" because a later still-null gotTurn value should let
// us wait more.
}
}
if (pass) {
finish("PASS");
} else {
finish("FAIL: " + JSON.stringify(gotTurn));
}
}
// Make sure we don't wait forever for TURN to complete.
function nanny(i) {
if (!gotTurn[i]) {
checkForCompletion(i, "FAIL (TURN server failed to respond)");
}
}
// Handle incoming ICE candidate |c| from |turnServers[i]|.
function onIceCandidate(i, c) {
var pc = pcs[i];
if (!c || !c.candidate) {
checkForCompletion(
i, !gotTurn[i] ? "FAIL (no TURN candidate)" :
(gotTurn[i] == "PASS") ? "PASS" : gotTurn[i]);
return;
}
if (c.candidate.candidate.indexOf(" typ relay ") >= 0) {
gotTurn[i] = "PASS";
}
}
// Kick off the test.
function go() {
xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = onTurnResult;
xmlhttp.open('GET', CEOD_URL, true);
xmlhttp.send();
}
// Handle the XMLHttpRequest's response.
function onTurnResult() {
if (xmlhttp.readyState != 4)
return;
if (xmlhttp.status != 200) {
finish("FAIL (no TURN server)");
return;
}
var turnServer = JSON.parse(xmlhttp.responseText);
for (i = 0; i < turnServer.uris.length; i++) {
if (turnServer.uris[i].indexOf(":3479?") >= 0) {
// Why does CEOD vend useless port 3479 URIs?
continue;
}
console.log("Adding to test: " +
[turnServer.uris[i], turnServer.username,
turnServer.password]);
gotTurn.push(null);
pcs.push(new webkitRTCPeerConnection({
"iceServers": [{
"url": turnServer.uris[i],
"username": turnServer.username,
"credential": turnServer.password
}]
}));
var index = pcs.length - 1;
var pc = pcs[index];
if (!pc) {
checkForCompletion(index, "FAIL (PeerConnection ctor failed)");
continue;
}
pc.onicecandidate = (
function(p) { return function(c) { onIceCandidate(p, c); } })(index);
pc.createOffer(
(function(p) { return function(o) { offerHandler(p, o); } })(index),
(function(p) { return function(e) { offerError(p, e); } })(index),
{'mandatory': { 'OfferToReceiveAudio': true } });
window.setTimeout(
(function(p) { return function() { nanny(p); } })(index), 10000);
}
}
</script>
</head>
<body onload="go()">
</body>
</html>

View File

@ -0,0 +1,49 @@
#!/bin/bash -e
function chrome_pids() {
ps axuwww|grep $D|grep c[h]rome|awk '{print $2}'
}
cd $(dirname $0)
export D=$(mktemp -d)
CHROME_LOG_FILE="${D}/chrome_debug.log"
touch $CHROME_LOG_FILE
XVFB="xvfb-run -a -e $CHROME_LOG_FILE -s '-screen 0 1024x768x24'"
if [ -n "$DISPLAY" ]; then
XVFB=""
fi
# "eval" below is required by $XVFB containing a quoted argument.
eval $XVFB chrome \
--enable-logging=stderr \
--no-first-run \
--disable-web-security \
--user-data-dir=$D \
--vmodule="*media/*=3,*turn*=3" \
"file://${PWD}/turn-prober.html" > $CHROME_LOG_FILE 2>&1 &
CHROME_PID=$!
while ! grep -q DONE $CHROME_LOG_FILE && chrome_pids|grep -q .; do
sleep 0.1
done
# Suppress bash's Killed message for the chrome above.
exec 3>&2
exec 2>/dev/null
while [ ! -z "$(chrome_pids)" ]; do
kill -9 $(chrome_pids)
done
exec 2>&3
exec 3>&-
DONE=$(grep DONE $CHROME_LOG_FILE)
EXIT_CODE=0
if ! grep -q "DONE: PASS" $CHROME_LOG_FILE; then
cat $CHROME_LOG_FILE
EXIT_CODE=1
fi
rm -rf $D
exit $EXIT_CODE

198
samples/js/base/adapter.js Normal file
View File

@ -0,0 +1,198 @@
var RTCPeerConnection = null;
var getUserMedia = null;
var attachMediaStream = null;
var reattachMediaStream = null;
var webrtcDetectedBrowser = null;
var webrtcDetectedVersion = null;
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function maybeFixConfiguration(pcConfig) {
if (pcConfig == null) {
return;
}
for (var i = 0; i < pcConfig.iceServers.length; i++) {
if (pcConfig.iceServers[i].hasOwnProperty('urls')){
pcConfig.iceServers[i]['url'] = pcConfig.iceServers[i]['urls'];
delete pcConfig.iceServers[i]['urls'];
}
}
}
if (navigator.mozGetUserMedia) {
console.log("This appears to be Firefox");
webrtcDetectedBrowser = "firefox";
webrtcDetectedVersion =
parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
// The RTCPeerConnection object.
var RTCPeerConnection = function(pcConfig, pcConstraints) {
// .urls is not supported in FF yet.
maybeFixConfiguration(pcConfig);
return new mozRTCPeerConnection(pcConfig, pcConstraints);
}
// The RTCSessionDescription object.
RTCSessionDescription = mozRTCSessionDescription;
// The RTCIceCandidate object.
RTCIceCandidate = mozRTCIceCandidate;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.mozGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Creates iceServer from the url for FF.
createIceServer = function(url, username, password) {
var iceServer = null;
var url_parts = url.split(':');
if (url_parts[0].indexOf('stun') === 0) {
// Create iceServer with stun url.
iceServer = { 'url': url };
} else if (url_parts[0].indexOf('turn') === 0) {
if (webrtcDetectedVersion < 27) {
// Create iceServer with turn url.
// Ignore the transport parameter from TURN url for FF version <=27.
var turn_url_parts = url.split("?");
// Return null for createIceServer if transport=tcp.
if (turn_url_parts.length === 1 ||
turn_url_parts[1].indexOf('transport=udp') === 0) {
iceServer = {'url': turn_url_parts[0],
'credential': password,
'username': username};
}
} else {
// FF 27 and above supports transport parameters in TURN url,
// So passing in the full url to create iceServer.
iceServer = {'url': url,
'credential': password,
'username': username};
}
}
return iceServer;
};
createIceServers = function(urls, username, password) {
var iceServers = [];
// Use .url for FireFox.
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i],
username,
password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
return iceServers;
}
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
console.log("Attaching media stream");
element.mozSrcObject = stream;
element.play();
};
reattachMediaStream = function(to, from) {
console.log("Reattaching media stream");
to.mozSrcObject = from.mozSrcObject;
to.play();
};
// Fake get{Video,Audio}Tracks
if (!MediaStream.prototype.getVideoTracks) {
MediaStream.prototype.getVideoTracks = function() {
return [];
};
}
if (!MediaStream.prototype.getAudioTracks) {
MediaStream.prototype.getAudioTracks = function() {
return [];
};
}
} else if (navigator.webkitGetUserMedia) {
console.log("This appears to be Chrome");
webrtcDetectedBrowser = "chrome";
webrtcDetectedVersion =
parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10);
// Creates iceServer from the url for Chrome M33 and earlier.
createIceServer = function(url, username, password) {
var iceServer = null;
var url_parts = url.split(':');
if (url_parts[0].indexOf('stun') === 0) {
// Create iceServer with stun url.
iceServer = { 'url': url };
} else if (url_parts[0].indexOf('turn') === 0) {
// Chrome M28 & above uses below TURN format.
iceServer = {'url': url,
'credential': password,
'username': username};
}
return iceServer;
};
// Creates iceServers from the urls for Chrome M34 and above.
createIceServers = function(urls, username, password) {
var iceServers = [];
if (webrtcDetectedVersion >= 34) {
// .urls is supported since Chrome M34.
iceServers = {'urls': urls,
'credential': password,
'username': username };
} else {
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i],
username,
password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
}
return iceServers;
};
// The RTCPeerConnection object.
var RTCPeerConnection = function(pcConfig, pcConstraints) {
// .urls is supported since Chrome M34.
if (webrtcDetectedVersion < 34) {
maybeFixConfiguration(pcConfig);
}
return new webkitRTCPeerConnection(pcConfig, pcConstraints);
}
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
if (typeof element.srcObject !== 'undefined') {
element.srcObject = stream;
} else if (typeof element.mozSrcObject !== 'undefined') {
element.mozSrcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
console.log('Error attaching stream to element.');
}
};
reattachMediaStream = function(to, from) {
to.src = from.src;
};
} else {
console.log("Browser does not appear to be WebRTC-capable");
}