windhamdavid 4 years ago
parent
commit
d71b10c739

+ 93 - 4
app/css/style.min.css

@@ -6766,7 +6766,7 @@ body {
   background-position: center center;
   background-repeat: no-repeat;
   background: -webkit-gradient(radial, center center, 0, center center, 460, from(#cecece), to(#222));
-  background: -webkit-radial-gradient(circle, #cecece, #222);
+  background: -webkit-radial-gradient(circle, #ffffff, #000000);
   background: -moz-radial-gradient(circle, #cecece, #222);
   background: -ms-radial-gradient(circle, #cecece, #222);
 }
@@ -6813,6 +6813,95 @@ div#player{
 	margin: 0 auto;
   line-height:14px;
 }
+input[type=range] {
+  -webkit-appearance: none;
+  margin: 18px 0;
+  width: 100%;
+}
+input[type=range]:focus {
+  outline: none;
+}
+input[type=range]::-webkit-slider-runnable-track {
+  width: 100%;
+  height: 8.4px;
+  cursor: pointer;
+  animate: 0.2s;
+  box-shadow: 0px 3px 8px #aaa, inset 0px 2px 3px #fff;
+  background: #337ab7;
+  border-radius: 1.3px;
+  border: 0.2px solid #010101;
+}
+input[type=range]::-webkit-slider-thumb {
+
+  box-shadow: 0px 3px 8px #aaa, inset 0px 2px 3px #fff;
+  border: 1px solid #333;
+  height: 36px;
+  width: 16px;
+  border-radius: 3px;
+  background: #f7f7f7;
+  cursor: pointer;
+  -webkit-appearance: none;
+  margin-top: -14px;
+}
+input[type=range]:focus::-webkit-slider-runnable-track {
+  background: #367ebd;
+}
+input[type=range]::-moz-range-track {
+  width: 100%;
+  height: 8.4px;
+  cursor: pointer;
+  animate: 0.2s;
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+  background: #337ab7;
+  border-radius: 1.3px;
+  border: 0.2px solid #010101;
+}
+input[type=range]::-moz-range-thumb {
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+  border: 1px solid #000000;
+  height: 36px;
+  width: 16px;
+  border-radius: 3px;
+  background: #ffffff;
+  cursor: pointer;
+}
+input[type=range]::-ms-track {
+  width: 100%;
+  height: 8.4px;
+  cursor: pointer;
+  animate: 0.2s;
+  background: transparent;
+  border-color: transparent;
+  border-width: 16px 0;
+  color: transparent;
+}
+input[type=range]::-ms-fill-lower {
+  background: #2a6495;
+  border: 0.2px solid #010101;
+  border-radius: 2.6px;
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+}
+input[type=range]::-ms-fill-upper {
+  background: #337ab7;
+  border: 0.2px solid #010101;
+  border-radius: 2.6px;
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+}
+input[type=range]::-ms-thumb {
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+  border: 1px solid #000000;
+  height: 36px;
+  width: 16px;
+  border-radius: 3px;
+  background: #ffffff;
+  cursor: pointer;
+}
+input[type=range]:focus::-ms-fill-lower {
+  background: #3071a9;
+}
+input[type=range]:focus::-ms-fill-upper {
+  background: #367ebd;
+}
 output.volume {
   position: absolute;
   background-image: -moz-linear-gradient(top, #444444, #999999);
@@ -6954,10 +7043,10 @@ li#Lobby_tab {
 
 video {
   background:#000;
-  height: 225px;
-  margin: 0 0 20px 0;
+  width: 100%;
+  margin: 0 0 5px 0;
   vertical-align: top;
-  width: calc(50% - 12px);
+  height: calc(50% - 12px);
 }
 
 

+ 0 - 25
app/img/daveo-header.svg

@@ -4,31 +4,6 @@
 <svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
 	 viewBox="0 0 1280 150" enable-background="new 0 0 1280 150" xml:space="preserve">
 <g id="draw">
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M201,83c12-1,23.8,3.3,34.7,8.5
-		c26.7,12.8,50.4,31.6,69.1,54.6c-24.8-22.8-56.8-35.8-88-48.4"/>
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M981.9,70.8c-15.2,13.2-25.9,31.6-29.8,51.4
-		c36.5-32.6,82.6-54.2,131-61.3"/>
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M98.9,74.1c8,14.8,16,29.7,24,44.5
-		c3.1-41.6,9.2-82.9,18-123.6"/>
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M297,34c0,11.2,0,22.3,0,33.5
-		c0,1.8,0.1,3.7,1.2,5.1c2.6,3.2,7.8,0.6,10.9-2.2c26-22.8,52.1-45.5,78.1-68.3"/>
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M1118.1,88.1c-0.5,8.8-0.9,17.9,2.3,26.1
-		c2.7,7,7.9,12.9,13.5,17.8c7.7,6.7,17,12.1,27.1,12.9c14.6,1.1,28.4-7.9,37-19.7c8.6-11.8,13-26,17.1-40"/>
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M1168.4,38.2c-27.1,20-56.4,37.1-87.2,51
-		c5.7-10.9,11.4-21.7,17.1-32.6c1.7-3.3,3.5-7.2,2.1-10.6c-1-2.4-3.3-3.9-5.5-5.1c-9.3-5-20.1-6.4-30.6-7.4
-		c-24.2-2.3-48.5-2.7-72.8-1.1c-9.4,0.6-19,1.6-27.3,6s-14.9,13.1-14.2,22.4c1,13.7,16.1,21.5,29.4,25.1c29.2,7.7,60.4,7.6,89.4-0.5
-		c-26.2,21.9-57.4,37.8-90.6,46"/>
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M78,163.9c-20.4,0.2-40.9-1-61.2-3.4
-		c-0.2-4.8,3.6-8.7,7.4-11.6C38.7,137.4,57,130,67.5,114.8c-9.4,1.6-18.8,3.3-28.2,4.9c14.1-8.8,28.2-17.5,42.3-26.3
-		c5-3.1,10.8-8,9.2-13.7c-21.1,0.6-38.4-3.6-58.9-5.1c-12.3-0.9-24.7-1.3-37.1-1.2c-5.8,0-28-2.5-32.1,1.4
-		c12.9-12,29.6-18.9,46-25.3c29.8-11.6,59.9-22.2,90.3-31.9c-30.5-0.5-61,1.4-91.2,5.9c18.5-14.7,37.8-28.3,58-40.5
-		C40.4-21.4,15.1-10.4-8.5,0.4c5.4-4,10.8-8,16.1-12c0.8,13.9-2,28-7.9,40.5"/>
-	<path fill="none" stroke="#000000" stroke-width="3" stroke-miterlimit="10" d="M1229,26.9c-44.5,0.5-89.1,0.3-133.6-0.5
-		c27-14.3,56.3-24.3,86.5-29.5c-2.1,19.8,0.1,40,6.4,58.8c30.2-13.9,48.1-46.1,76.7-63.1c6.8-4,14.8-7.3,22.5-5.1
-		c1.7,12.2,2.7,24.5,2.9,36.8c0.2,10.1-0.2,20.5-3.3,30.1c-3.2,9.6-9.5,18.6-18.7,22.9c-4.2-24.3-0.7-49.9,9.9-72.3
-		c-17.6,33.6-29.1,70.5-33.7,108.2c-0.2,1.6-0.4,3.3-1.4,4.5c-1.1,1.5-3,2.1-4.7,2.8c-13.9,5.9-20.1,25-12.4,37.9
-		c10-2.5,15.8-12.5,20.6-21.6c8.1-15.6,16.3-31.2,24.4-46.9c-8.4,6.9-15.6,15.2-21.2,24.5c12.7,1.7,25.8,0.1,37.8-4.6
-		c-0.1-6.5-5.4-11.6-10.4-15.7c-30.9-25.3-66.9-44.6-105.1-56.3"/>
 </g>
 <g id="title">
 	<text transform="matrix(1 0 0 1 417.0188 95.1418)" font-family="'Phosphate-Inline'" font-size="72px">Daveo Radio</text>

File diff suppressed because it is too large
+ 42 - 36
app/index.html


File diff suppressed because it is too large
+ 1 - 22
app/js/radio.min.js


File diff suppressed because it is too large
+ 1575 - 0
app/js/webrtc-adapter.js


+ 280 - 0
app/js/webrtc-audio.js

@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* global TimelineDataSeries, TimelineGraphView */
+
+'use strict';
+
+var audio2 = document.querySelector('audio#audio2');
+var callButton = document.querySelector('button#callButton');
+var hangupButton = document.querySelector('button#hangupButton');
+var codecSelector = document.querySelector('select#codec');
+hangupButton.disabled = true;
+callButton.onclick = call;
+hangupButton.onclick = hangup;
+
+var pc1;
+var pc2;
+var localStream;
+
+var bitrateGraph;
+var bitrateSeries;
+
+var packetGraph;
+var packetSeries;
+
+var lastResult;
+
+var offerOptions = {
+  offerToReceiveAudio: 1,
+  offerToReceiveVideo: 0,
+  voiceActivityDetection: false
+};
+
+function gotStream(stream) {
+  trace('Received local stream');
+  localStream = stream;
+  var audioTracks = localStream.getAudioTracks();
+  if (audioTracks.length > 0) {
+    trace('Using Audio device: ' + audioTracks[0].label);
+  }
+  pc1.addStream(localStream);
+  trace('Adding Local Stream to peer connection');
+
+  pc1.createOffer(gotDescription1, onCreateSessionDescriptionError,
+      offerOptions);
+
+  bitrateSeries = new TimelineDataSeries();
+  bitrateGraph = new TimelineGraphView('bitrateGraph', 'bitrateCanvas');
+  bitrateGraph.updateEndDate();
+
+  packetSeries = new TimelineDataSeries();
+  packetGraph = new TimelineGraphView('packetGraph', 'packetCanvas');
+  packetGraph.updateEndDate();
+}
+
+function onCreateSessionDescriptionError(error) {
+  trace('Failed to create session description: ' + error.toString());
+}
+
+function call() {
+  callButton.disabled = true;
+  hangupButton.disabled = false;
+  codecSelector.disabled = true;
+  trace('Starting call');
+  var servers = null;
+  var pcConstraints = {
+    'optional': []
+  };
+  pc1 = new RTCPeerConnection(servers, pcConstraints);
+  trace('Created local peer connection object pc1');
+  pc1.onicecandidate = iceCallback1;
+  pc2 = new RTCPeerConnection(servers, pcConstraints);
+  trace('Created remote peer connection object pc2');
+  pc2.onicecandidate = iceCallback2;
+  pc2.onaddstream = gotRemoteStream;
+  trace('Requesting local stream');
+  navigator.mediaDevices.getUserMedia({
+    audio: true,
+    video: false
+  })
+  .then(gotStream)
+  .catch(function(e) {
+    alert('getUserMedia() error: ' + e.name);
+  });
+}
+
+function gotDescription1(desc) {
+  desc.sdp = forceChosenAudioCodec(desc.sdp);
+  trace('Offer from pc1 \n' + desc.sdp);
+  pc1.setLocalDescription(desc, function() {
+    pc2.setRemoteDescription(desc, function() {
+      // Since the 'remote' side has no media stream we need
+      // to pass in the right constraints in order for it to
+      // accept the incoming offer of audio.
+      pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError);
+    }, onSetSessionDescriptionError);
+  }, onSetSessionDescriptionError);
+}
+
+function gotDescription2(desc) {
+  desc.sdp = forceChosenAudioCodec(desc.sdp);
+  pc2.setLocalDescription(desc, function() {
+    trace('Answer from pc2 \n' + desc.sdp);
+    pc1.setRemoteDescription(desc, function() {
+    }, onSetSessionDescriptionError);
+  }, onSetSessionDescriptionError);
+}
+
+function hangup() {
+  trace('Ending call');
+  localStream.getTracks().forEach(function(track) {
+    track.stop();
+  });
+  pc1.close();
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  hangupButton.disabled = true;
+  callButton.disabled = false;
+  codecSelector.disabled = false;
+}
+
+function gotRemoteStream(e) {
+  audio2.srcObject = e.stream;
+  trace('Received remote stream');
+}
+
+function iceCallback1(event) {
+  if (event.candidate) {
+    pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
+        onAddIceCandidateSuccess, onAddIceCandidateError);
+    trace('Local ICE candidate: \n' + event.candidate.candidate);
+  }
+}
+
+function iceCallback2(event) {
+  if (event.candidate) {
+    pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
+        onAddIceCandidateSuccess, onAddIceCandidateError);
+    trace('Remote ICE candidate: \n ' + event.candidate.candidate);
+  }
+}
+
+function onAddIceCandidateSuccess() {
+  trace('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+  trace('Failed to add ICE Candidate: ' + error.toString());
+}
+
+function onSetSessionDescriptionError(error) {
+  trace('Failed to set session description: ' + error.toString());
+}
+
+function forceChosenAudioCodec(sdp) {
+  return maybePreferCodec(sdp, 'audio', 'send', codecSelector.value);
+}
+
+// Copied from AppRTC's sdputils.js:
+
+// Sets |codec| as the default |type| codec if it's present.
+// The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'.
+function maybePreferCodec(sdp, type, dir, codec) {
+  var str = type + ' ' + dir + ' codec';
+  if (codec === '') {
+    trace('No preference on ' + str + '.');
+    return sdp;
+  }
+
+  trace('Prefer ' + str + ': ' + codec);
+
+  var sdpLines = sdp.split('\r\n');
+
+  // Search for m line.
+  var mLineIndex = findLine(sdpLines, 'm=', type);
+  if (mLineIndex === null) {
+    return sdp;
+  }
+
+  // If the codec is available, set it as the default in m line.
+  var codecIndex = findLine(sdpLines, 'a=rtpmap', codec);
+  console.log('codecIndex', codecIndex);
+  if (codecIndex) {
+    var payload = getCodecPayloadType(sdpLines[codecIndex]);
+    if (payload) {
+      sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload);
+    }
+  }
+
+  sdp = sdpLines.join('\r\n');
+  return sdp;
+}
+
+// Find the line in sdpLines that starts with |prefix|, and, if specified,
+// contains |substr| (case-insensitive search).
+function findLine(sdpLines, prefix, substr) {
+  return findLineInRange(sdpLines, 0, -1, prefix, substr);
+}
+
+// Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix|
+// and, if specified, contains |substr| (case-insensitive search).
+function findLineInRange(sdpLines, startLine, endLine, prefix, substr) {
+  var realEndLine = endLine !== -1 ? endLine : sdpLines.length;
+  for (var i = startLine; i < realEndLine; ++i) {
+    if (sdpLines[i].indexOf(prefix) === 0) {
+      if (!substr ||
+          sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) {
+        return i;
+      }
+    }
+  }
+  return null;
+}
+
+// Gets the codec payload type from an a=rtpmap:X line.
+function getCodecPayloadType(sdpLine) {
+  var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+');
+  var result = sdpLine.match(pattern);
+  return (result && result.length === 2) ? result[1] : null;
+}
+
+// Returns a new m= line with the specified codec as the first one.
+function setDefaultCodec(mLine, payload) {
+  var elements = mLine.split(' ');
+
+  // Just copy the first three parameters; codec order starts on fourth.
+  var newLine = elements.slice(0, 3);
+
+  // Put target payload first and copy in the rest.
+  newLine.push(payload);
+  for (var i = 3; i < elements.length; i++) {
+    if (elements[i] !== payload) {
+      newLine.push(elements[i]);
+    }
+  }
+  return newLine.join(' ');
+}
+
+// query getStats every second
+window.setInterval(function() {
+  if (!window.pc1) {
+    return;
+  }
+  window.pc1.getStats(null).then(function(res) {
+    Object.keys(res).forEach(function(key) {
+      var report = res[key];
+      var bytes;
+      var packets;
+      var now = report.timestamp;
+      if ((report.type === 'outboundrtp') ||
+          (report.type === 'outbound-rtp') ||
+          (report.type === 'ssrc' && report.bytesSent)) {
+        bytes = report.bytesSent;
+        packets = report.packetsSent;
+        if (lastResult && lastResult[report.id]) {
+          // calculate bitrate
+          var bitrate = 8 * (bytes - lastResult[report.id].bytesSent) /
+              (now - lastResult[report.id].timestamp);
+
+          // append to chart
+          bitrateSeries.addPoint(now, bitrate);
+          bitrateGraph.setDataSeries([bitrateSeries]);
+          bitrateGraph.updateEndDate();
+
+          // calculate number of packets and append to chart
+          packetSeries.addPoint(now, packets -
+              lastResult[report.id].packetsSent);
+          packetGraph.setDataSeries([packetSeries]);
+          packetGraph.updateEndDate();
+        }
+      }
+    });
+    lastResult = res;
+  });
+}, 1000);

+ 209 - 0
app/js/webrtc-video.js

@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+var startButton = document.getElementById('startButton');
+var callButton = document.getElementById('callButton');
+var hangupButton = document.getElementById('hangupButton');
+callButton.disabled = true;
+hangupButton.disabled = true;
+startButton.onclick = start;
+callButton.onclick = call;
+hangupButton.onclick = hangup;
+
+var startTime;
+var localVideo = document.getElementById('localVideo');
+var remoteVideo = document.getElementById('remoteVideo');
+
+localVideo.addEventListener('loadedmetadata', function() {
+  trace('Local video videoWidth: ' + this.videoWidth +
+    'px,  videoHeight: ' + this.videoHeight + 'px');
+});
+
+remoteVideo.addEventListener('loadedmetadata', function() {
+  trace('Remote video videoWidth: ' + this.videoWidth +
+    'px,  videoHeight: ' + this.videoHeight + 'px');
+});
+
+remoteVideo.onresize = function() {
+  trace('Remote video size changed to ' +
+    remoteVideo.videoWidth + 'x' + remoteVideo.videoHeight);
+  // We'll use the first onsize callback as an indication that video has started
+  // playing out.
+  if (startTime) {
+    var elapsedTime = window.performance.now() - startTime;
+    trace('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
+    startTime = null;
+  }
+};
+
+var localStream;
+var pc1;
+var pc2;
+var offerOptions = {
+  offerToReceiveAudio: 1,
+  offerToReceiveVideo: 1
+};
+
+function getName(pc) {
+  return (pc === pc1) ? 'pc1' : 'pc2';
+}
+
+function getOtherPc(pc) {
+  return (pc === pc1) ? pc2 : pc1;
+}
+
+function gotStream(stream) {
+  trace('Received local stream');
+  localVideo.srcObject = stream;
+  localStream = stream;
+  callButton.disabled = false;
+}
+
+function start() {
+  trace('Requesting local stream');
+  startButton.disabled = true;
+  navigator.mediaDevices.getUserMedia({
+    audio: true,
+    video: true
+  })
+  .then(gotStream)
+  .catch(function(e) {
+    alert('getUserMedia() error: ' + e.name);
+  });
+}
+
+function call() {
+  callButton.disabled = true;
+  hangupButton.disabled = false;
+  trace('Starting call');
+  startTime = window.performance.now();
+  var videoTracks = localStream.getVideoTracks();
+  var audioTracks = localStream.getAudioTracks();
+  if (videoTracks.length > 0) {
+    trace('Using video device: ' + videoTracks[0].label);
+  }
+  if (audioTracks.length > 0) {
+    trace('Using audio device: ' + audioTracks[0].label);
+  }
+  var servers = null;
+  pc1 = new RTCPeerConnection(servers);
+  trace('Created local peer connection object pc1');
+  pc1.onicecandidate = function(e) {
+    onIceCandidate(pc1, e);
+  };
+  pc2 = new RTCPeerConnection(servers);
+  trace('Created remote peer connection object pc2');
+  pc2.onicecandidate = function(e) {
+    onIceCandidate(pc2, e);
+  };
+  pc1.oniceconnectionstatechange = function(e) {
+    onIceStateChange(pc1, e);
+  };
+  pc2.oniceconnectionstatechange = function(e) {
+    onIceStateChange(pc2, e);
+  };
+  pc2.onaddstream = gotRemoteStream;
+
+  pc1.addStream(localStream);
+  trace('Added local stream to pc1');
+
+  trace('pc1 createOffer start');
+  pc1.createOffer(onCreateOfferSuccess, onCreateSessionDescriptionError,
+      offerOptions);
+}
+
+function onCreateSessionDescriptionError(error) {
+  trace('Failed to create session description: ' + error.toString());
+}
+
+function onCreateOfferSuccess(desc) {
+  trace('Offer from pc1\n' + desc.sdp);
+  trace('pc1 setLocalDescription start');
+  pc1.setLocalDescription(desc, function() {
+    onSetLocalSuccess(pc1);
+  }, onSetSessionDescriptionError);
+  trace('pc2 setRemoteDescription start');
+  pc2.setRemoteDescription(desc, function() {
+    onSetRemoteSuccess(pc2);
+  }, onSetSessionDescriptionError);
+  trace('pc2 createAnswer start');
+  // Since the 'remote' side has no media stream we need
+  // to pass in the right constraints in order for it to
+  // accept the incoming offer of audio and video.
+  pc2.createAnswer(onCreateAnswerSuccess, onCreateSessionDescriptionError);
+}
+
+function onSetLocalSuccess(pc) {
+  trace(getName(pc) + ' setLocalDescription complete');
+}
+
+function onSetRemoteSuccess(pc) {
+  trace(getName(pc) + ' setRemoteDescription complete');
+}
+
+function onSetSessionDescriptionError(error) {
+  trace('Failed to set session description: ' + error.toString());
+}
+
+function gotRemoteStream(e) {
+  remoteVideo.srcObject = e.stream;
+  trace('pc2 received remote stream');
+}
+
+function onCreateAnswerSuccess(desc) {
+  trace('Answer from pc2:\n' + desc.sdp);
+  trace('pc2 setLocalDescription start');
+  pc2.setLocalDescription(desc, function() {
+    onSetLocalSuccess(pc2);
+  }, onSetSessionDescriptionError);
+  trace('pc1 setRemoteDescription start');
+  pc1.setRemoteDescription(desc, function() {
+    onSetRemoteSuccess(pc1);
+  }, onSetSessionDescriptionError);
+}
+
+function onIceCandidate(pc, event) {
+  if (event.candidate) {
+    getOtherPc(pc).addIceCandidate(new RTCIceCandidate(event.candidate),
+        function() {
+          onAddIceCandidateSuccess(pc);
+        },
+        function(err) {
+          onAddIceCandidateError(pc, err);
+        }
+    );
+    trace(getName(pc) + ' ICE candidate: \n' + event.candidate.candidate);
+  }
+}
+
+function onAddIceCandidateSuccess(pc) {
+  trace(getName(pc) + ' addIceCandidate success');
+}
+
+function onAddIceCandidateError(pc, error) {
+  trace(getName(pc) + ' failed to add ICE Candidate: ' + error.toString());
+}
+
+function onIceStateChange(pc, event) {
+  if (pc) {
+    trace(getName(pc) + ' ICE state: ' + pc.iceConnectionState);
+    console.log('ICE state change event: ', event);
+  }
+}
+
+function hangup() {
+  trace('Ending call');
+  pc1.close();
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  hangupButton.disabled = true;
+  callButton.disabled = false;
+}

BIN
dump.rdb


+ 93 - 4
src/css/main.css

@@ -5,7 +5,7 @@ body {
   background-position: center center;
   background-repeat: no-repeat;
   background: -webkit-gradient(radial, center center, 0, center center, 460, from(#cecece), to(#222));
-  background: -webkit-radial-gradient(circle, #cecece, #222);
+  background: -webkit-radial-gradient(circle, #ffffff, #000000);
   background: -moz-radial-gradient(circle, #cecece, #222);
   background: -ms-radial-gradient(circle, #cecece, #222);
 }
@@ -52,6 +52,95 @@ div#player{
 	margin: 0 auto;
   line-height:14px;
 }
+input[type=range] {
+  -webkit-appearance: none;
+  margin: 18px 0;
+  width: 100%;
+}
+input[type=range]:focus {
+  outline: none;
+}
+input[type=range]::-webkit-slider-runnable-track {
+  width: 100%;
+  height: 8.4px;
+  cursor: pointer;
+  animate: 0.2s;
+  box-shadow: 0px 3px 8px #aaa, inset 0px 2px 3px #fff;
+  background: #337ab7;
+  border-radius: 1.3px;
+  border: 0.2px solid #010101;
+}
+input[type=range]::-webkit-slider-thumb {
+
+  box-shadow: 0px 3px 8px #aaa, inset 0px 2px 3px #fff;
+  border: 1px solid #333;
+  height: 36px;
+  width: 16px;
+  border-radius: 3px;
+  background: #f7f7f7;
+  cursor: pointer;
+  -webkit-appearance: none;
+  margin-top: -14px;
+}
+input[type=range]:focus::-webkit-slider-runnable-track {
+  background: #367ebd;
+}
+input[type=range]::-moz-range-track {
+  width: 100%;
+  height: 8.4px;
+  cursor: pointer;
+  animate: 0.2s;
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+  background: #337ab7;
+  border-radius: 1.3px;
+  border: 0.2px solid #010101;
+}
+input[type=range]::-moz-range-thumb {
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+  border: 1px solid #000000;
+  height: 36px;
+  width: 16px;
+  border-radius: 3px;
+  background: #ffffff;
+  cursor: pointer;
+}
+input[type=range]::-ms-track {
+  width: 100%;
+  height: 8.4px;
+  cursor: pointer;
+  animate: 0.2s;
+  background: transparent;
+  border-color: transparent;
+  border-width: 16px 0;
+  color: transparent;
+}
+input[type=range]::-ms-fill-lower {
+  background: #2a6495;
+  border: 0.2px solid #010101;
+  border-radius: 2.6px;
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+}
+input[type=range]::-ms-fill-upper {
+  background: #337ab7;
+  border: 0.2px solid #010101;
+  border-radius: 2.6px;
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+}
+input[type=range]::-ms-thumb {
+  box-shadow: 1px 1px 1px #000000, 0px 0px 1px #0d0d0d;
+  border: 1px solid #000000;
+  height: 36px;
+  width: 16px;
+  border-radius: 3px;
+  background: #ffffff;
+  cursor: pointer;
+}
+input[type=range]:focus::-ms-fill-lower {
+  background: #3071a9;
+}
+input[type=range]:focus::-ms-fill-upper {
+  background: #367ebd;
+}
 output.volume {
   position: absolute;
   background-image: -moz-linear-gradient(top, #444444, #999999);
@@ -193,10 +282,10 @@ li#Lobby_tab {
 
 video {
   background:#000;
-  height: 225px;
-  margin: 0 0 20px 0;
+  width: 100%;
+  margin: 0 0 5px 0;
   vertical-align: top;
-  width: calc(50% - 12px);
+  height: calc(50% - 12px);
 }
 
 

File diff suppressed because it is too large
+ 42 - 36
src/index.html


File diff suppressed because it is too large
+ 1 - 22
src/js/radio.js


File diff suppressed because it is too large
+ 1620 - 0
src/js/underscore-1.8.3.js


File diff suppressed because it is too large
+ 1575 - 0
src/js/webrtc-adapter.js


+ 280 - 0
src/js/webrtc-audio.js

@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+/* global TimelineDataSeries, TimelineGraphView */
+
+'use strict';
+
+var audio2 = document.querySelector('audio#audio2');
+var callButton = document.querySelector('button#callButton');
+var hangupButton = document.querySelector('button#hangupButton');
+var codecSelector = document.querySelector('select#codec');
+hangupButton.disabled = true;
+callButton.onclick = call;
+hangupButton.onclick = hangup;
+
+var pc1;
+var pc2;
+var localStream;
+
+var bitrateGraph;
+var bitrateSeries;
+
+var packetGraph;
+var packetSeries;
+
+var lastResult;
+
+var offerOptions = {
+  offerToReceiveAudio: 1,
+  offerToReceiveVideo: 0,
+  voiceActivityDetection: false
+};
+
+function gotStream(stream) {
+  trace('Received local stream');
+  localStream = stream;
+  var audioTracks = localStream.getAudioTracks();
+  if (audioTracks.length > 0) {
+    trace('Using Audio device: ' + audioTracks[0].label);
+  }
+  pc1.addStream(localStream);
+  trace('Adding Local Stream to peer connection');
+
+  pc1.createOffer(gotDescription1, onCreateSessionDescriptionError,
+      offerOptions);
+
+  bitrateSeries = new TimelineDataSeries();
+  bitrateGraph = new TimelineGraphView('bitrateGraph', 'bitrateCanvas');
+  bitrateGraph.updateEndDate();
+
+  packetSeries = new TimelineDataSeries();
+  packetGraph = new TimelineGraphView('packetGraph', 'packetCanvas');
+  packetGraph.updateEndDate();
+}
+
+function onCreateSessionDescriptionError(error) {
+  trace('Failed to create session description: ' + error.toString());
+}
+
+function call() {
+  callButton.disabled = true;
+  hangupButton.disabled = false;
+  codecSelector.disabled = true;
+  trace('Starting call');
+  var servers = null;
+  var pcConstraints = {
+    'optional': []
+  };
+  pc1 = new RTCPeerConnection(servers, pcConstraints);
+  trace('Created local peer connection object pc1');
+  pc1.onicecandidate = iceCallback1;
+  pc2 = new RTCPeerConnection(servers, pcConstraints);
+  trace('Created remote peer connection object pc2');
+  pc2.onicecandidate = iceCallback2;
+  pc2.onaddstream = gotRemoteStream;
+  trace('Requesting local stream');
+  navigator.mediaDevices.getUserMedia({
+    audio: true,
+    video: false
+  })
+  .then(gotStream)
+  .catch(function(e) {
+    alert('getUserMedia() error: ' + e.name);
+  });
+}
+
+function gotDescription1(desc) {
+  desc.sdp = forceChosenAudioCodec(desc.sdp);
+  trace('Offer from pc1 \n' + desc.sdp);
+  pc1.setLocalDescription(desc, function() {
+    pc2.setRemoteDescription(desc, function() {
+      // Since the 'remote' side has no media stream we need
+      // to pass in the right constraints in order for it to
+      // accept the incoming offer of audio.
+      pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError);
+    }, onSetSessionDescriptionError);
+  }, onSetSessionDescriptionError);
+}
+
+function gotDescription2(desc) {
+  desc.sdp = forceChosenAudioCodec(desc.sdp);
+  pc2.setLocalDescription(desc, function() {
+    trace('Answer from pc2 \n' + desc.sdp);
+    pc1.setRemoteDescription(desc, function() {
+    }, onSetSessionDescriptionError);
+  }, onSetSessionDescriptionError);
+}
+
+function hangup() {
+  trace('Ending call');
+  localStream.getTracks().forEach(function(track) {
+    track.stop();
+  });
+  pc1.close();
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  hangupButton.disabled = true;
+  callButton.disabled = false;
+  codecSelector.disabled = false;
+}
+
+function gotRemoteStream(e) {
+  audio2.srcObject = e.stream;
+  trace('Received remote stream');
+}
+
+function iceCallback1(event) {
+  if (event.candidate) {
+    pc2.addIceCandidate(new RTCIceCandidate(event.candidate),
+        onAddIceCandidateSuccess, onAddIceCandidateError);
+    trace('Local ICE candidate: \n' + event.candidate.candidate);
+  }
+}
+
+function iceCallback2(event) {
+  if (event.candidate) {
+    pc1.addIceCandidate(new RTCIceCandidate(event.candidate),
+        onAddIceCandidateSuccess, onAddIceCandidateError);
+    trace('Remote ICE candidate: \n ' + event.candidate.candidate);
+  }
+}
+
+function onAddIceCandidateSuccess() {
+  trace('AddIceCandidate success.');
+}
+
+function onAddIceCandidateError(error) {
+  trace('Failed to add ICE Candidate: ' + error.toString());
+}
+
+function onSetSessionDescriptionError(error) {
+  trace('Failed to set session description: ' + error.toString());
+}
+
+function forceChosenAudioCodec(sdp) {
+  return maybePreferCodec(sdp, 'audio', 'send', codecSelector.value);
+}
+
+// Copied from AppRTC's sdputils.js:
+
+// Sets |codec| as the default |type| codec if it's present.
+// The format of |codec| is 'NAME/RATE', e.g. 'opus/48000'.
+function maybePreferCodec(sdp, type, dir, codec) {
+  var str = type + ' ' + dir + ' codec';
+  if (codec === '') {
+    trace('No preference on ' + str + '.');
+    return sdp;
+  }
+
+  trace('Prefer ' + str + ': ' + codec);
+
+  var sdpLines = sdp.split('\r\n');
+
+  // Search for m line.
+  var mLineIndex = findLine(sdpLines, 'm=', type);
+  if (mLineIndex === null) {
+    return sdp;
+  }
+
+  // If the codec is available, set it as the default in m line.
+  var codecIndex = findLine(sdpLines, 'a=rtpmap', codec);
+  console.log('codecIndex', codecIndex);
+  if (codecIndex) {
+    var payload = getCodecPayloadType(sdpLines[codecIndex]);
+    if (payload) {
+      sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], payload);
+    }
+  }
+
+  sdp = sdpLines.join('\r\n');
+  return sdp;
+}
+
+// Find the line in sdpLines that starts with |prefix|, and, if specified,
+// contains |substr| (case-insensitive search).
+function findLine(sdpLines, prefix, substr) {
+  return findLineInRange(sdpLines, 0, -1, prefix, substr);
+}
+
+// Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix|
+// and, if specified, contains |substr| (case-insensitive search).
+function findLineInRange(sdpLines, startLine, endLine, prefix, substr) {
+  var realEndLine = endLine !== -1 ? endLine : sdpLines.length;
+  for (var i = startLine; i < realEndLine; ++i) {
+    if (sdpLines[i].indexOf(prefix) === 0) {
+      if (!substr ||
+          sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) {
+        return i;
+      }
+    }
+  }
+  return null;
+}
+
+// Gets the codec payload type from an a=rtpmap:X line.
+function getCodecPayloadType(sdpLine) {
+  var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+');
+  var result = sdpLine.match(pattern);
+  return (result && result.length === 2) ? result[1] : null;
+}
+
+// Returns a new m= line with the specified codec as the first one.
+function setDefaultCodec(mLine, payload) {
+  var elements = mLine.split(' ');
+
+  // Just copy the first three parameters; codec order starts on fourth.
+  var newLine = elements.slice(0, 3);
+
+  // Put target payload first and copy in the rest.
+  newLine.push(payload);
+  for (var i = 3; i < elements.length; i++) {
+    if (elements[i] !== payload) {
+      newLine.push(elements[i]);
+    }
+  }
+  return newLine.join(' ');
+}
+
+// query getStats every second
+window.setInterval(function() {
+  if (!window.pc1) {
+    return;
+  }
+  window.pc1.getStats(null).then(function(res) {
+    Object.keys(res).forEach(function(key) {
+      var report = res[key];
+      var bytes;
+      var packets;
+      var now = report.timestamp;
+      if ((report.type === 'outboundrtp') ||
+          (report.type === 'outbound-rtp') ||
+          (report.type === 'ssrc' && report.bytesSent)) {
+        bytes = report.bytesSent;
+        packets = report.packetsSent;
+        if (lastResult && lastResult[report.id]) {
+          // calculate bitrate
+          var bitrate = 8 * (bytes - lastResult[report.id].bytesSent) /
+              (now - lastResult[report.id].timestamp);
+
+          // append to chart
+          bitrateSeries.addPoint(now, bitrate);
+          bitrateGraph.setDataSeries([bitrateSeries]);
+          bitrateGraph.updateEndDate();
+
+          // calculate number of packets and append to chart
+          packetSeries.addPoint(now, packets -
+              lastResult[report.id].packetsSent);
+          packetGraph.setDataSeries([packetSeries]);
+          packetGraph.updateEndDate();
+        }
+      }
+    });
+    lastResult = res;
+  });
+}, 1000);

+ 209 - 0
src/js/webrtc-video.js

@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree.
+ */
+
+'use strict';
+
+var startButton = document.getElementById('startButton');
+var callButton = document.getElementById('callButton');
+var hangupButton = document.getElementById('hangupButton');
+callButton.disabled = true;
+hangupButton.disabled = true;
+startButton.onclick = start;
+callButton.onclick = call;
+hangupButton.onclick = hangup;
+
+var startTime;
+var localVideo = document.getElementById('localVideo');
+var remoteVideo = document.getElementById('remoteVideo');
+
+localVideo.addEventListener('loadedmetadata', function() {
+  trace('Local video videoWidth: ' + this.videoWidth +
+    'px,  videoHeight: ' + this.videoHeight + 'px');
+});
+
+remoteVideo.addEventListener('loadedmetadata', function() {
+  trace('Remote video videoWidth: ' + this.videoWidth +
+    'px,  videoHeight: ' + this.videoHeight + 'px');
+});
+
+remoteVideo.onresize = function() {
+  trace('Remote video size changed to ' +
+    remoteVideo.videoWidth + 'x' + remoteVideo.videoHeight);
+  // We'll use the first onsize callback as an indication that video has started
+  // playing out.
+  if (startTime) {
+    var elapsedTime = window.performance.now() - startTime;
+    trace('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
+    startTime = null;
+  }
+};
+
+var localStream;
+var pc1;
+var pc2;
+var offerOptions = {
+  offerToReceiveAudio: 1,
+  offerToReceiveVideo: 1
+};
+
+function getName(pc) {
+  return (pc === pc1) ? 'pc1' : 'pc2';
+}
+
+function getOtherPc(pc) {
+  return (pc === pc1) ? pc2 : pc1;
+}
+
+function gotStream(stream) {
+  trace('Received local stream');
+  localVideo.srcObject = stream;
+  localStream = stream;
+  callButton.disabled = false;
+}
+
+function start() {
+  trace('Requesting local stream');
+  startButton.disabled = true;
+  navigator.mediaDevices.getUserMedia({
+    audio: true,
+    video: true
+  })
+  .then(gotStream)
+  .catch(function(e) {
+    alert('getUserMedia() error: ' + e.name);
+  });
+}
+
+function call() {
+  callButton.disabled = true;
+  hangupButton.disabled = false;
+  trace('Starting call');
+  startTime = window.performance.now();
+  var videoTracks = localStream.getVideoTracks();
+  var audioTracks = localStream.getAudioTracks();
+  if (videoTracks.length > 0) {
+    trace('Using video device: ' + videoTracks[0].label);
+  }
+  if (audioTracks.length > 0) {
+    trace('Using audio device: ' + audioTracks[0].label);
+  }
+  var servers = null;
+  pc1 = new RTCPeerConnection(servers);
+  trace('Created local peer connection object pc1');
+  pc1.onicecandidate = function(e) {
+    onIceCandidate(pc1, e);
+  };
+  pc2 = new RTCPeerConnection(servers);
+  trace('Created remote peer connection object pc2');
+  pc2.onicecandidate = function(e) {
+    onIceCandidate(pc2, e);
+  };
+  pc1.oniceconnectionstatechange = function(e) {
+    onIceStateChange(pc1, e);
+  };
+  pc2.oniceconnectionstatechange = function(e) {
+    onIceStateChange(pc2, e);
+  };
+  pc2.onaddstream = gotRemoteStream;
+
+  pc1.addStream(localStream);
+  trace('Added local stream to pc1');
+
+  trace('pc1 createOffer start');
+  pc1.createOffer(onCreateOfferSuccess, onCreateSessionDescriptionError,
+      offerOptions);
+}
+
+function onCreateSessionDescriptionError(error) {
+  trace('Failed to create session description: ' + error.toString());
+}
+
+function onCreateOfferSuccess(desc) {
+  trace('Offer from pc1\n' + desc.sdp);
+  trace('pc1 setLocalDescription start');
+  pc1.setLocalDescription(desc, function() {
+    onSetLocalSuccess(pc1);
+  }, onSetSessionDescriptionError);
+  trace('pc2 setRemoteDescription start');
+  pc2.setRemoteDescription(desc, function() {
+    onSetRemoteSuccess(pc2);
+  }, onSetSessionDescriptionError);
+  trace('pc2 createAnswer start');
+  // Since the 'remote' side has no media stream we need
+  // to pass in the right constraints in order for it to
+  // accept the incoming offer of audio and video.
+  pc2.createAnswer(onCreateAnswerSuccess, onCreateSessionDescriptionError);
+}
+
+function onSetLocalSuccess(pc) {
+  trace(getName(pc) + ' setLocalDescription complete');
+}
+
+function onSetRemoteSuccess(pc) {
+  trace(getName(pc) + ' setRemoteDescription complete');
+}
+
+function onSetSessionDescriptionError(error) {
+  trace('Failed to set session description: ' + error.toString());
+}
+
+function gotRemoteStream(e) {
+  remoteVideo.srcObject = e.stream;
+  trace('pc2 received remote stream');
+}
+
+function onCreateAnswerSuccess(desc) {
+  trace('Answer from pc2:\n' + desc.sdp);
+  trace('pc2 setLocalDescription start');
+  pc2.setLocalDescription(desc, function() {
+    onSetLocalSuccess(pc2);
+  }, onSetSessionDescriptionError);
+  trace('pc1 setRemoteDescription start');
+  pc1.setRemoteDescription(desc, function() {
+    onSetRemoteSuccess(pc1);
+  }, onSetSessionDescriptionError);
+}
+
+function onIceCandidate(pc, event) {
+  if (event.candidate) {
+    getOtherPc(pc).addIceCandidate(new RTCIceCandidate(event.candidate),
+        function() {
+          onAddIceCandidateSuccess(pc);
+        },
+        function(err) {
+          onAddIceCandidateError(pc, err);
+        }
+    );
+    trace(getName(pc) + ' ICE candidate: \n' + event.candidate.candidate);
+  }
+}
+
+function onAddIceCandidateSuccess(pc) {
+  trace(getName(pc) + ' addIceCandidate success');
+}
+
+function onAddIceCandidateError(pc, error) {
+  trace(getName(pc) + ' failed to add ICE Candidate: ' + error.toString());
+}
+
+function onIceStateChange(pc, event) {
+  if (pc) {
+    trace(getName(pc) + ' ICE state: ' + pc.iceConnectionState);
+    console.log('ICE state change event: ', event);
+  }
+}
+
+function hangup() {
+  trace('Ending call');
+  pc1.close();
+  pc2.close();
+  pc1 = null;
+  pc2 = null;
+  hangupButton.disabled = true;
+  callButton.disabled = false;
+}