From ed178a16b156af9d1e1f2d248e95acf66a6af133 Mon Sep 17 00:00:00 2001 From: Muaz Khan Date: Mon, 19 Aug 2013 11:51:24 +0500 Subject: [PATCH] tinny fixes; a little bit updates and some new stuff. --- DataChannel/DataChannel.js | 42 +- DataChannel/README.md | 32 + DataChannel/auto-session-establishment.html | 4 +- DataChannel/index.html | 745 ++++++++---------- DataChannel/simple.html | 452 +++++++++++ Pluginfree-Screen-Sharing/README.md | 3 +- .../conference-ui.js | 74 +- Pluginfree-Screen-Sharing/conference.js | 241 ++++++ Pluginfree-Screen-Sharing/index.html | 311 +++++++- Pluginfree-Screen-Sharing/screen-sharing.js | 673 ---------------- RTCDataConnection/README.md | 4 + RTCMultiConnection/README.md | 76 +- .../RTCMultiConnection-Demos/all-in-one.html | 7 +- .../join-with-or-without-camera.html | 54 +- RTCMultiConnection/RTCMultiConnection-v1.1.js | 2 +- RTCMultiConnection/RTCMultiConnection-v1.2.js | 2 +- RTCMultiConnection/RTCMultiConnection-v1.3.js | 2 +- .../All-in-One.html | 704 ++++++++--------- .../chrome-to-firefox-screen-sharing.html | 63 ++ RTCMultiConnection/RTCMultiConnection-v1.4.js | 57 +- RTCMultiConnection/RTCMultiConnection-v1.5.js | 4 +- RTCMultiConnection/RTCMultiConnection-v1.6.js | 13 +- RTCMultiConnection/index.html | 704 ++++++++--------- RTCPeerConnection/RTCPeerConnection-v1.6.js | 17 +- RTCall/RTCall.js | 30 +- RecordRTC/RecordRTC.js | 4 +- SdpSerializer/SdpSerializer.js | 46 ++ demos/remote-stream-recording.html | 8 +- file-hangout/file-hangout.js | 2 +- file-sharing/index.html | 2 +- meeting/README.md | 26 +- meeting/index.html | 421 +++++----- meeting/meeting.js | 1 + meeting/simple.html | 53 ++ .../index.html | 5 - socket.io/README.md | 2 +- video-conferencing/README.md | 171 +++- video-conferencing/conference.js | 21 +- video-conferencing/index.html | 592 ++++++++------ webrtc-broadcasting/README.md | 199 ++++- webrtc-broadcasting/broadcast-ui.js | 176 ----- webrtc-broadcasting/index.html | 491 ++++++++---- 42 files changed, 3654 insertions(+), 2882 deletions(-) create mode 100644 DataChannel/simple.html rename {video-conferencing => Pluginfree-Screen-Sharing}/conference-ui.js (62%) create mode 100644 Pluginfree-Screen-Sharing/conference.js delete mode 100644 Pluginfree-Screen-Sharing/screen-sharing.js create mode 100644 RTCMultiConnection/RTCMultiConnection-v1.4-Demos/chrome-to-firefox-screen-sharing.html create mode 100644 meeting/simple.html delete mode 100644 webrtc-broadcasting/broadcast-ui.js diff --git a/DataChannel/DataChannel.js b/DataChannel/DataChannel.js index 50582499..bf303af0 100644 --- a/DataChannel/DataChannel.js +++ b/DataChannel/DataChannel.js @@ -93,19 +93,23 @@ if (self.config) return; self.config = { - onroom: function(room) { + ondatachannel: function(room) { if (!dataConnector) { self.room = room; return; } + var tempRoom = { + id: room.roomToken, + owner: room.broadcaster + }; + + if (self.ondatachannel) return self.ondatachannel(tempRoom); + if (self.joinedARoom) return; self.joinedARoom = true; - dataConnector.joinRoom({ - roomToken: room.roomToken, - joinUser: room.broadcaster - }); + self.join(tempRoom); }, onopen: function(userid, _channel) { self.onopen(userid, _channel); @@ -153,7 +157,7 @@ fileReceiver = new FileReceiver(); textReceiver = new TextReceiver(); - if (self.room) self.config.onroom(self.room); + if (self.room) self.config.ondatachannel(self.room); } this.open = function(_channel) { @@ -175,6 +179,18 @@ prepareInit(init); }; + // manually join a room + this.join = function(room) { + if (!room.id || !room.owner) { + throw 'Invalid room info passed.'; + } + + dataConnector.joinRoom({ + roomToken: room.id, + joinUser: room.owner + }); + }; + this.send = function(data, _channel) { if (!data) throw 'No file, data or text message to share.'; if (data.size) @@ -515,7 +531,7 @@ onmessage: function(response) { if (response.userToken == self.userToken) return; - if (isGetNewRoom && response.roomToken && response.broadcaster) config.onroom(response); + if (isGetNewRoom && response.roomToken && response.broadcaster) config.ondatachannel(response); if (response.newParticipant) onNewParticipant(response.newParticipant); @@ -627,7 +643,7 @@ packets = 0; // uuid is used to uniquely identify sending instance - var uuid = getRandomString(); + file.uuid = getRandomString(); var reader = new window.FileReader(); reader.readAsDataURL(file); @@ -636,7 +652,7 @@ function onReadAsDataURL(event, text) { var data = { type: 'file', - uuid: uuid + uuid: file.uuid }; if (event) { @@ -649,7 +665,7 @@ remaining: packets--, length: numberOfPackets, sent: numberOfPackets - packets - }, uuid); + }, file.uuid); if (text.length > packetSize) data.message = text.slice(0, packetSize); else { @@ -657,7 +673,7 @@ data.last = true; data.name = file.name; - if (config.onFileSent) config.onFileSent(file, uuid); + if (config.onFileSent) config.onFileSent(file, file.uuid); } // WebRTC-DataChannels.send(data, privateDataChannel) @@ -709,7 +725,7 @@ // if you don't want to auto-save to disk: // channel.autoSaveToDisk=false; if (root.autoSaveToDisk) - FileSaver.SaveToDisk(virtualURL, data.name); + FileSaver.SaveToDisk(dataURL, data.name); // channel.onFileReceived = function(fileName, file) {} // file.blob || file.dataURL || file.url || file.uuid @@ -885,7 +901,7 @@ credential: 'homeo', username: 'homeo' }; - iceServers.iceServers = [TURN, STUN]; + iceServers.iceServers = [STUN, TURN]; } var optional = { diff --git a/DataChannel/README.md b/DataChannel/README.md index fd80fb55..9b7117f2 100644 --- a/DataChannel/README.md +++ b/DataChannel/README.md @@ -89,6 +89,27 @@ channel.onmessage = function(message, userid, latency) { } = +##### `ondatachannel` + +Allows you show list of all available data channels to the user; and let him choose which one to join: + +```javascript +channel.ondatachannel = function(data_channel) { + channel.join(data_channel); + + // or + channel.join({ + id: data_channel.id, + owner: data_channel.owner + }); + + // id: unique identifier for the session + // owner: unique identifier for the session initiator +}; +``` + += + ##### Use custom user-ids ```javascript @@ -160,6 +181,17 @@ channel.leave(); // closing entire session = +##### `uuid` for files + +You can get `uuid` for each file (being sent) like this: + +```javascript +channel.send(file); +var uuid = file.uuid; // "file"-Dot-uuid +``` + += + ##### To Share files ```javascript diff --git a/DataChannel/auto-session-establishment.html b/DataChannel/auto-session-establishment.html index 45269bfa..444fccb2 100644 --- a/DataChannel/auto-session-establishment.html +++ b/DataChannel/auto-session-establishment.html @@ -298,8 +298,8 @@

Share Files

+ '
' + data + '
'; } - if (!parent) chatOutput.insertBefore(div, chatOutput.firstChild); - else parent.insertBefore(div, parent.firstChild); + if (!parent) chatOutput.appendChild(div, chatOutput.firstChild); + else fileProgress.appendChild(div, fileProgress.firstChild); div.tabIndex = 0; div.focus(); diff --git a/DataChannel/index.html b/DataChannel/index.html index 2e81632f..83aae0cc 100644 --- a/DataChannel/index.html +++ b/DataChannel/index.html @@ -1,452 +1,323 @@ - + + + + DataChannel.js » A WebRTC Library for Data Sharing ® Muaz Khan + + + + + + + + + + + + + + + + +
+
+

+ DataChannel.js + » A WebRTC Library for Data Sharing +

+

+ Copyright © 2013 + Muaz Khan<@muazkh> + » + @WebRTC Experiments + » + Google+ + » + What's New? +

+
+ +
+ +
+
+
+ + +
+
+

Open New DataChannel Connection

+ +
+ +
+ + + + + +
+

Text Chat

+ +
+ +
+

Share Files

+ + +
+
+
+ + + +
+

Latest Updates

+
+
+ +
+

Feedback

+
+ +
+ Enter your email too; if you want "direct" reply! +
+ +
+

DataChannel.js Features:

+
    +
  1. Direct messages — to any user using his `user-id`
  2. +
  3. Eject/Reject any user — using his `user-id`
  4. +
  5. Leave any room (i.e. data session) or close entire session using `leave` method
  6. +
  7. File size is limitless!
  8. +
  9. Text message length is limitless!
  10. +
  11. Size of data is also limitless!
  12. +
  13. Fallback to firebase/socket.io/websockets/etc.
  14. +
  15. Users' presence detection using `onleave`
  16. +
  17. Latency detection
  18. +
  19. Multi-longest strings/files concurrent transmission
  20. +
+
+ +
+

How to use DataChannel.js?

+
+<script src="https://www.webrtc-experiment.com/DataChannel.js"> </script>
+
+<input type="text" id="chat-input" disabled 
+       style="font-size: 2em; width: 98%;"><br />
+<div id="chat-output"></div>
 
-
-    File Sharing + Text Chat using WebRTC DataChannel ® Muaz Khan
-    
-    
-    
-    
-    
-    
-    
-    
-    
-
-    
-    
-
-
-
-
-↑ - WEBRTC EXPERIMENTS - - -

File Sharing + Text Chat using WebRTC DataChannel -

- -

- Copyright © 2013 - Muaz Khan<@muazkh>. -

-
-
-
-
-

Open Data Channel

- - -

or join:

- -
- - - - - -
-

Text Chat

- -
- -
-

Share Files

- - -
-
- - - - -
-
- -

Getting started with WebRTC DataChannel

-
-<script src="https://www.webrtc-experiment.com/DataChannel.js"></script>
-<script>
-    var channel = new DataChannel();
-
-    // to create/open a new channel
-    channel.open('channel-name');
-
-    // to send text/data or file
-    channel.send(file || data || 'text');
-	
-    // if soemone already created a channel; to join it: use "connect" method
-    channel.connect('channel-name');
 </script>
-
-Remember, A-to-Z, everything is optional! You can set channel-name in constructor or in -open/connect methods. It is your choice! - -
-
- -

Features:

-
    -
  1. Send file directly — of any size
  2. -
  3. Send text-message of any length
  4. -
  5. Send data directly
  6. -
  7. Simplest syntax ever! Same like WebSockets.
  8. -
  9. Supports fallback to socket.io/websockets/etc.
  10. -
  11. Auto users' presence detection
  12. -
  13. Allows you eject any user; or close your entire data session
  14. -
-
-
- -

Additional:

-
+
 <script>
-    // to be alerted on data ports get open
-    channel.onopen = function(userid) {}
-	
-    // to be alerted on data ports get new message
-    channel.onmessage = function(message, userid) {}
-	
-    // by default; connection is [many-to-many]; you can use following directions
-    channel.direction = 'one-to-one';
-    channel.direction = 'one-to-many';
-    channel.direction = 'many-to-many';	// --- it is default
-
-    // show progress bar!
-    channel.onFileProgress = function (packets) {
-        // packets.remaining
-        // packets.sent
-        // packets.received
-        // packets.length
+    var channel = new DataChannel('Session Unique Identifier');
+
+    channel.onopen = function(userid) {
+        chatInput.disabled = false;
+        chatInput.value = 'Hi, ' + userid;
+        chatInput.focus();
     };
 
-    // on file successfully sent
-    channel.onFileSent = function (file) {
-        // file.name
-        // file.size
+    channel.onmessage = function(message, userid) {
+        chatOutput.innerHTML = userid + ': ' + message + '<hr />' 
+                             + chatOutput.innerHTML;
     };
 
-    // on file successfully received
-    channel.onFileReceived = function (fileName) {};
+    channel.onleave = function(userid) {
+        chatOutput.innerHTML = userid + ' Left.<hr />' 
+                             + chatOutput.innerHTML;
+    };
 </script>
 
-
-
- -

Errors Handling

-
-<script>
-    // error to open data ports
-    channel.onerror = function(event) {}
+            
+
+ - // data ports suddenly dropped - channel.onclose = function(event) {} -</script> - -
-
- -

Use your own socket.io for signaling

-
-<script>
-    // by default Firebase is used for signaling; you can override it
-    channel.openSignalingChannel = function(config) {
-        var socket = io.connect('http://your-site:8888');
-        socket.channel = config.channel || this.channel || 'default-channel';
-        socket.on('message', config.onmessage);
-
-        socket.send = function (data) {
-            socket.emit('message', data);
-        };
-
-        if (config.onopen) setTimeout(config.onopen, 1);
-        return socket;
-    }
-</script>
-
-
-
-
-

Feedback

- -
- -
- -
- - - - - \ No newline at end of file + + + + diff --git a/DataChannel/simple.html b/DataChannel/simple.html new file mode 100644 index 00000000..c9f3d158 --- /dev/null +++ b/DataChannel/simple.html @@ -0,0 +1,452 @@ + + + + + File Sharing + Text Chat using WebRTC DataChannel ® Muaz Khan + + + + + + + + + + + + + + + +
+↑ + WEBRTC EXPERIMENTS + + +

File Sharing + Text Chat using WebRTC DataChannel +

+ +

+ Copyright © 2013 + Muaz Khan<@muazkh>. +

+
+
+
+
+

Open Data Channel

+ + +

or join:

+ +
+ + + + + +
+

Text Chat

+ +
+ +
+

Share Files

+ + +
+
+ + + + +
+
+ +

Getting started with WebRTC DataChannel

+
+<script src="https://www.webrtc-experiment.com/DataChannel.js"></script>
+<script>
+    var channel = new DataChannel();
+
+    // to create/open a new channel
+    channel.open('channel-name');
+
+    // to send text/data or file
+    channel.send(file || data || 'text');
+	
+    // if soemone already created a channel; to join it: use "connect" method
+    channel.connect('channel-name');
+</script>
+
+Remember, A-to-Z, everything is optional! You can set channel-name in constructor or in +open/connect methods. It is your choice! + +
+
+ +

Features:

+
    +
  1. Send file directly — of any size
  2. +
  3. Send text-message of any length
  4. +
  5. Send data directly
  6. +
  7. Simplest syntax ever! Same like WebSockets.
  8. +
  9. Supports fallback to socket.io/websockets/etc.
  10. +
  11. Auto users' presence detection
  12. +
  13. Allows you eject any user; or close your entire data session
  14. +
+
+
+ +

Additional:

+
+<script>
+    // to be alerted on data ports get open
+    channel.onopen = function(userid) {}
+	
+    // to be alerted on data ports get new message
+    channel.onmessage = function(message, userid) {}
+	
+    // by default; connection is [many-to-many]; you can use following directions
+    channel.direction = 'one-to-one';
+    channel.direction = 'one-to-many';
+    channel.direction = 'many-to-many';	// --- it is default
+
+    // show progress bar!
+    channel.onFileProgress = function (packets) {
+        // packets.remaining
+        // packets.sent
+        // packets.received
+        // packets.length
+    };
+
+    // on file successfully sent
+    channel.onFileSent = function (file) {
+        // file.name
+        // file.size
+    };
+
+    // on file successfully received
+    channel.onFileReceived = function (fileName) {};
+</script>
+
+
+
+ +

Errors Handling

+
+<script>
+    // error to open data ports
+    channel.onerror = function(event) {}
+	
+    // data ports suddenly dropped
+    channel.onclose = function(event) {}
+</script>
+
+
+
+ +

Use your own socket.io for signaling

+
+<script>
+    // by default Firebase is used for signaling; you can override it
+    channel.openSignalingChannel = function(config) {
+        var socket = io.connect('http://your-site:8888');
+        socket.channel = config.channel || this.channel || 'default-channel';
+        socket.on('message', config.onmessage);
+
+        socket.send = function (data) {
+            socket.emit('message', data);
+        };
+
+        if (config.onopen) setTimeout(config.onopen, 1);
+        return socket;
+    }
+</script>
+
+
+
+
+

Feedback

+ +
+ +
+ +
+
+ + + + \ No newline at end of file diff --git a/Pluginfree-Screen-Sharing/README.md b/Pluginfree-Screen-Sharing/README.md index c52e986c..d3b9a649 100644 --- a/Pluginfree-Screen-Sharing/README.md +++ b/Pluginfree-Screen-Sharing/README.md @@ -31,7 +31,8 @@ There is another experiment: [WebRTC Tab Sharing using experimental tabCapture A It is a big wish to share desktop using RTCWeb peer connection APIs but unfortunately currently it is not possible. -Current experiment is using chrome screen sharing APIs which is allows end-users just **view the screen**....nothing else! +Current experiment is using chrome screen sharing APIs (media/constraints) which allows end-users to **view the screen**....nothing else! + = #### Browser Support diff --git a/video-conferencing/conference-ui.js b/Pluginfree-Screen-Sharing/conference-ui.js similarity index 62% rename from video-conferencing/conference-ui.js rename to Pluginfree-Screen-Sharing/conference-ui.js index b3ff657c..f8ae4f19 100644 --- a/video-conferencing/conference-ui.js +++ b/Pluginfree-Screen-Sharing/conference-ui.js @@ -1,11 +1,11 @@ // 2013, @muazkh » github.com/muaz-khan // MIT License » https://webrtc-experiment.appspot.com/licence/ -// Documentation » https://github.com/muaz-khan/WebRTC-Experiment/tree/master/video-conferencing +// Documentation » https://github.com/muaz-khan/WebRTC-Experiment/tree/master/Pluginfree-Screen-Sharing var config = { - openSocket: function(config) { + openSocket: function (config) { var SIGNALING_SERVER = 'https://www.webrtc-experiment.com:8553/', - defaultChannel = location.hash.substr(1) || 'video-conferencing-hangout'; + defaultChannel = location.hash.substr(1) || 'Pluginfree-Screen-Sharing'; var channel = config.channel || defaultChannel; var sender = Math.round(Math.random() * 999999999) + 999999999; @@ -30,22 +30,14 @@ var config = { socket.on('message', config.onmessage); }, - onRemoteStream: function(media) { + onRemoteStream: function (media) { var video = media.video; - video.setAttribute('controls', true); - video.setAttribute('id', media.stream.id); - participants.insertBefore(video, participants.firstChild); - video.play(); rotateVideo(video); }, - onRemoteStreamEnded: function(stream) { - var video = document.getElementById(stream.id); - if (video) video.parentNode.removeChild(video); - }, - onRoomFound: function(room) { + onRoomFound: function (room) { var alreadyExist = document.getElementById(room.broadcaster); if (alreadyExist) return; @@ -54,26 +46,29 @@ var config = { var tr = document.createElement('tr'); tr.setAttribute('id', room.broadcaster); tr.innerHTML = '' + room.roomName + '' + - ''; + ''; roomsList.insertBefore(tr, roomsList.firstChild); - tr.onclick = function() { + tr.onclick = function () { var tr = this; - captureUserMedia(function() { - conferenceUI.joinRoom({ - roomToken: tr.querySelector('.join').id, - joinUser: tr.id - }); + conferenceUI.joinRoom({ + roomToken: tr.querySelector('.join').id, + joinUser: tr.id }); hideUnnecessaryStuff(); }; + }, + onNewParticipant: function (participants) { + var numberOfParticipants = document.getElementById('number-of-participants'); + if (!numberOfParticipants) return; + numberOfParticipants.innerHTML = participants + ' room participants'; } }; function createButtonClickHandler() { - captureUserMedia(function() { + captureUserMedia(function () { conferenceUI.createRoom({ - roomName: (document.getElementById('conference-name') || { }).value || 'Anonymous' + roomName: ((document.getElementById('conference-name') || {}).value || 'Anonymous') + ' shared screen with you' }); }); hideUnnecessaryStuff(); @@ -85,23 +80,37 @@ function captureUserMedia(callback) { video.setAttribute('controls', true); participants.insertBefore(video, participants.firstChild); + var screen_constraints = { + mandatory: { + chromeMediaSource: 'screen' + }, + optional: [] + }; + var constraints = { + audio: false, + video: screen_constraints + }; getUserMedia({ video: video, - onsuccess: function(stream) { + constraints: constraints, + onsuccess: function (stream) { config.attachStream = stream; callback && callback(); video.setAttribute('muted', true); rotateVideo(video); }, - onerror: function() { - alert('unable to get access to your webcam'); - callback && callback(); + onerror: function () { + if (location.protocol === 'http:') { + alert('Please test this WebRTC experiment on HTTPS.'); + } else { + alert('Screen capturing is either denied or not supported. Are you enabled flag: "Enable screen capture support in getUserMedia"?'); + } } }); } -// You can use! window.onload = function() {} +/* on page load: get public rooms */ var conferenceUI = conference(config); /* UI specific */ @@ -121,14 +130,13 @@ function hideUnnecessaryStuff() { function rotateVideo(video) { video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(0deg)'; - setTimeout(function() { + setTimeout(function () { video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(360deg)'; }, 1000); } -(function() { +(function () { var uniqueToken = document.getElementById('unique-token'); - if (uniqueToken) - if (location.hash.length > 2) uniqueToken.parentNode.parentNode.parentNode.innerHTML = '

Share this link

'; - else uniqueToken.innerHTML = uniqueToken.parentNode.parentNode.href = '#' + (Math.random() * new Date().getTime()).toString(36).toUpperCase().replace( /\./g , '-'); -})(); + if (uniqueToken) if (location.hash.length > 2) uniqueToken.parentNode.parentNode.parentNode.innerHTML = '

Share this link

'; + else uniqueToken.innerHTML = uniqueToken.parentNode.parentNode.href = '#' + (Math.random() * new Date().getTime()).toString(36).toUpperCase().replace(/\./g, '-'); +})(); \ No newline at end of file diff --git a/Pluginfree-Screen-Sharing/conference.js b/Pluginfree-Screen-Sharing/conference.js new file mode 100644 index 00000000..03462129 --- /dev/null +++ b/Pluginfree-Screen-Sharing/conference.js @@ -0,0 +1,241 @@ +/* MIT License: https://webrtc-experiment.appspot.com/licence/ */ + +var conference = function (config) { + var self = { + userToken: uniqueToken() + }, + channels = '--', + isbroadcaster, + isGetNewRoom = true, + participants = 1, + defaultSocket = {}; + + function openDefaultSocket() { + defaultSocket = config.openSocket({ + onmessage: defaultSocketResponse, + callback: function (socket) { + defaultSocket = socket; + } + }); + } + + function defaultSocketResponse(response) { + if (response.userToken == self.userToken) return; + + if (isGetNewRoom && response.roomToken && response.broadcaster) config.onRoomFound(response); + + if (response.newParticipant) onNewParticipant(response.newParticipant); + + if (response.userToken && response.joinUser == self.userToken && response.participant && channels.indexOf(response.userToken) == -1) { + channels += response.userToken + '--'; + openSubSocket({ + isofferer: true, + channel: response.channel || response.userToken, + closeSocket: true + }); + } + } + + function openSubSocket(_config) { + if (!_config.channel) return; + var socketConfig = { + channel: _config.channel, + onmessage: socketResponse, + onopen: function () { + if (isofferer && !peer) initPeer(); + } + }; + + socketConfig.callback = function (_socket) { + socket = _socket; + this.onopen(); + }; + + var socket = config.openSocket(socketConfig), + isofferer = _config.isofferer, + gotstream, + htmlElement = document.createElement('video'), + inner = {}, + peer; + + var peerConfig = { + attachStream: config.attachStream, + onICE: function (candidate) { + socket && socket.send({ + userToken: self.userToken, + candidate: { + sdpMLineIndex: candidate.sdpMLineIndex, + candidate: JSON.stringify(candidate.candidate) + } + }); + }, + onRemoteStream: function (stream) { + htmlElement[moz ? 'mozSrcObject' : 'src'] = moz ? stream : webkitURL.createObjectURL(stream); + htmlElement.play(); + + _config.stream = stream; + onRemoteStreamStartsFlowing(); + } + }; + + function initPeer(offerSDP) { + if (!offerSDP) peerConfig.onOfferSDP = sendsdp; + else { + peerConfig.offerSDP = offerSDP; + peerConfig.onAnswerSDP = sendsdp; + } + peer = RTCPeerConnection(peerConfig); + } + + function onRemoteStreamStartsFlowing() { + if (!(htmlElement.readyState <= HTMLMediaElement.HAVE_CURRENT_DATA || htmlElement.paused || htmlElement.currentTime <= 0)) { + afterRemoteStreamStartedFlowing(); + } else setTimeout(onRemoteStreamStartsFlowing, 50); + } + + function afterRemoteStreamStartedFlowing() { + gotstream = true; + + config.onRemoteStream({ + video: htmlElement + }); + + if (isbroadcaster && channels.split('--').length > 3) { + /* broadcasting newly connected participant for video-conferencing! */ + defaultSocket && defaultSocket.send({ + newParticipant: socket.channel, + userToken: self.userToken + }); + } + + /* closing subsocket here on the offerer side */ + if (_config.closeSocket) socket = null; + } + + function sendsdp(sdp) { + sdp = JSON.stringify(sdp); + var part = parseInt(sdp.length / 3); + + var firstPart = sdp.slice(0, part), + secondPart = sdp.slice(part, sdp.length - 1), + thirdPart = ''; + + if (sdp.length > part + part) { + secondPart = sdp.slice(part, part + part); + thirdPart = sdp.slice(part + part, sdp.length); + } + + socket && socket.send({ + userToken: self.userToken, + firstPart: firstPart + }); + + socket && socket.send({ + userToken: self.userToken, + secondPart: secondPart + }); + + socket && socket.send({ + userToken: self.userToken, + thirdPart: thirdPart + }); + } + + function socketResponse(response) { + if (response.userToken == self.userToken) return; + if (response.firstPart || response.secondPart || response.thirdPart) { + if (response.firstPart) { + inner.firstPart = response.firstPart; + if (inner.secondPart && inner.thirdPart) selfInvoker(); + } + if (response.secondPart) { + inner.secondPart = response.secondPart; + if (inner.firstPart && inner.thirdPart) selfInvoker(); + } + + if (response.thirdPart) { + inner.thirdPart = response.thirdPart; + if (inner.firstPart && inner.secondPart) selfInvoker(); + } + } + + if (response.candidate && !gotstream) { + peer && peer.addICE({ + sdpMLineIndex: response.candidate.sdpMLineIndex, + candidate: JSON.parse(response.candidate.candidate) + }); + } + } + + var invokedOnce = false; + + function selfInvoker() { + if (invokedOnce) return; + + invokedOnce = true; + + inner.sdp = JSON.parse(inner.firstPart + inner.secondPart + inner.thirdPart); + if (isofferer) { + peer.addAnswerSDP(inner.sdp); + if (config.onNewParticipant) config.onNewParticipant(participants++); + } else initPeer(inner.sdp); + } + } + + function startBroadcasting() { + defaultSocket && defaultSocket.send({ + roomToken: self.roomToken, + roomName: self.roomName, + broadcaster: self.userToken + }); + setTimeout(startBroadcasting, 3000); + } + + function onNewParticipant(channel) { + if (!channel || channels.indexOf(channel) != -1 || channel == self.userToken) return; + channels += channel + '--'; + + var new_channel = uniqueToken(); + openSubSocket({ + channel: new_channel, + closeSocket: true + }); + + defaultSocket.send({ + participant: true, + userToken: self.userToken, + joinUser: channel, + channel: new_channel + }); + } + + function uniqueToken() { + return Math.random().toString(36).substr(2, 35); + } + + openDefaultSocket(); + return { + createRoom: function (_config) { + self.roomName = _config.roomName || 'Anonymous'; + self.roomToken = uniqueToken(); + + isbroadcaster = true; + isGetNewRoom = false; + startBroadcasting(); + }, + joinRoom: function (_config) { + self.roomToken = _config.roomToken; + isGetNewRoom = false; + + openSubSocket({ + channel: self.userToken + }); + + defaultSocket.send({ + participant: true, + userToken: self.userToken, + joinUser: _config.joinUser + }); + } + }; +}; diff --git a/Pluginfree-Screen-Sharing/index.html b/Pluginfree-Screen-Sharing/index.html index 53a09a5c..c519916c 100644 --- a/Pluginfree-Screen-Sharing/index.html +++ b/Pluginfree-Screen-Sharing/index.html @@ -1,8 +1,303 @@ - -
- - -
- - - \ No newline at end of file + + + + + WebRTC Screen Sharing | Plugin-free ® Muaz Khan + + + + + + + + + + + + +
+ ↑ WEBRTC EXPERIMENTS + +

WebRTC Plugin-free Screen Sharing

+

+ Copyright © 2013 + Muaz Khan<@muazkh>. +

+ +
+
+
+ +
+ + + + + +
+ + + +
+ + + + +
Private sharing ?? +

+ /Pluginfree-Screen-Sharing/#123456789 +

+
+
+ + + + + +
+
+ +
+

Enable screen capture support in getUserMedia()

+ +

+ + Allow web pages to request access to the screen contents via the getUserMedia() API. #enable-usermedia-screen-capture + +

+ + You must enable this flag via "chrome://flags/" + +
+ +
+
+ +
+

To use code in your own site, you must understand following limitations:

+
+
+ Chrome Canary denies "screen capturing" request automatically if: +
    +
  1. You've not used 'chromeMediaSource' constraint: +
    +
    +mandatory: {chromeMediaSource: 'screen'}
    +
    + +
  2. +
  3. You requested audio-stream alongwith 'chromeMediaSource' – it is not permitted.
  4. +
  5. You're not testing it on SSL origin (HTTPS domain).
  6. +
  7. "screen capturing" is requested multiple times per tab. Maximum one request is permitted per page!
  8. +
+
+ Remember, recursive cascade images or blurred screen is chrome's implementation issues. It will be solved soon. +
+
+ mandatory: {chromeMediaSource: 'tab'} can only be useful in chrome extensions. See Tab sharing using tabCapture APIs. +
+ +
+
+ + It is preferred to use RTCMultiConnection.js for audio/video/screen conferencing/broadcasting. + +
+
+

Source Code on Github

+
+
+
+

Feedback

+
+ +
+ +
+
+ + + + diff --git a/Pluginfree-Screen-Sharing/screen-sharing.js b/Pluginfree-Screen-Sharing/screen-sharing.js deleted file mode 100644 index 00a316cc..00000000 --- a/Pluginfree-Screen-Sharing/screen-sharing.js +++ /dev/null @@ -1,673 +0,0 @@ -// 2013, @muazkh » github.com/muaz-khan -// MIT License » https://webrtc-experiment.appspot.com/licence/ -// Documentation » https://github.com/muaz-khan/WebRTC-Experiment/tree/master/Pluginfree-Screen-Sharing - -var conference = function(config) { - var self = { - userToken: uniqueToken() - }, - channels = '--', - isbroadcaster, - isGetNewRoom = true, - participants = 1, - defaultSocket = { }; - - function openDefaultSocket() { - defaultSocket = config.openSocket({ - onmessage: defaultSocketResponse, - callback: function(socket) { - defaultSocket = socket; - } - }); - } - - function defaultSocketResponse(response) { - if (response.userToken == self.userToken) return; - - if (isGetNewRoom && response.roomToken && response.broadcaster) config.onRoomFound(response); - - if (response.newParticipant) onNewParticipant(response.newParticipant); - - if (response.userToken && response.joinUser == self.userToken && response.participant && channels.indexOf(response.userToken) == -1) { - channels += response.userToken + '--'; - openSubSocket({ - isofferer: true, - channel: response.channel || response.userToken, - closeSocket: true - }); - } - } - - function openSubSocket(_config) { - if (!_config.channel) return; - var socketConfig = { - channel: _config.channel, - onmessage: socketResponse, - onopen: function() { - if (isofferer && !peer) initPeer(); - } - }; - - socketConfig.callback = function(_socket) { - socket = _socket; - this.onopen(); - }; - - var socket = config.openSocket(socketConfig), - isofferer = _config.isofferer, - gotstream, - htmlElement = document.createElement('video'), - inner = { }, - peer; - - var peerConfig = { - attachStream: config.attachStream, - onICE: function(candidate) { - socket && socket.send({ - userToken: self.userToken, - candidate: { - sdpMLineIndex: candidate.sdpMLineIndex, - candidate: JSON.stringify(candidate.candidate) - } - }); - }, - onRemoteStream: function(stream) { - htmlElement[moz ? 'mozSrcObject' : 'src'] = moz ? stream : window.webkitURL.createObjectURL(stream); - htmlElement.play(); - - _config.stream = stream; - onRemoteStreamStartsFlowing(); - } - }; - - function initPeer(offerSDP) { - if (!offerSDP) peerConfig.onOfferSDP = sendsdp; - else { - peerConfig.offerSDP = offerSDP; - peerConfig.onAnswerSDP = sendsdp; - } - peer = RTCPeerConnection(peerConfig); - } - - function onRemoteStreamStartsFlowing() { - if (!(htmlElement.readyState <= HTMLMediaElement.HAVE_CURRENT_DATA || htmlElement.paused || htmlElement.currentTime <= 0)) { - afterRemoteStreamStartedFlowing(); - } else setTimeout(onRemoteStreamStartsFlowing, 50); - } - - function afterRemoteStreamStartedFlowing() { - gotstream = true; - - config.onRemoteStream({ - video: htmlElement - }); - - if (isbroadcaster && channels.split('--').length > 3) { - /* broadcasting newly connected participant for video-conferencing! */ - defaultSocket && defaultSocket.send({ - newParticipant: socket.channel, - userToken: self.userToken - }); - } - - /* closing subsocket here on the offerer side */ - if (_config.closeSocket) socket = null; - } - - function sendsdp(sdp) { - sdp = JSON.stringify(sdp); - var part = parseInt(sdp.length / 3); - - var firstPart = sdp.slice(0, part), - secondPart = sdp.slice(part, sdp.length - 1), - thirdPart = ''; - - if (sdp.length > part + part) { - secondPart = sdp.slice(part, part + part); - thirdPart = sdp.slice(part + part, sdp.length); - } - - socket && socket.send({ - userToken: self.userToken, - firstPart: firstPart - }); - - socket && socket.send({ - userToken: self.userToken, - secondPart: secondPart - }); - - socket && socket.send({ - userToken: self.userToken, - thirdPart: thirdPart - }); - } - - function socketResponse(response) { - if (response.userToken == self.userToken) return; - if (response.firstPart || response.secondPart || response.thirdPart) { - if (response.firstPart) { - inner.firstPart = response.firstPart; - if (inner.secondPart && inner.thirdPart) selfInvoker(); - } - if (response.secondPart) { - inner.secondPart = response.secondPart; - if (inner.firstPart && inner.thirdPart) selfInvoker(); - } - - if (response.thirdPart) { - inner.thirdPart = response.thirdPart; - if (inner.firstPart && inner.secondPart) selfInvoker(); - } - } - - if (response.candidate && !gotstream) { - peer && peer.addICE({ - sdpMLineIndex: response.candidate.sdpMLineIndex, - candidate: JSON.parse(response.candidate.candidate) - }); - } - } - - var invokedOnce = false; - - function selfInvoker() { - if (invokedOnce) return; - - invokedOnce = true; - - inner.sdp = JSON.parse(inner.firstPart + inner.secondPart + inner.thirdPart); - if (isofferer) { - peer.addAnswerSDP(inner.sdp); - if (config.onNewParticipant) config.onNewParticipant(participants++); - } else initPeer(inner.sdp); - } - } - - function startBroadcasting() { - defaultSocket && defaultSocket.send({ - roomToken: self.roomToken, - roomName: self.roomName, - broadcaster: self.userToken - }); - setTimeout(startBroadcasting, 3000); - } - - function onNewParticipant(channel) { - if (!channel || channels.indexOf(channel) != -1 || channel == self.userToken) return; - channels += channel + '--'; - - var new_channel = uniqueToken(); - openSubSocket({ - channel: new_channel, - closeSocket: true - }); - - defaultSocket.send({ - participant: true, - userToken: self.userToken, - joinUser: channel, - channel: new_channel - }); - } - - function uniqueToken() { - return Math.random().toString(36).substr(2, 35); - } - - openDefaultSocket(); - return { - createRoom: function(_config) { - self.roomName = _config.roomName || 'Anonymous'; - self.roomToken = uniqueToken(); - - isbroadcaster = true; - isGetNewRoom = false; - startBroadcasting(); - }, - joinRoom: function(_config) { - self.roomToken = _config.roomToken; - isGetNewRoom = false; - - openSubSocket({ - channel: self.userToken - }); - - defaultSocket.send({ - participant: true, - userToken: self.userToken, - joinUser: _config.joinUser - }); - } - }; -}; - - -// 2013, @muazkh - github.com/muaz-khan -// MIT License - https://www.webrtc-experiment.com/licence/ -// Documentation - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/RTCPeerConnection - -window.moz = !!navigator.mozGetUserMedia; - -function RTCPeerConnection(options) { - var w = window, - PeerConnection = w.mozRTCPeerConnection || w.webkitRTCPeerConnection, - SessionDescription = w.mozRTCSessionDescription || w.RTCSessionDescription, - IceCandidate = w.mozRTCIceCandidate || w.RTCIceCandidate; - - var STUN = { - url: !moz ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121' - }; - - var TURN = { - url: 'turn:homeo@turn.bistri.com:80', - credential: 'homeo' - }; - - var iceServers = { - iceServers: options.iceServers || [STUN] - }; - - if (!moz && !options.iceServers) { - if (parseInt(navigator.userAgent.match( /Chrom(e|ium)\/([0-9]+)\./ )[2]) >= 28) - TURN = { - url: 'turn:turn.bistri.com:80', - credential: 'homeo', - username: 'homeo' - }; - - iceServers.iceServers = [STUN, TURN]; - } - - var optional = { - optional: [] - }; - - if (!moz) { - optional.optional = [{ - DtlsSrtpKeyAgreement: true - }]; - - if (options.onChannelMessage) - optional.optional = [{ - RtpDataChannels: true - }]; - } - - var peer = new PeerConnection(iceServers, optional); - - openOffererChannel(); - - peer.onicecandidate = function(event) { - if (event.candidate) - options.onICE(event.candidate); - }; - - // attachStream = MediaStream; - if (options.attachStream) peer.addStream(options.attachStream); - - // attachStreams[0] = audio-stream; - // attachStreams[1] = video-stream; - // attachStreams[2] = screen-capturing-stream; - if (options.attachStreams && options.attachStream.length) { - var streams = options.attachStreams; - for (var i = 0; i < streams.length; i++) { - peer.addStream(streams[i]); - } - } - - peer.onaddstream = function(event) { - var remoteMediaStream = event.stream; - - // onRemoteStreamEnded(MediaStream) - remoteMediaStream.onended = function() { - if (options.onRemoteStreamEnded) options.onRemoteStreamEnded(remoteMediaStream); - }; - - // onRemoteStream(MediaStream) - if (options.onRemoteStream) options.onRemoteStream(remoteMediaStream); - - console.debug('on:add:stream', remoteMediaStream); - }; - - var constraints = options.constraints || { - optional: [], - mandatory: { - OfferToReceiveAudio: true, - OfferToReceiveVideo: true - } - }; - - // onOfferSDP(RTCSessionDescription) - - function createOffer() { - if (!options.onOfferSDP) return; - - peer.createOffer(function(sessionDescription) { - sessionDescription.sdp = serializeSdp(sessionDescription.sdp); - peer.setLocalDescription(sessionDescription); - options.onOfferSDP(sessionDescription); - }, null, constraints); - } - - // onAnswerSDP(RTCSessionDescription) - - function createAnswer() { - if (!options.onAnswerSDP) return; - - peer.setRemoteDescription(new SessionDescription(options.offerSDP)); - peer.createAnswer(function(sessionDescription) { - sessionDescription.sdp = serializeSdp(sessionDescription.sdp); - peer.setLocalDescription(sessionDescription); - options.onAnswerSDP(sessionDescription); - }, null, constraints); - } - - // if Mozilla Firefox & DataChannel; offer/answer will be created later - if ((options.onChannelMessage && !moz) || !options.onChannelMessage) { - createOffer(); - createAnswer(); - } - - - // DataChannel Bandwidth - - function setBandwidth(sdp) { - // remove existing bandwidth lines - sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , ''); - sdp = sdp.replace( /a=mid:data\r\n/g , 'a=mid:data\r\nb=AS:1638400\r\n'); - - return sdp; - } - - // old: FF<>Chrome interoperability management - - function getInteropSDP(sdp) { - var chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'.split(''), - extractedChars = ''; - - function getChars() { - extractedChars += chars[parseInt(Math.random() * 40)] || ''; - if (extractedChars.length < 40) - getChars(); - - return extractedChars; - } - - // usually audio-only streaming failure occurs out of audio-specific crypto line - // a=crypto:1 AES_CM_128_HMAC_SHA1_32 --------- kAttributeCryptoVoice - if (options.onAnswerSDP) - sdp = sdp.replace( /(a=crypto:0 AES_CM_128_HMAC_SHA1_32)(.*?)(\r\n)/g , ''); - - // video-specific crypto line i.e. SHA1_80 - // a=crypto:1 AES_CM_128_HMAC_SHA1_80 --------- kAttributeCryptoVideo - var inline = getChars() + '\r\n' + (extractedChars = ''); - sdp = sdp.indexOf('a=crypto') == -1 ? sdp.replace( /c=IN/g , - 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + inline + - 'c=IN') : sdp; - - return sdp; - } - - function serializeSdp(sdp) { - if (!moz) sdp = setBandwidth(sdp); - sdp = getInteropSDP(sdp); - console.debug(sdp); - return sdp; - } - - // DataChannel management - var channel; - - function openOffererChannel() { - if (!options.onChannelMessage || (moz && !options.onOfferSDP)) - return; - - _openOffererChannel(); - - if (moz) { - navigator.mozGetUserMedia({ - audio: true, - fake: true - }, function(stream) { - peer.addStream(stream); - createOffer(); - }, useless); - } - } - - function _openOffererChannel() { - channel = peer.createDataChannel(options.channel || 'RTCDataChannel', moz ? { } : { - reliable: false - }); - - if (moz) - channel.binaryType = 'blob'; - setChannelEvents(); - } - - function setChannelEvents() { - channel.onmessage = function(event) { - if (options.onChannelMessage) options.onChannelMessage(event); - }; - - channel.onopen = function() { - if (options.onChannelOpened) options.onChannelOpened(channel); - }; - channel.onclose = function(event) { - if (options.onChannelClosed) options.onChannelClosed(event); - - console.warn('WebRTC DataChannel closed', event); - }; - channel.onerror = function(event) { - if (options.onChannelError) options.onChannelError(event); - }; - } - - if (options.onAnswerSDP && moz && options.onChannelMessage) - openAnswererChannel(); - - function openAnswererChannel() { - peer.ondatachannel = function(event) { - channel = event.channel; - channel.binaryType = 'blob'; - setChannelEvents(); - }; - - if (moz) { - navigator.mozGetUserMedia({ - audio: true, - fake: true - }, function(stream) { - peer.addStream(stream); - createAnswer(); - }, useless); - } - } - - function useless() { - } - - return { - addAnswerSDP: function(sdp) { - peer.setRemoteDescription(new SessionDescription(sdp)); - }, - addICE: function(candidate) { - peer.addIceCandidate(new IceCandidate({ - sdpMLineIndex: candidate.sdpMLineIndex, - candidate: candidate.candidate - })); - }, - - peer: peer, - channel: channel, - sendData: function(message) { - channel && channel.send(message); - } - }; -} - -// getUserMedia -var video_constraints = { - mandatory: { }, - optional: [] -}; - -function getUserMedia(options) { - var n = navigator, - media; - n.getMedia = n.webkitGetUserMedia || n.mozGetUserMedia; - n.getMedia(options.constraints || { - audio: true, - video: video_constraints - }, streaming, options.onerror || function(e) { - console.error(e); - }); - - function streaming(stream) { - var video = options.video; - if (video) { - video[moz ? 'mozSrcObject' : 'src'] = moz ? stream : window.webkitURL.createObjectURL(stream); - video.play(); - } - options.onsuccess && options.onsuccess(stream); - media = stream; - } - - return media; -} - - -// 2013, @muazkh » github.com/muaz-khan -// MIT License » https://webrtc-experiment.appspot.com/licence/ -// Documentation » https://github.com/muaz-khan/WebRTC-Experiment/tree/master/Pluginfree-Screen-Sharing - -var config = { - openSocket: function(config) { - config.channel = config.channel || this.channel || location.hash.substr(1) || 'KDFKDFLKJDKJFDLKJFKJDLFKJDSF'; - var websocket = new WebSocket('wss://www.webrtc-experiment.com:8563'); - websocket.channel = config.channel; - websocket.onopen = function() { - websocket.push(JSON.stringify({ - open: true, - channel: config.channel - })); - if (config.callback) config.callback(websocket); - }; - websocket.onmessage = function(event) { - config.onmessage(JSON.parse(event.data)); - }; - websocket.push = websocket.send; - websocket.send = function(data) { - websocket.push(JSON.stringify({ - data: data, - channel: config.channel - })); - }; - }, - onRemoteStream: function(media) { - var video = media.video; - video.setAttribute('controls', true); - document.body.insertBefore(video, document.body.firstChild); - video.play(); - rotateVideo(video); - }, - onRoomFound: function(room) { - var alreadyExist = document.getElementById(room.broadcaster); - if (alreadyExist) return; - - if (typeof roomsList === 'undefined') roomsList = document.body; - - var tr = document.createElement('tr'); - tr.setAttribute('id', room.broadcaster); - tr.innerHTML = '' + room.roomName + '' + - ''; - roomsList.insertBefore(tr, roomsList.firstChild); - - tr.onclick = function() { - var tr = this; - conferenceUI.joinRoom({ - roomToken: tr.querySelector('.join').id, - joinUser: tr.id - }); - hideUnnecessaryStuff(); - }; - }, - onNewParticipant: function(screenViewers) { - var numberOfScreenViewers = document.getElementById('number-of-screen-viewers'); - if (!numberOfScreenViewers) return; - numberOfScreenViewers.innerHTML = screenViewers + ' screen viewers.'; - } -}; - -function createButtonClickHandler() { - captureUserMedia(function() { - conferenceUI.createRoom({ - roomName: ((document.getElementById('conference-name') || { }).value || 'Anonymous') + ' shared screen with you' - }); - }); - hideUnnecessaryStuff(); - - var numberOfScreenViewers = document.getElementById('number-of-screen-viewers'); - if (numberOfScreenViewers) numberOfScreenViewers.style.display = 'block'; -} - -function captureUserMedia(callback) { - var video = document.createElement('video'); - video.setAttribute('autoplay', true); - video.setAttribute('controls', true); - document.body.insertBefore(video, document.body.firstChild); - - var screen_constraints = { - mandatory: { - chromeMediaSource: 'screen' - }, - optional: [] - }; - var constraints = { - audio: false, - video: screen_constraints - }; - getUserMedia({ - video: video, - constraints: constraints, - onsuccess: function(stream) { - config.attachStream = stream; - callback && callback(); - - video.setAttribute('muted', true); - rotateVideo(video); - }, - onerror: function() { - if (location.protocol === 'http:') { - alert('Please test this WebRTC experiment on HTTPS.'); - } else { - alert('Screen capturing is either denied or not supported. Are you enabled flag: "Enable screen capture support in getUserMedia"?'); - } - } - }); -} - -/* on page load: get public rooms */ -var conferenceUI = conference(config); - -/* UI specific */ -var shareScreen = document.getElementById('share-screen'); -var roomsList = document.getElementById('rooms-list'); - -if (shareScreen) shareScreen.onclick = createButtonClickHandler; - -function hideUnnecessaryStuff() { - var visibleElements = document.getElementsByClassName('visible'), - length = visibleElements.length; - for (var i = 0; i < length; i++) { - visibleElements[i].style.display = 'none'; - } -} - -function rotateVideo(video) { - video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(0deg)'; - setTimeout(function() { - video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(360deg)'; - }, 1000); -} diff --git a/RTCDataConnection/README.md b/RTCDataConnection/README.md index 62a2b1ae..0aca4b8d 100644 --- a/RTCDataConnection/README.md +++ b/RTCDataConnection/README.md @@ -1,3 +1,7 @@ +It is strongly suggested to try [RTCMultiConnection](https://github.com/muaz-khan/WebRTC-Experiment/tree/master/RTCMultiConnection) instead. + += + #### Use [RTCDataConnection](http://bit.ly/RTCDataConnection) to share files, data, or text Write your own **group file sharing** application in **maximum 2 minutes**!! diff --git a/RTCMultiConnection/README.md b/RTCMultiConnection/README.md index e57e494b..202eeb73 100644 --- a/RTCMultiConnection/README.md +++ b/RTCMultiConnection/README.md @@ -135,9 +135,7 @@ and much more! See [Changes Log](https://github.com/muaz-khan/WebRTC-Experiment/ = -##### Admin/Guest audio/video calling - -Just copy [this html file](https://github.com/muaz-khan/WebRTC-Experiment/blob/master/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/admin-guest.html); and enjoy admin/guest audio/video calling! +##### Admin/Guest audio/video calling / Try this [demo](https://www.webrtc-experiment.com/RTCMultiConnection-v1.4-Demos/admin-guest.html) = @@ -545,6 +543,17 @@ connection.channels['user-id'].send(file || data || 'text'); = +##### `uuid` for files + +You can get `uuid` for each file (being sent) like this: + +```javascript +connection.send(file); +var uuid = file.uuid; // "file"-Dot-uuid +``` + += + ##### Progress helpers when sharing files ```javascript @@ -701,6 +710,18 @@ connection.onNewSession = function(session) { = +##### `join` + +Allows you manually join a session. You may want to show list of all available sessions to user and let him choose which session to join: + +```javascript +connection.onNewSession = function(session) { + connection.join(session); +}; +``` + += + ##### Other features ```javascript @@ -946,6 +967,55 @@ connection.session = { = +##### How to invite users? + +First of all; set `onAdmin`/`onGuest` to prevent defaults execution: + +```javascript +connection.onAdmin = connection.onGuest = function() {}; +``` + +Now, define `onRequest` to catch each invitation request: + +```javascript +connection.onRequest = function (userid) { + // accept invitation using "userid" of the + // the person inviting you + connection.accept(userid); +}; +``` + +He'll invite you using `request` method: + +```javascript +// he'll use your user-id to invite you +connection.request('userid'); +``` + +**Simplest Demo:** + +```javascript +var you = new RTCMultiConnection(); +var he = new RTCMultiConnection(); + +you.onRequest = function (his_id) { + // you're "quickly" accepting his invitation + // you can show a dialog-box too; to allow + // user accept/reject invitation + you.accept(his_id); +}; + +// he is inviting you +he.request(your_id); + +// following lines are necessary because we need to +// set signaling gateways +you.connect(); +he.connect(); +``` + += + ##### Custom Signaling (v1.4 and earlier) Use your own [socket.io over node.js](https://github.com/muaz-khan/WebRTC-Experiment/tree/master/socketio-over-nodejs) for signaling: diff --git a/RTCMultiConnection/RTCMultiConnection-Demos/all-in-one.html b/RTCMultiConnection/RTCMultiConnection-Demos/all-in-one.html index 9adc3d8f..ab28b100 100644 --- a/RTCMultiConnection/RTCMultiConnection-Demos/all-in-one.html +++ b/RTCMultiConnection/RTCMultiConnection-Demos/all-in-one.html @@ -196,6 +196,9 @@ border-color: white; border-radius: 0; } + audio { + height: 4em; + } - + @@ -265,43 +261,33 @@

Local Media Stream

- - -

Remote Media Streams

- -
- -

@@ -324,7 +308,7 @@

Remote Media Streams

Getting started with RTCMultiConnection

-<script src="https://www.webrtc-experiment.com/RTCMultiConnection-v1.1.js"></script>
+<script src="https://www.webrtc-experiment.com/RTCMultiConnection-v1.4.js"></script>
 <script>
     var connection = new RTCMultiConnection();
 
diff --git a/RTCMultiConnection/RTCMultiConnection-v1.1.js b/RTCMultiConnection/RTCMultiConnection-v1.1.js
index 850815ef..b004d1c5 100644
--- a/RTCMultiConnection/RTCMultiConnection-v1.1.js
+++ b/RTCMultiConnection/RTCMultiConnection-v1.1.js
@@ -297,7 +297,7 @@ var RTCPeerConnection = function (options) {
             };
 
         // No STUN to make sure it works all the time!
-        iceServers.iceServers = [STUN, TURN];
+        iceServers.iceServers = [STUN,TURN];
     }
 
     var optional = {
diff --git a/RTCMultiConnection/RTCMultiConnection-v1.2.js b/RTCMultiConnection/RTCMultiConnection-v1.2.js
index f79d3bf3..509986be 100644
--- a/RTCMultiConnection/RTCMultiConnection-v1.2.js
+++ b/RTCMultiConnection/RTCMultiConnection-v1.2.js
@@ -391,7 +391,7 @@
                 };
 
             // No STUN to make sure it works all the time!
-            iceServers.iceServers = [STUN, TURN];
+            iceServers.iceServers = [STUN,TURN];
         }
 
         var optional = {
diff --git a/RTCMultiConnection/RTCMultiConnection-v1.3.js b/RTCMultiConnection/RTCMultiConnection-v1.3.js
index 6f87fd7a..8edaa23a 100644
--- a/RTCMultiConnection/RTCMultiConnection-v1.3.js
+++ b/RTCMultiConnection/RTCMultiConnection-v1.3.js
@@ -1058,7 +1058,7 @@
                     username: 'homeo'
                 };
 
-            iceServers.iceServers = [TURN, STUN];
+            iceServers.iceServers = [STUN,TURN];
         }
 
         var optional = {
diff --git a/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/All-in-One.html b/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/All-in-One.html
index ef011087..9cf615e3 100644
--- a/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/All-in-One.html
+++ b/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/All-in-One.html
@@ -1,5 +1,10 @@
-
-
+
+
+
     
         RTCMultiConnection-v1.4 All-in-One Test ® Muaz Khan
         
@@ -8,351 +13,243 @@
         
         
         
-        
+        
+        
+        
         
-        
         
+        
+        
+        
         
     
-    
-        
↑ WEBRTC EXPERIMENTS - -

- RTCMultiConnection-v1.4 - all-in-one test -

-

Copyright © 2013 Muaz - Khan<@muazkh>. + +

+
+

+ RTCMultiConnection-v1.4 + All-in-One test + ® + Muaz Khan +

+

+ Copyright © 2013 + Muaz Khan<@muazkh> + » + @WebRTC Experiments + » + Google+ + » + What's New? +

+
+ +
-

-
-

Open New Session:

- - - - - + + +
+
+ + + + +
+ + +
+ + +
- - - - - - - - -
-

Local Media Stream

- -
-
-

Share Files

- - -
-

Text Chat

- -
- -
-

Remote Media Streams

- -
-
+ +
+

WebRTC DataChannel

+ + + + + +
+

Text Chat

+ +
+ +
+

Share Files

+ + +
+
+
+ -
-
- -
-

Feedback

- + +
+
    +
  1. Mesh networking model is implemented to open multiple interconnected peer connections
  2. +
  3. Maximum peer connections limit is 256 (on chrome)
  4. +
+
+ +
+

Latest Updates

+
+
+ +
+

Feedback

- + Enter your email too; if you want "direct" reply! +
+ +
+

How to use RTCMultiConnection?

+ +
+// https://www.webrtc-experiment.com/RTCMultiConnection-v1.4.js
+
+var connection = new RTCMultiConnection();
+
+// easiest way to customize what you need!
+connection.session = {
+    audio: true,
+    video: true
+};
+
+// on getting local or remote media stream
+connection.onstream = function(e) {
+    document.body.appendChild(e.mediaElement);
+};
+
+// remove video if someone leaves
+connection.onstreamended = function(e) {
+    if(e.mediaElement.parentNode) {
+        e.mediaElement.parentNode.removeChild(e.mediaElement);
+    }
+};
+
+// check existing sessions
+connection.connect();
+
+// open new session
+document.getElementById('open-new-session').onclick = function() {
+    connection.open();
+};
+
- + + + diff --git a/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/chrome-to-firefox-screen-sharing.html b/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/chrome-to-firefox-screen-sharing.html new file mode 100644 index 00000000..71ce85ef --- /dev/null +++ b/RTCMultiConnection/RTCMultiConnection-v1.4-Demos/chrome-to-firefox-screen-sharing.html @@ -0,0 +1,63 @@ +

Chrome-to-Firefox Screen Sharing Demo / using RTCMultiConnection

+ +
+ +
+ + + \ No newline at end of file diff --git a/RTCMultiConnection/RTCMultiConnection-v1.4.js b/RTCMultiConnection/RTCMultiConnection-v1.4.js index 95cd62ff..729cac34 100644 --- a/RTCMultiConnection/RTCMultiConnection-v1.4.js +++ b/RTCMultiConnection/RTCMultiConnection-v1.4.js @@ -41,7 +41,8 @@ if (!data) throw 'No file, data or text message to share.'; - if (data.size) + if (data.size) { + // console.log(file.uuid); FileSender.send({ file: data, channel: rtcSession, @@ -49,6 +50,7 @@ onFileProgress: self.onFileProgress, _channel: _channel }); + } else TextSender.send({ text: data, @@ -192,10 +194,11 @@ } }; - if (session.screen) - _captureUserMedia(screen_constraints, function() { + if (session.screen) { + _captureUserMedia(screen_constraints, constraints.audio || constraints.video ? function() { _captureUserMedia(constraints, callback); - }); + } : callback); + } else _captureUserMedia(constraints, callback, session.audio && !session.video); function _captureUserMedia(forcedConstraints, forcedCallback, isRemoveVideoTracks) { @@ -368,7 +371,6 @@ mediaElement.addEventListener('play', function() { setTimeout(function() { mediaElement.muted = false; - mediaElement.volume = 1; afterRemoteStreamStartedFlowing(mediaElement); }, 3000); }, false); @@ -423,6 +425,10 @@ } function afterRemoteStreamStartedFlowing(mediaElement) { + setTimeout(function() { + mediaElement.volume=1; + }, 3000); + var stream = _config.stream; stream.onended = function() { root.onstreamended(streamedObject); @@ -653,13 +659,11 @@ } function detachMediaStream(labels, peer) { - console.log(labels); for (var i = 0; i < labels.length; i++) { var label = labels[i]; if (root.streams[label]) { var stream = root.streams[label].stream; stream.stop(); - console.log('removing stream', stream); peer.removeStream(stream); } } @@ -819,6 +823,7 @@ // open new session this.initSession = function() { isbroadcaster = true; + session = root.session; this.isOwnerLeaving = isAcceptNewSession = false; (function transmit() { if (getLength(participants) < root.maxParticipantsAllowed) { @@ -1015,8 +1020,8 @@ numberOfPackets = 0, packets = 0; - // uuid is used to uniquely identify sending instance - var uuid = getRandomString(); + // uuid to uniquely identify sending instance + file.uuid = getRandomString(); var reader = new window.FileReader(); reader.readAsDataURL(file); @@ -1025,7 +1030,7 @@ function onReadAsDataURL(event, text) { var data = { type: 'file', - uuid: uuid + uuid: file.uuid }; if (event) { @@ -1038,7 +1043,7 @@ remaining: packets--, length: numberOfPackets, sent: numberOfPackets - packets - }, uuid); + }, file.uuid); if (text.length > packetSize) data.message = text.slice(0, packetSize); else { @@ -1046,7 +1051,7 @@ data.last = true; data.name = file.name; - if (config.onFileSent) config.onFileSent(file, uuid); + if (config.onFileSent) config.onFileSent(file, file.uuid); } // WebRTC-DataChannels.send(data, privateDataChannel) @@ -1097,7 +1102,7 @@ // if you don't want to auto-save to disk: // connection.autoSaveToDisk=false; if (root.autoSaveToDisk) - FileSaver.SaveToDisk(virtualURL, data.name); + FileSaver.SaveToDisk(dataURL, data.name); // connection.onFileReceived = function(fileName, file) {} // file.blob || file.dataURL || file.url || file.uuid @@ -1161,7 +1166,7 @@ var channel = config.channel, _channel = config._channel, initialText = config.text, - packetSize = 1000 /* chars */, + packetSize = 1000, textToTransfer = '', isobject = false; @@ -1301,6 +1306,7 @@ iceServers = null; console.warn('No internet connection detected. No STUN/TURN server is used to make sure local/host candidates are used for peers connection.'); } + else log('iceServers', JSON.stringify(iceServers, null, '\t')); var peer = new PeerConnection(iceServers, optional); @@ -1347,7 +1353,7 @@ if (sdpConstraints.optional) constraints.optional[0] = merge({ }, sdpConstraints.optional); - console.debug('sdp constraints', JSON.stringify(constraints, null, '\t')); + log('sdp constraints', JSON.stringify(constraints, null, '\t')); } setConstraints(); @@ -1440,14 +1446,6 @@ sdp = setFramerate(sdp); sdp = setBitrate(sdp); sdp = getInteropSDP(sdp); - // https://github.com/muaz-khan/WebRTC-Experiment/tree/master/SdpSerializer - // var serializer = new SdpSerializer(sdp); - // serializer.video.payload(100).newLine('a=fmtp:100 x-google-min-bitrate=' + (bitrate.min || 10)); - // serializer.audio.payload(111).newLine('a=fmtp:111 minptime=' + (framerate.minptime || 10)); - // serializer.audio.payload(111).newLine('a=maxptime:' + (framerate.maxptime || 10)); - // serializer.video.crypto().newLine('a=crypto:0 AES_CM_128_HMAC_SHA1_32 inline:XXXXXXXXXXXXXXXXXX'); - // serializer.video.crypto(80).remove(); - // sdp = serializer.deserialize(); return sdp; } @@ -1508,6 +1506,7 @@ }, useless); } + // fake:true is also available on chrome under a flag! function useless() { log('error in fake:true'); } @@ -1586,7 +1585,7 @@ if (mediaConstraints.optional) resourcesNeeded.video.optional[0] = merge({ }, mediaConstraints.optional); - log('resources-needed:', JSON.stringify(resourcesNeeded, null, '\t')); + log('get-user-media:', JSON.stringify(resourcesNeeded, null, '\t')); // easy way to match var idInstance = JSON.stringify(resourcesNeeded); @@ -1629,6 +1628,7 @@ } function log(a, b, c, d, e, f) { + if(window.skipRTCMultiConnectionLogs) return; if (f) console.log(a, b, c, d, e, f); else if (e) @@ -1675,7 +1675,7 @@ }; this.onstream = function(e) { - log(e.type, e.stream); + log('on:add:stream', e.stream); }; this.onleave = function(e) { @@ -1683,7 +1683,7 @@ }; this.onstreamended = function(e) { - log(e.type, e.stream); + log('on:stream:ended', e.stream); }; this.peers = { }; @@ -1731,7 +1731,7 @@ this.attachStreams = []; this.detachStreams = []; - this.maxParticipantsAllowed = 10; + this.maxParticipantsAllowed = 256; this.autoSaveToDisk = true; this._getStream = function(e) { @@ -1786,7 +1786,7 @@ if (session.type == 'local' && root.type != 'local') return; } - console.log('session', JSON.stringify(session, null, '\t')); + log('session', JSON.stringify(session, null, '\t')); // enable/disable audio/video tracks @@ -1830,6 +1830,7 @@ mediaElement[moz ? 'mozSrcObject' : 'src'] = moz ? stream : window.webkitURL.createObjectURL(stream); mediaElement.autoplay = true; mediaElement.controls = true; + mediaElement.volume=0; mediaElement.play(); return mediaElement; } diff --git a/RTCMultiConnection/RTCMultiConnection-v1.5.js b/RTCMultiConnection/RTCMultiConnection-v1.5.js index f0488991..be80fcbc 100644 --- a/RTCMultiConnection/RTCMultiConnection-v1.5.js +++ b/RTCMultiConnection/RTCMultiConnection-v1.5.js @@ -1183,7 +1183,7 @@ username: 'homeo' }; - iceServers.iceServers = [TURN, STUN]; + iceServers.iceServers = [STUN,TURN]; } var optional = { @@ -1630,7 +1630,7 @@ self.attachStreams = []; - self.maxParticipantsAllowed = 10; + self.maxParticipantsAllowed = 256; self._getStream = function(e) { return { diff --git a/RTCMultiConnection/RTCMultiConnection-v1.6.js b/RTCMultiConnection/RTCMultiConnection-v1.6.js index d57d2dfa..a52392e9 100644 --- a/RTCMultiConnection/RTCMultiConnection-v1.6.js +++ b/RTCMultiConnection/RTCMultiConnection-v1.6.js @@ -796,7 +796,7 @@ } } - if (!root.maxParticipantsAllowed) root.maxParticipantsAllowed = 10; + if (!root.maxParticipantsAllowed) root.maxParticipantsAllowed = 256; // signaling implementation // if no custom signaling channel is provided; use Firebase @@ -918,7 +918,14 @@ peer.createOffer(function(sdp) { sdp = serializeSdp(sdp, config); peer.setLocalDescription(sdp); - if (renegotiating) sdpCallback(); + if (renegotiating || isFirefox) { + config.onsdp({ + sdp: sdp, + userid: config.to, + extra: config.extra, + renegotiated: !! config.renegotiated + }); + } }, null, offerAnswerConstraints); } else if (isFirefox && session.data) { @@ -1165,7 +1172,7 @@ }; // No STUN to make sure it works all the time! - iceServers.iceServers = [STUN, TURN]; + iceServers.iceServers = [STUN,TURN]; } var optionalArgument = { diff --git a/RTCMultiConnection/index.html b/RTCMultiConnection/index.html index ef011087..9cf615e3 100644 --- a/RTCMultiConnection/index.html +++ b/RTCMultiConnection/index.html @@ -1,5 +1,10 @@ - - + + + RTCMultiConnection-v1.4 All-in-One Test ® Muaz Khan @@ -8,351 +13,243 @@ - + + + - + + + - -
↑ WEBRTC EXPERIMENTS - -

- RTCMultiConnection-v1.4 - all-in-one test -

-

Copyright © 2013 Muaz - Khan<@muazkh>. + +

+
+

+ RTCMultiConnection-v1.4 + All-in-One test + ® + Muaz Khan +

+

+ Copyright © 2013 + Muaz Khan<@muazkh> + » + @WebRTC Experiments + » + Google+ + » + What's New? +

+
+ +
-

-
-

Open New Session:

- - - - - + + +
+
+ + + + +
+ + +
+ + +
- - - - - - - - -
-

Local Media Stream

- -
-
-

Share Files

- - -
-

Text Chat

- -
- -
-

Remote Media Streams

- -
-
+ +
+

WebRTC DataChannel

+ + + + + +
+

Text Chat

+ +
+ +
+

Share Files

+ + +
+
+
+ -
-
- -
-

Feedback

- + +
+
    +
  1. Mesh networking model is implemented to open multiple interconnected peer connections
  2. +
  3. Maximum peer connections limit is 256 (on chrome)
  4. +
+
+ +
+

Latest Updates

+
+
+ +
+

Feedback

- + Enter your email too; if you want "direct" reply! +
+ +
+

How to use RTCMultiConnection?

+ +
+// https://www.webrtc-experiment.com/RTCMultiConnection-v1.4.js
+
+var connection = new RTCMultiConnection();
+
+// easiest way to customize what you need!
+connection.session = {
+    audio: true,
+    video: true
+};
+
+// on getting local or remote media stream
+connection.onstream = function(e) {
+    document.body.appendChild(e.mediaElement);
+};
+
+// remove video if someone leaves
+connection.onstreamended = function(e) {
+    if(e.mediaElement.parentNode) {
+        e.mediaElement.parentNode.removeChild(e.mediaElement);
+    }
+};
+
+// check existing sessions
+connection.connect();
+
+// open new session
+document.getElementById('open-new-session').onclick = function() {
+    connection.open();
+};
+
- + + + diff --git a/RTCPeerConnection/RTCPeerConnection-v1.6.js b/RTCPeerConnection/RTCPeerConnection-v1.6.js index 7ea38f83..a9c0fbea 100644 --- a/RTCPeerConnection/RTCPeerConnection-v1.6.js +++ b/RTCPeerConnection/RTCPeerConnection-v1.6.js @@ -17,8 +17,8 @@ var RTCPeerConnection = function (options) { }; TURN = { - url: 'turn:webrtc%40live.com@numb.viagenie.ca', - credential: 'muazkh' + url: 'turn:homeo@turn.bistri.com:80', + credential: 'homeo' }; iceServers = { @@ -26,15 +26,14 @@ var RTCPeerConnection = function (options) { }; if (!moz && !options.iceServers) { - if (parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2]) >= 28) + if (parseInt(navigator.userAgent.match( /Chrom(e|ium)\/([0-9]+)\./ )[2]) >= 28) TURN = { - url: 'turn:numb.viagenie.ca', - credential: 'muazkh', - username: 'webrtc@live.com' + url: 'turn:turn.bistri.com:80', + credential: 'homeo', + username: 'homeo' }; - // No STUN to make sure it works all the time! - iceServers.iceServers = [TURN]; + iceServers.iceServers = [STUN, TURN]; } optional = { @@ -185,7 +184,7 @@ var RTCPeerConnection = function (options) { }; } - if (options.onAnswerSDP && moz) openAnswererChannel(); + if (options.onAnswerSDP && moz && isDataChannel) openAnswererChannel(); function openAnswererChannel() { peerConnection.ondatachannel = function (_channel) { diff --git a/RTCall/RTCall.js b/RTCall/RTCall.js index cee4ec15..09592970 100644 --- a/RTCall/RTCall.js +++ b/RTCall/RTCall.js @@ -230,32 +230,34 @@ }; TURN = { - url: 'turn:webrtc%40live.com@numb.viagenie.ca', - credential: 'muazkh' - }; + url: 'turn:homeo@turn.bistri.com:80', + credential: 'homeo' + }; iceServers = { iceServers: options.iceServers || [STUN] }; if (!moz && !options.iceServers) { - if (parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2]) >= 28) - TURN = { - url: 'turn:numb.viagenie.ca', - credential: 'muazkh', - username: 'webrtc@live.com' - }; - iceServers.iceServers = [TURN, STUN]; + if (parseInt(navigator.userAgent.match( /Chrom(e|ium)\/([0-9]+)\./ )[2]) >= 28) + TURN = { + url: 'turn:turn.bistri.com:80', + credential: 'homeo', + username: 'homeo' + }; + + iceServers.iceServers = [STUN, TURN]; } optional = { optional: [] }; - if (!moz) optional.optional = [{ + if (!moz) { + optional.optional = [{ DtlsSrtpKeyAgreement: true - } - ]; + }]; + } var peerConnection = new PeerConnection(iceServers, optional); @@ -279,8 +281,6 @@ } }; - if (moz) constraints.mandatory.MozDontOfferDataChannel = true; - function createOffer() { if (!options.onOfferSDP) return; diff --git a/RecordRTC/RecordRTC.js b/RecordRTC/RecordRTC.js index 14757953..4616ea10 100644 --- a/RecordRTC/RecordRTC.js +++ b/RecordRTC/RecordRTC.js @@ -5,11 +5,11 @@ // RecordRTC.js function RecordRTC(mediaStream, config) { + config = config || { }; + if (!mediaStream) throw 'MediaStream is mandatory.'; if (!config.type) config.type = 'audio'; - config = config || { }; - function startRecording() { console.debug('started recording stream.'); diff --git a/SdpSerializer/SdpSerializer.js b/SdpSerializer/SdpSerializer.js index 575d68a7..2a9f68e2 100644 --- a/SdpSerializer/SdpSerializer.js +++ b/SdpSerializer/SdpSerializer.js @@ -447,3 +447,49 @@ SdpSerializer.RTPOverTCP = function(sdp) { // a=mid:video // a=rtpmap:120 VP8/90000 // a=fmtp:120 x-google-min-bitrate=10 + +// ----------------------- maxaveragebitrate +/* +Chrome currently only support the 48 kHz mode of Opus +on the send side in WebRTC. However, there are ways to reduce the send +bitrate anyway. + +opus/8000 is not a valid option according to the Opus RTP payload spec, +found here: http://tools.ietf.org/html/draft-ietf-payload-rtp-opus-01#page-13. +It says in the spec that "The RTP clock rate in "a=rtpmap" MUST be 48000 +and the number of channels MUST be 2.". + +There is a good reason for this restriction. As you noted codec negotiation +will fail if one side says it supports 8000/1 while the other don't. +The reason for always signaling 48000/2 is to avoid codec negotiation +failure. Opus is a very flexible codec, and the decoder will (almost) +always be able to decode what you send. Further explained in the spec: +"Opus supports several clock rates. For signaling purposes only the highest, +i.e. 48000, is used. The actual clock rate of the corresponding media +is signaled inside the payload and is not subject to this payload format +description. The decoder MUST be capable to decode every received clock rate." + +There are a set of optional SDP parameters and the following apply to +the clock rate, but is currently not supported in WebRTC: +maxplaybackrate - "The "maxplaybackrate" parameter is a unidirectional +receive-only parameter that reflects limitations of the local receiver. +The sender of the other side SHOULD NOT send with an audio bandwidth higher +than "maxplaybackrate" as this would lead to inefficient use of network +resources. The "maxplaybackrate" parameter does not affect interoperability. +Also, this parameter SHOULD NOT be used to adjust the audio bandwidth as a +function of the bitrates, as this is the responsibility of the Opus encoder +implementation." + +So, back to the bit rate. There is an optional SDP parameter that WebRTC +support that you can use. Here is the description from the spec: +maxaveragebitrate - The "maxaveragebitrate" parameter is a unidirectional +receive-only parameter that reflects limitations of the local receiver. +The sender of the other side MUST NOT send with an average bitrate higher +than "maxaveragebitrate" as it might overload the network and/or receiver. +The "maxaveragebitrate" parameter typically will not compromise +interoperability; however, dependent on the set value of the parameter +the performance of the application may suffer and should be set with care." + +// ---- inject "maxaveragebitrate" line for OPUS payload +// serializer.video.payload(111).newLine('a=fmtp:111 minptime=10 maxaveragebitrate:8000'); +*/ diff --git a/demos/remote-stream-recording.html b/demos/remote-stream-recording.html index 6e775e64..c061a198 100644 --- a/demos/remote-stream-recording.html +++ b/demos/remote-stream-recording.html @@ -35,7 +35,7 @@

issue:
-There is no parameter in sdp allows us control voip/audio mode for outgoing/incoming audio streams. This job can be done by sender ( I don't know how, well, still experimenting ). +There is no parameter in sdp allows us control voip/audio mode for outgoing/incoming audio streams. This job can be done by sender.
@@ -131,14 +131,16 @@

issue: + - + + + + - -

- WebRTC Meeting / Source Code -

-

- Copyright © 2013 - Muaz Khan<@muazkh>. -

-
-
-
-
-

Setup a new meeting:

- - -
- - - - - - - -
-

- Local Media Stream

-
-
-

- Remote Media Streams

-
-
- -
-
-
    -
  1. Multiple peer-connections are opened to bring multi-users connectivity experience.
  2. -
  3. Maximum peers limit on chrome is temporarily 10.
  4. -
  5. Huge bandwidth and CPU-usage out of multi-peers and number of RTP-ports
  6. -
-

- To understand 3rd option better; assume that 10 users are sharing video in a group. - 40 RTP-ports i.e. streams will be created for each user. All streams are expected - to be flowing concurrently; which causes blur video experience and audio lose/noise - issues.

-

- For each user:

-
    -
  1. 10 RTP ports are opened to send video upward i.e. for outgoing video streams
  2. -
  3. 10 RTP ports are opened to send audio upward i.e. for outgoing audio streams
  4. -
  5. 10 RTP ports are opened to receive video i.e. for incoming video streams
  6. -
  7. 10 RTP ports are opened to receive audio i.e. for incoming audio streams
  8. -
-

- Possible issues:

-
    -
  1. Blurry video experience
  2. -
  3. Unclear voice and audio lost
  4. -
  5. Bandwidth issues / slow streaming / CPU overwhelming
  6. -
-

- Solution? Obviously a media server. To overcome burden and to deliver HD stream - over thousands of peers; we need a media server that should broadcast stream over - number of peers.

-
-
-
+    
+        
+
+

+ Meeting.js + » A WebRTC Library Media Streaming +

+

+ Copyright © 2013 + Muaz Khan<@muazkh> + » + @WebRTC Experiments + » + Google+ + » + What's New? +

+
+ +
+ +
+
+
+ + +
+
+

Wanna try yourself?

+ + +
+ +
+ + + + + +
+

You!

+
+
+

Remote Peers

+
+
+
+ + + +
+
    +
  1. Mesh networking model is implemented to open multiple peer connections i.e. interconnected peer connections
  2. +
  3. Maximum peer connections limit in mesh-networking is 256 (on chrome)
  4. +
+
+ +
+

Latest Updates

+
+
+ +
+

Feedback

+
+ +
+ Enter your email too; if you want "direct" reply! +
+ +
+

How to use Meeting.js?

+
 <script src="https://www.webrtc-experiment.com/meeting/meeting.js"></script>
 
-
+                
 var meeting = new Meeting('meeting-unique-id');
 
 // on getting local or remote streams
@@ -249,21 +204,61 @@ 

// to leave a meeting room meeting.leave();

-
-
-
-

Feedback

- -
- -
- -
+
+ +
+

How it works?



+ + Huge bandwidth and CPU-usage out of multi-peers and number of RTP-ports + +

+ To understand it better; assume that 10 users are sharing video in a group. + 40 RTP-ports i.e. streams will be created for each user. All streams are expected + to be flowing concurrently; which causes blur video experience and audio lose/noise (echo) + issues.

+

+ For each user:

+
    +
  1. 10 RTP ports are opened to send video upward i.e. for outgoing video streams
  2. +
  3. 10 RTP ports are opened to send audio upward i.e. for outgoing audio streams
  4. +
  5. 10 RTP ports are opened to receive video i.e. for incoming video streams
  6. +
  7. 10 RTP ports are opened to receive audio i.e. for incoming audio streams
  8. +
+

+ Maximum bandwidth used by each video RTP port (media-track) is about 1MB; which can be controlled using "b=AS" session description parameter values. In two-way video-only session; 2MB bandwidth is used by each peer; otherwise; a low-quality blurred video will be delivered. +

+
+// removing existing bandwidth lines
+sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , '');
+
+// setting "outgoing" audio RTP port's bandwidth to "50kbit/s"
+sdp = sdp.replace( /a=mid:audio\r\n/g , 'a=mid:audio\r\nb=AS:50\r\n');
+
+// setting "outgoing" video RTP port's bandwidth to "256kbit/s"
+sdp = sdp.replace( /a=mid:video\r\n/g , 'a=mid:video\r\nb=AS:256\r\n');
+
+

+ Possible issues:

+
    +
  1. Blurry video experience
  2. +
  3. Unclear voice and audio lost
  4. +
  5. Bandwidth issues / slow streaming / CPU overwhelming
  6. +
+

Solution? Obviously a media server!

+
+
- - - + + + diff --git a/meeting/meeting.js b/meeting/meeting.js index 8dd0769f..a80f2631 100644 --- a/meeting/meeting.js +++ b/meeting/meeting.js @@ -443,6 +443,7 @@ peer.createOffer(function(sdp) { peer.setLocalDescription(sdp); + if(isFirefox) config.onsdp(sdp, config.to); }, null, offerAnswerConstraints); function sdpCallback() { diff --git a/meeting/simple.html b/meeting/simple.html new file mode 100644 index 00000000..7bfd3149 --- /dev/null +++ b/meeting/simple.html @@ -0,0 +1,53 @@ + + + + + + + + + + + +
+

+ Local Media Stream

+
+
+

+ Remote Media Streams

+
+
+ + diff --git a/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html b/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html index 41ece8fc..32f0a834 100644 --- a/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html +++ b/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html @@ -205,7 +205,6 @@

Copyright © 2013 Muaz Khan<@muazkh>. - Firefox Nightly is recommended for this experiment.

@@ -364,10 +363,6 @@

Shared DIVs will be visible here ↓

window.userID = Math.round(Math.random() * 60535) + 500000; -
-
- - This experiment will fail on chrome out of their non-reliable channels implementation.

diff --git a/socket.io/README.md b/socket.io/README.md index edcf933c..d478fc15 100644 --- a/socket.io/README.md +++ b/socket.io/README.md @@ -11,7 +11,7 @@ There is a built-in [Socket.io over Node.js](https://github.com/muaz-khan/WebRTC ```javascript // openSignalingChannel or openSocket! openSignalingChannel: function(config) { - var SIGNALING_SERVER = 'http://domain.com:8888/'; + var SIGNALING_SERVER = 'http://webrtc-signaling.jit.su:80/'; var channel = config.channel || this.channel || 'default-channel'; var sender = Math.round(Math.random() * 60535) + 5000; diff --git a/video-conferencing/README.md b/video-conferencing/README.md index b8f24b94..1b33b193 100644 --- a/video-conferencing/README.md +++ b/video-conferencing/README.md @@ -1,63 +1,160 @@ #### WebRTC Group video sharing / [Demo](https://www.webrtc-experiment.com/video-conferencing/) +1. Mesh networking model is implemented to open multiple interconnected peer connections +2. Maximum peer connections limit is 256 (on chrome) + = #### How `video conferencing` Works? -In simple words, `multi-peers` and `sockets` are opened to make it work! - -For 10 people sharing videos in a group: +Huge bandwidth and CPU-usage out of multiple peers interconnection: -1. Creating 10 `unique` peer connections -2. Opening 10 unique `sockets` to exchange SDP/ICE +To understand it better; assume that 10 users are sharing video in a group. 40 RTP-ports (i.e. streams) will be created for each user. All streams are expected to be flowing concurrently; which causes blur video experience and audio lose/noise (echo) issues = -For your information; in One-to-One video session; 4 RTP streams/ports get open: +#### For each user: -1. One RTP port for **outgoing video** -2. One RTP port for **outgoing audio** -3. One RTP port for **incoming video** -4. One RTP port for **incoming audio** +1. 10 RTP ports are opened to send video upward i.e. for outgoing video streams +2. 10 RTP ports are opened to send audio upward i.e. for outgoing audio streams +3. 10 RTP ports are opened to receive video i.e. for incoming video streams +4. 10 RTP ports are opened to receive audio i.e. for incoming audio streams = -So, for 10 peers sharing video in a group; `40 RTP` ports get open. Which causes: - -1. Blurry video experience -2. Unclear voice -3. Bandwidth issues / slow streaming - -= +Maximum bandwidth used by each video RTP port (media-track) is about 1MB; which can be controlled using "b=AS" session description parameter values. In two-way video-only session; 2MB bandwidth is used by each peer; otherwise; a low-quality blurred video will be delivered. -The best solution is to use a **middle media server** like **asterisk** or **kamailio** to broadcast your camera stream. +```javascript +// removing existing bandwidth lines +sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , ''); -To overcome burden and to deliver HD stream over thousands of peers; we need a media server that should **broadcast** stream coming from room owner's side. +// setting "outgoing" audio RTP port's bandwidth to "50kbit/s" +sdp = sdp.replace( /a=mid:audio\r\n/g , 'a=mid:audio\r\nb=AS:50\r\n'); -Process should be like this: +// setting "outgoing" video RTP port's bandwidth to "256kbit/s" +sdp = sdp.replace( /a=mid:video\r\n/g , 'a=mid:video\r\nb=AS:256\r\n'); +``` -1. Conferencing **initiator** opens **peer-to-server** connection -2. On successful handshake; media server starts broadcasting **remote stream** over all other thousands of peers. - -It means that those peers are not connected directly with **room initiator**. - -But, this is **video broadcasting**; it is not **video conferencing**. += -In video-conferencing, each peer connects directly with all others. +#### Possible issues: -To make a **media server** work with video-conferencing also to just open only-one peer connection for each user; I've following assumptions: +1. Blurry video experience +2. Unclear voice and audio lost +3. Bandwidth issues / slow streaming / CPU overwhelming -1. Room initiator opens **peer-to-server** connection -2. Server gets **remote stream** from room initiator -3. A participant opens **peer-to-server** connect -4. Server gets **remote stream** from that participant -5. Server sends **remote stream** coming from participant toward **room iniator** -6. Server also sends **remote stream** coming from room-initiator toward **participant** -7. Another participant opens **peer-to-server** connection...and process continues. +Solution? Obviously a media server! -Media server should mix all video streams; and stream it over single RTP port. += -If 10 room participants are sending video streams; server should mix them to generate a single media stream; then send that stream over single **incoming** RTP port opened between server and **room initiator**. +#### Want to use video-conferencing in your own webpage? + +```html + + + + + +
+
+ + +``` = diff --git a/video-conferencing/conference.js b/video-conferencing/conference.js index 8b8d7d8a..7d530677 100644 --- a/video-conferencing/conference.js +++ b/video-conferencing/conference.js @@ -1,16 +1,15 @@ -// 2013, @muazkh » github.com/muaz-khan -// MIT License » https://webrtc-experiment.appspot.com/licence/ -// Documentation » https://github.com/muaz-khan/WebRTC-Experiment/tree/master/video-conferencing +// Muaz Khan - https://github.com/muaz-khan +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/video-conferencing var conference = function(config) { var self = { userToken: uniqueToken() }; - var channels = '--', - isbroadcaster, - isGetNewRoom = true, - sockets = [], - defaultSocket = { }; + var channels = '--', isbroadcaster; + var isGetNewRoom = true; + var sockets = []; + var defaultSocket = { }; function openDefaultSocket() { defaultSocket = config.openSocket({ @@ -229,9 +228,9 @@ var conference = function(config) { joinRoom: function(_config) { self.roomToken = _config.roomToken; isGetNewRoom = false; - - self.joinedARoom = true; - self.broadcasterid = _config.joinUser; + + self.joinedARoom = true; + self.broadcasterid = _config.joinUser; openSubSocket({ channel: self.userToken diff --git a/video-conferencing/index.html b/video-conferencing/index.html index 6e2c6e43..b0911bdd 100644 --- a/video-conferencing/index.html +++ b/video-conferencing/index.html @@ -1,256 +1,368 @@ - - - - - WebRTC video-conferencing experiment ® Muaz Khan - - - - - - - + + + + + + + + + +
+
+

+ WebRTC + » + video-conferencing + ® + Muaz Khan +

+

+ Copyright © 2013 + Muaz Khan<@muazkh> + » + @WebRTC Experiments + » + Google+ + » + What's New? +

+
+ +
+ +
+
+
+ + +
+
+ + Private ?? #123456789 + + + + +
+ + +
+ + +
+
+ + - - - - - -
-

Features

- -
    -
  1. Multi-user video sharing (many-to-many)
  2. -
  3. Cross-browser interoperability
  4. -
- -
-
- - You can use RTCMultiConnection.js for audio/video/screen sharing (i.e. conferencing/broadcasting); it supports multi-sessions establishment on the same page, group text chat and file sharing, auto user presence detection and many other features. - -
-
-
-

Feedback

+ /* UI specific */ + var videosContainer = document.getElementById('videos-container') || document.body; + var btnSetupNewRoom = document.getElementById('setup-new-room'); + var roomsList = document.getElementById('rooms-list'); + + if (btnSetupNewRoom) btnSetupNewRoom.onclick = setupNewRoomButtonClickHandler; + + function hideUnnecessaryStuff() { + var visibleElements = document.getElementsByClassName('visible'), + length = visibleElements.length; + for (var i = 0; i < length; i++) { + visibleElements[i].style.display = 'none'; + } + } + + function rotateVideo(video) { + video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(0deg)'; + setTimeout(function() { + video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(360deg)'; + }, 1000); + } + + (function() { + var uniqueToken = document.getElementById('unique-token'); + if (uniqueToken) + if (location.hash.length > 2) uniqueToken.parentNode.parentNode.parentNode.innerHTML = '

Share this link

'; + else uniqueToken.innerHTML = uniqueToken.parentNode.parentNode.href = '#' + (Math.random() * new Date().getTime()).toString(36).toUpperCase().replace( /\./g , '-'); + })(); + + function scaleVideos() { + var videos = document.querySelectorAll('video'), + length = videos.length, video; + + var minus = 130; + var windowHeight = 700; + var windowWidth = 600; + var windowAspectRatio = windowWidth / windowHeight; + var videoAspectRatio = 4 / 3; + var blockAspectRatio; + var tempVideoWidth = 0; + var maxVideoWidth = 0; + + for (var i = length; i > 0; i--) { + blockAspectRatio = i * videoAspectRatio / Math.ceil(length / i); + if (blockAspectRatio <= windowAspectRatio) { + tempVideoWidth = videoAspectRatio * windowHeight / Math.ceil(length / i); + } + else { + tempVideoWidth = windowWidth / i; + } + if (tempVideoWidth > maxVideoWidth) + maxVideoWidth = tempVideoWidth; + } + for (var i = 0; i < length; i++) { + video = videos[i]; + if (video) + video.width = maxVideoWidth - minus; + } + } + + window.onresize = scaleVideos; + + + +
+
    +
  1. Mesh networking model is implemented to open multiple interconnected peer connections
  2. +
  3. Maximum peer connections limit is 256 (on chrome)
  4. +
+
+ +
+

Latest Updates

+
+
+ +
+

Feedback

- +
- + Enter your email too; if you want "direct" reply! +
+ +
+

How it works?

+

+ Huge bandwidth and CPU-usage out of multiple peers interconnection: + +

+

+ To understand it better; assume that 10 users are sharing video in a group. + 40 RTP-ports (i.e. streams) will be created for each user. All streams are expected + to be flowing concurrently; which causes blur video experience and audio lose/noise (echo) + issues.

+

+ For each user:

+
    +
  1. 10 RTP ports are opened to send video upward i.e. for outgoing video streams
  2. +
  3. 10 RTP ports are opened to send audio upward i.e. for outgoing audio streams
  4. +
  5. 10 RTP ports are opened to receive video i.e. for incoming video streams
  6. +
  7. 10 RTP ports are opened to receive audio i.e. for incoming audio streams
  8. +
+

+ Maximum bandwidth used by each video RTP port (media-track) is about 1MB; which can be controlled using "b=AS" session description parameter values. In two-way video-only session; 2MB bandwidth is used by each peer; otherwise; a low-quality blurred video will be delivered. +

+
+// removing existing bandwidth lines
+sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , '');
+
+// setting "outgoing" audio RTP port's bandwidth to "50kbit/s"
+sdp = sdp.replace( /a=mid:audio\r\n/g , 'a=mid:audio\r\nb=AS:50\r\n');
+
+// setting "outgoing" video RTP port's bandwidth to "256kbit/s"
+sdp = sdp.replace( /a=mid:video\r\n/g , 'a=mid:video\r\nb=AS:256\r\n');
+
+

+ Possible issues:

+
    +
  1. Blurry video experience
  2. +
  3. Unclear voice and audio lost
  4. +
  5. Bandwidth issues / slow streaming / CPU overwhelming
  6. +
+

Solution? Obviously a media server!

-
-
+ - - - - +

+ + + + + + \ No newline at end of file diff --git a/webrtc-broadcasting/README.md b/webrtc-broadcasting/README.md index 6c82caaa..cdcf8fdd 100644 --- a/webrtc-broadcasting/README.md +++ b/webrtc-broadcasting/README.md @@ -4,6 +4,8 @@ Participants can view your broadcasted video **anonymously**. They can also list This experiment is actually a **one-way** audio/video/screen streaming. += + You can: 1. Share your screen in one-way over many peers @@ -14,30 +16,199 @@ You can: #### How WebRTC One-Way Broadcasting Works? -It is a **one-to-many** audio/video/screen sharing experiment. However, only room initiator will be asked to allow access to camera/microphone because his media stream will be shared in one-way over all connected peers. +1. Mesh networking model is implemented to open multiple interconnected peer connections +2. Maximum peer connections limit is 256 (on chrome) + += + +It is one-way broadcasting; media stream is attached only by the broadcaster. + +It means that, if 10 people are watching your one-way broadcasted audio/video stream; on your system: -It means that, if 10 people are watching your one-way broadcasted video stream; on your system: +1. 10 RTP ports are opened to send video upward i.e. outgoing video +2. 10 RTP ports are opened to send audio upward i.e. outgoing audio -1. 10 unique peer connections are opened -2. Same **LocalMediaStream** is attached over all those **10 peers** +And on participants system: -Behind the scene: +1. 10 RTP ports are opened to receive video i.e. incoming video +2. 10 RTP ports are opened to receive audio i.e. incoming audio -1. 10 unique RTP ports are opened for **outgoing local audio stream** -2. 10 unique RTP ports are opened for **outgoing local video stream** +Maximum bandwidth used by each video RTP port (media-track) is about 1MB. You're streaming audio and video tracks. You must be careful when streaming video over more than one peers. If you're broadcasting audio/video over 10 peers; it means that 20MB bandwidth is required on your system to stream-up (broadcast/transmit) your video. Otherwise; you'll face connection lost; CPU usage issues; and obviously audio-lost/noise/echo issues. -So, total **20 RTP ports** are opened on your system to make it work! +You can handle such things using "b=AS" (application specific bandwidth) session description parameter values to deliver a little bit low quality video. -Also, **10 unique sockets** are opened to exchange SDP/ICE! +```javascript +// removing existing bandwidth lines +sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , ''); -Remember, there is **no incoming RTP port** is opened on your system! **Because it is one-way streaming**! +// setting "outgoing" audio RTP port's bandwidth to "50kbit/s" +sdp = sdp.replace( /a=mid:audio\r\n/g , 'a=mid:audio\r\nb=AS:50\r\n'); -For users who are watching your video stream anonymously; **2 incoming RTP** ports are opened on each user's side: +// setting "outgoing" video RTP port's bandwidth to "256kbit/s" +sdp = sdp.replace( /a=mid:video\r\n/g , 'a=mid:video\r\nb=AS:256\r\n'); +``` -1. One RTP port for **incoming remote audio stream** -2. One RTP port for **incoming remote video stream** += + +Possible issues + +1. Blurry video experience +2. Unclear voice and audio lost +3. Bandwidth issues / slow streaming / CPU overwhelming + +Solution? Obviously a media server! + += -Again, because it is one-way streaming; **no outgoing RTP ports** will be opened on room participants' side. +#### Want to use video-conferencing in your own webpage? + +```html + + + + + + + + +
+
+ + +``` = diff --git a/webrtc-broadcasting/broadcast-ui.js b/webrtc-broadcasting/broadcast-ui.js deleted file mode 100644 index ea7d4508..00000000 --- a/webrtc-broadcasting/broadcast-ui.js +++ /dev/null @@ -1,176 +0,0 @@ -// 2013, @muazkh » github.com/muaz-khan -// MIT License » https://webrtc-experiment.appspot.com/licence/ -// Documentation » https://github.com/muaz-khan/WebRTC-Experiment/tree/master/webrtc-broadcasting - -var config = { - openSocket: function (config) { - var SIGNALING_SERVER = 'https://www.webrtc-experiment.com:8553/', - defaultChannel = location.hash.substr(1) || 'video-oneway-broadcasting'; - - var channel = config.channel || defaultChannel; - var sender = Math.round(Math.random() * 999999999) + 999999999; - - io.connect(SIGNALING_SERVER).emit('new-channel', { - channel: channel, - sender: sender - }); - - var socket = io.connect(SIGNALING_SERVER + channel); - socket.channel = channel; - socket.on('connect', function() { - if (config.callback) config.callback(socket); - }); - - socket.send = function(message) { - socket.emit('message', { - sender: sender, - data: message - }); - }; - - socket.on('message', config.onmessage); - }, - onRemoteStream: function (htmlElement) { - htmlElement.setAttribute('controls', true); - participants.insertBefore(htmlElement, participants.firstChild); - htmlElement.play(); - rotateInCircle(htmlElement); - }, - onRoomFound: function (room) { - var hash = location.hash.replace('#', '').length; - if (!hash) { - var alreadyExist = document.getElementById(room.broadcaster); - if (alreadyExist) return; - - if (typeof roomsList === 'undefined') roomsList = document.body; - - var tr = document.createElement('tr'); - tr.setAttribute('id', room.broadcaster); - - if (room.isAudio) tr.setAttribute('accesskey', room.isAudio); - - tr.innerHTML = '' + room.roomName + '' + - ''; - roomsList.insertBefore(tr, roomsList.firstChild); - - tr.onclick = function () { - tr = this; - broadcastUI.joinRoom({ - roomToken: tr.querySelector('.join').id, - joinUser: tr.id, - isAudio: tr.getAttribute('accesskey') - }); - hideUnnecessaryStuff(); - }; - } else { - /* auto join privately shared room */ - config.attachStream = null; - broadcastUI.joinRoom({ - roomToken: room.roomToken, - joinUser: room.broadcaster, - isAudio: room.isAudio - }); - hideUnnecessaryStuff(); - } - }, - onNewParticipant: function (participants) { - var numberOfParticipants = document.getElementById('number-of-participants'); - if (!numberOfParticipants) return; - numberOfParticipants.innerHTML = participants + ' room participants'; - } -}; - -function createButtonClickHandler() { - captureUserMedia(function () { - var shared = 'video'; - if (window.option == 'Only Audio') shared = 'audio'; - if (window.option == 'Screen') shared = 'screen'; - broadcastUI.createRoom({ - roomName: (document.getElementById('conference-name') || {}).value || 'Anonymous', - isAudio: shared === 'audio' - }); - }); - hideUnnecessaryStuff(); -} - -function captureUserMedia(callback) { - var constraints = null; - window.option = broadcastingOption ? broadcastingOption.value : ''; - if (option === 'Only Audio') { - constraints = { - audio: true, - video: false - }; - } - if (option === 'Screen') { - var video_constraints = { - mandatory: { - chromeMediaSource: 'screen' - }, - optional: [] - }; - constraints = { - audio: false, - video: video_constraints - }; - } - - var htmlElement = document.createElement(option === 'Only Audio' ? 'audio' : 'video'); - htmlElement.setAttribute('autoplay', true); - htmlElement.setAttribute('controls', true); - participants.insertBefore(htmlElement, participants.firstChild); - - var mediaConfig = { - video: htmlElement, - onsuccess: function (stream) { - config.attachStream = stream; - callback && callback(); - - htmlElement.setAttribute('muted', true); - rotateInCircle(htmlElement); - }, - onerror: function () { - if (option === 'Only Audio') alert('unable to get access to your microphone'); - else if(option === 'Screen') { - if(location.protocol === 'http:') alert('Please test this WebRTC experiment on HTTPS.'); - else alert('Screen capturing is either denied or not supported. Are you enabled flag: "Enable screen capture support in getUserMedia"?'); - } - else alert('unable to get access to your webcam'); - } - }; - if (constraints) mediaConfig.constraints = constraints; - getUserMedia(mediaConfig); -} - -/* on page load: get public rooms */ -var broadcastUI = broadcast(config); - -/* UI specific */ -var participants = document.getElementById("participants") || document.body; -var startConferencing = document.getElementById('start-conferencing'); -var roomsList = document.getElementById('rooms-list'); - -var broadcastingOption = document.getElementById('broadcasting-option'); - -if (startConferencing) startConferencing.onclick = createButtonClickHandler; - -function hideUnnecessaryStuff() { - var visibleElements = document.getElementsByClassName('visible'), - length = visibleElements.length; - for (var i = 0; i < length; i++) { - visibleElements[i].style.display = 'none'; - } -} - -function rotateInCircle(video) { - video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(0deg)'; - setTimeout(function () { - video.style[navigator.mozGetUserMedia ? 'transform' : '-webkit-transform'] = 'rotate(360deg)'; - }, 1000); -} - -(function () { - var uniqueToken = document.getElementById('unique-token'); - if (uniqueToken) if (location.hash.length > 2) uniqueToken.parentNode.parentNode.parentNode.innerHTML = '

Share this link

'; - else uniqueToken.innerHTML = uniqueToken.parentNode.parentNode.href = '#' + (Math.random() * new Date().getTime()).toString(36).toUpperCase().replace(/\./g, '-'); -})(); \ No newline at end of file diff --git a/webrtc-broadcasting/index.html b/webrtc-broadcasting/index.html index 091af9c3..49204c28 100644 --- a/webrtc-broadcasting/index.html +++ b/webrtc-broadcasting/index.html @@ -1,218 +1,361 @@ - - - + + + - WebRTC One-Way Audio, Video or Screen Sharing/Broadcasting ® Muaz Khan + WebRTC Broadcasting ® Muaz Khan - + + + + - + + + + + - +
- ↑ WEBRTC EXPERIMENTS +
+

+ WebRTC + Broadcasting + ® + Muaz Khan +

+

+ Copyright © 2013 + Muaz Khan<@muazkh> + » + @WebRTC Experiments + » + Google+ + » + What's New? +

+
-

- WebRTC one-way audio, video and screen sharing -

+
-

- Copyright © 2013 Muaz Khan<@muazkh>. -

- -
+
- - - - - - - -
- - - - - - -
-
- - - - -
Private broadcast ?? -

- /webrtc-broadcasting/#123456789 -

-
-
- - - - - -
    -
  1. Share your screen in one-way over many peers
  2. -
  3. Share you camera in one-way over many peers
  4. -
  5. Share/transmit your voice in one-way over many peers
  6. -
+ + + +
+ + +
+ + +
+
+ + + +
+
    +
  1. Mesh networking model is implemented to open multiple interconnected peer connections
  2. +
  3. Maximum peer connections limit is 256 (on chrome)
  4. +
+
+ +
+

Latest Updates

+
+
+ +
+

Feedback

- +
- + Enter your email too; if you want "direct" reply! +
+ +
+

How it works?

+

+ It is one-way broadcasting; media stream is attached only by the broadcaster. +

+

+ It means that, if 10 people are watching your one-way broadcasted audio/video stream; on your system: +

+
    +
  1. 10 RTP ports are opened to send video upward i.e. outgoing video
  2. +
  3. 10 RTP ports are opened to send audio upward i.e. outgoing audio
  4. +
+

+ And on participants system: +

+
    +
  1. 10 RTP ports are opened to receive video i.e. incoming video
  2. +
  3. 10 RTP ports are opened to receive audio i.e. incoming audio
  4. +
+

+ Maximum bandwidth used by each video RTP port (media-track) is about 1MB. You're streaming audio and video tracks. You must be careful when streaming video over more than one peers. If you're broadcasting audio/video over 10 peers; it means that 20MB bandwidth is required on your system to stream-up (broadcast/transmit) your video. Otherwise; you'll face connection lost; CPU usage issues; and obviously audio-lost/noise/echo issues. +

+

+ You can handle such things using "b=AS" (application specific bandwidth) session description parameter values to deliver a little bit low quality video. +

+
+// removing existing bandwidth lines
+sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , '');
+
+// setting "outgoing" audio RTP port's bandwidth to "50kbit/s"
+sdp = sdp.replace( /a=mid:audio\r\n/g , 'a=mid:audio\r\nb=AS:50\r\n');
+
+// setting "outgoing" video RTP port's bandwidth to "256kbit/s"
+sdp = sdp.replace( /a=mid:video\r\n/g , 'a=mid:video\r\nb=AS:256\r\n');
+
+

+ Possible issues:

+
    +
  1. Blurry video experience
  2. +
  3. Unclear voice and audio lost
  4. +
  5. Bandwidth issues / slow streaming / CPU overwhelming
  6. +
+

Solution? Obviously a media server!

- + + + - \ No newline at end of file