From fba5be47a9cd702f3a3b038c2db0a832731a25f9 Mon Sep 17 00:00:00 2001 From: Muaz Khan <=> Date: Thu, 21 Nov 2013 17:14:36 +0500 Subject: [PATCH] old updates! --- DataChannel/DataChannel.js | 17 +- .../MediaStreamRecorder (all-in-one).js | 948 ++ Pluginfree-Screen-Sharing/README.md | 34 + Pre-recorded-Media-Streaming/index.html | 1 - .../Library/RTCMultiConnection-v1.1.js | 45 +- .../Library/RTCMultiConnection-v1.2.js | 45 +- .../Library/RTCMultiConnection-v1.3.js | 14 +- .../Library/RTCMultiConnection-v1.4.js | 166 +- RTCMultiConnection/README.md | 19 +- .../RecordRTC-and-RTCMultiConnection.html | 2 +- .../features.html | 147 + RTCPeerConnection/RTCPeerConnection-v1.3.js | 2 +- RTCPeerConnection/RTCPeerConnection-v1.4.js | 26 +- RTCPeerConnection/RTCPeerConnection-v1.5.js | 49 - RTCall/RTCall.js | 2 +- RecordRTC/RecordRTC-to-PHP/index.html | 639 +- RecordRTC/RecordRTC.js | 333 +- RecordRTC/index.html | 7 +- Signaling.md | 43 + file-sharing/data-connection.js | 27 +- firebase-debug.js | 8351 +++++++++++++++++ meeting/index.html | 41 +- meeting/meeting.js | 14 +- one-to-many-audio-broadcasting/index.html | 2 +- one-to-many-audio-broadcasting/meeting.js | 50 +- one-to-many-video-broadcasting/index.html | 27 +- one-to-many-video-broadcasting/meeting.js | 50 +- .../{ => firebase}/index.html | 0 part-of-screen-sharing/iframe/index.html | 37 + part-of-screen-sharing/iframe/otherpage.html | 8 + .../realtime-chat/No-WebRTC-Chat.html | 6 +- .../RTCDataChannel/hangout-ui.js | 63 - .../realtime-chat/RTCDataChannel/hangout.js | 248 - .../realtime-chat/how-this-work.html | 4 +- .../realtime-chat/index.html | 193 - part-of-screen-sharing/screenshot-dev.js | 1 + .../README.md | 74 - .../index.html | 449 - .../webrtc-data-channel/index.html | 442 + screen-sharing/screen.js | 14 +- socket.io/PeerConnection.js | 93 +- socket.io/index.html | 158 +- .../RTCPeerConnection-v1.5.js | 105 +- text-chat/data-connection.js | 27 +- video-conferencing/README.md | 44 + websocket/PeerConnection.js | 113 +- websocket/index.html | 350 +- 47 files changed, 11367 insertions(+), 2163 deletions(-) create mode 100644 MediaStreamRecorder/MediaStreamRecorder (all-in-one).js create mode 100644 RTCMultiConnection/RTCMultiConnection-v1.4-Demos/features.html create mode 100644 firebase-debug.js rename part-of-screen-sharing/{ => firebase}/index.html (100%) create mode 100644 part-of-screen-sharing/iframe/index.html create mode 100644 part-of-screen-sharing/iframe/otherpage.html delete mode 100644 part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout-ui.js delete mode 100644 part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout.js delete mode 100644 part-of-screen-sharing/realtime-chat/index.html delete mode 100644 part-of-screen-sharing/webrtc-and-part-of-screen-sharing/README.md delete mode 100644 part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html create mode 100644 part-of-screen-sharing/webrtc-data-channel/index.html diff --git a/DataChannel/DataChannel.js b/DataChannel/DataChannel.js index 23e2d464..fd4e27f0 100644 --- a/DataChannel/DataChannel.js +++ b/DataChannel/DataChannel.js @@ -936,6 +936,12 @@ OfferToReceiveVideo: !!moz } }; + + function onSdpError(e) { + console.error('sdp error:', e.name, e.message); + } + + function onSdpSuccess() {} function createOffer() { if (!options.onOfferSDP) return; @@ -944,20 +950,20 @@ sessionDescription.sdp = setBandwidth(sessionDescription.sdp); peerConnection.setLocalDescription(sessionDescription); options.onOfferSDP(sessionDescription); - }, null, constraints); + }, onSdpError, constraints); } function createAnswer() { if (!options.onAnswerSDP) return; options.offerSDP = new SessionDescription(options.offerSDP); - peerConnection.setRemoteDescription(options.offerSDP); + peerConnection.setRemoteDescription(options.offerSDP, onSdpSuccess, onSdpError); peerConnection.createAnswer(function(sessionDescription) { sessionDescription.sdp = setBandwidth(sessionDescription.sdp); peerConnection.setLocalDescription(sessionDescription); options.onAnswerSDP(sessionDescription); - }, null, constraints); + }, onSdpError, constraints); } function setBandwidth(sdp) { @@ -1032,13 +1038,12 @@ } } - function useless() { - } + function useless() {} return { addAnswerSDP: function(sdp) { sdp = new SessionDescription(sdp); - peerConnection.setRemoteDescription(sdp); + peerConnection.setRemoteDescription(sdp, onSdpSuccess, onSdpError); }, addICE: function(candidate) { peerConnection.addIceCandidate(new IceCandidate({ diff --git a/MediaStreamRecorder/MediaStreamRecorder (all-in-one).js b/MediaStreamRecorder/MediaStreamRecorder (all-in-one).js new file mode 100644 index 00000000..ce0090a2 --- /dev/null +++ b/MediaStreamRecorder/MediaStreamRecorder (all-in-one).js @@ -0,0 +1,948 @@ +// Muaz Khan - https://github.com/muaz-khan +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/MediaStreamRecorder +// ========================================================== +// MediaStreamRecorder.js + +function MediaStreamRecorder(mediaStream) { + if (!mediaStream) throw 'MediaStream is mandatory.'; + + // void start(optional long timeSlice) + // timestamp to fire "ondataavailable" + this.start = function(timeSlice) { + // Media Stream Recording API has not been implemented in chrome yet; + // That's why using WebAudio API to record stereo audio in WAV format + var Recorder = IsChrome ? window.StereoRecorder : window.MediaRecorder; + + // video recorder (in WebM format) + if (this.mimeType === 'video/webm') Recorder = window.WhammyRecorder; + + // video recorder (in GIF format) + if (this.mimeType === 'image/gif') Recorder = window.GifRecorder; + + mediaRecorder = new Recorder(mediaStream); + mediaRecorder.ondataavailable = this.ondataavailable; + mediaRecorder.onstop = this.onstop; + + // Merge all data-types except "function" + mediaRecorder = mergeProps(mediaRecorder, this); + + mediaRecorder.start(timeSlice); + }; + + this.stop = function() { + if (mediaRecorder) mediaRecorder.stop(); + }; + + this.ondataavailable = function(blob) { + console.log('ondataavailable..', blob); + }; + + this.onstop = function() { + console.log('stopped..'); + }; + + // Reference to "MediaRecorder.js" + var mediaRecorder; +} + +// ========================== +// Cross-Browser Declarations + +// Media Stream Recording API representer +MediaRecorderWrapper = window.MediaRecorder; + +// animation-frame used in WebM recording +requestAnimationFrame = window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame; +cancelAnimationFrame = window.webkitCancelAnimationFrame || window.mozCancelAnimationFrame; + +// WebAudio API representer +AudioContext = window.webkitAudioContext || window.mozAudioContext; + +URL = window.URL || window.webkitURL; +navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia; + +IsChrome = !!navigator.webkitGetUserMedia; + +// Merge all other data-types except "function" +function mergeProps(mergein, mergeto) { + for (var t in mergeto) { + if (typeof mergeto[t] !== 'function') { + mergein[t] = mergeto[t]; + } + } + return mergein; +} + + +// Muaz Khan - https://github.com/muaz-khan +// neizerth - https://github.com/neizerth +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/streamproc/MediaStreamRecorder +// ========================================================== +// MediaRecorder.js + +function MediaRecorder(mediaStream) { + // void start(optional long timeSlice) + // timestamp to fire "ondataavailable" + this.start = function(timeSlice) { + timeSlice = timeSlice || 1000; + + mediaRecorder = new MediaRecorderWrapper(mediaStream); + mediaRecorder.ondataavailable = function(e) { + if (mediaRecorder.state == 'recording') { + var blob = new window.Blob([e.data], { + type: self.mimeType || 'audio/ogg' + }); + self.ondataavailable(blob); + mediaRecorder.stop(); + } + }; + + mediaRecorder.onstop = function() { + if (mediaRecorder.state == 'inactive') { + // bug: it is a temporary workaround; it must be fixed. + mediaRecorder = new MediaRecorder(mediaStream); + mediaRecorder.ondataavailable = self.ondataavailable; + mediaRecorder.onstop = self.onstop; + mediaRecorder.mimeType = self.mimeType; + mediaRecorder.start(timeSlice); + } + + self.onstop(); + }; + + // void start(optional long timeSlice) + mediaRecorder.start(timeSlice); + }; + + this.stop = function() { + if (mediaRecorder && mediaRecorder.state == 'recording') { + mediaRecorder.stop(); + } + }; + + this.ondataavailable = function() {}; + this.onstop = function() {}; + + // Reference to itself + var self = this; + + // Reference to "MediaRecorderWrapper" object + var mediaRecorder; +} + + +// Muaz Khan - https://github.com/muaz-khan +// neizerth - https://github.com/neizerth +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/streamproc/MediaStreamRecorder +// ========================================================== +// StereoRecorder.js + +function StereoRecorder(mediaStream) { + // void start(optional long timeSlice) + // timestamp to fire "ondataavailable" + this.start = function(timeSlice) { + timeSlice = timeSlice || 1000; + + mediaRecorder = new StereoAudioRecorder(mediaStream, this); + + (function looper() { + mediaRecorder.record(); + + setTimeout(function() { + mediaRecorder.stop(); + looper(); + }, timeSlice); + })(); + }; + + this.stop = function() { + if (mediaRecorder) mediaRecorder.stop(); + }; + + this.ondataavailable = function() {}; + + // Reference to "StereoAudioRecorder" object + var mediaRecorder; +} + +// source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js +function StereoAudioRecorder(mediaStream, root) { + // variables + var leftchannel = []; + var rightchannel = []; + var recorder; + var recording = false; + var recordingLength = 0; + var volume; + var audioInput; + var sampleRate = 44100; + var audioContext; + var context; + + this.record = function() { + recording = true; + // reset the buffers for the new recording + leftchannel.length = rightchannel.length = 0; + recordingLength = 0; + }; + + this.stop = function() { + // we stop recording + recording = false; + + // we flat the left and right channels down + var leftBuffer = mergeBuffers(leftchannel, recordingLength); + var rightBuffer = mergeBuffers(rightchannel, recordingLength); + // we interleave both channels together + var interleaved = interleave(leftBuffer, rightBuffer); + + // we create our wav file + var buffer = new ArrayBuffer(44 + interleaved.length * 2); + var view = new DataView(buffer); + + // RIFF chunk descriptor + writeUTFBytes(view, 0, 'RIFF'); + view.setUint32(4, 44 + interleaved.length * 2, true); + writeUTFBytes(view, 8, 'WAVE'); + // FMT sub-chunk + writeUTFBytes(view, 12, 'fmt '); + view.setUint32(16, 16, true); + view.setUint16(20, 1, true); + // stereo (2 channels) + view.setUint16(22, 2, true); + view.setUint32(24, sampleRate, true); + view.setUint32(28, sampleRate * 4, true); + view.setUint16(32, 4, true); + view.setUint16(34, 16, true); + // data sub-chunk + writeUTFBytes(view, 36, 'data'); + view.setUint32(40, interleaved.length * 2, true); + + // write the PCM samples + var lng = interleaved.length; + var index = 44; + var volume = 1; + for (var i = 0; i < lng; i++) { + view.setInt16(index, interleaved[i] * (0x7FFF * volume), true); + index += 2; + } + + // our final binary blob + var blob = new Blob([view], { type: 'audio/wav' }); + + root.ondataavailable(blob); + }; + + function interleave(leftChannel, rightChannel) { + var length = leftChannel.length + rightChannel.length; + var result = new Float32Array(length); + + var inputIndex = 0; + + for (var index = 0; index < length;) { + result[index++] = leftChannel[inputIndex]; + result[index++] = rightChannel[inputIndex]; + inputIndex++; + } + return result; + } + + function mergeBuffers(channelBuffer, recordingLength) { + var result = new Float32Array(recordingLength); + var offset = 0; + var lng = channelBuffer.length; + for (var i = 0; i < lng; i++) { + var buffer = channelBuffer[i]; + result.set(buffer, offset); + offset += buffer.length; + } + return result; + } + + function writeUTFBytes(view, offset, string) { + var lng = string.length; + for (var i = 0; i < lng; i++) { + view.setUint8(offset + i, string.charCodeAt(i)); + } + } + + // creates the audio context + audioContext = window.AudioContext || window.webkitAudioContext; + context = new audioContext(); + + // creates a gain node + volume = context.createGain(); + + // creates an audio node from the microphone incoming stream + audioInput = context.createMediaStreamSource(mediaStream); + + // connect the stream to the gain node + audioInput.connect(volume); + + /* From the spec: This value controls how frequently the audioprocess event is + dispatched and how many sample-frames need to be processed each call. + Lower values for buffer size will result in a lower (better) latency. + Higher values will be necessary to avoid audio breakup and glitches */ + var bufferSize = 2048; + recorder = context.createJavaScriptNode(bufferSize, 2, 2); + + recorder.onaudioprocess = function(e) { + if (!recording) return; + var left = e.inputBuffer.getChannelData(0); + var right = e.inputBuffer.getChannelData(1); + // we clone the samples + leftchannel.push(new Float32Array(left)); + rightchannel.push(new Float32Array(right)); + recordingLength += bufferSize; + }; // we connect the recorder + volume.connect(recorder); + recorder.connect(context.destination); +} + + +// Muaz Khan - https://github.com/muaz-khan +// neizerth - https://github.com/neizerth +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/streamproc/MediaStreamRecorder +// ========================================================== +// WhammyRecorder.js + +function WhammyRecorder(mediaStream) { + // void start(optional long timeSlice) + // timestamp to fire "ondataavailable" + this.start = function(timeSlice) { + timeSlice = timeSlice || 1000; + + var imageWidth = this.videoWidth || 320; + var imageHeight = this.videoHeight || 240; + + canvas.width = video.width = imageWidth; + canvas.height = video.height = imageHeight; + + startTime = Date.now(); + + function drawVideoFrame(time) { + lastAnimationFrame = requestAnimationFrame(drawVideoFrame); + + if (typeof lastFrameTime === undefined) { + lastFrameTime = time; + } + + // ~10 fps + if (time - lastFrameTime < 90) return; + + context.drawImage(video, 0, 0, imageWidth, imageHeight); + + // whammy.add(canvas, time - lastFrameTime); + whammy.add(canvas); + + // console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's'); + // console.log("fps: ", 1000 / (time - lastFrameTime)); + + lastFrameTime = time; + } + + lastAnimationFrame = requestAnimationFrame(drawVideoFrame); + + (function getWebMBlob() { + setTimeout(function() { + endTime = Date.now(); + console.log('frames captured: ' + whammy.frames.length + ' => ' + + ((endTime - startTime) / 1000) + 's video'); + + var WebM_Blob = whammy.compile(); + self.ondataavailable(WebM_Blob); + + whammy.frames = []; + getWebMBlob(); + }, timeSlice); + })(); + }; + + this.stop = function() { + if (lastAnimationFrame) + cancelAnimationFrame(lastAnimationFrame); + }; + + this.ondataavailable = function() {}; + this.onstop = function() {}; + + // Reference to itself + var self = this; + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + + var video = document.createElement('video'); + video.muted = true; + video.autoplay = true; + video.src = URL.createObjectURL(mediaStream); + video.play(); + + var lastAnimationFrame = null; + var startTime, endTime, lastFrameTime; + var whammy = new Whammy.Video(10, 0.6); +} + + +// Muaz Khan - https://github.com/muaz-khan +// neizerth - https://github.com/neizerth +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/streamproc/MediaStreamRecorder +// ========================================================== +// GifRecorder.js + +function GifRecorder(mediaStream) { + // void start(optional long timeSlice) + // timestamp to fire "ondataavailable" + this.start = function(timeSlice) { + timeSlice = timeSlice || 1000; + + var imageWidth = this.videoWidth || 320; + var imageHeight = this.videoHeight || 240; + + canvas.width = video.width = imageWidth; + canvas.height = video.height = imageHeight; + + // external library to record as GIF images + gifEncoder = new GIFEncoder(); + + // void setRepeat(int iter) + // Sets the number of times the set of GIF frames should be played. + // Default is 1; 0 means play indefinitely. + gifEncoder.setRepeat(0); + + // void setFrameRate(Number fps) + // Sets frame rate in frames per second. + // Equivalent to setDelay(1000/fps). + // Using "setDelay" instead of "setFrameRate" + gifEncoder.setDelay(this.frameRate || 200); + + // void setQuality(int quality) + // Sets quality of color quantization (conversion of images to the + // maximum 256 colors allowed by the GIF specification). + // Lower values (minimum = 1) produce better colors, + // but slow processing significantly. 10 is the default, + // and produces good color mapping at reasonable speeds. + // Values greater than 20 do not yield significant improvements in speed. + gifEncoder.setQuality(this.quality || 10); + + // Boolean start() + // This writes the GIF Header and returns false if it fails. + gifEncoder.start(); + + startTime = Date.now(); + + function drawVideoFrame(time) { + lastAnimationFrame = requestAnimationFrame(drawVideoFrame); + + if (typeof lastFrameTime === undefined) { + lastFrameTime = time; + } + + // ~10 fps + if (time - lastFrameTime < 90) return; + + context.drawImage(video, 0, 0, imageWidth, imageHeight); + + gifEncoder.addFrame(context); + + // console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's'); + // console.log("fps: ", 1000 / (time - lastFrameTime)); + + lastFrameTime = time; + } + + lastAnimationFrame = requestAnimationFrame(drawVideoFrame); + + (function getWebMBlob() { + setTimeout(function() { + endTime = Date.now(); + + var gifBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], { + type: 'image/gif' + }); + self.ondataavailable(gifBlob); + + // bug: find a way to clear old recorded blobs + gifEncoder.stream().bin = []; + + getWebMBlob(); + }, timeSlice); + })(); + }; + + this.stop = function() { + if (lastAnimationFrame) cancelAnimationFrame(lastAnimationFrame); + }; + + this.ondataavailable = function() {}; + this.onstop = function() {}; + + // Reference to itself + var self = this; + + var canvas = document.createElement('canvas'); + var context = canvas.getContext('2d'); + + var video = document.createElement('video'); + video.muted = true; + video.autoplay = true; + video.src = URL.createObjectURL(mediaStream); + video.play(); + + var lastAnimationFrame = null; + var startTime, endTime, lastFrameTime; + + var gifEncoder; +} + + +// Muaz Khan - https://github.com/muaz-khan +// neizerth - https://github.com/neizerth +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/streamproc/MediaStreamRecorder +// ========================================================== +// whammy.js + +// ========================================================== + +// Note: +// ========================================================== +// whammy.js is an "external library" +// and has its own copyrights. Taken from "Whammy" project. + +var Whammy = (function() { + // in this case, frames has a very specific meaning, which will be + // detailed once i finish writing the code + + function toWebM(frames) { + var info = checkFrames(frames); + var counter = 0; + var EBML = [ + { + "id": 0x1a45dfa3, // EBML + "data": [ + { + "data": 1, + "id": 0x4286 // EBMLVersion + }, + { + "data": 1, + "id": 0x42f7 // EBMLReadVersion + }, + { + "data": 4, + "id": 0x42f2 // EBMLMaxIDLength + }, + { + "data": 8, + "id": 0x42f3 // EBMLMaxSizeLength + }, + { + "data": "webm", + "id": 0x4282 // DocType + }, + { + "data": 2, + "id": 0x4287 // DocTypeVersion + }, + { + "data": 2, + "id": 0x4285 // DocTypeReadVersion + } + ] + }, + { + "id": 0x18538067, // Segment + "data": [ + { + "id": 0x1549a966, // Info + "data": [ + { + "data": 1e6, //do things in millisecs (num of nanosecs for duration scale) + "id": 0x2ad7b1 // TimecodeScale + }, + { + "data": "whammy", + "id": 0x4d80 // MuxingApp + }, + { + "data": "whammy", + "id": 0x5741 // WritingApp + }, + { + "data": doubleToString(info.duration), + "id": 0x4489 // Duration + } + ] + }, + { + "id": 0x1654ae6b, // Tracks + "data": [ + { + "id": 0xae, // TrackEntry + "data": [ + { + "data": 1, + "id": 0xd7 // TrackNumber + }, + { + "data": 1, + "id": 0x63c5 // TrackUID + }, + { + "data": 0, + "id": 0x9c // FlagLacing + }, + { + "data": "und", + "id": 0x22b59c // Language + }, + { + "data": "V_VP8", + "id": 0x86 // CodecID + }, + { + "data": "VP8", + "id": 0x258688 // CodecName + }, + { + "data": 1, + "id": 0x83 // TrackType + }, + { + "id": 0xe0, // Video + "data": [ + { + "data": info.width, + "id": 0xb0 // PixelWidth + }, + { + "data": info.height, + "id": 0xba // PixelHeight + } + ] + } + ] + } + ] + }, + { + "id": 0x1f43b675, // Cluster + "data": [ + { + "data": 0, + "id": 0xe7 // Timecode + } + ].concat(frames.map(function(webp) { + var block = makeSimpleBlock({ + discardable: 0, + frame: webp.data.slice(4), + invisible: 0, + keyframe: 1, + lacing: 0, + trackNum: 1, + timecode: Math.round(counter) + }); + counter += webp.duration; + return { + data: block, + id: 0xa3 + }; + })) + } + ] + } + ]; + return generateEBML(EBML); + } + + // sums the lengths of all the frames and gets the duration, woo + + function checkFrames(frames) { + var width = frames[0].width, + height = frames[0].height, + duration = frames[0].duration; + for (var i = 1; i < frames.length; i++) { + if (frames[i].width != width) throw "Frame " + (i + 1) + " has a different width"; + if (frames[i].height != height) throw "Frame " + (i + 1) + " has a different height"; + if (frames[i].duration < 0) throw "Frame " + (i + 1) + " has a weird duration"; + duration += frames[i].duration; + } + return { + duration: duration, + width: width, + height: height + }; + } + + + function numToBuffer(num) { + var parts = []; + while (num > 0) { + parts.push(num & 0xff); + num = num >> 8; + } + return new Uint8Array(parts.reverse()); + } + + function strToBuffer(str) { + // return new Blob([str]); + + var arr = new Uint8Array(str.length); + for (var i = 0; i < str.length; i++) { + arr[i] = str.charCodeAt(i); + } + return arr; + + // this is slower + + /* + return new Uint8Array(str.split('').map(function(e){ + return e.charCodeAt(0) + })) + */ + } + + + // sorry this is ugly, and sort of hard to understand exactly why this was done + // at all really, but the reason is that there's some code below that i dont really + // feel like understanding, and this is easier than using my brain. + + function bitsToBuffer(bits) { + var data = []; + var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : ''; + bits = pad + bits; + for (var i = 0; i < bits.length; i += 8) { + data.push(parseInt(bits.substr(i, 8), 2)); + } + return new Uint8Array(data); + } + + function generateEBML(json) { + var ebml = []; + for (var i = 0; i < json.length; i++) { + var data = json[i].data; + + // console.log(data); + + if (typeof data == 'object') data = generateEBML(data); + if (typeof data == 'number') data = bitsToBuffer(data.toString(2)); + if (typeof data == 'string') data = strToBuffer(data); + + // console.log(data) + + var len = data.size || data.byteLength; + var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8); + var size_str = len.toString(2); + var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str; + var size = (new Array(zeroes)).join('0') + '1' + padded; + + // i actually dont quite understand what went on up there, so I'm not really + // going to fix this, i'm probably just going to write some hacky thing which + // converts that string into a buffer-esque thing + + ebml.push(numToBuffer(json[i].id)); + ebml.push(bitsToBuffer(size)); + ebml.push(data); + } + return new Blob(ebml, { + type: "video/webm" + }); + } + + // OKAY, so the following two functions are the string-based old stuff, the reason they're + // still sort of in here, is that they're actually faster than the new blob stuff because + // getAsFile isn't widely implemented, or at least, it doesn't work in chrome, which is the + // only browser which supports get as webp + + // Converting between a string of 0010101001's and binary back and forth is probably inefficient + // TODO: get rid of this function + + function toBinStr_old(bits) { + var data = ''; + var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : ''; + bits = pad + bits; + for (var i = 0; i < bits.length; i += 8) { + data += String.fromCharCode(parseInt(bits.substr(i, 8), 2)); + } + return data; + } + + function generateEBML_old(json) { + var ebml = ''; + for (var i = 0; i < json.length; i++) { + var data = json[i].data; + if (typeof data == 'object') data = generateEBML_old(data); + if (typeof data == 'number') data = toBinStr_old(data.toString(2)); + + var len = data.length; + var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8); + var size_str = len.toString(2); + var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str; + var size = (new Array(zeroes)).join('0') + '1' + padded; + + ebml += toBinStr_old(json[i].id.toString(2)) + toBinStr_old(size) + data; + + } + return ebml; + } + + // woot, a function that's actually written for this project! + // this parses some json markup and makes it into that binary magic + // which can then get shoved into the matroska comtainer (peaceably) + + function makeSimpleBlock(data) { + var flags = 0; + if (data.keyframe) flags |= 128; + if (data.invisible) flags |= 8; + if (data.lacing) flags |= (data.lacing << 1); + if (data.discardable) flags |= 1; + if (data.trackNum > 127) { + throw "TrackNumber > 127 not supported"; + } + var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) { + return String.fromCharCode(e); + }).join('') + data.frame; + + return out; + } + + // here's something else taken verbatim from weppy, awesome rite? + + function parseWebP(riff) { + var VP8 = riff.RIFF[0].WEBP[0]; + + var frame_start = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header + for (var i = 0, c = []; i < 4; i++) c[i] = VP8.charCodeAt(frame_start + 3 + i); + + var width, horizontal_scale, height, vertical_scale, tmp; + + //the code below is literally copied verbatim from the bitstream spec + tmp = (c[1] << 8) | c[0]; + width = tmp & 0x3FFF; + horizontal_scale = tmp >> 14; + tmp = (c[3] << 8) | c[2]; + height = tmp & 0x3FFF; + vertical_scale = tmp >> 14; + return { + width: width, + height: height, + data: VP8, + riff: riff + }; + } + + // i think i'm going off on a riff by pretending this is some known + // idiom which i'm making a casual and brilliant pun about, but since + // i can't find anything on google which conforms to this idiomatic + // usage, I'm assuming this is just a consequence of some psychotic + // break which makes me make up puns. well, enough riff-raff (aha a + // rescue of sorts), this function was ripped wholesale from weppy + + function parseRIFF(string) { + var offset = 0; + var chunks = { }; + + while (offset < string.length) { + var id = string.substr(offset, 4); + var len = parseInt(string.substr(offset + 4, 4).split('').map(function(i) { + var unpadded = i.charCodeAt(0).toString(2); + return (new Array(8 - unpadded.length + 1)).join('0') + unpadded; + }).join(''), 2); + var data = string.substr(offset + 4 + 4, len); + offset += 4 + 4 + len; + chunks[id] = chunks[id] || []; + + if (id == 'RIFF' || id == 'LIST') { + chunks[id].push(parseRIFF(data)); + } else { + chunks[id].push(data); + } + } + return chunks; + } + + // here's a little utility function that acts as a utility for other functions + // basically, the only purpose is for encoding "Duration", which is encoded as + // a double (considerably more difficult to encode than an integer) + + function doubleToString(num) { + return [].slice.call( + new Uint8Array( + ( + new Float64Array([num]) // create a float64 array + // extract the array buffer + ).buffer), 0) // convert the Uint8Array into a regular array + .map(function(e) { // since it's a regular array, we can now use map + return String.fromCharCode(e); // encode all the bytes individually + }) + .reverse() // correct the byte endianness (assume it's little endian for now) + .join(''); // join the bytes in holy matrimony as a string + } + + function WhammyVideo(speed, quality) { // a more abstract-ish API + this.frames = []; + this.duration = 1000 / speed; + this.quality = quality || 0.8; + } + + WhammyVideo.prototype.add = function(frame, duration) { + if (typeof duration != 'undefined' && this.duration) throw "you can't pass a duration if the fps is set"; + if ('canvas' in frame) { //CanvasRenderingContext2D + frame = frame.canvas; + } + if ('toDataURL' in frame) { + frame = frame.toDataURL('image/webp', this.quality); + } else if (typeof frame != "string") { + throw "frame must be a a HTMLCanvasElement, a CanvasRenderingContext2D or a DataURI formatted string"; + } + if (!( /^data:image\/webp;base64,/ig ).test(frame)) { + throw "Input must be formatted properly as a base64 encoded DataURI of type image/webp"; + } + this.frames.push({ + image: frame, + duration: duration || this.duration + }); + }; + WhammyVideo.prototype.compile = function() { + return new toWebM(this.frames.map(function(frame) { + var webp = parseWebP(parseRIFF(atob(frame.image.slice(23)))); + webp.duration = frame.duration; + return webp; + })); + }; + return { + Video: WhammyVideo, + fromImageArray: function(images, fps) { + return toWebM(images.map(function(image) { + var webp = parseWebP(parseRIFF(atob(image.slice(23)))); + webp.duration = 1000 / fps; + return webp; + })); + }, + toWebM: toWebM + // expose methods of madness + }; +})(); + + +// Muaz Khan - https://github.com/muaz-khan +// neizerth - https://github.com/neizerth +// MIT License - https://www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/streamproc/MediaStreamRecorder +// ========================================================== +// gif-encoder.js + +// ========================================================== + +// Note: +// ========================================================== +// All libraries listed in this file are "external libraries" +// and has their own copyrights. Taken from "jsGif" project. + +function encode64(n){for(var o="",f=0,l=n.length,u="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",s,i,r,c,h,e,t;f>2,h=(s&3)<<4|i>>4,e=(i&15)<<2|r>>6,t=r&63,isNaN(i)?e=t=64:isNaN(r)&&(t=64),o=o+u.charAt(c)+u.charAt(h)+u.charAt(e)+u.charAt(t);return o}LZWEncoder=function(){var c={},it=-1,st,ht,rt,l,w,et,ut=12,ct=5003,t,ft=ut,o,ot=1<=254&&k(t)},at=function(n){tt(a),s=f+2,h=!0,e(f,n)},tt=function(n){for(var t=0;t=0){rt=g-c,c==0&&(rt=1);do if((c-=rt)<0&&(c+=g),u[c]==w){l=y[c];continue n}while(u[c]>=0)}e(l,i),l=nt,s0&&(n.writeByte(r),n.writeBytes(g,0,r),r=0)},b=function(n){return(1<0?i|=r<=8;)nt(i&255,u),i>>=8,n-=8;if((s>o||h)&&(h?(o=b(t=v),h=!1):(++t,o=t==ft?ot:b(t))),r==p){while(n>0)nt(i&255,u),i>>=8,n-=8;k(u)}};return lt.apply(this,arguments),c},NeuQuant=function(){var c={},t=256,tt=499,nt=491,rt=487,it=503,g=3*it,b=t-1,r=4,pt=100,ft=16,y=1<>a,dt=y<>3,l=6,ti=1<>1,i=o+1;i>1,i=o+1;i<256;i++)f[i]=b},vt=function(){var t,u,k,b,p,c,n,s,o,y,ut,a,f,ft;for(i>l,n<=1&&(n=0),t=0;t=ft&&(f-=i),t++,y==0&&(y=1),t%y==0)for(s-=s/et,c-=c/kt,n=c>>l,n<=1&&(n=0),u=0;u=0;)c=h?c=t:(c++,e<0&&(e=-e),o=s[0]-i,o<0&&(o=-o),e+=o,e=0&&(s=n[l],e=r-s[1],e>=h?l=-1:(l--,e<0&&(e=-e),o=s[0]-i,o<0&&(o=-o),e+=o,e>=r,n[i][1]>>=r,n[i][2]>>=r,n[i][3]=i},lt=function(i,r,f,e,o){var a,y,l,c,h,p,s;for(l=r-i,l<-1&&(l=-1),c=r+i,c>t&&(c=t),a=r+1,y=r-1,p=1;al;){if(h=v[p++],al){s=n[y--];try{s[0]-=h*(s[0]-f)/u,s[1]-=h*(s[1]-e)/u,s[2]-=h*(s[2]-o)/u}catch(w){}}}},at=function(t,i,r,u,f){var o=n[i];o[0]-=t*(o[0]-r)/e,o[1]-=t*(o[1]-u)/e,o[2]-=t*(o[2]-f)/e},yt=function(i,u,f){var h,c,e,b,d,l,k,v,w,y;for(v=2147483647,w=v,l=-1,k=l,h=0;h>ft-r),b>a,s[h]-=d,o[h]+=d<=0&&(y=n)},dt=t.setRepeat=function(n){n>=0&&(k=n)},bt=t.setTransparent=function(n){v=n},kt=t.addFrame=function(t,i){if(t==null||!f||n==null){throw new Error("Please call start method before calling addFrame");return!1}var r=!0;try{i?a=t:(a=t.getImageData(0,0,t.canvas.width,t.canvas.height).data,ft||et(t.canvas.width,t.canvas.height)),ct(),ht(),e&&(vt(),tt(),k>=0&<()),st(),ot(),e||tt(),at(),e=!1}catch(u){r=!1}return r},ui=t.finish=function(){if(!f)return!1;var t=!0;f=!1;try{n.writeByte(59)}catch(i){t=!1}return t},nt=function(){g=0,a=null,i=null,l=null,r=null,b=!1,e=!0},fi=t.setFrameRate=function(n){n!=15&&(d=Math.round(100/n))},ri=t.setQuality=function(n){n<1&&(n=1),it=n},et=t.setSize=function et(n,t){(!f||e)&&(o=n,s=t,o<1&&(o=320),s<1&&(s=240),ft=!0)},ti=t.start=function(){nt();var t=!0;b=!1,n=new h;try{n.writeUTFBytes("GIF89a")}catch(i){t=!1}return f=t},ii=t.cont=function(){nt();var t=!0;return b=!1,n=new h,f=t},ht=function(){var e=i.length,o=e/3,f,n,t,u;for(l=[],f=new NeuQuant(i,e,it),r=f.process(),n=0,t=0;t>16,v=(n&65280)>>8,a=n&255,s=0,h=16777216,l=r.length;for(t=0;t=0&&(t=y&7),t<<=2,n.writeByte(0|t|0|i),u(d),n.writeByte(g),n.writeByte(0)},ot=function(){n.writeByte(44),u(0),u(0),u(o),u(s),e?n.writeByte(0):n.writeByte(128|p)},vt=function(){u(o),u(s),n.writeByte(240|p),n.writeByte(0),n.writeByte(0)},lt=function(){n.writeByte(33),n.writeByte(255),n.writeByte(11),n.writeUTFBytes("NETSCAPE2.0"),n.writeByte(3),n.writeByte(1),u(k),n.writeByte(0)},tt=function(){var i,t;for(n.writeBytes(r),i=768-r.length,t=0;t>8&255)},at=function(){var t=new LZWEncoder(o,s,l,rt);t.encode(n)},wt=t.stream=function(){return n},pt=t.setProperties=function(n,t){f=n,e=t};return t} diff --git a/Pluginfree-Screen-Sharing/README.md b/Pluginfree-Screen-Sharing/README.md index d3b9a649..914a81bb 100644 --- a/Pluginfree-Screen-Sharing/README.md +++ b/Pluginfree-Screen-Sharing/README.md @@ -35,6 +35,40 @@ Current experiment is using chrome screen sharing APIs (media/constraints) which = +Test it on HTTPS. Because, screen capturing (currently) only works on SSL domains. + +Chrome denies request automatically in the following cases: + +1. Screen capturing is not enabled via command line switch. +mandatory: {chromeMediaSource: 'screen'} must be there + +2. Audio stream was requested (it's not supported yet). + +```javascript +navigator.webkitGetUserMedia({ + audio: false /* MUST be false because audio capturer not works with screen capturer */ +}); +``` + +3. Request from a page that was not loaded from a secure origin. + +Here is their C++ code that denies screen capturing: + +```c +if (!screen_capture_enabled || + request.audio_type != content::MEDIA_NO_SERVICE || + !request.security_origin.SchemeIsSecure()) { + callback.Run(content::MediaStreamDevices()); + return; + } +``` + +Personally I don’t know why they deny non-SSL requests. Maybe they’re using iframes in sandbox mode or something else that runs only on HTTPS. + +Browsers who don't understand {chromeMediaSource: 'screen'} constraint will simply get video like chrome stable or Firefox. + += + #### Browser Support [WebRTC plugin free screen sharing](https://www.webrtc-experiment.com/Pluginfree-Screen-Sharing/) experiment works fine on following web-browsers: diff --git a/Pre-recorded-Media-Streaming/index.html b/Pre-recorded-Media-Streaming/index.html index 92eda322..e0f6ca00 100644 --- a/Pre-recorded-Media-Streaming/index.html +++ b/Pre-recorded-Media-Streaming/index.html @@ -13,7 +13,6 @@ - +
+ + + + + + + + + +
+ + + \ No newline at end of file diff --git a/RTCPeerConnection/RTCPeerConnection-v1.3.js b/RTCPeerConnection/RTCPeerConnection-v1.3.js index 20a10034..925645ec 100644 --- a/RTCPeerConnection/RTCPeerConnection-v1.3.js +++ b/RTCPeerConnection/RTCPeerConnection-v1.3.js @@ -100,7 +100,7 @@ var RTCPeerConnection = function (options) { }; } - if (options.onAnswerSDP && moz) openAnswererChannel(); + if (options.onAnswerSDP && moz && options.onChannelMessage) openAnswererChannel(); function openAnswererChannel() { peerConnection.ondatachannel = function (_channel) { diff --git a/RTCPeerConnection/RTCPeerConnection-v1.4.js b/RTCPeerConnection/RTCPeerConnection-v1.4.js index a7128116..1463fe0a 100644 --- a/RTCPeerConnection/RTCPeerConnection-v1.4.js +++ b/RTCPeerConnection/RTCPeerConnection-v1.4.js @@ -93,35 +93,12 @@ var RTCPeerConnection = function (options) { return extractedChars; } - function getInteropSDP(sdp) { - // for audio-only streaming: multiple-crypto lines are not allowed - if (options.onAnswerSDP) - sdp = sdp.replace(/(a=crypto:0 AES_CM_128_HMAC_SHA1_32)(.*?)(\r\n)/g, ''); - - - var inline = getChars() + '\r\n' + (extractedChars = ''); - sdp = sdp.indexOf('a=crypto') == -1 ? sdp.replace(/c=IN/g, - 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + inline + - 'c=IN') : sdp; - - if (options.offerSDP) { - info('\n--------offer sdp provided by offerer\n'); - info(options.offerSDP.sdp); - } - - info(options.onOfferSDP ? '\n--------offer\n' : '\n--------answer\n'); - info('sdp: ' + sdp); - - return sdp; - } - if (moz && !options.onChannelMessage) constraints.mandatory.MozDontOfferDataChannel = true; function createOffer() { if (!options.onOfferSDP) return; peerConnection.createOffer(function (sessionDescription) { - sessionDescription.sdp = getInteropSDP(sessionDescription.sdp); peerConnection.setLocalDescription(sessionDescription); options.onOfferSDP(sessionDescription); }, null, constraints); @@ -134,7 +111,6 @@ var RTCPeerConnection = function (options) { peerConnection.setRemoteDescription(options.offerSDP); peerConnection.createAnswer(function (sessionDescription) { - sessionDescription.sdp = getInteropSDP(sessionDescription.sdp); peerConnection.setLocalDescription(sessionDescription); options.onAnswerSDP(sessionDescription); @@ -195,7 +171,7 @@ var RTCPeerConnection = function (options) { }; } - if (options.onAnswerSDP && moz) openAnswererChannel(); + if (options.onAnswerSDP && moz && options.onChannelMessage) openAnswererChannel(); function openAnswererChannel() { peerConnection.ondatachannel = function (_channel) { diff --git a/RTCPeerConnection/RTCPeerConnection-v1.5.js b/RTCPeerConnection/RTCPeerConnection-v1.5.js index 0c06f79f..83b3604f 100644 --- a/RTCPeerConnection/RTCPeerConnection-v1.5.js +++ b/RTCPeerConnection/RTCPeerConnection-v1.5.js @@ -99,7 +99,6 @@ function RTCPeerConnection(options) { if (!options.onOfferSDP) return; peer.createOffer(function(sessionDescription) { - sessionDescription.sdp = serializeSdp(sessionDescription.sdp); peer.setLocalDescription(sessionDescription); options.onOfferSDP(sessionDescription); }, onSdpError, constraints); @@ -113,7 +112,6 @@ function RTCPeerConnection(options) { //options.offerSDP.sdp = addStereo(options.offerSDP.sdp); peer.setRemoteDescription(new SessionDescription(options.offerSDP), onSdpSuccess, onSdpError); peer.createAnswer(function(sessionDescription) { - sessionDescription.sdp = serializeSdp(sessionDescription.sdp); peer.setLocalDescription(sessionDescription); options.onAnswerSDP(sessionDescription); }, onSdpError, constraints); @@ -125,53 +123,6 @@ function RTCPeerConnection(options) { createAnswer(); } - - // DataChannel Bandwidth - - function setBandwidth(sdp) { - // remove existing bandwidth lines - sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , ''); - sdp = sdp.replace( /a=mid:data\r\n/g , 'a=mid:data\r\nb=AS:1638400\r\n'); - - return sdp; - } - - // old: FF<>Chrome interoperability management - - function getInteropSDP(sdp) { - var chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'.split(''), - extractedChars = ''; - - function getChars() { - extractedChars += chars[parseInt(Math.random() * 40)] || ''; - if (extractedChars.length < 40) - getChars(); - - return extractedChars; - } - - // usually audio-only streaming failure occurs out of audio-specific crypto line - // a=crypto:1 AES_CM_128_HMAC_SHA1_32 --------- kAttributeCryptoVoice - if (options.onAnswerSDP) - sdp = sdp.replace( /(a=crypto:0 AES_CM_128_HMAC_SHA1_32)(.*?)(\r\n)/g , ''); - - // video-specific crypto line i.e. SHA1_80 - // a=crypto:1 AES_CM_128_HMAC_SHA1_80 --------- kAttributeCryptoVideo - var inline = getChars() + '\r\n' + (extractedChars = ''); - sdp = sdp.indexOf('a=crypto') == -1 ? sdp.replace( /c=IN/g , - 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + inline + - 'c=IN') : sdp; - - return sdp; - } - - function serializeSdp(sdp) { - if (!moz) sdp = setBandwidth(sdp); - sdp = getInteropSDP(sdp); - console.debug(sdp); - return sdp; - } - // DataChannel management var channel; diff --git a/RTCall/RTCall.js b/RTCall/RTCall.js index c6efa624..09c1f864 100644 --- a/RTCall/RTCall.js +++ b/RTCall/RTCall.js @@ -242,7 +242,7 @@ username: 'homeo' }; - iceServers.iceServers = [TURN]; + iceServers.iceServers = [STUN, TURN]; } optional = { diff --git a/RecordRTC/RecordRTC-to-PHP/index.html b/RecordRTC/RecordRTC-to-PHP/index.html index ec04c7cd..4d576736 100644 --- a/RecordRTC/RecordRTC-to-PHP/index.html +++ b/RecordRTC/RecordRTC-to-PHP/index.html @@ -6,299 +6,265 @@ - RecordRTC-to-PHP ® Muaz Khan + RecordRTC to PHP ® Muaz Khan - + + + + + - -

- RecordRTC-to-PHP ® Muaz Khan -

-

- Copyright © 2013 - Muaz Khan<@muazkh> - » - @WebRTC Experiments - » - Google+ - » - What's New? -

- -
-
-
-
- - - - - -
- -
-
-

- You can record audio in wav/ogg file format; and video in either webm format or - as animated GIF image.

-

- How to save recorded wav/webm file to PHP server?

+ } + + var record = document.getElementById('record'); + var stop = document.getElementById('stop'); + var deleteFiles = document.getElementById('delete'); + + var audio = document.querySelector('audio'); + + var recordVideo = document.getElementById('record-video'); + var preview = document.getElementById('preview'); + + var container = document.getElementById('container'); + + var recordAudio, recordVideo; + record.onclick = function() { + record.disabled = true; + var video_constraints = { + mandatory: { }, + optional: [] + }; + navigator.getUserMedia({ + audio: true, + video: video_constraints + }, function(stream) { + preview.src = window.URL.createObjectURL(stream); + preview.play(); + + // var legalBufferValues = [256, 512, 1024, 2048, 4096, 8192, 16384]; + // sample-rates in at least the range 22050 to 96000. + recordAudio = RecordRTC(stream, { + //bufferSize: 16384, + //sampleRate: 45000 + }); + + recordVideo = RecordRTC(stream, { + type: 'video' + }); + + recordAudio.startRecording(); + recordVideo.startRecording(); + + stop.disabled = false; + }); + }; + + var fileName; + stop.onclick = function() { + record.disabled = false; + stop.disabled = true; + + fileName = Math.round(Math.random() * 99999999) + 99999999; + + recordAudio.stopRecording(); + PostBlob(recordAudio.getBlob(), 'audio', fileName + '.wav'); + + recordVideo.stopRecording(); + PostBlob(recordVideo.getBlob(), 'video', fileName + '.webm'); + + preview.src = ''; + deleteFiles.disabled = false; + }; + + deleteFiles.onclick = function() { + deleteAudioVideoFiles(); + }; + + function deleteAudioVideoFiles() { + deleteFiles.disabled = true; + if (!fileName) return; + var formData = new FormData(); + formData.append('delete-file', fileName); + xhr('delete.php', formData, null, function(response) { + console.log(response); + }); + fileName = null; + container.innerHTML = ''; + } + + function xhr(url, data, progress, callback) { + var request = new XMLHttpRequest(); + request.onreadystatechange = function() { + if (request.readyState == 4 && request.status == 200) { + callback(request.responseText); + } + }; + + request.onprogress = function(e) { + if(!progress) return; + if (e.lengthComputable) { + progress.value = (e.loaded / e.total) * 100; + progress.textContent = progress.value; // Fallback for unsupported browsers. + } + + if(progress.value == 100){ + progress.value = 0; + } + }; + request.open('POST', url); + request.send(data); + } + + window.onbeforeunload = function() { + if (!!fileName) { + deleteAudioVideoFiles(); + return 'It seems that you\'ve not deleted audio/video files from the server.'; + } + }; + + +
+
    +
  1. Both files are recorded and uploaded individually (wav/webm)
  2. +
  3. You can merge/mux them in single format like avi or mkv — using tools like ffmpeg/avconv
  4. +
+
+ +
+

Feedback

+
+ +
+ Enter your email too; if you want "direct" reply! +
+ +
+

+ How to save recorded wav/webm file to PHP server?

+
    +
  1. Write a PHP file to write recrded blob on disk
  2. +
  3. Write Javascript to POST recorded blobs to server using XHR2/FormdData
  4. +
+
+ +
+

PHP Code

-// PHP code
 <?php
 foreach(array('video', 'audio') as $type) {
     if (isset($_FILES["${type}-blob"])) {
@@ -314,8 +280,11 @@ 

} } ?> - -// Javascript code +

+
+
+

Javascript Code

+
 var fileType = 'video'; // or "audio"
 var fileName = 'ABCDEF.webm';  // or "wav"
 
@@ -337,30 +306,120 @@ 

request.open('POST', url); request.send(data); } +

+
+

+ How to use RecordRTC?

+
+<script src="https://www.webrtc-experiment.com/RecordRTC.js"></script>
+
+
+
+

+ How to record audio using RecordRTC?

+
+var recordRTC = RecordRTC(mediaStream);
+recordRTC.startRecording();
+recordRTC.stopRecording(function(audioURL) {
+   window.open(audioURL);
+});
+
+ +
+ + +
+

+ How to record video using RecordRTC?

+
+var options = {
+   type: 'video',
+   video: {
+      width: 320,
+      height: 240
+   },
+   canvas: {
+      width: 320,
+      height: 240
+   }
+};
+var recordRTC = RecordRTC(mediaStream, options);
+recordRTC.startRecording();
+recordRTC.stopRecording(function(videoURL) {
+   window.open(videoURL);
+});
 
-

- RecordRTC is MIT licensed on Github! Documentation -

-
-
-
-

Feedback

- -
- -
- Enter your email too; if you want "direct" reply! -
+ +
+ +
+

+ How to record animated GIF using RecordRTC?

+
+var options = {
+   type: 'gif',
+   video: {
+      width: 320,
+      height: 240
+   },
+   canvas: {
+      width: 320,
+      height: 240
+   },
+   frameRate: 200,
+   quality: 10
+};
+var recordRTC = RecordRTC(mediaStream, options);
+recordRTC.startRecording();
+recordRTC.stopRecording(function(gifURL) {
+   window.open(gifURL);
+});
+
+
+ +
+

+ Possible + issues/failures: +

+

+ The biggest issue is that RecordRTC is unable to record both audio and video streams in single file.

+ Do you know "RecordRTC" fails recording audio because following conditions fails: +

    +
  1. Sample rate and channel configuration must be the same for input and output sides + on Windows i.e. audio input/output devices must match
  2. +
  3. Only the Default microphone device can be used for capturing.
  4. +
  5. The requesting scheme is must be one of the following: http, https, chrome, extension's, + or file (only works with --allow-file-access-from-files)
  6. +
  7. The browser must be able to create/initialize the metadata database for the API + under the profile directory
  8. +
+

+
+
+

+ RecordRTC is MIT licensed on Github! Documentation +

+
+ +
+

Latest Updates

+
+
+ - - - + + + diff --git a/RecordRTC/RecordRTC.js b/RecordRTC/RecordRTC.js index 708a2038..3e3234ec 100644 --- a/RecordRTC/RecordRTC.js +++ b/RecordRTC/RecordRTC.js @@ -7,7 +7,7 @@ /* need to fix: 1. chrome tabCapture and audio/video recording - 2. ffmpeg/avconv to merge webm/wav + 2. ffmpeg/avconv to merge webm/wav (sync time of both wav and webm) 3. longer video issues 4. longer audio issues */ @@ -148,7 +148,11 @@ function MediaStreamRecorder(mediaStream) { // https://wiki.mozilla.org/Gecko:MediaRecorder mediaRecorder = new MediaRecorder(mediaStream); mediaRecorder.ondataavailable = function(e) { - self.recordedBlob = new Blob([self.recordedBlob, e.data], { type: 'audio/ogg' }); + // pull #118 + if(self.recordedBlob) + self.recordedBlob = new Blob([self.recordedBlob, e.data], { type: 'audio/ogg' }); + else + self.recordedBlob = new Blob([e.data], { type: 'audio/ogg' }); }; mediaRecorder.start(0); @@ -188,11 +192,14 @@ function StereoRecorder(mediaStream) { // ______________________ // StereoAudioRecorder.js +// In Chrome, when the javascript node is out of scope, the onaudioprocess callback stops firing. +// This leads to audio being significantly shorter than the generated video. +var __stereoAudioRecorderJavacriptNode; + function StereoAudioRecorder(mediaStream, root) { // variables var leftchannel = []; var rightchannel = []; - var recorder; var recording = false; var recordingLength = 0; var volume; @@ -337,9 +344,9 @@ function StereoAudioRecorder(mediaStream, root) { console.log('sample-rate', sampleRate); console.log('buffer-size', bufferSize); - recorder = context.createJavaScriptNode(bufferSize, 2, 2); + __stereoAudioRecorderJavacriptNode = context.createJavaScriptNode(bufferSize, 2, 2); - recorder.onaudioprocess = function(e) { + __stereoAudioRecorderJavacriptNode.onaudioprocess = function(e) { if (!recording) return; var left = e.inputBuffer.getChannelData(0); var right = e.inputBuffer.getChannelData(1); @@ -350,8 +357,8 @@ function StereoAudioRecorder(mediaStream, root) { }; // we connect the recorder - volume.connect(recorder); - recorder.connect(context.destination); + volume.connect(__stereoAudioRecorderJavacriptNode); + __stereoAudioRecorderJavacriptNode.connect(context.destination); } // _________________ @@ -536,13 +543,17 @@ function GifRecorder(mediaStream) { // whammy.js -var Whammy = (function() { - // in this case, frames has a very specific meaning, which will be + +var Whammy = (function(){ + // in this case, frames has a very specific meaning, which will be // detailed once i finish writing the code - function toWebM(frames) { + function toWebM(frames, outputAsArray){ var info = checkFrames(frames); - var counter = 0; + + //max duration by cluster in milliseconds + var CLUSTER_MAX_DURATION = 30000; + var EBML = [ { "id": 0x1a45dfa3, // EBML @@ -652,46 +663,70 @@ var Whammy = (function() { } ] }, - { - "id": 0x1f43b675, // Cluster - "data": [ - { - "data": 0, - "id": 0xe7 // Timecode - } - ].concat(frames.map(function(webp) { - var block = makeSimpleBlock({ - discardable: 0, - frame: webp.data.slice(4), - invisible: 0, - keyframe: 1, - lacing: 0, - trackNum: 1, - timecode: Math.round(counter) - }); - counter += webp.duration; - return { - data: block, - id: 0xa3 - }; - })) - } + + //cluster insertion point ] } ]; - return generateEBML(EBML); + + + //Generate clusters (max duration) + var frameNumber = 0; + var clusterTimecode = 0; + while(frameNumber < frames.length){ + + var clusterFrames = []; + var clusterDuration = 0; + do { + clusterFrames.push(frames[frameNumber]); + clusterDuration += frames[frameNumber].duration; + frameNumber++; + }while(frameNumber < frames.length && clusterDuration < CLUSTER_MAX_DURATION); + + var clusterCounter = 0; + var cluster = { + "id": 0x1f43b675, // Cluster + "data": [ + { + "data": clusterTimecode, + "id": 0xe7 // Timecode + } + ].concat(clusterFrames.map(function(webp){ + var block = makeSimpleBlock({ + discardable: 0, + frame: webp.data.slice(4), + invisible: 0, + keyframe: 1, + lacing: 0, + trackNum: 1, + timecode: Math.round(clusterCounter) + }); + clusterCounter += webp.duration; + return { + data: block, + id: 0xa3 + }; + })) + } + + //Add cluster to segment + EBML[1].data.push(cluster); + clusterTimecode += clusterDuration; + } + + return generateEBML(EBML, outputAsArray) } // sums the lengths of all the frames and gets the duration, woo - function checkFrames(frames) { + function checkFrames(frames){ var width = frames[0].width, height = frames[0].height, duration = frames[0].duration; - for (var i = 1; i < frames.length; i++) { - if (frames[i].width != width) throw "Frame " + (i + 1) + " has a different width"; - if (frames[i].height != height) throw "Frame " + (i + 1) + " has a different height"; - if (frames[i].duration < 0) throw "Frame " + (i + 1) + " has a weird duration"; + for(var i = 1; i < frames.length; i++){ + if(frames[i].width != width) throw "Frame " + (i + 1) + " has a different width"; + if(frames[i].height != height) throw "Frame " + (i + 1) + " has a different height"; + if(frames[i].duration < 0 || frames[i].duration > 0x7fff) throw "Frame " + (i + 1) + " has a weird duration (must be between 0 and 32767)"; duration += frames[i].duration; } return { @@ -702,107 +737,125 @@ var Whammy = (function() { } - function numToBuffer(num) { + function numToBuffer(num){ var parts = []; - while (num > 0) { - parts.push(num & 0xff); - num = num >> 8; + while(num > 0){ + parts.push(num & 0xff) + num = num >> 8 } return new Uint8Array(parts.reverse()); } - function strToBuffer(str) { + function strToBuffer(str){ // return new Blob([str]); var arr = new Uint8Array(str.length); - for (var i = 0; i < str.length; i++) { - arr[i] = str.charCodeAt(i); + for(var i = 0; i < str.length; i++){ + arr[i] = str.charCodeAt(i) } return arr; - // this is slower - - /* - return new Uint8Array(str.split('').map(function(e){ - return e.charCodeAt(0) - })) - */ + // return new Uint8Array(str.split('').map(function(e){ + // return e.charCodeAt(0) + // })) } - // sorry this is ugly, and sort of hard to understand exactly why this was done + //sorry this is ugly, and sort of hard to understand exactly why this was done // at all really, but the reason is that there's some code below that i dont really // feel like understanding, and this is easier than using my brain. - function bitsToBuffer(bits) { + function bitsToBuffer(bits){ var data = []; var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : ''; bits = pad + bits; - for (var i = 0; i < bits.length; i += 8) { - data.push(parseInt(bits.substr(i, 8), 2)); + for(var i = 0; i < bits.length; i+= 8){ + data.push(parseInt(bits.substr(i,8),2)) } return new Uint8Array(data); } - function generateEBML(json) { + function generateEBML(json, outputAsArray){ var ebml = []; - for (var i = 0; i < json.length; i++) { + for(var i = 0; i < json.length; i++){ var data = json[i].data; + if(typeof data == 'object') data = generateEBML(data, outputAsArray); + if(typeof data == 'number') data = bitsToBuffer(data.toString(2)); + if(typeof data == 'string') data = strToBuffer(data); - // console.log(data); - - if (typeof data == 'object') data = generateEBML(data); - if (typeof data == 'number') data = bitsToBuffer(data.toString(2)); - if (typeof data == 'string') data = strToBuffer(data); - - // console.log(data) + if(data.length){ + var z = z; + } - var len = data.size || data.byteLength; - var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8); + var len = data.size || data.byteLength || data.length; + var zeroes = Math.ceil(Math.ceil(Math.log(len)/Math.log(2))/8); var size_str = len.toString(2); var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str; var size = (new Array(zeroes)).join('0') + '1' + padded; - // i actually dont quite understand what went on up there, so I'm not really - // going to fix this, i'm probably just going to write some hacky thing which - // converts that string into a buffer-esque thing + //i actually dont quite understand what went on up there, so I'm not really + //going to fix this, i'm probably just going to write some hacky thing which + //converts that string into a buffer-esque thing ebml.push(numToBuffer(json[i].id)); ebml.push(bitsToBuffer(size)); - ebml.push(data); + ebml.push(data) + + + } + + //output as blob or byteArray + if(outputAsArray){ + //convert ebml to an array + var buffer = toFlatArray(ebml) + return new Uint8Array(buffer); + }else{ + return new Blob(ebml, {type: "video/webm"}); } - return new Blob(ebml, { - type: "video/webm" - }); } - // OKAY, so the following two functions are the string-based old stuff, the reason they're - // still sort of in here, is that they're actually faster than the new blob stuff because - // getAsFile isn't widely implemented, or at least, it doesn't work in chrome, which is the - // only browser which supports get as webp + function toFlatArray(arr, outBuffer){ + if(outBuffer == null){ + outBuffer = []; + } + for(var i = 0; i < arr.length; i++){ + if(typeof arr[i] == 'object'){ + //an array + toFlatArray(arr[i], outBuffer) + }else{ + //a simple element + outBuffer.push(arr[i]); + } + } + return outBuffer; + } - // Converting between a string of 0010101001's and binary back and forth is probably inefficient - // TODO: get rid of this function + //OKAY, so the following two functions are the string-based old stuff, the reason they're + //still sort of in here, is that they're actually faster than the new blob stuff because + //getAsFile isn't widely implemented, or at least, it doesn't work in chrome, which is the + // only browser which supports get as webp - function toBinStr_old(bits) { + //Converting between a string of 0010101001's and binary back and forth is probably inefficient + //TODO: get rid of this function + function toBinStr_old(bits){ var data = ''; var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : ''; bits = pad + bits; - for (var i = 0; i < bits.length; i += 8) { - data += String.fromCharCode(parseInt(bits.substr(i, 8), 2)); + for(var i = 0; i < bits.length; i+= 8){ + data += String.fromCharCode(parseInt(bits.substr(i,8),2)) } return data; } - function generateEBML_old(json) { + function generateEBML_old(json){ var ebml = ''; - for (var i = 0; i < json.length; i++) { + for(var i = 0; i < json.length; i++){ var data = json[i].data; - if (typeof data == 'object') data = generateEBML_old(data); - if (typeof data == 'number') data = toBinStr_old(data.toString(2)); + if(typeof data == 'object') data = generateEBML_old(data); + if(typeof data == 'number') data = toBinStr_old(data.toString(2)); var len = data.length; - var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8); + var zeroes = Math.ceil(Math.ceil(Math.log(len)/Math.log(2))/8); var size_str = len.toString(2); var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str; var size = (new Array(zeroes)).join('0') + '1' + padded; @@ -813,11 +866,11 @@ var Whammy = (function() { return ebml; } - // woot, a function that's actually written for this project! - // this parses some json markup and makes it into that binary magic - // which can then get shoved into the matroska comtainer (peaceably) + //woot, a function that's actually written for this project! + //this parses some json markup and makes it into that binary magic + //which can then get shoved into the matroska comtainer (peaceably) - function makeSimpleBlock(data) { + function makeSimpleBlock(data){ var flags = 0; if (data.keyframe) flags |= 128; if (data.invisible) flags |= 8; @@ -826,8 +879,8 @@ var Whammy = (function() { if (data.trackNum > 127) { throw "TrackNumber > 127 not supported"; } - var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) { - return String.fromCharCode(e); + var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e){ + return String.fromCharCode(e) }).join('') + data.frame; return out; @@ -835,11 +888,11 @@ var Whammy = (function() { // here's something else taken verbatim from weppy, awesome rite? - function parseWebP(riff) { + function parseWebP(riff){ var VP8 = riff.RIFF[0].WEBP[0]; - var frame_start = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header - for (var i = 0, c = []; i < 4; i++) c[i] = VP8.charCodeAt(frame_start + 3 + i); + var frame_start = VP8.indexOf('\x9d\x01\x2a'); //A VP8 keyframe starts with the 0x9d012a header + for(var i = 0, c = []; i < 4; i++) c[i] = VP8.charCodeAt(frame_start + 3 + i); var width, horizontal_scale, height, vertical_scale, tmp; @@ -855,7 +908,7 @@ var Whammy = (function() { height: height, data: VP8, riff: riff - }; + } } // i think i'm going off on a riff by pretending this is some known @@ -865,16 +918,16 @@ var Whammy = (function() { // break which makes me make up puns. well, enough riff-raff (aha a // rescue of sorts), this function was ripped wholesale from weppy - function parseRIFF(string) { + function parseRIFF(string){ var offset = 0; - var chunks = { }; + var chunks = {}; while (offset < string.length) { var id = string.substr(offset, 4); - var len = parseInt(string.substr(offset + 4, 4).split('').map(function(i) { + var len = parseInt(string.substr(offset + 4, 4).split('').map(function(i){ var unpadded = i.charCodeAt(0).toString(2); - return (new Array(8 - unpadded.length + 1)).join('0') + unpadded; - }).join(''), 2); + return (new Array(8 - unpadded.length + 1)).join('0') + unpadded + }).join(''),2); var data = string.substr(offset + 4 + 4, len); offset += 4 + 4 + len; chunks[id] = chunks[id] || []; @@ -891,65 +944,67 @@ var Whammy = (function() { // here's a little utility function that acts as a utility for other functions // basically, the only purpose is for encoding "Duration", which is encoded as // a double (considerably more difficult to encode than an integer) - - function doubleToString(num) { + function doubleToString(num){ return [].slice.call( - new Uint8Array( - ( - new Float64Array([num]) // create a float64 array - // extract the array buffer - ).buffer), 0) // convert the Uint8Array into a regular array - .map(function(e) { // since it's a regular array, we can now use map - return String.fromCharCode(e); // encode all the bytes individually + new Uint8Array( + ( + new Float64Array([num]) //create a float64 array + ).buffer) //extract the array buffer + , 0) // convert the Uint8Array into a regular array + .map(function(e){ //since it's a regular array, we can now use map + return String.fromCharCode(e) // encode all the bytes individually }) - .reverse() // correct the byte endianness (assume it's little endian for now) - .join(''); // join the bytes in holy matrimony as a string + .reverse() //correct the byte endianness (assume it's little endian for now) + .join('') // join the bytes in holy matrimony as a string } - function WhammyVideo(speed, quality) { // a more abstract-ish API + function WhammyVideo(speed, quality){ // a more abstract-ish API this.frames = []; this.duration = 1000 / speed; this.quality = quality || 0.8; } - WhammyVideo.prototype.add = function(frame, duration) { - if (typeof duration != 'undefined' && this.duration) throw "you can't pass a duration if the fps is set"; - if ('canvas' in frame) { //CanvasRenderingContext2D + WhammyVideo.prototype.add = function(frame, duration){ + if(typeof duration != 'undefined' && this.duration) throw "you can't pass a duration if the fps is set"; + if(typeof duration == 'undefined' && !this.duration) throw "if you don't have the fps set, you ned to have durations here." + if('canvas' in frame){ //CanvasRenderingContext2D frame = frame.canvas; } - if ('toDataURL' in frame) { - frame = frame.toDataURL('image/webp', this.quality); - } else if (typeof frame != "string") { - throw "frame must be a a HTMLCanvasElement, a CanvasRenderingContext2D or a DataURI formatted string"; + if('toDataURL' in frame){ + frame = frame.toDataURL('image/webp', this.quality) + }else if(typeof frame != "string"){ + throw "frame must be a a HTMLCanvasElement, a CanvasRenderingContext2D or a DataURI formatted string" } - if (!( /^data:image\/webp;base64,/ig ).test(frame)) { + if (!(/^data:image\/webp;base64,/ig).test(frame)) { throw "Input must be formatted properly as a base64 encoded DataURI of type image/webp"; } this.frames.push({ image: frame, duration: duration || this.duration - }); - }; - WhammyVideo.prototype.compile = function() { - return new toWebM(this.frames.map(function(frame) { + }) + } + + WhammyVideo.prototype.compile = function(outputAsArray){ + return new toWebM(this.frames.map(function(frame){ var webp = parseWebP(parseRIFF(atob(frame.image.slice(23)))); webp.duration = frame.duration; return webp; - })); - }; + }), outputAsArray) + } + return { Video: WhammyVideo, - fromImageArray: function(images, fps) { - return toWebM(images.map(function(image) { - var webp = parseWebP(parseRIFF(atob(image.slice(23)))); + fromImageArray: function(images, fps, outputAsArray){ + return toWebM(images.map(function(image){ + var webp = parseWebP(parseRIFF(atob(image.slice(23)))) webp.duration = 1000 / fps; return webp; - })); + }), outputAsArray) }, toWebM: toWebM - // expose methods of madness - }; -})(); + // expose methods of madness + } +})() // gifEncoder diff --git a/RecordRTC/index.html b/RecordRTC/index.html index 4ce30804..2805c0fd 100644 --- a/RecordRTC/index.html +++ b/RecordRTC/index.html @@ -21,7 +21,7 @@ width: 10em; } - video { vertical-align: top; } + video { vertical-align: top;max-width: 100%; } input { border: 1px solid #d9d9d9; @@ -412,6 +412,11 @@

RecordRTC is MIT licensed on Github! Documentation

+ +
+

Latest Updates

+
+

diff --git a/Signaling.md b/Signaling.md index 5720a387..f5dc11dd 100644 --- a/Signaling.md +++ b/Signaling.md @@ -57,6 +57,49 @@ connection.openSignalingChannel = function(callback) { Want to use XHR, WebSockets, SIP, XMPP, etc. for signaling? Read [this post](https://github.com/muaz-khan/WebRTC-Experiment/issues/56#issuecomment-20090650). += + +#### Want to use [Firebase](https://www.firebase.com/) for signaling? + +```javascript +var config = { + openSocket: function (config) { + var channel = config.channel || location.href.replace(/\/|:|#|%|\.|\[|\]/g, ''); + var socket = new Firebase('https://chat.firebaseIO.com/' + channel); + socket.channel = channel; + socket.on('child_added', function (data) { + config.onmessage(data.val()); + }); + socket.send = function (data) { + this.push(data); + } + config.onopen && setTimeout(config.onopen, 1); + socket.onDisconnect().remove(); + return socket; + } +} +``` + += + +#### Want to use [PubNub](http://www.pubnub.com/) for signaling? + +```javascript +var config = { + openSocket: function (config) { + var channel = config.channel || location.href.replace(/\/|:|#|%|\.|\[|\]/g, ''); + var socket = io.connect('https://pubsub.pubnub.com/' + channel, { + publish_key: 'demo', + subscribe_key: 'demo', + channel: config.channel || channel, + ssl: true + }); + if (config.onopen) socket.on('connect', config.onopen); + socket.on('message', config.onmessage); + return socket; + } +} +``` = diff --git a/file-sharing/data-connection.js b/file-sharing/data-connection.js index 392eb8bf..b7adce81 100644 --- a/file-sharing/data-connection.js +++ b/file-sharing/data-connection.js @@ -1,7 +1,6 @@ -// 2013, @muazkh - github.com/muaz-khan -// MIT License - https://webrtc-experiment.appspot.com/licence/ -// Documentation (file sharing) - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/file-sharing -// Documentation (text chat) - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/text-chat +// 2013, Muaz Khan - www.muazkhan.com +// MIT License - www.webrtc-experiment.com/licence/ +// Documentation - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/file-sharing (function() { @@ -576,7 +575,7 @@ peer.createOffer(function(sdp) { sdp = serializeSdp(sdp, config); peer.setLocalDescription(sdp); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); } else if (isFirefox) { navigator.mozGetUserMedia({ @@ -590,7 +589,7 @@ sdp: sdp, userid: config.to }); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); }, mediaError); } @@ -600,7 +599,7 @@ return this; }, setRemoteDescription: function(sdp) { - this.peer.setRemoteDescription(new RTCSessionDescription(sdp)); + this.peer.setRemoteDescription(new RTCSessionDescription(sdp), onSdpSuccess, onSdpError); }, addIceCandidate: function(candidate) { this.peer.addIceCandidate(new RTCIceCandidate({ @@ -609,6 +608,12 @@ })); } }; + + function onSdpSuccess() {} + + function onSdpError(e) { + console.error('sdp error:', e.name, e.message); + } // var answer = Answer.createAnswer(config); // answer.setRemoteDescription(sdp); @@ -633,14 +638,14 @@ }, function(stream) { peer.addStream(stream); - peer.setRemoteDescription(new RTCSessionDescription(config.sdp)); + peer.setRemoteDescription(new RTCSessionDescription(config.sdp), onSdpSuccess, onSdpError); peer.createAnswer(function(sdp) { peer.setLocalDescription(sdp); config.onsdp({ sdp: sdp, userid: config.to }); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); }, mediaError); } @@ -665,7 +670,7 @@ }; if (isChrome) { - peer.setRemoteDescription(new RTCSessionDescription(config.sdp)); + peer.setRemoteDescription(new RTCSessionDescription(config.sdp), onSdpSuccess, onSdpError); peer.createAnswer(function(sdp) { sdp = serializeSdp(sdp, config); peer.setLocalDescription(sdp); @@ -674,7 +679,7 @@ sdp: sdp, userid: config.to }); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); } this.peer = peer; diff --git a/firebase-debug.js b/firebase-debug.js new file mode 100644 index 00000000..eb062809 --- /dev/null +++ b/firebase-debug.js @@ -0,0 +1,8351 @@ +var COMPILED = false; +var goog = goog || {}; +goog.global = this; +goog.DEBUG = true; +goog.LOCALE = "en"; +goog.provide = function(name) { + if(!COMPILED) { + if(goog.isProvided_(name)) { + throw Error('Namespace "' + name + '" already declared.'); + } + delete goog.implicitNamespaces_[name]; + var namespace = name; + while(namespace = namespace.substring(0, namespace.lastIndexOf("."))) { + if(goog.getObjectByName(namespace)) { + break + } + goog.implicitNamespaces_[namespace] = true + } + } + goog.exportPath_(name) +}; +goog.setTestOnly = function(opt_message) { + if(COMPILED && !goog.DEBUG) { + opt_message = opt_message || ""; + throw Error("Importing test-only code into non-debug environment" + opt_message ? ": " + opt_message : "."); + } +}; +if(!COMPILED) { + goog.isProvided_ = function(name) { + return!goog.implicitNamespaces_[name] && !!goog.getObjectByName(name) + }; + goog.implicitNamespaces_ = {} +} +goog.exportPath_ = function(name, opt_object, opt_objectToExportTo) { + var parts = name.split("."); + var cur = opt_objectToExportTo || goog.global; + if(!(parts[0] in cur) && cur.execScript) { + cur.execScript("var " + parts[0]) + } + for(var part;parts.length && (part = parts.shift());) { + if(!parts.length && goog.isDef(opt_object)) { + cur[part] = opt_object + }else { + if(cur[part]) { + cur = cur[part] + }else { + cur = cur[part] = {} + } + } + } +}; +goog.getObjectByName = function(name, opt_obj) { + var parts = name.split("."); + var cur = opt_obj || goog.global; + for(var part;part = parts.shift();) { + if(goog.isDefAndNotNull(cur[part])) { + cur = cur[part] + }else { + return null + } + } + return cur +}; +goog.globalize = function(obj, opt_global) { + var global = opt_global || goog.global; + for(var x in obj) { + global[x] = obj[x] + } +}; +goog.addDependency = function(relPath, provides, requires) { + if(!COMPILED) { + var provide, require; + var path = relPath.replace(/\\/g, "/"); + var deps = goog.dependencies_; + for(var i = 0;provide = provides[i];i++) { + deps.nameToPath[provide] = path; + if(!(path in deps.pathToNames)) { + deps.pathToNames[path] = {} + } + deps.pathToNames[path][provide] = true + } + for(var j = 0;require = requires[j];j++) { + if(!(path in deps.requires)) { + deps.requires[path] = {} + } + deps.requires[path][require] = true + } + } +}; +goog.ENABLE_DEBUG_LOADER = true; +goog.require = function(name) { + if(!COMPILED) { + if(goog.isProvided_(name)) { + return + } + if(goog.ENABLE_DEBUG_LOADER) { + var path = goog.getPathFromDeps_(name); + if(path) { + goog.included_[path] = true; + goog.writeScripts_(); + return + } + } + var errorMessage = "goog.require could not find: " + name; + if(goog.global.console) { + goog.global.console["error"](errorMessage) + } + throw Error(errorMessage); + } +}; +goog.basePath = ""; +goog.global.CLOSURE_BASE_PATH; +goog.global.CLOSURE_NO_DEPS; +goog.global.CLOSURE_IMPORT_SCRIPT; +goog.nullFunction = function() { +}; +goog.identityFunction = function(opt_returnValue, var_args) { + return opt_returnValue +}; +goog.abstractMethod = function() { + throw Error("unimplemented abstract method"); +}; +goog.addSingletonGetter = function(ctor) { + ctor.getInstance = function() { + if(ctor.instance_) { + return ctor.instance_ + } + if(goog.DEBUG) { + goog.instantiatedSingletons_[goog.instantiatedSingletons_.length] = ctor + } + return ctor.instance_ = new ctor + } +}; +goog.instantiatedSingletons_ = []; +if(!COMPILED && goog.ENABLE_DEBUG_LOADER) { + goog.included_ = {}; + goog.dependencies_ = {pathToNames:{}, nameToPath:{}, requires:{}, visited:{}, written:{}}; + goog.inHtmlDocument_ = function() { + var doc = goog.global.document; + return typeof doc != "undefined" && "write" in doc + }; + goog.findBasePath_ = function() { + if(goog.global.CLOSURE_BASE_PATH) { + goog.basePath = goog.global.CLOSURE_BASE_PATH; + return + }else { + if(!goog.inHtmlDocument_()) { + return + } + } + var doc = goog.global.document; + var scripts = doc.getElementsByTagName("script"); + for(var i = scripts.length - 1;i >= 0;--i) { + var src = scripts[i].src; + var qmark = src.lastIndexOf("?"); + var l = qmark == -1 ? src.length : qmark; + if(src.substr(l - 7, 7) == "base.js") { + goog.basePath = src.substr(0, l - 7); + return + } + } + }; + goog.importScript_ = function(src) { + var importScript = goog.global.CLOSURE_IMPORT_SCRIPT || goog.writeScriptTag_; + if(!goog.dependencies_.written[src] && importScript(src)) { + goog.dependencies_.written[src] = true + } + }; + goog.writeScriptTag_ = function(src) { + if(goog.inHtmlDocument_()) { + var doc = goog.global.document; + doc.write(' - @@ -42,7 +40,7 @@

Meeting.js - » A WebRTC Library Media Streaming + » A WebRTC Library for Media Streaming

Copyright © 2013 @@ -119,9 +117,37 @@

Remote Peers

How to use Meeting.js?

-

How it works?



- +

How it works?

+

Huge bandwidth and CPU-usage out of multi-peers and number of RTP-ports +

To understand it better; assume that 10 users are sharing video in a group. 40 RTP-ports i.e. streams will be created for each user. All streams are expected diff --git a/meeting/meeting.js b/meeting/meeting.js index a80f2631..64cafc01 100644 --- a/meeting/meeting.js +++ b/meeting/meeting.js @@ -418,6 +418,12 @@ function getToken() { return Math.round(Math.random() * 9999999999) + 9999999999; } + + function onSdpSuccess() {} + + function onSdpError(e) { + console.error('sdp error:', e.name, e.message); + } // var offer = Offer.createOffer(config); // offer.setRemoteDescription(sdp); @@ -444,7 +450,7 @@ peer.createOffer(function(sdp) { peer.setLocalDescription(sdp); if(isFirefox) config.onsdp(sdp, config.to); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); function sdpCallback() { config.onsdp(peer.localDescription, config.to); @@ -455,7 +461,7 @@ return this; }, setRemoteDescription: function(sdp) { - this.peer.setRemoteDescription(new RTCSessionDescription(sdp)); + this.peer.setRemoteDescription(new RTCSessionDescription(sdp), onSdpSuccess, onSdpError); }, addIceCandidate: function(candidate) { this.peer.addIceCandidate(new RTCIceCandidate({ @@ -482,11 +488,11 @@ config.onicecandidate(event.candidate, config.to); }; - peer.setRemoteDescription(new RTCSessionDescription(config.sdp)); + peer.setRemoteDescription(new RTCSessionDescription(config.sdp), onSdpSuccess, onSdpError); peer.createAnswer(function(sdp) { peer.setLocalDescription(sdp); config.onsdp(sdp, config.to); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); this.peer = peer; diff --git a/one-to-many-audio-broadcasting/index.html b/one-to-many-audio-broadcasting/index.html index c862c160..e8441396 100644 --- a/one-to-many-audio-broadcasting/index.html +++ b/one-to-many-audio-broadcasting/index.html @@ -8,7 +8,7 @@ - + - + @@ -94,8 +94,8 @@

- How this work?/ - Realtime Chat using RTCDataChannel! + How this work?/ + Realtime Chat using RTCDataChannel!
diff --git a/part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout-ui.js b/part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout-ui.js deleted file mode 100644 index 8fd5f509..00000000 --- a/part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout-ui.js +++ /dev/null @@ -1,63 +0,0 @@ -var hangoutUI = hangout({ - openSocket: function (config) { - if (!window.Firebase) return; - var channel = config.channel || location.hash.replace('#', '') || 'realtime-text-chat'; - console.log('using channel: ' + channel); - var socket = new Firebase('https://chat.firebaseIO.com/' + channel); - socket.channel = channel; - socket.on("child_added", function (data) { - config.onmessage && config.onmessage(data.val()); - }); - socket.send = function (data) { - this.push(data); - } - config.onopen && setTimeout(config.onopen, 1); - socket.onDisconnect().remove(); - return socket; - }, - onRoomFound: function (room) { - hangoutUI.joinRoom({ - roomToken: room.roomToken, - joinUser: room.broadcaster, - userName: Math.random().toString(36).substr(2, 35) - }); - hideUnnecessaryStuff(); - }, - onChannelOpened: function () { - hideUnnecessaryStuff(); - startSendingImage(); - }, - onChannelMessage: function (data) { - onMessage(data); - } -}); - -var startConferencing = document.getElementById('start-conferencing'); -if (startConferencing) { - if (location.hash) startConferencing.innerHTML = 'Start Realtime Private Chat'; - startConferencing.onclick = function () { - hangoutUI.createRoom({ - userName: Math.random().toString(36).substr(2, 35), - roomName: (document.getElementById('conference-name') || {}).value || 'Anonymous' - }); - hideUnnecessaryStuff(); - }; -} - -function hideUnnecessaryStuff() { - startConferencing.style.display = 'none'; - var hideMeLater = document.getElementById('hide-me-later'); - if (hideMeLater) hideMeLater.style.display = 'none'; -} - -(function () { - var uniqueToken = document.getElementById('unique-token'); - if (uniqueToken) { - if (location.hash.length > 2) uniqueToken.parentNode.parentNode.parentNode.innerHTML = 'You can share this private room with your friends.'; - else uniqueToken.innerHTML = uniqueToken.parentNode.parentNode.href = (function () { - return "#private-" + ("" + 1e10).replace(/[018]/g, function (a) { - return (a ^ Math.random() * 16 >> a / 4).toString(16); - }); - })(); - } -})(); \ No newline at end of file diff --git a/part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout.js b/part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout.js deleted file mode 100644 index cc3359ac..00000000 --- a/part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout.js +++ /dev/null @@ -1,248 +0,0 @@ -var hangout = function (config) { - var self = { - userToken: uniqueToken() - }, - channels = '--', - isbroadcaster, - isGetNewRoom = true, - defaultSocket = {}, RTCDataChannels = []; - - function openDefaultSocket() { - defaultSocket = config.openSocket({ - onmessage: onDefaultSocketResponse - }); - } - - function onDefaultSocketResponse(response) { - if (response.userToken == self.userToken) return; - - if (isGetNewRoom && response.roomToken && response.broadcaster) config.onRoomFound(response); - - if (response.newParticipant) onNewParticipant(response.newParticipant); - - if (response.userToken && response.joinUser == self.userToken && response.participant && channels.indexOf(response.userToken) == -1) { - channels += response.userToken + '--'; - openSubSocket({ - isofferer: true, - channel: response.channel || response.userToken, - closeSocket: true - }); - } - } - - function getPort() { - return Math.random() * 1000 << 10; - } - - function openSubSocket(_config) { - if (!_config.channel) return; - var socketConfig = { - channel: _config.channel, - onmessage: socketResponse, - onopen: function () { - if (isofferer && !peer) initPeer(); - } - }; - - var socket = config.openSocket(socketConfig), - isofferer = _config.isofferer, - gotstream, - inner = {}, - dataPorts = [getPort(), getPort()], - peer; - - var peerConfig = { - onICE: function (candidate) { - socket.send({ - userToken: self.userToken, - candidate: { - sdpMLineIndex: candidate.sdpMLineIndex, - candidate: JSON.stringify(candidate.candidate) - } - }); - }, - onChannelOpened: onChannelOpened, - onChannelMessage: function (event) { - config.onChannelMessage(JSON.parse(event.data)); - } - }; - - function initPeer(offerSDP) { - if (!offerSDP) { - peerConfig.onOfferSDP = sendsdp; - } else { - peerConfig.offerSDP = offerSDP; - peerConfig.onAnswerSDP = sendsdp; - peerConfig.dataPorts = dataPorts; - } - peer = RTCPeerConnection(peerConfig); - } - - function onChannelOpened(channel) { - RTCDataChannels[RTCDataChannels.length] = channel; - channel.send(JSON.stringify({ - message: 'Hi, I\'m ' + self.userName + '!', - sender: self.userName - })); - - if (config.onChannelOpened) config.onChannelOpened(channel); - - if (isbroadcaster && channels.split('--').length > 3) { - /* broadcasting newly connected participant for video-conferencing! */ - defaultSocket.send({ - newParticipant: socket.channel, - userToken: self.userToken - }); - } - - /* closing subsocket here on the offerer side */ - if (_config.closeSocket) socket = null; - - gotstream = true; - } - - function sendsdp(sdp) { - sdp = JSON.stringify(sdp); - var part = parseInt(sdp.length / 3); - - var firstPart = sdp.slice(0, part), - secondPart = sdp.slice(part, sdp.length - 1), - thirdPart = ''; - - if (sdp.length > part + part) { - secondPart = sdp.slice(part, part + part); - thirdPart = sdp.slice(part + part, sdp.length); - } - - socket.send({ - userToken: self.userToken, - firstPart: firstPart, - - /* sending RTCDataChannel ports alongwith sdp */ - dataPorts: dataPorts - }); - - socket.send({ - userToken: self.userToken, - secondPart: secondPart - }); - - socket.send({ - userToken: self.userToken, - thirdPart: thirdPart - }); - } - - function socketResponse(response) { - if (response.userToken == self.userToken) return; - - if (response.firstPart || response.secondPart || response.thirdPart) { - if (response.dataPorts) inner.dataPorts = response.dataPorts; - if (response.firstPart) { - inner.firstPart = response.firstPart; - if (inner.secondPart && inner.thirdPart) selfInvoker(); - } - if (response.secondPart) { - inner.secondPart = response.secondPart; - if (inner.firstPart && inner.thirdPart) selfInvoker(); - } - - if (response.thirdPart) { - inner.thirdPart = response.thirdPart; - if (inner.firstPart && inner.secondPart) selfInvoker(); - } - } - - if (response.candidate && !gotstream) { - peer && peer.addICE({ - sdpMLineIndex: response.candidate.sdpMLineIndex, - candidate: JSON.parse(response.candidate.candidate) - }); - } - } - - var invokedOnce = false; - - function selfInvoker() { - if (invokedOnce) return; - - invokedOnce = true; - - inner.sdp = JSON.parse(inner.firstPart + inner.secondPart + inner.thirdPart); - - /* using random data ports to support wide connection on firefox! */ - if (isofferer) peer.addAnswerSDP(inner.sdp, inner.dataPorts); - else initPeer(inner.sdp); - } - } - - function startBroadcasting() { - defaultSocket.send({ - roomToken: self.roomToken, - roomName: self.roomName, - broadcaster: self.userToken - }); - setTimeout(startBroadcasting, 3000); - } - - function onNewParticipant(channel) { - if (!channel || channels.indexOf(channel) != -1 || channel == self.userToken) return; - channels += channel + '--'; - - var new_channel = uniqueToken(); - openSubSocket({ - channel: new_channel, - closeSocket: true - }); - - defaultSocket.send({ - participant: true, - userToken: self.userToken, - joinUser: channel, - channel: new_channel - }); - } - - function uniqueToken() { - var s4 = function () { - return Math.floor(Math.random() * 0x10000).toString(16); - }; - return s4() + s4() + "-" + s4() + "-" + s4() + "-" + s4() + "-" + s4() + s4() + s4(); - } - - openDefaultSocket(); - return { - createRoom: function (_config) { - self.roomName = _config.roomName || 'Anonymous'; - self.roomToken = uniqueToken(); - self.userName = _config.userName || 'Anonymous'; - - isbroadcaster = true; - isGetNewRoom = false; - startBroadcasting(); - }, - joinRoom: function (_config) { - self.roomToken = _config.roomToken; - self.userName = _config.userName || 'Anonymous'; - isGetNewRoom = false; - - openSubSocket({ - channel: self.userToken - }); - - defaultSocket.send({ - participant: true, - userToken: self.userToken, - joinUser: _config.joinUser - }); - }, - send: function (data) { - var length = RTCDataChannels.length, - data = JSON.stringify(data); - if (!length) return; - for (var i = 0; i < length; i++) { - RTCDataChannels[i].send(data); - } - } - }; -}; \ No newline at end of file diff --git a/part-of-screen-sharing/realtime-chat/how-this-work.html b/part-of-screen-sharing/realtime-chat/how-this-work.html index 5ad17721..68296880 100644 --- a/part-of-screen-sharing/realtime-chat/how-this-work.html +++ b/part-of-screen-sharing/realtime-chat/how-this-work.html @@ -22,7 +22,7 @@

- How realtime chat works? + How realtime chat works? Source code on Github

@@ -37,6 +37,6 @@

Sharing part of the screen in realtime!

Browser Support:

Works fine on chrome canary and firefox nightly/aurora/stable. - No-WebRTC chatworks fine on any browser supports Canvas2D! + No-WebRTC chatworks fine on any browser supports Canvas2D! \ No newline at end of file diff --git a/part-of-screen-sharing/realtime-chat/index.html b/part-of-screen-sharing/realtime-chat/index.html deleted file mode 100644 index 34e3113b..00000000 --- a/part-of-screen-sharing/realtime-chat/index.html +++ /dev/null @@ -1,193 +0,0 @@ - - - - - Realtime text chat using RTCDataChannel and html2canvas! ® Muaz Khan - - - - - - - - -

- Realtime text chat using RTCDataChannel and html2canvas! - Source code on Github -

- -
-
-
- - / - - - - - - - How this work?/ - NoWebRTC Chat -
-
- -
-
- Preview image -
- - -
-
- Private chat ?? #123456789 - - - - - - -

intro:

- -
    -
  1. Sharing part of the screen using RTCDataChannel APIs!
  2. -
  3. Everything is synchronized in realtime.
  4. -
  5. It is a realtime text chat with a realtime preview!
  6. -
  7. You can see what your fellow is typing...in realtime!
  8. -
  9. Firefox - nightly/ - aurora/ - stableis preferred/recommended.
  10. -
  11. Works fine on Chrome - canarytoo.
  12. -
- - - \ No newline at end of file diff --git a/part-of-screen-sharing/screenshot-dev.js b/part-of-screen-sharing/screenshot-dev.js index d90746a5..f791fe3a 100644 --- a/part-of-screen-sharing/screenshot-dev.js +++ b/part-of-screen-sharing/screenshot-dev.js @@ -1945,6 +1945,7 @@ _html2canvas.Parse = function (images, options) { if (isElementVisible(el)) { stack = renderElement(el, stack, pseudoElement) || stack; if (!ignoreElementsRegExp.test(el.nodeName)) { + if(el.tagName == 'IFRAME') el = el.contentDocument; _html2canvas.Util.Children(el).forEach(function(node) { if (node.nodeType === 1) { parseElement(node, stack, pseudoElement); diff --git a/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/README.md b/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/README.md deleted file mode 100644 index 94cf9b93..00000000 --- a/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/README.md +++ /dev/null @@ -1,74 +0,0 @@ -#### WebRTC [Part of Screen Sharing Demos](https://www.webrtc-experiment.com/#part-of-screen-sharing) - -1. [Using RTCDataChannel](https://www.webrtc-experiment.com/part-of-screen-sharing/webrtc-data-channel/) -2. [Using Firebase](https://www.webrtc-experiment.com/part-of-screen-sharing/) -3. [A realtime chat using RTCDataChannel](https://googledrive.com/host/0B6GWd_dUUTT8RzVSRVU2MlIxcm8/realtime-chat/) -4. [A realtime chat using Firebase](https://googledrive.com/host/0B6GWd_dUUTT8RzVSRVU2MlIxcm8/realtime-chat/No-WebRTC-Chat.html) - -= - -#### How to use in your own site? - -```html - -``` - -```javascript -var divToShare = document.querySelector('div'); -html2canvas(divToShare, { - onrendered: function (canvas) { - var screenshot = canvas.toDataURL(); - // image.src = screenshot; - // context.drawImage(screenshot, x, y, width, height); - // firebase.push(screenshot); - // pubnub.send(screenshot); - // socketio.send(screenshot); - // signaler.send(screenshot); - // window.open(screenshot); - } -}); - -/* - -----Note: - Put above code in a function; use "requestAnimationFrame" to loop the function - and post/transmit DataURL in realtime! - - -----what above code will do? - Above code will take screenshot of the DIV or other HTML element and return you - and image. You can preview image to render in IMG element or draw to Canvas2D. - */ -``` - -= - -#### How.....why.....what.....? - -1. Used `html2canvas` library to take screenshot of the entire webpage or part of webpage. -2. Sharing those screenshots using `RTCDataChannel APIs` or `Firebase`. -3. `Firefox` is preferred because Firefox uses `reliable` 16 `SCTP` streams by default; so it is realtime and superfast on Firefox! - -**To share your custom part of screen**: - -1. Open `index.html` file -2. Find `renderMe` object that is getting an element by id: `render-me` - -....and that's all you need to do! - -= - -#### Browser Support - -These WebRTC **Part of Screen Sharing** experiments works fine on following web-browsers: - -| Browser | Support | -| ------------- |:-------------:| -| Firefox | [Stable](http://www.mozilla.org/en-US/firefox/new/) | -| Firefox | [Aurora](http://www.mozilla.org/en-US/firefox/aurora/) | -| Firefox | [Nightly](http://nightly.mozilla.org/) | -| Google Chrome | [Canary](https://www.google.com/intl/en/chrome/browser/canary.html) | - -= - -#### License - -These WebRTC **Part of Screen Sharing** experiments are released under [MIT licence](https://www.webrtc-experiment.com/licence/) . Copyright (c) 2013 [Muaz Khan](https://plus.google.com/100325991024054712503). diff --git a/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html b/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html deleted file mode 100644 index 32f0a834..00000000 --- a/part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html +++ /dev/null @@ -1,449 +0,0 @@ - - - - - Part of screen sharing using WebRTC Data Channel ® Muaz Khan - - - - - - - - - - - - - - - -
- ↑ WEBRTC EXPERIMENTS - -

-
- - -
- - Part of screen sharing using WebRTC Data Channel - - -

- -

- Copyright © 2013 Muaz Khan<@muazkh>. -

-
-
-
- - - - - - - - - - -
-
- -
-
-
- -

Shared DIVs will be visible here ↓

-
- - - -
-
- -

intro:

- -
    -
  1. Sharing part of the screen or region of screen (i.e. a DIV, SECTION, ARTICLE or ASIDE)... not the entire screen!
  2. -
  3. Everything inside that DIV is synchronized in realtime.
  4. -
  5. Works fine on all modern web browsers supporting WebRTC Data Channels.
  6. -
-

how?

- -
    -
  1. Share screenshots of the entire DIV, ASIDE or BODY element in realtime!
  2. -
  3. Using WebRTC Data Channel to transmit screenshots in realtime!
  4. -
  5. It is better to "pause sharing" to scroll down and see others' shared regions.
  6. -
  7. It supports multi-user connectivity too! Hmm! i.e. you can share region of screen with many friends!
  8. -
-

Try Part - of Screen Sharing using Firebase -

- -
-<script src="https://www.webrtc-experiment.com/screenshot.js"></script>
-<script>
-var divToShare = document.querySelector('div');
-html2canvas(divToShare, {
-    onrendered: function (canvas) {
-        var screenshot = canvas.toDataURL();
-        // image.src = screenshot;
-        // context.drawImage(screenshot, x, y, width, height);
-        // firebase.push(screenshot);
-        // pubnub.send(screenshot);
-        // socketio.send(screenshot);
-        // signaler.send(screenshot);
-        // window.open(screenshot);
-    }
-});
-
-/*
- 

Note:

- Put above code in a function; use "requestAnimationFrame" to loop the function - and post/transmit DataURL in realtime! - -

What above code will do?

- Above code will take screenshot of the DIV or other HTML element and return you - and image. You can preview image to render in IMG element or draw to Canvas2D. - */ -</script> -
- -
-
- -

Part of - screen sharing is open-sourced on Github!

- -
-
-
-

Feedback

-
- -
- -
-
- - - - diff --git a/part-of-screen-sharing/webrtc-data-channel/index.html b/part-of-screen-sharing/webrtc-data-channel/index.html new file mode 100644 index 00000000..0ff9fafd --- /dev/null +++ b/part-of-screen-sharing/webrtc-data-channel/index.html @@ -0,0 +1,442 @@ + + + + + Part of screen sharing using WebRTC Data Channel ® Muaz Khan + + + + + + + + + + + + + + + + + +
+
+

+ Part of Screen Sharing + using + DataChannel.js +

+

+ Copyright © 2013 + Muaz Khan<@muazkh> + » + @WebRTC Experiments + » + Google+ + » + What's New? +

+
+ +
+ +
+
+
+ + +
+
+

Open New DataChannel Connection

+ +
+ +
+
+ +
+
+ + + +
+ + + +
+
+ + +
+ + + +

Shared DIVs will be visible here ↓

+
+
+ + + + + + + + + + +
+

How to use this experiment?

+
    +
  1. Setup data connection.
  2. +
  3. Click a DIV to share.
  4. +
  5. Change DIV's content; all DIVs are edit-able.
  6. +
+
+ +
+
    +
  1. Sharing part of the screen or region of screen (i.e. a DIV, SECTION, ARTICLE or ASIDE)... not the entire screen!
  2. +
  3. Everything inside that DIV is synchronized in realtime.
  4. +
  5. Works fine on all modern web browsers supporting WebRTC Data Channels.
  6. +
+
+ +
+

Feedback

+
+ +
+ Enter your email too; if you want "direct" reply! +
+ +
+

How?

+
    +
  1. Share screenshots of the entire DIV, ASIDE or BODY element in realtime!
  2. +
  3. Using WebRTC Data Channel to transmit screenshots in realtime!
  4. +
  5. It is better to "pause sharing" to scroll down and see others' shared regions.
  6. +
  7. It supports multi-user connectivity too! Hmm! i.e. you can share region of screen with many friends!
  8. +
+
+ +
+

Try Part + of Screen Sharing using Firebase +

+
+ +
+
+<script src="https://www.webrtc-experiment.com/screenshot.js"></script>
+<script>
+var divToShare = document.querySelector('div');
+html2canvas(divToShare, {
+    onrendered: function (canvas) {
+        var screenshot = canvas.toDataURL();
+        // image.src = screenshot;
+        // context.drawImage(screenshot, x, y, width, height);
+        // firebase.push(screenshot);
+        // pubnub.send(screenshot);
+        // socketio.send(screenshot);
+        // signaler.send(screenshot);
+        // window.open(screenshot);
+    }
+});
+
+/*
+ 

Note:

+ Put above code in a function; use "requestAnimationFrame" to loop the function + and post/transmit DataURL in realtime! + +

What above code will do?

+ Above code will take screenshot of the DIV or other HTML element and return you + and image. You can preview image to render in IMG element or draw to Canvas2D. + */ +</script> +
+
+ +
+

Part of + screen sharing is open-sourced on Github!

+
+ +
+

Latest Updates

+
+
+
+ + + + + + diff --git a/screen-sharing/screen.js b/screen-sharing/screen.js index 22e7655a..07666628 100644 --- a/screen-sharing/screen.js +++ b/screen-sharing/screen.js @@ -377,6 +377,12 @@ function getToken() { return Math.round(Math.random() * 9999999999) + 9999999999; } + + function onSdpSuccess() {} + + function onSdpError(e) { + console.error('sdp error:', e.name, e.message); + } // var offer = Offer.createOffer(config); // offer.setRemoteDescription(sdp); @@ -398,14 +404,14 @@ peer.createOffer(function(sdp) { peer.setLocalDescription(sdp); if (config.onsdp) config.onsdp(sdp, config.to); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); this.peer = peer; return this; }, setRemoteDescription: function(sdp) { - this.peer.setRemoteDescription(new RTCSessionDescription(sdp)); + this.peer.setRemoteDescription(new RTCSessionDescription(sdp), onSdpSuccess, onSdpError); }, addIceCandidate: function(candidate) { this.peer.addIceCandidate(new RTCIceCandidate({ @@ -432,11 +438,11 @@ if (event.candidate) config.onicecandidate(event.candidate, config.to); }; - peer.setRemoteDescription(new RTCSessionDescription(config.sdp)); + peer.setRemoteDescription(new RTCSessionDescription(config.sdp), onSdpSuccess, onSdpError); peer.createAnswer(function(sdp) { peer.setLocalDescription(sdp); if (config.onsdp) config.onsdp(sdp, config.to); - }, null, offerAnswerConstraints); + }, onSdpError, offerAnswerConstraints); this.peer = peer; diff --git a/socket.io/PeerConnection.js b/socket.io/PeerConnection.js index 89d965f1..7a74c8e9 100644 --- a/socket.io/PeerConnection.js +++ b/socket.io/PeerConnection.js @@ -11,16 +11,19 @@ if (!socketURL) throw 'Socket-URL is mandatory.'; if (!socketEvent) socketEvent = 'message'; - var root = this; - captureUserMedia(function() { - new Signaler(root, socketURL, socketEvent); - }, root); + new Signaler(this, socketURL, socketEvent); + + this.addStream = function(stream) { + this.MediaStream = stream; + }; }; function Signaler(root, socketURL, socketEvent) { var self = this; root.startBroadcasting = function() { + if(!root.MediaStream) throw 'Offerer must have media stream.'; + (function transmit() { socket.send({ userid: root.userid, @@ -108,8 +111,7 @@ var streamObject = { mediaElement: mediaElement, stream: stream, - userid: root.participant, - type: 'remote' + participantid: root.participant }; function afterRemoteStreamStartedFlowing() { @@ -149,9 +151,8 @@ root.close(); }; - var socket = io.connect(socketURL); - socket.on(socketEvent, function(message) { - if (message.userid == root.userid) return; + function onmessage(message) { + if (message.userid == root.userid) return; root.participant = message.userid; // for pretty logging @@ -189,11 +190,17 @@ if (message.userLeft && message.to == root.userid) { closePeerConnections(); } - }); - - socket.send = function(data) { - socket.emit(socketEvent, data); - }; + } + + var socket = socketURL; + if(typeof socketURL == 'string') { + var socket = io.connect(socketURL); + socket.send = function(data) { + socket.emit(socketEvent, data); + }; + } + + socket.on(socketEvent, onmessage); } var RTCPeerConnection = window.mozRTCPeerConnection || window.webkitRTCPeerConnection; @@ -320,45 +327,6 @@ } }; - function captureUserMedia(callback, root) { - var constraints = { - audio: true, - video: true - }; - - navigator.getUserMedia(constraints, onstream, onerror); - - function onstream(stream) { - callback(); - - stream.onended = function() { - if (root.onStreamEnded) root.onStreamEnded(streamObject); - }; - - root.MediaStream = stream; - - var mediaElement = document.createElement('video'); - mediaElement.id = 'self'; - mediaElement[isFirefox ? 'mozSrcObject' : 'src'] = isFirefox ? stream : window.webkitURL.createObjectURL(stream); - mediaElement.autoplay = true; - mediaElement.controls = true; - mediaElement.muted = true; - mediaElement.play(); - - var streamObject = { - mediaElement: mediaElement, - stream: stream, - userid: 'self', - type: 'local' - }; - root.onStreamAdded(streamObject); - } - - function onerror(e) { - console.error(e); - } - } - function merge(mergein, mergeto) { for (var t in mergeto) { mergein[t] = mergeto[t]; @@ -366,4 +334,21 @@ return mergein; } -})(); + window.URL = window.webkitURL || window.URL; + navigator.getMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia; + navigator.getUserMedia = function(hints, onsuccess, onfailure) { + if(!hints) hints = {audio:true,video:true}; + if(!onsuccess) throw 'Second argument is mandatory. navigator.getUserMedia(hints,onsuccess,onfailure)'; + + navigator.getMedia(hints, _onsuccess, _onfailure); + + function _onsuccess(stream) { + onsuccess(stream); + } + + function _onfailure(e) { + if(onfailure) onfailure(e); + else throw Error('getUserMedia failed: ' + JSON.stringify(e, null, '\t')); + } + }; +})(); \ No newline at end of file diff --git a/socket.io/index.html b/socket.io/index.html index 34982198..0254fdc3 100644 --- a/socket.io/index.html +++ b/socket.io/index.html @@ -31,7 +31,7 @@ border-radius: 1px; font-size: 2em; margin: .2em; - width: 40%; + width: 30%; } .setup { @@ -95,8 +95,11 @@

+ + Private ?? #123456789 + - +
@@ -111,7 +114,29 @@

// MIT License - https://www.webrtc-experiment.com/licence/ // Documentation - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/socket.io - var peer = new PeerConnection('http://socketio-signaling.jit.su:80'); + var channel = location.href.replace(/\/|:|#|%|\.|\[|\]/g, ''); + var sender = Math.round(Math.random() * 999999999) + 999999999; + + var SIGNALING_SERVER = 'https://www.webrtc-experiment.com:2013/'; + io.connect(SIGNALING_SERVER).emit('new-channel', { + channel: channel, + sender: sender + }); + + var socket = io.connect(SIGNALING_SERVER + channel); + socket.on('connect', function () { + // setup peer connection & pass socket object over the constructor! + }); + + socket.send = function (message) { + socket.emit('message', { + sender: sender, + data: message + }); + }; + + // var peer = new PeerConnection('http://socketio-signaling.jit.su:80'); + var peer = new PeerConnection(socket); peer.onUserFound = function(userid) { if (document.getElementById(userid)) return; var tr = document.createElement('tr'); @@ -126,8 +151,12 @@

button.id = userid; button.style.float = 'right'; button.onclick = function() { - this.disabled = true; - peer.sendParticipationRequest(this.id); + button = this; + getUserMedia(function(stream) { + peer.addStream(stream); + peer.sendParticipationRequest(button.id); + }); + button.disabled = true; }; td2.appendChild(button); @@ -137,12 +166,8 @@

}; peer.onStreamAdded = function(e) { - if (e.type == 'local') document.querySelector('#start-broadcasting').disabled = false; var video = e.mediaElement; - video.setAttribute('width', 600); - video.setAttribute('controls', true); - videosContainer.insertBefore(video, videosContainer.firstChild); video.play(); @@ -164,7 +189,10 @@

document.querySelector('#start-broadcasting').onclick = function() { this.disabled = true; - peer.startBroadcasting(); + getUserMedia(function(stream) { + peer.addStream(stream); + peer.startBroadcasting(); + }); }; document.querySelector('#your-name').onchange = function() { @@ -215,6 +243,41 @@

} window.onresize = scaleVideos; + + // you need to capture getUserMedia yourself! + function getUserMedia(callback) { + var hints = {audio:true,video:{ + optional: [], + mandatory: { + minWidth: 1280, + minHeight: 720, + maxWidth: 1920, + maxHeight: 1080, + minAspectRatio: 1.77 + } + }}; + navigator.getUserMedia(hints,function(stream) { + var video = document.createElement('video'); + video.src = URL.createObjectURL(stream); + video.controls = true; + video.muted = true; + + peer.onStreamAdded({ + mediaElement: video, + userid: 'self', + stream: stream + }); + + callback(stream); + }); + } + + (function() { + var uniqueToken = document.getElementById('unique-token'); + if (uniqueToken) + if (location.hash.length > 2) uniqueToken.parentNode.parentNode.parentNode.innerHTML = '

Share this link

'; + else uniqueToken.innerHTML = uniqueToken.parentNode.parentNode.href = '#' + (Math.random() * new Date().getTime()).toString(36).toUpperCase().replace( /\./g , '-'); + })(); @@ -282,6 +345,81 @@

Simplest Demo

answerer.sendParticipationRequest('offerer');
+ +
+

getUserMedia is "in-your-own-hands"!

+

+ It is upto you to decide when to capture the stream; how to capture; and the quality of the stream. +

+
+function getUserMedia(callback) {
+    var hints = {
+        audio: true,
+        video: {
+            optional: [],
+			
+            // capture super-hd stream!
+            mandatory: {
+                minWidth: 1280,
+                minHeight: 720,
+                maxWidth: 1920,
+                maxHeight: 1080,
+                minAspectRatio: 1.77
+            }
+        }
+    };
+	
+    navigator.getUserMedia(hints, function (stream) {
+        //    you can use "peer.addStream" to attach stream
+        //    peer.addStream(stream);
+        // or peer.MediaStream = stream;
+	
+        callback(stream);
+	
+        // preview local video
+        var video = document.createElement('video');
+        video.src = URL.createObjectURL(stream);
+        video.controls = true;
+        video.muted = true;
+
+        peer.onStreamAdded({
+            mediaElement: video,
+            userid: 'self',
+            stream: stream
+        });
+    });
+}
+
+
+ +
+

Want to use Socket.io over Node.js?

+
+var channel = location.href.replace(/\/|:|#|%|\.|\[|\]/g, '');
+var sender = Math.round(Math.random() * 999999999) + 999999999;
+
+var SIGNALING_SERVER = 'http://webrtc-signaling.jit.su:80/';
+io.connect(SIGNALING_SERVER).emit('new-channel', {
+    channel: channel,
+    sender: sender
+});
+
+var socket = io.connect(SIGNALING_SERVER + channel);
+socket.send = function (message) {
+    socket.emit('message', {
+        sender: sender,
+        data: message
+    });
+};
+
+// pass "socket" object over the constructor instead of URL
+var peer = new PeerConnection(socket);
+
+ +

+ Check other signaling examples. +

+