From fba5be47a9cd702f3a3b038c2db0a832731a25f9 Mon Sep 17 00:00:00 2001
From: Muaz Khan <=>
Date: Thu, 21 Nov 2013 17:14:36 +0500
Subject: [PATCH] old updates!
---
DataChannel/DataChannel.js | 17 +-
.../MediaStreamRecorder (all-in-one).js | 948 ++
Pluginfree-Screen-Sharing/README.md | 34 +
Pre-recorded-Media-Streaming/index.html | 1 -
.../Library/RTCMultiConnection-v1.1.js | 45 +-
.../Library/RTCMultiConnection-v1.2.js | 45 +-
.../Library/RTCMultiConnection-v1.3.js | 14 +-
.../Library/RTCMultiConnection-v1.4.js | 166 +-
RTCMultiConnection/README.md | 19 +-
.../RecordRTC-and-RTCMultiConnection.html | 2 +-
.../features.html | 147 +
RTCPeerConnection/RTCPeerConnection-v1.3.js | 2 +-
RTCPeerConnection/RTCPeerConnection-v1.4.js | 26 +-
RTCPeerConnection/RTCPeerConnection-v1.5.js | 49 -
RTCall/RTCall.js | 2 +-
RecordRTC/RecordRTC-to-PHP/index.html | 639 +-
RecordRTC/RecordRTC.js | 333 +-
RecordRTC/index.html | 7 +-
Signaling.md | 43 +
file-sharing/data-connection.js | 27 +-
firebase-debug.js | 8351 +++++++++++++++++
meeting/index.html | 41 +-
meeting/meeting.js | 14 +-
one-to-many-audio-broadcasting/index.html | 2 +-
one-to-many-audio-broadcasting/meeting.js | 50 +-
one-to-many-video-broadcasting/index.html | 27 +-
one-to-many-video-broadcasting/meeting.js | 50 +-
.../{ => firebase}/index.html | 0
part-of-screen-sharing/iframe/index.html | 37 +
part-of-screen-sharing/iframe/otherpage.html | 8 +
.../realtime-chat/No-WebRTC-Chat.html | 6 +-
.../RTCDataChannel/hangout-ui.js | 63 -
.../realtime-chat/RTCDataChannel/hangout.js | 248 -
.../realtime-chat/how-this-work.html | 4 +-
.../realtime-chat/index.html | 193 -
part-of-screen-sharing/screenshot-dev.js | 1 +
.../README.md | 74 -
.../index.html | 449 -
.../webrtc-data-channel/index.html | 442 +
screen-sharing/screen.js | 14 +-
socket.io/PeerConnection.js | 93 +-
socket.io/index.html | 158 +-
.../RTCPeerConnection-v1.5.js | 105 +-
text-chat/data-connection.js | 27 +-
video-conferencing/README.md | 44 +
websocket/PeerConnection.js | 113 +-
websocket/index.html | 350 +-
47 files changed, 11367 insertions(+), 2163 deletions(-)
create mode 100644 MediaStreamRecorder/MediaStreamRecorder (all-in-one).js
create mode 100644 RTCMultiConnection/RTCMultiConnection-v1.4-Demos/features.html
create mode 100644 firebase-debug.js
rename part-of-screen-sharing/{ => firebase}/index.html (100%)
create mode 100644 part-of-screen-sharing/iframe/index.html
create mode 100644 part-of-screen-sharing/iframe/otherpage.html
delete mode 100644 part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout-ui.js
delete mode 100644 part-of-screen-sharing/realtime-chat/RTCDataChannel/hangout.js
delete mode 100644 part-of-screen-sharing/realtime-chat/index.html
delete mode 100644 part-of-screen-sharing/webrtc-and-part-of-screen-sharing/README.md
delete mode 100644 part-of-screen-sharing/webrtc-and-part-of-screen-sharing/index.html
create mode 100644 part-of-screen-sharing/webrtc-data-channel/index.html
diff --git a/DataChannel/DataChannel.js b/DataChannel/DataChannel.js
index 23e2d464..fd4e27f0 100644
--- a/DataChannel/DataChannel.js
+++ b/DataChannel/DataChannel.js
@@ -936,6 +936,12 @@
OfferToReceiveVideo: !!moz
}
};
+
+ function onSdpError(e) {
+ console.error('sdp error:', e.name, e.message);
+ }
+
+ function onSdpSuccess() {}
function createOffer() {
if (!options.onOfferSDP) return;
@@ -944,20 +950,20 @@
sessionDescription.sdp = setBandwidth(sessionDescription.sdp);
peerConnection.setLocalDescription(sessionDescription);
options.onOfferSDP(sessionDescription);
- }, null, constraints);
+ }, onSdpError, constraints);
}
function createAnswer() {
if (!options.onAnswerSDP) return;
options.offerSDP = new SessionDescription(options.offerSDP);
- peerConnection.setRemoteDescription(options.offerSDP);
+ peerConnection.setRemoteDescription(options.offerSDP, onSdpSuccess, onSdpError);
peerConnection.createAnswer(function(sessionDescription) {
sessionDescription.sdp = setBandwidth(sessionDescription.sdp);
peerConnection.setLocalDescription(sessionDescription);
options.onAnswerSDP(sessionDescription);
- }, null, constraints);
+ }, onSdpError, constraints);
}
function setBandwidth(sdp) {
@@ -1032,13 +1038,12 @@
}
}
- function useless() {
- }
+ function useless() {}
return {
addAnswerSDP: function(sdp) {
sdp = new SessionDescription(sdp);
- peerConnection.setRemoteDescription(sdp);
+ peerConnection.setRemoteDescription(sdp, onSdpSuccess, onSdpError);
},
addICE: function(candidate) {
peerConnection.addIceCandidate(new IceCandidate({
diff --git a/MediaStreamRecorder/MediaStreamRecorder (all-in-one).js b/MediaStreamRecorder/MediaStreamRecorder (all-in-one).js
new file mode 100644
index 00000000..ce0090a2
--- /dev/null
+++ b/MediaStreamRecorder/MediaStreamRecorder (all-in-one).js
@@ -0,0 +1,948 @@
+// Muaz Khan - https://github.com/muaz-khan
+// MIT License - https://www.webrtc-experiment.com/licence/
+// Documentation - https://github.com/muaz-khan/WebRTC-Experiment/tree/master/MediaStreamRecorder
+// ==========================================================
+// MediaStreamRecorder.js
+
+function MediaStreamRecorder(mediaStream) {
+ if (!mediaStream) throw 'MediaStream is mandatory.';
+
+ // void start(optional long timeSlice)
+ // timestamp to fire "ondataavailable"
+ this.start = function(timeSlice) {
+ // Media Stream Recording API has not been implemented in chrome yet;
+ // That's why using WebAudio API to record stereo audio in WAV format
+ var Recorder = IsChrome ? window.StereoRecorder : window.MediaRecorder;
+
+ // video recorder (in WebM format)
+ if (this.mimeType === 'video/webm') Recorder = window.WhammyRecorder;
+
+ // video recorder (in GIF format)
+ if (this.mimeType === 'image/gif') Recorder = window.GifRecorder;
+
+ mediaRecorder = new Recorder(mediaStream);
+ mediaRecorder.ondataavailable = this.ondataavailable;
+ mediaRecorder.onstop = this.onstop;
+
+ // Merge all data-types except "function"
+ mediaRecorder = mergeProps(mediaRecorder, this);
+
+ mediaRecorder.start(timeSlice);
+ };
+
+ this.stop = function() {
+ if (mediaRecorder) mediaRecorder.stop();
+ };
+
+ this.ondataavailable = function(blob) {
+ console.log('ondataavailable..', blob);
+ };
+
+ this.onstop = function() {
+ console.log('stopped..');
+ };
+
+ // Reference to "MediaRecorder.js"
+ var mediaRecorder;
+}
+
+// ==========================
+// Cross-Browser Declarations
+
+// Media Stream Recording API representer
+MediaRecorderWrapper = window.MediaRecorder;
+
+// animation-frame used in WebM recording
+requestAnimationFrame = window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame;
+cancelAnimationFrame = window.webkitCancelAnimationFrame || window.mozCancelAnimationFrame;
+
+// WebAudio API representer
+AudioContext = window.webkitAudioContext || window.mozAudioContext;
+
+URL = window.URL || window.webkitURL;
+navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
+
+IsChrome = !!navigator.webkitGetUserMedia;
+
+// Merge all other data-types except "function"
+function mergeProps(mergein, mergeto) {
+ for (var t in mergeto) {
+ if (typeof mergeto[t] !== 'function') {
+ mergein[t] = mergeto[t];
+ }
+ }
+ return mergein;
+}
+
+
+// Muaz Khan - https://github.com/muaz-khan
+// neizerth - https://github.com/neizerth
+// MIT License - https://www.webrtc-experiment.com/licence/
+// Documentation - https://github.com/streamproc/MediaStreamRecorder
+// ==========================================================
+// MediaRecorder.js
+
+function MediaRecorder(mediaStream) {
+ // void start(optional long timeSlice)
+ // timestamp to fire "ondataavailable"
+ this.start = function(timeSlice) {
+ timeSlice = timeSlice || 1000;
+
+ mediaRecorder = new MediaRecorderWrapper(mediaStream);
+ mediaRecorder.ondataavailable = function(e) {
+ if (mediaRecorder.state == 'recording') {
+ var blob = new window.Blob([e.data], {
+ type: self.mimeType || 'audio/ogg'
+ });
+ self.ondataavailable(blob);
+ mediaRecorder.stop();
+ }
+ };
+
+ mediaRecorder.onstop = function() {
+ if (mediaRecorder.state == 'inactive') {
+ // bug: it is a temporary workaround; it must be fixed.
+ mediaRecorder = new MediaRecorder(mediaStream);
+ mediaRecorder.ondataavailable = self.ondataavailable;
+ mediaRecorder.onstop = self.onstop;
+ mediaRecorder.mimeType = self.mimeType;
+ mediaRecorder.start(timeSlice);
+ }
+
+ self.onstop();
+ };
+
+ // void start(optional long timeSlice)
+ mediaRecorder.start(timeSlice);
+ };
+
+ this.stop = function() {
+ if (mediaRecorder && mediaRecorder.state == 'recording') {
+ mediaRecorder.stop();
+ }
+ };
+
+ this.ondataavailable = function() {};
+ this.onstop = function() {};
+
+ // Reference to itself
+ var self = this;
+
+ // Reference to "MediaRecorderWrapper" object
+ var mediaRecorder;
+}
+
+
+// Muaz Khan - https://github.com/muaz-khan
+// neizerth - https://github.com/neizerth
+// MIT License - https://www.webrtc-experiment.com/licence/
+// Documentation - https://github.com/streamproc/MediaStreamRecorder
+// ==========================================================
+// StereoRecorder.js
+
+function StereoRecorder(mediaStream) {
+ // void start(optional long timeSlice)
+ // timestamp to fire "ondataavailable"
+ this.start = function(timeSlice) {
+ timeSlice = timeSlice || 1000;
+
+ mediaRecorder = new StereoAudioRecorder(mediaStream, this);
+
+ (function looper() {
+ mediaRecorder.record();
+
+ setTimeout(function() {
+ mediaRecorder.stop();
+ looper();
+ }, timeSlice);
+ })();
+ };
+
+ this.stop = function() {
+ if (mediaRecorder) mediaRecorder.stop();
+ };
+
+ this.ondataavailable = function() {};
+
+ // Reference to "StereoAudioRecorder" object
+ var mediaRecorder;
+}
+
+// source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js
+function StereoAudioRecorder(mediaStream, root) {
+ // variables
+ var leftchannel = [];
+ var rightchannel = [];
+ var recorder;
+ var recording = false;
+ var recordingLength = 0;
+ var volume;
+ var audioInput;
+ var sampleRate = 44100;
+ var audioContext;
+ var context;
+
+ this.record = function() {
+ recording = true;
+ // reset the buffers for the new recording
+ leftchannel.length = rightchannel.length = 0;
+ recordingLength = 0;
+ };
+
+ this.stop = function() {
+ // we stop recording
+ recording = false;
+
+ // we flat the left and right channels down
+ var leftBuffer = mergeBuffers(leftchannel, recordingLength);
+ var rightBuffer = mergeBuffers(rightchannel, recordingLength);
+ // we interleave both channels together
+ var interleaved = interleave(leftBuffer, rightBuffer);
+
+ // we create our wav file
+ var buffer = new ArrayBuffer(44 + interleaved.length * 2);
+ var view = new DataView(buffer);
+
+ // RIFF chunk descriptor
+ writeUTFBytes(view, 0, 'RIFF');
+ view.setUint32(4, 44 + interleaved.length * 2, true);
+ writeUTFBytes(view, 8, 'WAVE');
+ // FMT sub-chunk
+ writeUTFBytes(view, 12, 'fmt ');
+ view.setUint32(16, 16, true);
+ view.setUint16(20, 1, true);
+ // stereo (2 channels)
+ view.setUint16(22, 2, true);
+ view.setUint32(24, sampleRate, true);
+ view.setUint32(28, sampleRate * 4, true);
+ view.setUint16(32, 4, true);
+ view.setUint16(34, 16, true);
+ // data sub-chunk
+ writeUTFBytes(view, 36, 'data');
+ view.setUint32(40, interleaved.length * 2, true);
+
+ // write the PCM samples
+ var lng = interleaved.length;
+ var index = 44;
+ var volume = 1;
+ for (var i = 0; i < lng; i++) {
+ view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
+ index += 2;
+ }
+
+ // our final binary blob
+ var blob = new Blob([view], { type: 'audio/wav' });
+
+ root.ondataavailable(blob);
+ };
+
+ function interleave(leftChannel, rightChannel) {
+ var length = leftChannel.length + rightChannel.length;
+ var result = new Float32Array(length);
+
+ var inputIndex = 0;
+
+ for (var index = 0; index < length;) {
+ result[index++] = leftChannel[inputIndex];
+ result[index++] = rightChannel[inputIndex];
+ inputIndex++;
+ }
+ return result;
+ }
+
+ function mergeBuffers(channelBuffer, recordingLength) {
+ var result = new Float32Array(recordingLength);
+ var offset = 0;
+ var lng = channelBuffer.length;
+ for (var i = 0; i < lng; i++) {
+ var buffer = channelBuffer[i];
+ result.set(buffer, offset);
+ offset += buffer.length;
+ }
+ return result;
+ }
+
+ function writeUTFBytes(view, offset, string) {
+ var lng = string.length;
+ for (var i = 0; i < lng; i++) {
+ view.setUint8(offset + i, string.charCodeAt(i));
+ }
+ }
+
+ // creates the audio context
+ audioContext = window.AudioContext || window.webkitAudioContext;
+ context = new audioContext();
+
+ // creates a gain node
+ volume = context.createGain();
+
+ // creates an audio node from the microphone incoming stream
+ audioInput = context.createMediaStreamSource(mediaStream);
+
+ // connect the stream to the gain node
+ audioInput.connect(volume);
+
+ /* From the spec: This value controls how frequently the audioprocess event is
+ dispatched and how many sample-frames need to be processed each call.
+ Lower values for buffer size will result in a lower (better) latency.
+ Higher values will be necessary to avoid audio breakup and glitches */
+ var bufferSize = 2048;
+ recorder = context.createJavaScriptNode(bufferSize, 2, 2);
+
+ recorder.onaudioprocess = function(e) {
+ if (!recording) return;
+ var left = e.inputBuffer.getChannelData(0);
+ var right = e.inputBuffer.getChannelData(1);
+ // we clone the samples
+ leftchannel.push(new Float32Array(left));
+ rightchannel.push(new Float32Array(right));
+ recordingLength += bufferSize;
+ }; // we connect the recorder
+ volume.connect(recorder);
+ recorder.connect(context.destination);
+}
+
+
+// Muaz Khan - https://github.com/muaz-khan
+// neizerth - https://github.com/neizerth
+// MIT License - https://www.webrtc-experiment.com/licence/
+// Documentation - https://github.com/streamproc/MediaStreamRecorder
+// ==========================================================
+// WhammyRecorder.js
+
+function WhammyRecorder(mediaStream) {
+ // void start(optional long timeSlice)
+ // timestamp to fire "ondataavailable"
+ this.start = function(timeSlice) {
+ timeSlice = timeSlice || 1000;
+
+ var imageWidth = this.videoWidth || 320;
+ var imageHeight = this.videoHeight || 240;
+
+ canvas.width = video.width = imageWidth;
+ canvas.height = video.height = imageHeight;
+
+ startTime = Date.now();
+
+ function drawVideoFrame(time) {
+ lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
+
+ if (typeof lastFrameTime === undefined) {
+ lastFrameTime = time;
+ }
+
+ // ~10 fps
+ if (time - lastFrameTime < 90) return;
+
+ context.drawImage(video, 0, 0, imageWidth, imageHeight);
+
+ // whammy.add(canvas, time - lastFrameTime);
+ whammy.add(canvas);
+
+ // console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's');
+ // console.log("fps: ", 1000 / (time - lastFrameTime));
+
+ lastFrameTime = time;
+ }
+
+ lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
+
+ (function getWebMBlob() {
+ setTimeout(function() {
+ endTime = Date.now();
+ console.log('frames captured: ' + whammy.frames.length + ' => ' +
+ ((endTime - startTime) / 1000) + 's video');
+
+ var WebM_Blob = whammy.compile();
+ self.ondataavailable(WebM_Blob);
+
+ whammy.frames = [];
+ getWebMBlob();
+ }, timeSlice);
+ })();
+ };
+
+ this.stop = function() {
+ if (lastAnimationFrame)
+ cancelAnimationFrame(lastAnimationFrame);
+ };
+
+ this.ondataavailable = function() {};
+ this.onstop = function() {};
+
+ // Reference to itself
+ var self = this;
+
+ var canvas = document.createElement('canvas');
+ var context = canvas.getContext('2d');
+
+ var video = document.createElement('video');
+ video.muted = true;
+ video.autoplay = true;
+ video.src = URL.createObjectURL(mediaStream);
+ video.play();
+
+ var lastAnimationFrame = null;
+ var startTime, endTime, lastFrameTime;
+ var whammy = new Whammy.Video(10, 0.6);
+}
+
+
+// Muaz Khan - https://github.com/muaz-khan
+// neizerth - https://github.com/neizerth
+// MIT License - https://www.webrtc-experiment.com/licence/
+// Documentation - https://github.com/streamproc/MediaStreamRecorder
+// ==========================================================
+// GifRecorder.js
+
+function GifRecorder(mediaStream) {
+ // void start(optional long timeSlice)
+ // timestamp to fire "ondataavailable"
+ this.start = function(timeSlice) {
+ timeSlice = timeSlice || 1000;
+
+ var imageWidth = this.videoWidth || 320;
+ var imageHeight = this.videoHeight || 240;
+
+ canvas.width = video.width = imageWidth;
+ canvas.height = video.height = imageHeight;
+
+ // external library to record as GIF images
+ gifEncoder = new GIFEncoder();
+
+ // void setRepeat(int iter)
+ // Sets the number of times the set of GIF frames should be played.
+ // Default is 1; 0 means play indefinitely.
+ gifEncoder.setRepeat(0);
+
+ // void setFrameRate(Number fps)
+ // Sets frame rate in frames per second.
+ // Equivalent to setDelay(1000/fps).
+ // Using "setDelay" instead of "setFrameRate"
+ gifEncoder.setDelay(this.frameRate || 200);
+
+ // void setQuality(int quality)
+ // Sets quality of color quantization (conversion of images to the
+ // maximum 256 colors allowed by the GIF specification).
+ // Lower values (minimum = 1) produce better colors,
+ // but slow processing significantly. 10 is the default,
+ // and produces good color mapping at reasonable speeds.
+ // Values greater than 20 do not yield significant improvements in speed.
+ gifEncoder.setQuality(this.quality || 10);
+
+ // Boolean start()
+ // This writes the GIF Header and returns false if it fails.
+ gifEncoder.start();
+
+ startTime = Date.now();
+
+ function drawVideoFrame(time) {
+ lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
+
+ if (typeof lastFrameTime === undefined) {
+ lastFrameTime = time;
+ }
+
+ // ~10 fps
+ if (time - lastFrameTime < 90) return;
+
+ context.drawImage(video, 0, 0, imageWidth, imageHeight);
+
+ gifEncoder.addFrame(context);
+
+ // console.log('Recording...' + Math.round((Date.now() - startTime) / 1000) + 's');
+ // console.log("fps: ", 1000 / (time - lastFrameTime));
+
+ lastFrameTime = time;
+ }
+
+ lastAnimationFrame = requestAnimationFrame(drawVideoFrame);
+
+ (function getWebMBlob() {
+ setTimeout(function() {
+ endTime = Date.now();
+
+ var gifBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
+ type: 'image/gif'
+ });
+ self.ondataavailable(gifBlob);
+
+ // bug: find a way to clear old recorded blobs
+ gifEncoder.stream().bin = [];
+
+ getWebMBlob();
+ }, timeSlice);
+ })();
+ };
+
+ this.stop = function() {
+ if (lastAnimationFrame) cancelAnimationFrame(lastAnimationFrame);
+ };
+
+ this.ondataavailable = function() {};
+ this.onstop = function() {};
+
+ // Reference to itself
+ var self = this;
+
+ var canvas = document.createElement('canvas');
+ var context = canvas.getContext('2d');
+
+ var video = document.createElement('video');
+ video.muted = true;
+ video.autoplay = true;
+ video.src = URL.createObjectURL(mediaStream);
+ video.play();
+
+ var lastAnimationFrame = null;
+ var startTime, endTime, lastFrameTime;
+
+ var gifEncoder;
+}
+
+
+// Muaz Khan - https://github.com/muaz-khan
+// neizerth - https://github.com/neizerth
+// MIT License - https://www.webrtc-experiment.com/licence/
+// Documentation - https://github.com/streamproc/MediaStreamRecorder
+// ==========================================================
+// whammy.js
+
+// ==========================================================
+
+// Note:
+// ==========================================================
+// whammy.js is an "external library"
+// and has its own copyrights. Taken from "Whammy" project.
+
+var Whammy = (function() {
+ // in this case, frames has a very specific meaning, which will be
+ // detailed once i finish writing the code
+
+ function toWebM(frames) {
+ var info = checkFrames(frames);
+ var counter = 0;
+ var EBML = [
+ {
+ "id": 0x1a45dfa3, // EBML
+ "data": [
+ {
+ "data": 1,
+ "id": 0x4286 // EBMLVersion
+ },
+ {
+ "data": 1,
+ "id": 0x42f7 // EBMLReadVersion
+ },
+ {
+ "data": 4,
+ "id": 0x42f2 // EBMLMaxIDLength
+ },
+ {
+ "data": 8,
+ "id": 0x42f3 // EBMLMaxSizeLength
+ },
+ {
+ "data": "webm",
+ "id": 0x4282 // DocType
+ },
+ {
+ "data": 2,
+ "id": 0x4287 // DocTypeVersion
+ },
+ {
+ "data": 2,
+ "id": 0x4285 // DocTypeReadVersion
+ }
+ ]
+ },
+ {
+ "id": 0x18538067, // Segment
+ "data": [
+ {
+ "id": 0x1549a966, // Info
+ "data": [
+ {
+ "data": 1e6, //do things in millisecs (num of nanosecs for duration scale)
+ "id": 0x2ad7b1 // TimecodeScale
+ },
+ {
+ "data": "whammy",
+ "id": 0x4d80 // MuxingApp
+ },
+ {
+ "data": "whammy",
+ "id": 0x5741 // WritingApp
+ },
+ {
+ "data": doubleToString(info.duration),
+ "id": 0x4489 // Duration
+ }
+ ]
+ },
+ {
+ "id": 0x1654ae6b, // Tracks
+ "data": [
+ {
+ "id": 0xae, // TrackEntry
+ "data": [
+ {
+ "data": 1,
+ "id": 0xd7 // TrackNumber
+ },
+ {
+ "data": 1,
+ "id": 0x63c5 // TrackUID
+ },
+ {
+ "data": 0,
+ "id": 0x9c // FlagLacing
+ },
+ {
+ "data": "und",
+ "id": 0x22b59c // Language
+ },
+ {
+ "data": "V_VP8",
+ "id": 0x86 // CodecID
+ },
+ {
+ "data": "VP8",
+ "id": 0x258688 // CodecName
+ },
+ {
+ "data": 1,
+ "id": 0x83 // TrackType
+ },
+ {
+ "id": 0xe0, // Video
+ "data": [
+ {
+ "data": info.width,
+ "id": 0xb0 // PixelWidth
+ },
+ {
+ "data": info.height,
+ "id": 0xba // PixelHeight
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "id": 0x1f43b675, // Cluster
+ "data": [
+ {
+ "data": 0,
+ "id": 0xe7 // Timecode
+ }
+ ].concat(frames.map(function(webp) {
+ var block = makeSimpleBlock({
+ discardable: 0,
+ frame: webp.data.slice(4),
+ invisible: 0,
+ keyframe: 1,
+ lacing: 0,
+ trackNum: 1,
+ timecode: Math.round(counter)
+ });
+ counter += webp.duration;
+ return {
+ data: block,
+ id: 0xa3
+ };
+ }))
+ }
+ ]
+ }
+ ];
+ return generateEBML(EBML);
+ }
+
+ // sums the lengths of all the frames and gets the duration, woo
+
+ function checkFrames(frames) {
+ var width = frames[0].width,
+ height = frames[0].height,
+ duration = frames[0].duration;
+ for (var i = 1; i < frames.length; i++) {
+ if (frames[i].width != width) throw "Frame " + (i + 1) + " has a different width";
+ if (frames[i].height != height) throw "Frame " + (i + 1) + " has a different height";
+ if (frames[i].duration < 0) throw "Frame " + (i + 1) + " has a weird duration";
+ duration += frames[i].duration;
+ }
+ return {
+ duration: duration,
+ width: width,
+ height: height
+ };
+ }
+
+
+ function numToBuffer(num) {
+ var parts = [];
+ while (num > 0) {
+ parts.push(num & 0xff);
+ num = num >> 8;
+ }
+ return new Uint8Array(parts.reverse());
+ }
+
+ function strToBuffer(str) {
+ // return new Blob([str]);
+
+ var arr = new Uint8Array(str.length);
+ for (var i = 0; i < str.length; i++) {
+ arr[i] = str.charCodeAt(i);
+ }
+ return arr;
+
+ // this is slower
+
+ /*
+ return new Uint8Array(str.split('').map(function(e){
+ return e.charCodeAt(0)
+ }))
+ */
+ }
+
+
+ // sorry this is ugly, and sort of hard to understand exactly why this was done
+ // at all really, but the reason is that there's some code below that i dont really
+ // feel like understanding, and this is easier than using my brain.
+
+ function bitsToBuffer(bits) {
+ var data = [];
+ var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
+ bits = pad + bits;
+ for (var i = 0; i < bits.length; i += 8) {
+ data.push(parseInt(bits.substr(i, 8), 2));
+ }
+ return new Uint8Array(data);
+ }
+
+ function generateEBML(json) {
+ var ebml = [];
+ for (var i = 0; i < json.length; i++) {
+ var data = json[i].data;
+
+ // console.log(data);
+
+ if (typeof data == 'object') data = generateEBML(data);
+ if (typeof data == 'number') data = bitsToBuffer(data.toString(2));
+ if (typeof data == 'string') data = strToBuffer(data);
+
+ // console.log(data)
+
+ var len = data.size || data.byteLength;
+ var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
+ var size_str = len.toString(2);
+ var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str;
+ var size = (new Array(zeroes)).join('0') + '1' + padded;
+
+ // i actually dont quite understand what went on up there, so I'm not really
+ // going to fix this, i'm probably just going to write some hacky thing which
+ // converts that string into a buffer-esque thing
+
+ ebml.push(numToBuffer(json[i].id));
+ ebml.push(bitsToBuffer(size));
+ ebml.push(data);
+ }
+ return new Blob(ebml, {
+ type: "video/webm"
+ });
+ }
+
+ // OKAY, so the following two functions are the string-based old stuff, the reason they're
+ // still sort of in here, is that they're actually faster than the new blob stuff because
+ // getAsFile isn't widely implemented, or at least, it doesn't work in chrome, which is the
+ // only browser which supports get as webp
+
+ // Converting between a string of 0010101001's and binary back and forth is probably inefficient
+ // TODO: get rid of this function
+
+ function toBinStr_old(bits) {
+ var data = '';
+ var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
+ bits = pad + bits;
+ for (var i = 0; i < bits.length; i += 8) {
+ data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
+ }
+ return data;
+ }
+
+ function generateEBML_old(json) {
+ var ebml = '';
+ for (var i = 0; i < json.length; i++) {
+ var data = json[i].data;
+ if (typeof data == 'object') data = generateEBML_old(data);
+ if (typeof data == 'number') data = toBinStr_old(data.toString(2));
+
+ var len = data.length;
+ var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
+ var size_str = len.toString(2);
+ var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str;
+ var size = (new Array(zeroes)).join('0') + '1' + padded;
+
+ ebml += toBinStr_old(json[i].id.toString(2)) + toBinStr_old(size) + data;
+
+ }
+ return ebml;
+ }
+
+ // woot, a function that's actually written for this project!
+ // this parses some json markup and makes it into that binary magic
+ // which can then get shoved into the matroska comtainer (peaceably)
+
+ function makeSimpleBlock(data) {
+ var flags = 0;
+ if (data.keyframe) flags |= 128;
+ if (data.invisible) flags |= 8;
+ if (data.lacing) flags |= (data.lacing << 1);
+ if (data.discardable) flags |= 1;
+ if (data.trackNum > 127) {
+ throw "TrackNumber > 127 not supported";
+ }
+ var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
+ return String.fromCharCode(e);
+ }).join('') + data.frame;
+
+ return out;
+ }
+
+ // here's something else taken verbatim from weppy, awesome rite?
+
+ function parseWebP(riff) {
+ var VP8 = riff.RIFF[0].WEBP[0];
+
+ var frame_start = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
+ for (var i = 0, c = []; i < 4; i++) c[i] = VP8.charCodeAt(frame_start + 3 + i);
+
+ var width, horizontal_scale, height, vertical_scale, tmp;
+
+ //the code below is literally copied verbatim from the bitstream spec
+ tmp = (c[1] << 8) | c[0];
+ width = tmp & 0x3FFF;
+ horizontal_scale = tmp >> 14;
+ tmp = (c[3] << 8) | c[2];
+ height = tmp & 0x3FFF;
+ vertical_scale = tmp >> 14;
+ return {
+ width: width,
+ height: height,
+ data: VP8,
+ riff: riff
+ };
+ }
+
+ // i think i'm going off on a riff by pretending this is some known
+ // idiom which i'm making a casual and brilliant pun about, but since
+ // i can't find anything on google which conforms to this idiomatic
+ // usage, I'm assuming this is just a consequence of some psychotic
+ // break which makes me make up puns. well, enough riff-raff (aha a
+ // rescue of sorts), this function was ripped wholesale from weppy
+
+ function parseRIFF(string) {
+ var offset = 0;
+ var chunks = { };
+
+ while (offset < string.length) {
+ var id = string.substr(offset, 4);
+ var len = parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
+ var unpadded = i.charCodeAt(0).toString(2);
+ return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
+ }).join(''), 2);
+ var data = string.substr(offset + 4 + 4, len);
+ offset += 4 + 4 + len;
+ chunks[id] = chunks[id] || [];
+
+ if (id == 'RIFF' || id == 'LIST') {
+ chunks[id].push(parseRIFF(data));
+ } else {
+ chunks[id].push(data);
+ }
+ }
+ return chunks;
+ }
+
+ // here's a little utility function that acts as a utility for other functions
+ // basically, the only purpose is for encoding "Duration", which is encoded as
+ // a double (considerably more difficult to encode than an integer)
+
+ function doubleToString(num) {
+ return [].slice.call(
+ new Uint8Array(
+ (
+ new Float64Array([num]) // create a float64 array
+ // extract the array buffer
+ ).buffer), 0) // convert the Uint8Array into a regular array
+ .map(function(e) { // since it's a regular array, we can now use map
+ return String.fromCharCode(e); // encode all the bytes individually
+ })
+ .reverse() // correct the byte endianness (assume it's little endian for now)
+ .join(''); // join the bytes in holy matrimony as a string
+ }
+
+ function WhammyVideo(speed, quality) { // a more abstract-ish API
+ this.frames = [];
+ this.duration = 1000 / speed;
+ this.quality = quality || 0.8;
+ }
+
+ WhammyVideo.prototype.add = function(frame, duration) {
+ if (typeof duration != 'undefined' && this.duration) throw "you can't pass a duration if the fps is set";
+ if ('canvas' in frame) { //CanvasRenderingContext2D
+ frame = frame.canvas;
+ }
+ if ('toDataURL' in frame) {
+ frame = frame.toDataURL('image/webp', this.quality);
+ } else if (typeof frame != "string") {
+ throw "frame must be a a HTMLCanvasElement, a CanvasRenderingContext2D or a DataURI formatted string";
+ }
+ if (!( /^data:image\/webp;base64,/ig ).test(frame)) {
+ throw "Input must be formatted properly as a base64 encoded DataURI of type image/webp";
+ }
+ this.frames.push({
+ image: frame,
+ duration: duration || this.duration
+ });
+ };
+ WhammyVideo.prototype.compile = function() {
+ return new toWebM(this.frames.map(function(frame) {
+ var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
+ webp.duration = frame.duration;
+ return webp;
+ }));
+ };
+ return {
+ Video: WhammyVideo,
+ fromImageArray: function(images, fps) {
+ return toWebM(images.map(function(image) {
+ var webp = parseWebP(parseRIFF(atob(image.slice(23))));
+ webp.duration = 1000 / fps;
+ return webp;
+ }));
+ },
+ toWebM: toWebM
+ // expose methods of madness
+ };
+})();
+
+
+// Muaz Khan - https://github.com/muaz-khan
+// neizerth - https://github.com/neizerth
+// MIT License - https://www.webrtc-experiment.com/licence/
+// Documentation - https://github.com/streamproc/MediaStreamRecorder
+// ==========================================================
+// gif-encoder.js
+
+// ==========================================================
+
+// Note:
+// ==========================================================
+// All libraries listed in this file are "external libraries"
+// and has their own copyrights. Taken from "jsGif" project.
+
+function encode64(n){for(var o="",f=0,l=n.length,u="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",s,i,r,c,h,e,t;f =0&&(y=n)},dt=t.setRepeat=function(n){n>=0&&(k=n)},bt=t.setTransparent=function(n){v=n},kt=t.addFrame=function(t,i){if(t==null||!f||n==null){throw new Error("Please call start method before calling addFrame");return!1}var r=!0;try{i?a=t:(a=t.getImageData(0,0,t.canvas.width,t.canvas.height).data,ft||et(t.canvas.width,t.canvas.height)),ct(),ht(),e&&(vt(),tt(),k>=0&<()),st(),ot(),e||tt(),at(),e=!1}catch(u){r=!1}return r},ui=t.finish=function(){if(!f)return!1;var t=!0;f=!1;try{n.writeByte(59)}catch(i){t=!1}return t},nt=function(){g=0,a=null,i=null,l=null,r=null,b=!1,e=!0},fi=t.setFrameRate=function(n){n!=15&&(d=Math.round(100/n))},ri=t.setQuality=function(n){n<1&&(n=1),it=n},et=t.setSize=function et(n,t){(!f||e)&&(o=n,s=t,o<1&&(o=320),s<1&&(s=240),ft=!0)},ti=t.start=function(){nt();var t=!0;b=!1,n=new h;try{n.writeUTFBytes("GIF89a")}catch(i){t=!1}return f=t},ii=t.cont=function(){nt();var t=!0;return b=!1,n=new h,f=t},ht=function(){var e=i.length,o=e/3,f,n,t,u;for(l=[],f=new NeuQuant(i,e,it),r=f.process(),n=0,t=0;t
- Copyright © 2013
- Muaz Khan<@muazkh>
- »
- @WebRTC Experiments
- »
- Google+
- »
- What's New?
-
- You can record audio in wav/ogg file format; and video in either webm format or
- as animated GIF image.
+ The biggest issue is that RecordRTC is unable to record both audio and video streams in single file.
+ RecordRTC is MIT licensed on Github! Documentation
+ >1,i=o+1;i<256;i++)f[i]=b},vt=function(){var t,u,k,b,p,c,n,s,o,y,ut,a,f,ft;for(i
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/RTCPeerConnection/RTCPeerConnection-v1.3.js b/RTCPeerConnection/RTCPeerConnection-v1.3.js
index 20a10034..925645ec 100644
--- a/RTCPeerConnection/RTCPeerConnection-v1.3.js
+++ b/RTCPeerConnection/RTCPeerConnection-v1.3.js
@@ -100,7 +100,7 @@ var RTCPeerConnection = function (options) {
};
}
- if (options.onAnswerSDP && moz) openAnswererChannel();
+ if (options.onAnswerSDP && moz && options.onChannelMessage) openAnswererChannel();
function openAnswererChannel() {
peerConnection.ondatachannel = function (_channel) {
diff --git a/RTCPeerConnection/RTCPeerConnection-v1.4.js b/RTCPeerConnection/RTCPeerConnection-v1.4.js
index a7128116..1463fe0a 100644
--- a/RTCPeerConnection/RTCPeerConnection-v1.4.js
+++ b/RTCPeerConnection/RTCPeerConnection-v1.4.js
@@ -93,35 +93,12 @@ var RTCPeerConnection = function (options) {
return extractedChars;
}
- function getInteropSDP(sdp) {
- // for audio-only streaming: multiple-crypto lines are not allowed
- if (options.onAnswerSDP)
- sdp = sdp.replace(/(a=crypto:0 AES_CM_128_HMAC_SHA1_32)(.*?)(\r\n)/g, '');
-
-
- var inline = getChars() + '\r\n' + (extractedChars = '');
- sdp = sdp.indexOf('a=crypto') == -1 ? sdp.replace(/c=IN/g,
- 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + inline +
- 'c=IN') : sdp;
-
- if (options.offerSDP) {
- info('\n--------offer sdp provided by offerer\n');
- info(options.offerSDP.sdp);
- }
-
- info(options.onOfferSDP ? '\n--------offer\n' : '\n--------answer\n');
- info('sdp: ' + sdp);
-
- return sdp;
- }
-
if (moz && !options.onChannelMessage) constraints.mandatory.MozDontOfferDataChannel = true;
function createOffer() {
if (!options.onOfferSDP) return;
peerConnection.createOffer(function (sessionDescription) {
- sessionDescription.sdp = getInteropSDP(sessionDescription.sdp);
peerConnection.setLocalDescription(sessionDescription);
options.onOfferSDP(sessionDescription);
}, null, constraints);
@@ -134,7 +111,6 @@ var RTCPeerConnection = function (options) {
peerConnection.setRemoteDescription(options.offerSDP);
peerConnection.createAnswer(function (sessionDescription) {
- sessionDescription.sdp = getInteropSDP(sessionDescription.sdp);
peerConnection.setLocalDescription(sessionDescription);
options.onAnswerSDP(sessionDescription);
@@ -195,7 +171,7 @@ var RTCPeerConnection = function (options) {
};
}
- if (options.onAnswerSDP && moz) openAnswererChannel();
+ if (options.onAnswerSDP && moz && options.onChannelMessage) openAnswererChannel();
function openAnswererChannel() {
peerConnection.ondatachannel = function (_channel) {
diff --git a/RTCPeerConnection/RTCPeerConnection-v1.5.js b/RTCPeerConnection/RTCPeerConnection-v1.5.js
index 0c06f79f..83b3604f 100644
--- a/RTCPeerConnection/RTCPeerConnection-v1.5.js
+++ b/RTCPeerConnection/RTCPeerConnection-v1.5.js
@@ -99,7 +99,6 @@ function RTCPeerConnection(options) {
if (!options.onOfferSDP) return;
peer.createOffer(function(sessionDescription) {
- sessionDescription.sdp = serializeSdp(sessionDescription.sdp);
peer.setLocalDescription(sessionDescription);
options.onOfferSDP(sessionDescription);
}, onSdpError, constraints);
@@ -113,7 +112,6 @@ function RTCPeerConnection(options) {
//options.offerSDP.sdp = addStereo(options.offerSDP.sdp);
peer.setRemoteDescription(new SessionDescription(options.offerSDP), onSdpSuccess, onSdpError);
peer.createAnswer(function(sessionDescription) {
- sessionDescription.sdp = serializeSdp(sessionDescription.sdp);
peer.setLocalDescription(sessionDescription);
options.onAnswerSDP(sessionDescription);
}, onSdpError, constraints);
@@ -125,53 +123,6 @@ function RTCPeerConnection(options) {
createAnswer();
}
-
- // DataChannel Bandwidth
-
- function setBandwidth(sdp) {
- // remove existing bandwidth lines
- sdp = sdp.replace( /b=AS([^\r\n]+\r\n)/g , '');
- sdp = sdp.replace( /a=mid:data\r\n/g , 'a=mid:data\r\nb=AS:1638400\r\n');
-
- return sdp;
- }
-
- // old: FF<>Chrome interoperability management
-
- function getInteropSDP(sdp) {
- var chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'.split(''),
- extractedChars = '';
-
- function getChars() {
- extractedChars += chars[parseInt(Math.random() * 40)] || '';
- if (extractedChars.length < 40)
- getChars();
-
- return extractedChars;
- }
-
- // usually audio-only streaming failure occurs out of audio-specific crypto line
- // a=crypto:1 AES_CM_128_HMAC_SHA1_32 --------- kAttributeCryptoVoice
- if (options.onAnswerSDP)
- sdp = sdp.replace( /(a=crypto:0 AES_CM_128_HMAC_SHA1_32)(.*?)(\r\n)/g , '');
-
- // video-specific crypto line i.e. SHA1_80
- // a=crypto:1 AES_CM_128_HMAC_SHA1_80 --------- kAttributeCryptoVideo
- var inline = getChars() + '\r\n' + (extractedChars = '');
- sdp = sdp.indexOf('a=crypto') == -1 ? sdp.replace( /c=IN/g ,
- 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + inline +
- 'c=IN') : sdp;
-
- return sdp;
- }
-
- function serializeSdp(sdp) {
- if (!moz) sdp = setBandwidth(sdp);
- sdp = getInteropSDP(sdp);
- console.debug(sdp);
- return sdp;
- }
-
// DataChannel management
var channel;
diff --git a/RTCall/RTCall.js b/RTCall/RTCall.js
index c6efa624..09c1f864 100644
--- a/RTCall/RTCall.js
+++ b/RTCall/RTCall.js
@@ -242,7 +242,7 @@
username: 'homeo'
};
- iceServers.iceServers = [TURN];
+ iceServers.iceServers = [STUN, TURN];
}
optional = {
diff --git a/RecordRTC/RecordRTC-to-PHP/index.html b/RecordRTC/RecordRTC-to-PHP/index.html
index ec04c7cd..4d576736 100644
--- a/RecordRTC/RecordRTC-to-PHP/index.html
+++ b/RecordRTC/RecordRTC-to-PHP/index.html
@@ -6,299 +6,265 @@
-
- RecordRTC-to-PHP ® Muaz Khan
-
-
-
-
-
-
-
-
-
-
-
-
- How to save recorded wav/webm file to PHP server?
+ }
+
+ var record = document.getElementById('record');
+ var stop = document.getElementById('stop');
+ var deleteFiles = document.getElementById('delete');
+
+ var audio = document.querySelector('audio');
+
+ var recordVideo = document.getElementById('record-video');
+ var preview = document.getElementById('preview');
+
+ var container = document.getElementById('container');
+
+ var recordAudio, recordVideo;
+ record.onclick = function() {
+ record.disabled = true;
+ var video_constraints = {
+ mandatory: { },
+ optional: []
+ };
+ navigator.getUserMedia({
+ audio: true,
+ video: video_constraints
+ }, function(stream) {
+ preview.src = window.URL.createObjectURL(stream);
+ preview.play();
+
+ // var legalBufferValues = [256, 512, 1024, 2048, 4096, 8192, 16384];
+ // sample-rates in at least the range 22050 to 96000.
+ recordAudio = RecordRTC(stream, {
+ //bufferSize: 16384,
+ //sampleRate: 45000
+ });
+
+ recordVideo = RecordRTC(stream, {
+ type: 'video'
+ });
+
+ recordAudio.startRecording();
+ recordVideo.startRecording();
+
+ stop.disabled = false;
+ });
+ };
+
+ var fileName;
+ stop.onclick = function() {
+ record.disabled = false;
+ stop.disabled = true;
+
+ fileName = Math.round(Math.random() * 99999999) + 99999999;
+
+ recordAudio.stopRecording();
+ PostBlob(recordAudio.getBlob(), 'audio', fileName + '.wav');
+
+ recordVideo.stopRecording();
+ PostBlob(recordVideo.getBlob(), 'video', fileName + '.webm');
+
+ preview.src = '';
+ deleteFiles.disabled = false;
+ };
+
+ deleteFiles.onclick = function() {
+ deleteAudioVideoFiles();
+ };
+
+ function deleteAudioVideoFiles() {
+ deleteFiles.disabled = true;
+ if (!fileName) return;
+ var formData = new FormData();
+ formData.append('delete-file', fileName);
+ xhr('delete.php', formData, null, function(response) {
+ console.log(response);
+ });
+ fileName = null;
+ container.innerHTML = '';
+ }
+
+ function xhr(url, data, progress, callback) {
+ var request = new XMLHttpRequest();
+ request.onreadystatechange = function() {
+ if (request.readyState == 4 && request.status == 200) {
+ callback(request.responseText);
+ }
+ };
+
+ request.onprogress = function(e) {
+ if(!progress) return;
+ if (e.lengthComputable) {
+ progress.value = (e.loaded / e.total) * 100;
+ progress.textContent = progress.value; // Fallback for unsupported browsers.
+ }
+
+ if(progress.value == 100){
+ progress.value = 0;
+ }
+ };
+ request.open('POST', url);
+ request.send(data);
+ }
+
+ window.onbeforeunload = function() {
+ if (!!fileName) {
+ deleteAudioVideoFiles();
+ return 'It seems that you\'ve not deleted audio/video files from the server.';
+ }
+ };
+
+
+
+
+ Feedback
+
+ How to save recorded wav/webm file to PHP server?
+
+
+ PHP Code
-// PHP code
<?php
foreach(array('video', 'audio') as $type) {
if (isset($_FILES["${type}-blob"])) {
@@ -314,8 +280,11 @@
+
}
}
?>
-
-// Javascript code
+
Javascript Code
+
var fileType = 'video'; // or "audio"
var fileName = 'ABCDEF.webm'; // or "wav"
@@ -337,30 +306,120 @@
request.open('POST', url);
request.send(data);
}
+
+ How to use RecordRTC?
+
+<script src="https://www.webrtc-experiment.com/RecordRTC.js"></script>
+
+
+ How to record audio using RecordRTC?
+
+var recordRTC = RecordRTC(mediaStream);
+recordRTC.startRecording();
+recordRTC.stopRecording(function(audioURL) {
+ window.open(audioURL);
+});
+
+
+
+ How to record video using RecordRTC?
+
+var options = {
+ type: 'video',
+ video: {
+ width: 320,
+ height: 240
+ },
+ canvas: {
+ width: 320,
+ height: 240
+ }
+};
+var recordRTC = RecordRTC(mediaStream, options);
+recordRTC.startRecording();
+recordRTC.stopRecording(function(videoURL) {
+ window.open(videoURL);
+});
-
- RecordRTC is MIT licensed on Github! Documentation
-
-
-
- Feedback
-
-
+ How to record animated GIF using RecordRTC?
+
+var options = {
+ type: 'gif',
+ video: {
+ width: 320,
+ height: 240
+ },
+ canvas: {
+ width: 320,
+ height: 240
+ },
+ frameRate: 200,
+ quality: 10
+};
+var recordRTC = RecordRTC(mediaStream, options);
+recordRTC.startRecording();
+recordRTC.stopRecording(function(gifURL) {
+ window.open(gifURL);
+});
+
+
+ Possible
+ issues/failures:
+
+
+ Do you know "RecordRTC" fails recording audio because following conditions fails:
+
+
+
RecordRTC is MIT licensed on Github! Documentation