diff --git a/core/rfb.js b/core/rfb.js index f35d503f1..c5f3cd7cb 100644 --- a/core/rfb.js +++ b/core/rfb.js @@ -13,6 +13,7 @@ import { encodeUTF8, decodeUTF8 } from './util/strings.js'; import { dragThreshold } from './util/browser.js'; import { clientToElement } from './util/element.js'; import { setCapture } from './util/events.js'; +import AudioBuffer from './util/audio.js'; import EventTargetMixin from './util/eventtarget.js'; import Display from "./display.js"; import Inflator from "./inflator.js"; @@ -499,6 +500,15 @@ export default class RFB extends EventTargetMixin { } } + enableAudio(sampleFormat, channels, frequency) { + RFB.messages.SetQEMUExtendedAudioFormat(this._sock, sampleFormat, channels, frequency); + RFB.messages.ToggleQEMUExtendedAudio(this._sock, true); + } + + disableAudio() { + RFB.messages.ToggleQEMUExtendedAudio(this._sock, false); + } + // ===== PRIVATE METHODS ===== _connect() { @@ -2039,6 +2049,25 @@ export default class RFB extends EventTargetMixin { return true; } + _handleQEMUExtAudioMsg() { + if (this._sock.rQwait("QEMU extended audio message", 3, 1)) { return false; } + + this._sock.rQshift8(); // for now there is only a single submessage type 1 + const operation = this._sock.rQshift16(); + + if (operation === 1) { // stream is starting + this._audioBuffer = new AudioBuffer('audio/webm; codecs="opus"'); // TODO: This is obviously not the right value to use here + } else if (operation === 0) { // stream is stopping + this._audioBuffer.close(); + } else { // stream data + const length = this._sock.rQshift32(); + const data = this._sock.rQshiftBytes(length); + this._audioBuffer.queueAudio(data); + } + + return true; + } + _handleXvpMsg() { if (this._sock.rQwait("XVP version and message", 3, 1)) { return false; } this._sock.rQskipBytes(1); // Padding @@ -2113,6 +2142,9 @@ export default class RFB extends EventTargetMixin { case 250: // XVP return this._handleXvpMsg(); + case 255: // Qemu extended audio message + return this._handleQEMUExtAudioMsg(); + default: this._fail("Unexpected server message (type " + msgType + ")"); Log.Debug("sock.rQslice(0, 30): " + this._sock.rQslice(0, 30)); @@ -2549,6 +2581,16 @@ export default class RFB extends EventTargetMixin { } } +// Audio sample formats +RFB.sampleFormats = { + U8: 0, + S8: 1, + U16: 2, + S16: 3, + U32: 4, + S32: 5 +}; + // Class Methods RFB.messages = { keyEvent(sock, keysym, down) { @@ -2570,6 +2612,48 @@ RFB.messages = { sock.flush(); }, + ToggleQEMUExtendedAudio(sock, enabled) { + const buff = sock._sQ; + const offset = sock._sQlen; + + buff[offset] = 255; // msg-type + buff[offset + 1] = 1; // sub msg-type + + buff[offset + 2] = 0; // operation + if (enabled) { + buff[offset + 3] = 0; + } else { + buff[offset + 3] = 1; + } + + sock._sQlen += 4; + sock.flush(); + }, + + SetQEMUExtendedAudioFormat(sock, sampleFormat, channels, frequency) { + const buff = sock._sQ; + const offset = sock._sQlen; + + buff[offset] = 255; // msg type + buff[offset + 1] = 1; // sub msg-type + + buff[offset + 2] = 0; // operation + buff[offset + 3] = 2; + + buff[offset + 4] = sampleFormat; + buff[offset + 5] = channels; + + const freq = toUnsigned32bit(frequency); + + buff[offset + 6] = freq >> 24; + buff[offset + 7] = freq >> 16; + buff[offset + 8] = freq >> 8; + buff[offset + 9] = freq; + + sock._sQlen += 10; + sock.flush(); + }, + QEMUExtendedKeyEvent(sock, keysym, down, keycode) { function getRFBkeycode(xtScanCode) { const upperByte = (keycode >> 8); diff --git a/core/util/audio.js b/core/util/audio.js new file mode 100644 index 000000000..85a8a7a5a --- /dev/null +++ b/core/util/audio.js @@ -0,0 +1,48 @@ +/* + * noVNC: HTML5 VNC client + * Copyright (C) 2020 The noVNC Authors + * Licensed under MPL 2.0 (see LICENSE.txt) + * + * See README.md for usage and integration instructions. + */ + +export default class AudioBuffer { + constructor(codec) { + this._codec = codec + // instantiate a media source and audio buffer/queue + this._mediaSource = new MediaSource(); + this._audioBuffer = null; + this._audioQ = []; + + // create a hidden audio element + this._audio = document.createElement('audio'); + this._audio.src = window.URL.createObjectURL(this._mediaSource); + + // when data is queued, start playing + this._mediaSource.addEventListener('sourceopen', this._onSourceOpen, false); + } + + _onSourceOpen(e) { + this._audio.play(); + this._audioBuffer = this._mediaSource.addSourceBuffer(this._codec); + this._audioBuffer.addEventListener('update', this._onUpdateBuffer); + } + + _onUpdateBuffer() { + if (this._audioQ.length > 0 && !this._audioBuffer.updating) { + this._audioBuffer.appendBuffer(this._audioQ.shift()); + } + } + + queueAudio(data) { + if (this._audioBuffer !== null) { + if (this._audioBuffer.updating || this._audioQ.length > 0) { + this._audioQ.push(data); + } else { + this._audioBuffer.appendBuffer(data); + } + } + } + + close() {} // intentionally left empty as no cleanup seems necessary +} \ No newline at end of file