Pushing changes

This commit is contained in:
2017-03-23 23:52:08 -05:00
parent 6075860b82
commit ac667ec74f
1465 changed files with 345149 additions and 3 deletions

114
node_modules/discordie/lib/voice/AudioDecoder.js generated vendored Normal file
View File

@ -0,0 +1,114 @@
"use strict";
const fork = require("child_process").fork;
const DecoderWorker = require("./threading/DecoderWorker");
const Utils = require("../core/Utils");
const defaultOptions = {
multiThreadedVoice: false,
};
class AudioDecoder {
constructor(voicews, options) {
this.options = {};
if (!options) options = defaultOptions;
this.voicews = voicews;
this.onPacketDecoded = null;
this.onPacket = null;
this.initialize(options);
}
get canStream() {
return this.voicews && this.voicews.canStream;
}
destroyUser(userId) {
if (this.disposed) return;
this.worker.send({
op: "destroyUser",
userId: userId
});
}
assignUser(ssrc, userId) {
if (this.disposed) return;
this.worker.send({
op: "assignUser",
ssrc: ssrc,
userId: userId
});
}
enqueue(packet) {
if (typeof this.onPacket === "function") {
const packetCopy = Object.assign({}, packet);
this.onPacket(packetCopy);
}
if (!this.onPacketDecoded)
return;
if (this.disposed) {
if (this.onPacketDecoded !== null)
throw new Error("AudioDecoder is not initialized");
return;
}
this.worker.send({
op: "enqueue",
packet: packet
});
}
get disposed() {
return this.worker == null;
}
initialize(options) {
if (!options) options = defaultOptions;
const hasChanges = Object.keys(options).reduce((r, k) => {
return r || (this.options[k] != options[k])
}, false);
if (hasChanges) this.kill();
const _defaultOptions = Object.assign({}, defaultOptions);
this.options = Object.assign(_defaultOptions, options);
if (this.disposed) {
if (options.multiThreadedVoice) {
this.worker = fork(__dirname + "/threading/DecoderWorker");
} else {
this.worker = new DecoderWorker();
}
this.worker.on("message", (msg) => {
switch (msg.op) {
case "packet":
if (!msg.packet) break;
const packet = msg.packet;
if (packet.chunk.data && packet.chunk.data.length > 0)
packet.chunk = Utils.createBuffer(packet.chunk.data);
if (!(packet.chunk instanceof Buffer))
return;
if (typeof this.onPacketDecoded === "function")
this.onPacketDecoded(packet);
break;
}
});
}
this.worker.send({
op: "initialize",
options
});
this.onPacketDecoded = null;
}
kill() {
if (this.disposed) return;
this.worker.kill();
this.worker = null;
}
}
module.exports = AudioDecoder;

283
node_modules/discordie/lib/voice/AudioEncoder.js generated vendored Normal file
View File

@ -0,0 +1,283 @@
"use strict";
const fork = require("child_process").fork;
const EncoderWorker = require("./threading/EncoderWorker");
const AudioEncoderStream = require("./streams/AudioEncoderStream");
const Constants = require("../Constants");
var useBufferFrom = false;
try {
Buffer.from([]);
useBufferFrom = true;
} catch (e) { } //eslint-disable-line no-empty
const defaultOptions = {
multiThreadedVoice: false,
};
/**
* @class
* @classdesc
* Primary audio encoder class (low level).
* All other encoders and streams abstract this class.
*/
class AudioEncoder {
constructor(voicews, options) {
this.options = {};
this.voicews = voicews;
/**
* Override this function to be called when encoder needs data.
*
* > **Note:** This function **WILL NOT** be called if an
* > `AudioEncoderStream` instance is piping data into the encoder on the
* > same voice connection to avoid buffer interleaving.
* @instance
* @memberOf AudioEncoder
* @name onNeedBuffer
* @returns {Function|null}
* @example
* var bitDepth = 16;
*
* var options = {
* frameDuration: 60,
* sampleRate: 48000,
* channels: 2
* };
* var encoder = voiceConnection.getEncoder(options);
*
* var readSize =
* options.sampleRate / 1000 *
* options.frameDuration *
* bitDepth / 8 *
* channels;
*
* // [sampleRate] 48000 / 1000 *
* // [frameDuration] 60 *
* // [bitDepth] 16 / 8 *
* // [channels] 2
* // = 11520 bytes
*
* encoder.onNeedBuffer = function() {
* var chunk = reader.read(readSize);
*
* if (!chunk) return;
* // return will stop onNeedBuffer calls until you call .enqueue again
*
* var sampleCount = options.sampleRate / 1000 * options.frameDuration;
*
* // [sampleRate] 48000 / 1000 *
* // [frameDuration] 60
* // = 2880 samples
*
* encoder.enqueue(chunk, sampleCount);
* };
* encoder.onNeedBuffer();
*/
this.onNeedBuffer = null;
this._stream = new AudioEncoderStream(this);
this.initialize(options);
}
get canStream() {
return this.voicews && this.voicews.canStream;
}
/**
* Checks worker state: returns true if not initialized.
* @returns {boolean}
*/
get disposed() {
return this.worker == null;
}
/**
* Initializes worker object.
* @param {Object} options
* @example
* var options = {};
*
* // frame size in milliseconds, forced range from 20 to 60
* // (frame sizes below 20ms are unsupported)
* options.frameDuration = 60; // < default
* // input sample rate,
* // anything other than 48000 will be resampled by the library
* // to match Discord output sample rate
* options.sampleRate = 48000; // < default
* options.sampleRate = 44100; // will be resampled to 48000
* // number of channels, only mono (1) or stereo (2)
* options.channels = 1; // < default
* // read as 32-bit floating point audio
* options.float = false;
*
* // downmix to mono audio
* options.downmix = "average";
* // encode multichannel audio
* options.downmix = false; // < default
*
* // use emscripten-compiled opus codec
* options.engine = "internal"; // < default
* // try to use native `node-opus` module,
* // must be installed in discordie/node_modules
* options.engine = "native";
*
* // proxy mode, passes packets straight to the muxer
* // packets should be opus encoded already
* // in this mode it only works as scheduler for packets
* options.proxy = false; // < default
* options.proxy = true;
*
* // OPUS_AUTO -1000
* // OPUS_BITRATE_MAX -1
* options.bitrate = -1000; // < default
* // encode 64kbps audio
* options.bitrate = 64000;
* // encode 128kbps audio - max bitrate for Discord servers, do not exceed
* options.bitrate = 128000;
*
* encoder.initialize(options);
*/
initialize(options) {
if (!options) options = defaultOptions;
const hasChanges = Object.keys(options).reduce((r, k) => {
return r || (this.options[k] != options[k])
}, false);
if (hasChanges) this.kill();
const _defaultOptions = Object.assign({}, defaultOptions);
this.options = Object.assign(_defaultOptions, options);
if (this.disposed) {
if (options.multiThreadedVoice) {
this.worker = fork(__dirname + "/threading/EncoderWorker");
} else {
this.worker = new EncoderWorker();
}
this.worker.on("message", (msg) => {
switch (msg.op) {
case "needbuffer":
var streamMode = false;
if (this._stream && this._stream._needBuffer()) {
streamMode = true;
}
if (!streamMode && typeof this.onNeedBuffer === "function")
this.onNeedBuffer();
break;
case "opuspacket":
if (this.voicews.connected && !this.voicews.audioTransportSocket)
throw new Error("No transport");
if (!this.voicews.connected) break;
if (!msg.packet) break;
let packetData = msg.packet.data;
if (msg.packet instanceof Buffer)
packetData = msg.packet;
if (Array.isArray(packetData)) { // received from separate process
packetData =
useBufferFrom ? Buffer.from(packetData) : new Buffer(packetData);
}
this.voicews.audioTransportSocket
.send(packetData, msg.sampleCount);
break;
}
});
}
this.worker.send({
op: "initialize",
options
});
if (this._stream) this._stream._needBuffer();
this.onNeedBuffer = null;
}
/**
* Sets volume.
* Does not apply in proxy mode.
* @param {Number} volume - Number range from 0 to 100
*/
setVolume(volume) {
if (this.disposed) return;
this.worker.send({
op: "set",
key: "volume",
value: volume
});
}
/**
* Sets bitrate.
* Does not apply in proxy mode.
* @param {Number} bitrate - Number range from 8000 to 512000
*/
setBitrate(bitrate) {
if (this.disposed) return;
this.worker.send({
op: "setBitrate",
value: bitrate
});
}
/**
* Enqueues audio (PCM or Opus packet (proxy mode), depending on options)
* to the queue buffer.
* @param {Buffer} chunk
* @param {Number} sampleCount - Number of samples per channel
*/
enqueue(chunk, sampleCount) {
if (this.disposed) return;
this.worker.send({
op: "enqueue",
frame: {
chunk: chunk,
sampleCount: sampleCount
}
});
}
/**
* Enqueues an array of audio chunks (PCM or Opus packet (proxy mode),
* depending on options) to the queue buffer.
*
* All chunks should have the same `sampleCount`.
* @param {Array<Buffer>} chunks
* @param {Number} sampleCount - Number of samples per channel
*/
enqueueMultiple(chunks, sampleCount) {
if (this.disposed) return;
this.worker.send({
op: "enqueueMultiple",
frames: chunks.map(chunk => { return {chunk, sampleCount}; })
});
}
/**
* Clears audio queue.
*/
clearQueue() {
if (this.disposed) return;
this.worker.send({ op: "clearQueue" });
}
/**
* Shuts down the worker.
*/
kill() {
if (this.disposed) return;
if (this._stream) this._stream.unpipeAll();
this.worker.kill();
this.worker = null;
}
}
module.exports = AudioEncoder;

66
node_modules/discordie/lib/voice/AudioResampler.js generated vendored Normal file
View File

@ -0,0 +1,66 @@
"use strict";
function interpLinear(i, i0, v0, v1) {
var d = i - i0;
return v0 * (1 - d) + v1 * d;
}
function interpNearest(i, i0, v0, v1) {
return (i - i0) < 0.5 ? v0 : v1;
}
class AudioResampler {
constructor(channels, sourceRate, targetRate, interpolation) {
if (channels <= 0) throw new TypeError("Invalid channel count");
this.channels = channels;
this.sourceRate = sourceRate;
this.targetRate = targetRate;
this.interpolation = interpolation || "linear";
const interp = {
linear: interpLinear,
nearest: interpNearest,
};
if (!interp[this.interpolation])
throw new Error("Unknown interpolation type");
this.interp = interp[this.interpolation];
}
process(buffer) {
var ratio = this.sourceRate / this.targetRate;
var resampled = new Float32Array(buffer.length / ratio);
var bufferLength = buffer.length;
var interp = this.interp;
var channels = this.channels;
if (channels == 1) {
for (var i = 0, r = 0; i < bufferLength; i += ratio) {
var i0 = Math.floor(i);
var i1 = i0 + 1;
while (i1 >= buffer.length) i1--;
resampled[r++] = interp(i, i0, buffer[i0], buffer[i1]);
}
} else {
var channelLength = bufferLength / channels;
for (var c = 0; c < channels; c++) {
for (var i = 0, r = 0; i < channelLength; i += ratio) {
var ifl = Math.floor(i);
var i0 = ifl * channels + c;
var i1 = i0 + channels;
while (i0 >= buffer.length) i0 -= channels;
while (i1 >= buffer.length) i1 -= channels;
resampled[r++ * channels + c] =
interp(i, ifl, buffer[i0], buffer[i1]);
}
}
}
return resampled;
}
destroy() {}
}
module.exports = AudioResampler;

View File

@ -0,0 +1,122 @@
"use strict";
const fs = require("fs");
const path = require("path");
const spawn = require("child_process").spawn;
const EventEmitter = require("events").EventEmitter;
const Utils = require("../../core/Utils");
const ENVPATH = ["."].concat((process.env.PATH || "").split(path.delimiter));
class ExternalEncoderBase extends EventEmitter {
constructor(voiceConnection, options) {
super();
this._voiceConnection = voiceConnection;
this._encoderStream = null;
this._destroyOnUnpipe = options.destroyOnUnpipe !== false;
this._handle = null;
this._disposed = false;
}
_createProcess(executable, args, options) {
var binaries = [executable, executable + ".exe"];
for (var name of binaries) {
for (var dir of ENVPATH) {
var binary = dir + path.sep + name;
if (!Utils.fileExists(binary)) continue;
return spawn(name, args, options);
}
}
return null;
}
_setHandle(handle, onExit) {
this._handle = handle;
this._handle.on("exit", code => {
if (typeof onExit === "function") onExit(code);
this.destroy();
});
}
/**
* Gets the voice connection this instance is bound to.
* @returns {IVoiceConnection}
* @readonly
*/
get voiceConnection() { return this._voiceConnection; }
/**
* Connects pipe into AudioEncoderStream of the bound voice connection.
*
* This function handles automatic unpiping and kills process.
* The stream will become disposed and no longer playable after
* calling `unpipe()` or `stop()`.
*
* Use `pipe()` method if you want to control the process manually
* and `destroy()` it later.
* @returns {AudioEncoderStream}
*/
play(encoderOptions) {
if (this._disposed) throw new Error("Unable to play disposed stream");
// already playing, ignore request
if (this._encoderStream) return this._encoderStream;
encoderOptions = encoderOptions || {proxy: true};
const encoder = this._voiceConnection.getEncoderStream(encoderOptions);
if (!encoder) return null;
this.pipe(encoder);
encoder.once("unpipe", () => {
if (!this._destroyOnUnpipe) return;
this.emit("unpipe");
this.destroy();
this._encoderStream = null;
});
this._encoderStream = encoder;
return encoder;
}
/**
* Unpipes internal stream from the bound voice connection.
*/
stop() {
this.unpipe(this._encoderStream);
}
/**
* Pipes stream into destination.
*
* > **Note:** In case of manual piping you have to invoke
* > `IVoiceConnection.getEncoderStream` with the correct settings yourself.
* > For proxy (externally encoding) streams only `{proxy: true}` is required.
* @param {WritableStream} dest
*/
pipe(dest) { throw new Error("Not implemented"); }
/**
* Unpipes stream from destination.
* @param {WritableStream} [dest]
*/
unpipe(dest) { throw new Error("Not implemented"); }
/**
* Destroys all handles, releases resources and disposes this instance.
*/
destroy() {
this._disposed = true;
if (!this._handle) return;
this._handle.kill();
this._handle = null;
}
}
module.exports = ExternalEncoderBase;

View File

@ -0,0 +1,25 @@
"use strict";
const FFmpegEncoder = require("./FFmpegEncoder");
const OggOpusPlayer = require("./OggOpusPlayer");
const WebmOpusPlayer = require("./WebmOpusPlayer");
module.exports = {
create(voiceConnection, options) {
options = options || {};
const type = options.type || "ffmpeg";
if (type === "ffmpeg" || type === "avconv") {
return new FFmpegEncoder(voiceConnection, options);
}
if (type.toLowerCase() === "OggOpusPlayer".toLowerCase()) {
return new OggOpusPlayer(voiceConnection, options);
}
if (type.toLowerCase() === "WebmOpusPlayer".toLowerCase()) {
return new WebmOpusPlayer(voiceConnection, options);
}
throw new Error(`Invalid type '${options.type}'`);
}
};

View File

@ -0,0 +1,246 @@
"use strict";
const ExternalEncoderBase = require("./ExternalEncoderBase");
const EBMLDecoder = require("./demuxers/EBMLDecoder");
const WebmOpusDemuxer = require("./demuxers/WebmOpusDemuxer");
function stderrDataListener(data) {
process.stdout.write("[FFmpeg] " + data);
}
/**
* @class
* @extends ExternalEncoderBase
* @classdesc
* Simple FFmpeg wrapper that binds to a voice connection,
* encodes audio into opus
* (by default encodes using external process, not inside node.js).
*
* Requires `ffmpeg` or `avconv` installed and in PATH or current directory.
*
* > Please note that FFmpeg **must be compiled with `libopus` support**
* > and whatever other audio codecs you intend to use.
*
* ```js
* var info = client.VoiceConnections[0];
* if (!info) return console.log("Voice not connected");
*
* var encoder = info.voiceConnection.createExternalEncoder({
* type: "ffmpeg",
*
* // any source FFmpeg can read (http, rtmp, etc.) (FFmpeg option '-i');
* // (with "-" source pipe data into `encoder.stdin`)
* source: "test.mp3",
*
* // "opus" or "pcm", in "opus" mode AudioEncoder.setVolume won't work
* // - "opus" - encode audio using FFmpeg only and let node just stream opus
* // - "pcm" - request pcm data from FFmpeg and encode inside node.js
* format: "opus", // < default
*
* // "pcm" mode option
* frameDuration: 60, // < default
*
* // optional array of additional arguments (applied to input stream)
* inputArgs: [],
*
* // optional array of additional arguments (applied to output stream)
* // (this volume parameter is passed into FFmpeg and applied for both
* // "pcm" and "opus" formats, but can't be changed dynamically)
* outputArgs: ["-af", "volume=0.05"],
*
* // optional, 'true' redirects FFmpeg's stderr into console
* // and starts with "-loglevel warning"
* debug: false
* });
* if (!encoder) return console.log("Voice connection is disposed");
*
* encoder.once("end", () => console.log("stream end"));
*
* var encoderStream = encoder.play();
* encoderStream.resetTimestamp();
* encoderStream.removeAllListeners("timestamp");
* encoderStream.on("timestamp", time => console.log("Time " + time));
* ```
*
* > **Note:** Since so many users prefer the `ytdl-core` package over
* > `youtube-dl` and use it incorrectly, this is probably the best way to
* > work with it (filter formats from `getInfo` and pass url to FFmpeg):
*
* ```js
* function playRemote(remote, info) {
* function onMediaInfo(err, mediaInfo) {
* if (err) return console.log("ytdl error:", err);
* // sort by bitrate, high to low; prefer webm over anything else
* var formats = mediaInfo.formats.filter(f => f.container === "webm")
* .sort((a, b) => b.audioBitrate - a.audioBitrate);
*
* // get first audio-only format or fallback to non-dash video
* var bestaudio = formats.find(f => f.audioBitrate > 0 && !f.bitrate) ||
* formats.find(f => f.audioBitrate > 0);
* if (!bestaudio) return console.log("[playRemote] No valid formats");
*
* if (!info) info = client.VoiceConnections[0];
* if (!info) return console.log("[playRemote] Voice not connected");
* // note that in this case FFmpeg must also be compiled with HTTPS support
* var encoder = info.voiceConnection.createExternalEncoder({
* type: "ffmpeg", source: bestaudio.url
* });
* encoder.play();
* }
* try {
* ytdl.getInfo(remote, onMediaInfo);
* } catch (e) { console.log("ytdl threw:", e); }
* }
* ```
*
* #### Events:
*
* - ** Event: `end` **
*
* Emitted when stream done playing.
*
* - ** Event: `unpipe` **
*
* Emitted when stream gets unpiped from `AudioEncoderStream`.
* Proxies `AudioEncoderStream` unpipe event, fires only when
* using `play()` method.
* If you create file or http streams make sure descriptors get destroyed on
* unpiping.
*/
class FFmpegEncoder extends ExternalEncoderBase {
constructor(voiceConnection, options) {
super(voiceConnection, options);
this._format = options.format || "opus";
this._frameDuration = options.frameDuration || 60;
this._debug = options.debug || false;
const args = this._getArgs(options);
const handle =
this._createProcess("ffmpeg", args) ||
this._createProcess("avconv", args);
if (!handle) {
throw new Error(
"Unable to spawn 'ffmpeg' or 'avconv', neither of them seems " +
"to be in PATH or current folder"
);
}
const stdin = handle.stdin;
stdin.on("error", err => {
if (err.code === "EOF" || err.code == "EPIPE") return;
if (stdin.listenerCount("error") !== 1) return;
// throw only if there are no other stdin error handlers
console.error("FFmpegEncoder stdin: " + err);
throw err;
});
if (this._format === "pcm") {
this._stream = handle.stdout;
} else {
this._ebmld = new EBMLDecoder();
this._stream = new WebmOpusDemuxer();
handle.stdout.pipe(this._ebmld).pipe(this._stream);
}
this._stream.on("end", () => {
if (!this._stream._readableState.pipesCount) return;
// don't emit 'end' after 'unpipe'
this.emit("end");
});
if (this._debug) handle.stderr.on("data", stderrDataListener);
this._setHandle(handle, this._onExit.bind(this));
}
_onExit(code) {
// nodejs bug since v5.11.0 (backported to 4.4.5+)
// workaround for ffmpeg stdout not closing/ending if piped stream is slow
if (this._handle && this._handle.stdout) {
this._handle.stdout._readableState.awaitDrain = 0;
}
if (!code || code === 255 || code === 123) return;
// 0 -> normal exit
// 255 -> exit on interrupt
// 123 -> hard exit
if (!this._debug) return;
console.error(
this.constructor.name +
": external encoder exited with code " + code
);
}
_getArgs(options) {
if (!options.source) {
console.warn(
"Warning: options.source is not defined, using stdin as input"
);
}
const source = options.source || "-";
const inputArgs = options.inputArgs || [];
const outputArgs = options.outputArgs || [];
const args = [
options.realtime ? "-re" : null,
"-analyzeduration", "0",
"-loglevel", this._debug ? "warning" : "0",
"-i", source,
"-vn"
];
const pcmArgs = [
"-f", "s16le", "-ar", 48000, "-ac", 2, "-"
];
const opusArgs = [
"-c:a", "libopus", "-frame_duration", "60", "-f", "webm", "-"
];
const formatArgs = this._format === "pcm" ? pcmArgs : opusArgs;
return inputArgs
.concat(args)
.concat(outputArgs)
.concat(formatArgs)
.filter(v => v);
}
/**
* Gets stdin.
*
* @returns {WritableStream|null}
* @readonly
*/
get stdin() { return (this._handle && this._handle.stdin) || null; }
play() {
const pcmOptions = {
proxy: false,
sampleRate: 48000,
channels: 2,
frameDuration: this._frameDuration
};
const options = this._format === "pcm" ? pcmOptions : {proxy: true};
return super.play(options);
}
pipe(dest) {
this._stream.pipe(dest);
}
unpipe(dest) {
this._stream.unpipe(dest);
}
destroy() {
if (this._handle && this._handle.exitCode === null) {
const handle = this._handle;
// send an extra SIGTERM to interrupt transcoding
handle.kill();
// kill with SIGKILL if it still hasn't exited
setTimeout(() => handle.kill("SIGKILL"), 5000);
}
super.destroy(); // send SIGTERM and dispose instance
}
}
module.exports = FFmpegEncoder;

View File

@ -0,0 +1,83 @@
"use strict";
const ExternalEncoderBase = require("./ExternalEncoderBase");
const OggOpusDemuxer = require("./demuxers/OggOpusDemuxer");
/**
* @class
* @extends ExternalEncoderBase
* @classdesc
* Simple wrapper for ogg opus streams. Streams audio on the fly without
* decoding.
*
* ```js
* var info = client.VoiceConnections[0];
* if (!info) return console.log("Voice not connected");
*
* var source = fs.createReadStream("test.opus");
* var encoder = info.voiceConnection.createExternalEncoder({
* type: "OggOpusPlayer",
* source: source
* });
* if (!encoder) return console.log("Voice connection is disposed");
*
* encoder.once("end", () => console.log("stream end"));
* encoder.once("error", err => console.log("Ogg Error", err));
*
* var encoderStream = encoder.play();
* encoderStream.once("unpipe", () => source.destroy()); // close descriptor
*
* encoderStream.resetTimestamp();
* encoderStream.removeAllListeners("timestamp");
* encoderStream.on("timestamp", time => console.log("Time " + time));
* ```
*
* #### Events:
*
* - ** Event: `end` **
*
* Emitted when stream done playing.
*
* - ** Event: `unpipe` **
*
* Emitted when stream gets unpiped from `AudioEncoderStream`.
* Proxies `AudioEncoderStream` unpipe event, fires only when
* using `play()` method.
* If you create file or http streams make sure descriptors get destroyed on
* unpiping.
*
* - ** Event: `error` **
*
* Emitted if an error occurred while demuxing. The stream will unpipe
* itself on this event.
*/
class OggOpusPlayer extends ExternalEncoderBase {
constructor(voiceConnection, options) {
super(voiceConnection, options);
if (!options.source || typeof options.source.pipe !== "function") {
throw new TypeError(`Invalid source '${options.source}'`);
}
this._stream = new OggOpusDemuxer();
this._stream.on("error", err => {
this.emit("error", err);
this.unpipe();
});
this._stream.on("end", () => {
if (!this._stream._readableState.pipesCount) return;
// don't emit 'end' after 'unpipe'
this.emit("end");
});
options.source.pipe(this._stream);
}
pipe(dest) {
this._stream.pipe(dest);
}
unpipe(dest) {
this._stream.unpipe(dest);
}
}
module.exports = OggOpusPlayer;

View File

@ -0,0 +1,86 @@
"use strict";
const ExternalEncoderBase = require("./ExternalEncoderBase");
const EBMLDecoder = require("./demuxers/EBMLDecoder");
const WebmOpusDemuxer = require("./demuxers/WebmOpusDemuxer");
/**
* @class
* @extends ExternalEncoderBase
* @classdesc
* Simple wrapper for webm opus streams. Streams audio on the fly without
* decoding.
*
* ```js
* var info = client.VoiceConnections[0];
* if (!info) return console.log("Voice not connected");
*
* var source = fs.createReadStream("test.webm");
* var encoder = info.voiceConnection.createExternalEncoder({
* type: "WebmOpusPlayer",
* source: source
* });
* if (!encoder) return console.log("Voice connection is disposed");
*
* encoder.once("end", () => console.log("stream end"));
* encoder.once("error", err => console.log("WebM Error", err));
*
* var encoderStream = encoder.play();
* encoderStream.once("unpipe", () => source.destroy()); // close descriptor
*
* encoderStream.resetTimestamp();
* encoderStream.removeAllListeners("timestamp");
* encoderStream.on("timestamp", time => console.log("Time " + time));
* ```
*
* #### Events:
*
* - ** Event: `end` **
*
* Emitted when stream done playing.
*
* - ** Event: `unpipe` **
*
* Emitted when stream gets unpiped from `AudioEncoderStream`.
* Proxies `AudioEncoderStream` unpipe event, fires only when
* using `play()` method.
* If you create file or http streams make sure descriptors get destroyed on
* unpiping.
*
* - ** Event: `error` **
*
* Emitted if an error occurred while demuxing. The stream will unpipe
* itself on this event.
*/
class WebmOpusPlayer extends ExternalEncoderBase {
constructor(voiceConnection, options) {
super(voiceConnection, options);
if (!options.source || typeof options.source.pipe !== "function") {
throw new TypeError(`Invalid source '${options.source}'`);
}
this._ebmld = new EBMLDecoder();
this._stream = new WebmOpusDemuxer();
options.source.pipe(this._ebmld).pipe(this._stream);
this._stream.on("error", err => {
this.emit("error", err);
this.unpipe();
});
this._stream.on("end", () => {
if (!this._stream._readableState.pipesCount) return;
// don't emit 'end' after 'unpipe'
this.emit("end");
});
}
pipe(dest) {
this._stream.pipe(dest);
}
unpipe(dest) {
this._stream.unpipe(dest);
}
}
module.exports = WebmOpusPlayer;

View File

@ -0,0 +1,46 @@
"use strict";
class BufferStream {
constructor(buffer) {
this._buffer = buffer;
this._offset = 0;
}
get ended() { return this._offset >= this._buffer.length; }
get available() { return this._buffer.length - this._offset; }
skip(bytes) { this.read(bytes); }
read(bytes) {
if (bytes <= 0) return null;
if (this.ended) return null;
if (this._offset + bytes > this._buffer.length) return null;
var offset = this._offset;
this._offset += bytes;
return this._buffer.slice(offset, offset + bytes);
}
readByte() { return (this.read(1) || [null])[0]; }
readString(size) {
var v = this.read(size);
return v ? v.toString("utf8", 0) : null;
}
readInt32LE() {
var v = this.read(4);
return v ? v.readInt32LE(0) : null;
}
readUInt32LE() {
var v = this.read(4);
return v ? v.readUInt32LE(0) : null;
}
readInt16LE() {
var v = this.read(2);
return v ? v.readInt16LE(0) : null;
}
readUInt16LE() {
var v = this.read(2);
return v ? v.readUInt16LE(0) : null;
}
readUInt8() {
var v = this.read(1);
return v ? v.readUInt8(0) : null;
}
}
module.exports = BufferStream;

View File

@ -0,0 +1,15 @@
"use strict";
const ebml = require('ebml');
ebml.Decoder.prototype.__transform = ebml.Decoder.prototype._transform;
ebml.Decoder.prototype._transform = function(chunk, encoding, done) {
// catch "Unrepresentable length" errors
try {
this.__transform.apply(this, arguments);
} catch (e) {
this.push(null);
done();
}
};
module.exports = ebml.Decoder;

View File

@ -0,0 +1,101 @@
"use strict";
const OGG_PAGE_HEADER_SIZE = 26;
const BufferStream = require("./BufferStream");
const Transform = require("stream").Transform;
class OggOpusDemuxer extends Transform {
constructor() {
super({
writableObjectMode: true,
readableObjectMode: true
});
}
readSegments(reader) {
var tableSize = reader.readByte();
if (reader.available < tableSize) return null;
var segmentSizes = [];
for (var i = 0; i < tableSize; ) {
var read = reader.readByte(); i++;
var size = read;
while (read === 255) {
read = reader.readByte(); i++;
size += read;
}
segmentSizes.push(size);
}
var dataSize = segmentSizes.reduce((c, n) => c + n, 0);
if (reader.available < dataSize) return null;
return segmentSizes.map(size => reader.read(size));
}
readPage(reader, done) {
if (reader.available < OGG_PAGE_HEADER_SIZE) return false;
var magic = reader.readString(4);
if (magic !== "OggS")
return new Error("OGG magic does not match");
var version = reader.readByte();
var headerType = reader.readByte();
var isContinuation = headerType & (1 << 0);
if (isContinuation)
return new Error("OGG page continuation handling not implemented");
var isBeginning = headerType & (1 << 1);
var isEnd = headerType & (1 << 2);
reader.skip(8); // granule position
reader.skip(4); // stream serial number
var pageSeq = reader.readInt32LE();
reader.skip(4); // checksum
var segments = this.readSegments(reader);
if (segments == null) return false;
if (segments.indexOf(null) >= 0) return false;
var packets = [];
for (var segment of segments) {
var header = segment.toString("utf8", 0, 8);
if (header === "OpusHead") {
this._opusHeader = segment;
this.emit("OpusHead", this._opusHeader);
} else if (header === "OpusTags") {
this._opusTags = segment;
this.emit("OpusTags", this._opusTags);
} else packets.push(segment);
}
if (!this._opusHeader) return true;
packets.forEach(packet => this.push(packet));
return true;
}
_transform(chunk, encoding, done) {
if (this._leftover) {
chunk = Buffer.concat([this._leftover, chunk]);
this._leftover = null;
}
var reader = new BufferStream(chunk);
while (!reader.ended) {
// save current position if page reading fails
var offset = reader._offset;
var ok = this.readPage(reader, done);
if (ok instanceof Error) return done(ok, null);
if (!ok) {
// save remaining buffer
this._leftover = chunk.slice(offset, chunk.length);
break;
}
}
done();
}
}
module.exports = OggOpusDemuxer;

View File

@ -0,0 +1,105 @@
"use strict";
const BufferStream = require("./BufferStream");
const Transform = require("stream").Transform;
const TRACKTYPE_VIDEO = 1;
const TRACKTYPE_AUDIO = 2;
const TRACKTYPE_COMPLEX = 3;
class WebmOpusDemuxer extends Transform {
constructor() {
super({
writableObjectMode: true,
readableObjectMode: true,
});
this.demuxingAudio = false;
this.tracks = new Map();
}
_parseTracks(type, info, done) {
if (info.name == "TrackEntry") {
if (type == "start") {
this.parsingTrack = {};
}
if (type == "end") {
if (this.parsingTrack.hasOwnProperty("TrackNumber")) {
const id = this.parsingTrack.TrackNumber;
this.tracks.set(id, this.parsingTrack);
}
delete this.parsingTrack;
}
}
if (this.parsingTrack && info.name == "TrackNumber")
this.parsingTrack.TrackNumber = info.data[0];
if (this.parsingTrack && info.name == "CodecID")
this.parsingTrack.CodecID = info.data.toString();
if (this.parsingTrack && info.name == "TrackType")
this.parsingTrack.TrackType = info.data[0];
if (type == "end" && info.name == "Tracks") {
for (var track of this.tracks.values()) {
if (track.TrackType != TRACKTYPE_AUDIO) continue;
this.firstAudioTrack = track;
}
if (!this.firstAudioTrack) {
return done(new Error("No audio track"), null);
}
}
}
_parseCodecPrivate(type, info, done) {
const bin = info.data;
if (type != "tag" || info.name != "CodecPrivate") return;
const reader = new BufferStream(bin);
var head = reader.readString(8);
if (head != "OpusHead") {
return done(new Error("Invalid codec " + head), null);
}
this.codecdata = {
version: reader.readUInt8(),
channelCount: reader.readUInt8(),
preSkip: reader.readUInt16LE(),
inputSampleRate: reader.readUInt32LE(),
outputGain: reader.readUInt16LE(),
mappingFamily: reader.readUInt8()
};
this.channels = this.codecdata.channelCount;
this.sampleRate = this.codecdata.inputSampleRate;
this.emit("format", {
channels: this.channels,
sampleRate: this.sampleRate
});
}
_transform(chunk, encoding, done) {
const type = chunk[0];
const info = chunk[1];
const bin = info.data;
this._parseTracks(type, info, done);
this._parseCodecPrivate(type, info, done);
if (type == "tag" && info.name == "SimpleBlock") {
const tracknumber = bin.readUInt8(0) & 0xF;
if (tracknumber == this.firstAudioTrack.TrackNumber) {
const timestamp = bin.readUInt16BE(1);
const flags = bin.readUInt8(3);
const data = bin.slice(4);
this.push(data);
if (!this.demuxingAudio) {
this.emit("demux");
this.demuxingAudio = true;
}
}
}
done();
}
}
module.exports = WebmOpusDemuxer;

View File

@ -0,0 +1,207 @@
"use strict";
const Constants = require("../../Constants");
const Utils = require("../../core/Utils");
const Writable = require("stream").Writable;
const OpusUtils = require("./OpusUtils");
/**
* @class
* @classdesc
* This is a regular `Writable` stream that abstracts `AudioEncoder`.
*
* Only one stream can be piped at a time, previous stream will automatically
* unpipe if you connect a new one.
*
* Stream instance is bound to a voice connection and persists until the
* connection is disposed.
*
* You also can `cork()` and `uncork()` it to pause or unpause like any other
* writable stream.
*
* ```js
* var lame = require("lame");
* var mp3decoder = new lame.Decoder();
* var file = fs.createReadStream("test.mp3");
* file.pipe(mp3decoder);
*
* mp3decoder.on('format', pcmfmt => {
* var options = {
* sampleRate: pcmfmt.sampleRate,
* channels: pcmfmt.channels,
* };
*
* var info = client.VoiceConnections[0];
* if (!info) return console.log("Voice not connected");
*
* var encoderStream = info.voiceConnection.getEncoderStream(options);
* if (!encoderStream) return console.log("Connection is disposed");
*
* encoderStream.resetTimestamp();
* // you can register timestamp listener only once on VOICE_CONNECTED
* // instead of unregistering all listeners every time
* encoderStream.removeAllListeners("timestamp");
* encoderStream.on("timestamp", time => console.log("Time " + time));
*
* mp3decoder.pipe(encoderStream);
* mp3decoder.once('end', () => console.log("stream end"));
*
* // must be registered after `pipe()`
* encoderStream.once("unpipe", () => file.destroy()); // close descriptor
* });
* ```
*
* #### Events:
*
* - ** Event: `timestamp` **
*
* Emits a `Number` value in seconds as argument when the `timestamp` is
* incremented.
*/
class AudioEncoderStream extends Writable {
constructor(encoder) {
super({ highWaterMark: 0 });
// disable internal buffer for edge cases like proxy mode changes
this.source = null;
this.on("pipe", src => {
this.unpipeAll();
this.source = src;
});
this.on("unpipe", src => this.unpipeAll());
this._done = null;
this._encoder = encoder;
this._buffer = null;
/**
* Current timestamp in seconds.
*
* Increments when a chunk is processed.
* @type {Number}
*/
this.timestamp = 0;
Utils.privatify(this);
}
_needBuffer() {
if (typeof this._done === "function") {
var done = this._done;
this._done = null;
// callback can invoke _write and overwrite this._done
done();
return true;
}
return false;
}
_write(chunk, encoding, done) {
// blackhole the stream if encoder is destroyed
if (this._encoder.disposed) return done();
if (this._encoder.options.proxy) {
return this._writeOpus(chunk, done);
}
this._writePCM(chunk, done);
}
_writeOpus(chunk, done) {
// input stream must produce single opus packet as chunk
const numSamples =
OpusUtils.packet_get_nb_samples(chunk, Constants.DISCORD_SAMPLE_RATE);
if (numSamples < 0) return done(new Error("Invalid opus packet"));
// reinitialize scheduler if frame duration does not match
const frameDuration = numSamples / Constants.DISCORD_SAMPLE_RATE * 1000;
const options = this._encoder.options;
if (options.frameDuration != frameDuration) {
options.frameDuration = frameDuration;
this._encoder.initialize(options);
}
this._done = done;
this._addTimestamp(numSamples / Constants.DISCORD_SAMPLE_RATE);
this._encoder.enqueue(chunk, numSamples);
}
_writePCM(chunk, done) {
if (this._buffer) {
chunk = Buffer.concat([this._buffer, chunk]);
this._buffer = null;
}
var options = this._encoder.options;
if (!options.sampleRate)
return done(new Error("Invalid sampleRate: " + options.sampleRate));
if (!options.frameDuration)
return done(new Error("Invalid frameDuration: " + options.frameDuration));
var bitDepth = options.float ? 32 : 16;
var numSamples =
options.sampleRate / 1000 *
options.frameDuration;
var readSize =
numSamples *
bitDepth / 8 *
(options.channels || 1);
if (chunk.length < readSize) {
// not enough data, save the chunk and request more
if (chunk.length > 0)
this._buffer = chunk;
return done();
}
var framesAvailable = Math.floor(chunk.length / readSize);
var frames = [];
for (var i = 0; i < framesAvailable; i++) {
var offset = i * readSize;
frames.push(chunk.slice(offset, offset + readSize));
}
this._done = done;
this._addTimestamp((framesAvailable * numSamples) / options.sampleRate);
this._encoder.enqueueMultiple(frames, numSamples);
var excessBytes = chunk.length - framesAvailable * readSize;
if (excessBytes > 0) this._buffer = chunk.slice(-excessBytes);
}
_addTimestamp(value) {
this.timestamp += value;
this.emit("timestamp", this.timestamp);
}
end(chunk, encoding, done) {
// not endable, just write the chunk
if (typeof chunk === 'function') {
done = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
done = encoding;
encoding = null;
}
if (chunk !== null && chunk !== undefined) {
this.write(chunk, encoding, done);
}
}
/**
* Resets current timestamp.
*/
resetTimestamp() {
this.timestamp = 0;
}
/**
* Unpipes the connected stream if piped.
*/
unpipeAll() {
if (!this.source) return;
this.source.unpipe(this);
this.source = null;
this._needBuffer();
}
}
module.exports = AudioEncoderStream;

58
node_modules/discordie/lib/voice/streams/OpusUtils.js generated vendored Normal file
View File

@ -0,0 +1,58 @@
"use strict";
const Constants = {
OPUS_BAD_ARG: -1,
OPUS_INVALID_PACKET: -4
};
function opus_packet_get_samples_per_frame(packet, sampleRate) {
sampleRate = sampleRate || 48000;
let audiosize;
if (packet[0] & 0x80) {
audiosize = ((packet[0] >> 3) & 0x3);
audiosize = (sampleRate << audiosize) / 400;
} else if ((packet[0] & 0x60) == 0x60) {
audiosize = (packet[0] & 0x08) ? sampleRate / 50 : sampleRate / 100;
} else {
audiosize = ((packet[0] >> 3) & 0x3);
if (audiosize == 3) {
audiosize = sampleRate * 60 / 1000;
} else {
audiosize = (sampleRate << audiosize) / 100;
}
}
return audiosize;
}
function opus_packet_get_nb_frames(packet) {
var count;
if (packet.length < 1) return Constants.OPUS_BAD_ARG;
count = packet[0] & 0x3;
if (count == 0) return 1;
else if (count != 3) return 2;
else if (packet.length < 2) return Constants.OPUS_INVALID_PACKET;
else return packet[1] & 0x3F;
}
function opus_packet_get_nb_samples(packet, sampleRate)
{
sampleRate = sampleRate || 48000;
var count = opus_packet_get_nb_frames(packet);
if (count < 0) return count;
var samples = count * opus_packet_get_samples_per_frame(packet, sampleRate);
/* Can't have more than 120 ms */
if (samples * 25 > sampleRate * 3)
return Constants.OPUS_INVALID_PACKET;
return samples;
}
module.exports = {
Constants,
packet_get_samples_per_frame: opus_packet_get_samples_per_frame,
packet_get_nb_frames: opus_packet_get_nb_frames,
packet_get_nb_samples: opus_packet_get_nb_samples
};

View File

@ -0,0 +1,158 @@
"use strict";
const events = require("events");
const nopus = () => require("../../../deps/nopus");
const Utils = require("../../core/Utils");
const Constants = require("../../Constants");
var IPC = true;
class DecoderWorker extends events.EventEmitter {
constructor() {
super();
IPC = false;
}
kill() {
this.destroyStates();
}
}
function destroyStates() {
if ((this.states || []).length) {
this.states.forEach(s => s.decoder.destroy());
}
this.states = [];
this.userMap = {};
}
function initialize(_options) {
this.destroyStates();
_options.float = _options.float || false;
_options.channels = _options.channels || 1;
if (_options.channels < 1) _options.channels = 1;
this.options = _options;
}
function createDecoder() {
const channels = this.options.channels;
return new (nopus().OpusDecoder)(Constants.DISCORD_SAMPLE_RATE, channels);
}
function destroyUnknown() {
const unknown = this.states.filter(u => !u.userId);
unknown.forEach(s => {
s.decoder.destroy();
const index = this.states.indexOf(s);
if (index >= 0) this.states.splice(index, 1);
});
}
function destroyUser(userId) {
destroyUnknown.call(this);
const index = this.states.findIndex(s => s.userId == userId);
if (index < 0) return;
const state = this.states[index];
if (state.ssrc) delete this.userMap[state.ssrc];
state.decoder.destroy();
this.states.splice(index, 1);
}
function assignUser(ssrc, userId) {
this.userMap[ssrc] = userId;
const state = this.states.find(s => s.ssrc == ssrc);
if (!state || state.userId == userId) return;
state.decoder.destroy();
state.decoder = this.createDecoder();
state.userId = userId;
}
function getOrCreateDecoder(ssrc) {
let state = this.states.find(s => s.ssrc == ssrc);
if (!state) {
state = {
ssrc: ssrc,
decoder: this.createDecoder(),
userId: this.userMap[ssrc] || null
};
this.states.push(state);
}
return state.decoder;
}
function decode(packet) {
const decoder = getOrCreateDecoder.call(this, packet.ssrc);
let frameData = packet.chunk.data;
if (packet.chunk instanceof Buffer)
frameData = packet.chunk;
if (!packet.chunk || !frameData) return;
const decode = (
this.options.float ?
decoder.decode_float :
decoder.decode
).bind(decoder);
try {
const dataBuffer = new Uint8Array(Utils.createArrayBuffer(frameData));
const decoded = new Uint8Array(decode(dataBuffer).buffer);
packet.chunk = Utils.createBuffer(decoded);
this.sendPacket(packet);
} catch (e) { console.error((e instanceof Error ? e : new Error(e)).stack); }
}
function onIPCMessage(msg) {
if (!msg) return;
switch (msg.op) {
case "initialize":
this.initialize(msg.options);
break;
case "enqueue":
this.decode(msg.packet);
break;
case "assignUser":
this.assignUser(msg.ssrc, msg.userId);
break;
case "destroyUser":
this.destroyUser(msg.userId);
break;
}
}
function sendIPC(data) {
if (!IPC) {
this.emit("message", data);
return;
}
process.send(data);
}
function sendPacket(packet, sampleCount) {
this.sendIPC({
op: "packet",
packet: packet
});
}
process.on("message", onIPCMessage.bind(DecoderWorker.prototype));
Object.assign(DecoderWorker.prototype, {
send: onIPCMessage,
initialize: initialize,
decode: decode,
sendIPC: sendIPC,
sendPacket: sendPacket,
createDecoder: createDecoder,
destroyStates: destroyStates,
assignUser: assignUser,
destroyUser: destroyUser,
});
module.exports = DecoderWorker;

View File

@ -0,0 +1,552 @@
"use strict";
const events = require("events");
const nopus = () => require("../../../deps/nopus");
const Utils = require("../../core/Utils");
const Constants = require("../../Constants");
const AudioResampler = require("../AudioResampler");
const DiscordieProfiler = require("../../core/DiscordieProfiler");
var IPC = true;
class EncoderWorker extends events.EventEmitter {
constructor() {
super();
IPC = false;
}
kill() {
if (this.encoder) {
this.encoder.destroy();
this.encoder = null;
}
if (this.resampler) {
this.resampler.destroy();
this.resampler = null;
}
this.audioQueue.length = 0;
this.audioQueue = null;
}
}
class BaseEncodingEngine {
constructor(sampleRate, channels, application, frameDuration) {
this.sampleRate = sampleRate;
this.channels = channels;
this.application = application;
this.frameDuration = frameDuration;
}
static isAvailable() {}
setBitrate(bitrate) {}
destroy() {}
encode(buffer) {}
encode_float(buffer) {}
}
class NativeEncodingEngine extends BaseEncodingEngine {
constructor(sampleRate, channels, application, frameDuration) {
super(sampleRate, channels, application, frameDuration);
this.encoder = new (require("node-opus").OpusEncoder)(
sampleRate,
channels,
application
);
}
static isAvailable() {
try {
require("node-opus");
return true;
} catch (e) {
return false;
}
return false;
}
destroy() {}
setBitrate(bitrate) {
return this.encoder.setBitrate(bitrate);
}
encode(view) {
let byteBuffer = new Uint8Array(view.buffer);
const expectedFrameSize =
this.sampleRate * (this.frameDuration / 1000) * 2 * this.channels;
// node-opus doesn't like buffers not matching supported frame sizes
if (byteBuffer.length < expectedFrameSize) {
const cut = byteBuffer;
(byteBuffer = new Uint8Array(expectedFrameSize)).set(cut);
} else {
byteBuffer = byteBuffer.subarray(0, expectedFrameSize);
}
return [this.encoder.encode(byteBuffer)];
}
encode_float(view) {
if (this.float || view instanceof Float32Array) {
// node-opus doesn't expose encode_float
var int16View = new Int16Array(view.length);
for (var i = 0; i < int16View.length; i++)
int16View[i] = view[i] * 32767;
view = int16View;
}
return this.encode(view);
}
}
var coldBoot = true;
class InternalEncodingEngine extends BaseEncodingEngine {
constructor(sampleRate, channels, application, frameDuration) {
super(sampleRate, channels, application, frameDuration);
this.encoder = new (nopus().OpusEncoder)(
sampleRate,
channels,
application,
frameDuration
);
// make V8 compile the code
if (coldBoot) {
const samples = this.sampleRate / 1000 * this.frameDuration;
for (var i = 0; i < 5; i++) {
this.encode(Utils.allocBuffer(samples * this.channels * 2));
this.encode_float(Utils.allocBuffer(samples * this.channels * 4));
}
coldBoot = false;
}
}
static isAvailable() { return true; }
setBitrate(bitrate) {
return this.encoder.set_bitrate(bitrate);
}
destroy() {
return this.encoder.destroy.apply(this.encoder, arguments);
}
encode(buffer) {
return this.encoder.encode.apply(this.encoder, arguments)
.map(packet => Utils.createBuffer(packet));
}
encode_float(buffer) {
return this.encoder.encode_float.apply(this.encoder, arguments)
.map(packet => Utils.createBuffer(packet));
}
}
class BaseResampler {
constructor(channels, sourceRate, targetRate, bps, float) {
this.channels = channels;
this.sourceRate = sourceRate;
this.targetRate = targetRate;
this.bps = bps;
this.float = float;
}
destroy() {}
process_interleaved() {}
}
class SpeexResampler extends BaseResampler {
constructor(channels, sourceRate, targetRate, bps, float) {
super(channels, sourceRate, targetRate, bps, float);
this.resampler = new (nopus().Resampler)(
channels,
sourceRate, targetRate,
bps, float
);
}
destroy() {
return this.resampler.destroy.apply(this.resampler, arguments);
}
process_interleaved() {
return this.resampler.process_interleaved.apply(this.resampler, arguments);
}
}
class InternalResampler extends BaseResampler {
constructor(channels, sourceRate, targetRate, bps, float) {
super(channels, sourceRate, targetRate, bps, float);
this.resampler = new AudioResampler(channels, sourceRate, targetRate);
}
process_interleaved(buffer) {
let view = null;
if (!this.float) {
if (this.bps == 8) {
view = new Int8Array(buffer);
} else if (this.bps == 16) {
view = new Int16Array(buffer);
} else if (this.bps == 32) {
view = new Int32Array(buffer);
}
// 24 bps audio nope
} else {
view = new Float32Array(buffer);
}
if (!view) throw new Error("Unsupported audio format");
if (!this.float) {
var floatView = new Float32Array(view.length);
for (var i = 0; i < floatView.length; i++)
floatView[i] = view[i] / 32767;
view = floatView;
}
return this.resampler.process(view);
}
}
const hrtime = function() {
const t = process.hrtime();
return t[0] * 1000 + t[1] / 1000000;
}
function initialize(_options) {
if (this.encoder != null) {
this.encoder.destroy();
}
if (this.resampler != null) {
this.resampler.destroy();
this.resampler = null;
}
const targetSampleRate = Constants.DISCORD_SAMPLE_RATE;
_options.frameDuration = _options.frameDuration || 60;
_options.frameDuration = Math.max(20, Math.min(_options.frameDuration, 60));
_options.sampleRate = _options.sampleRate || targetSampleRate;
_options.channels = _options.channels || 1;
_options.float = _options.float || false;
// "average", anything else will encode multichannel audio
_options.downmix = _options.downmix;
// "highres", anything else will just use setTimeout
_options.timingPrecision = _options.timingPrecision || "normal";
// "nopus", anything else will just use internal
_options.resampler = _options.resampler || "internal";
// "native", anything else will just use internal
_options.engine = _options.engine || "internal";
// proxy mode, passes packets straight to the muxer
// packets should be opus encoded already
// in this mode it only works as scheduler for packets
_options.proxy = _options.proxy || false;
// OPUS_AUTO -1000
// OPUS_BITRATE_MAX -1
_options.bitrate = _options.bitrate || null;
this.options = _options;
this.encodingChannels = this.options.downmix ?
1 :
this.options.channels;
if (!this.options.proxy) {
if (this.options.engine == "native") {
const available = NativeEncodingEngine.isAvailable();
if (!available) {
console.warn("Unable to load native opus module (node-opus)");
console.warn("Audio will be encoded using internal module");
this.options.engine = "internal";
}
}
const _encodingEngine =
this.options.engine == "native" ?
NativeEncodingEngine :
InternalEncodingEngine;
this.encoder = new _encodingEngine(
targetSampleRate,
this.encodingChannels,
(nopus().OpusApplication).Audio,
this.options.frameDuration
);
if (this.options.bitrate) {
this.encoder.setBitrate(this.options.bitrate);
}
if (targetSampleRate != this.options.sampleRate) {
const _resampler =
this.options.resampler == "nopus" ?
SpeexResampler :
InternalResampler;
this.resampler = new _resampler(this.encodingChannels,
this.options.sampleRate, targetSampleRate,
this.options.float ? 32 : 16, this.options.float
);
}
}
this.failedPackets = 0;
this.startTime = hrtime();
this.lastFrame = 0;
this.audioQueue = [];
}
function processQueue() {
if (this.audioQueue.length <= 0) return;
const frameDuration = this.options.frameDuration;
const lastTime = this.lastFrame * frameDuration + this.startTime;
if (hrtime() - lastTime > 1000) {
// put queue into pause mode after 1 second
// reset scheduler when more data available
this.startTime = hrtime();
this.lastFrame = 0;
}
setImmediate(this.timerCallback.bind(this));
}
function timerCallback() {
if (this.encoder == null && !this.options.proxy)
return;
//throw new Error("Encoder is not initialized");
if (!this.audioQueue) return;
if (this.audioQueue && this.audioQueue.length <= 0) {
this.sendNeedBuffer();
return;
}
const frameDuration = this.options.frameDuration;
const nextTime = (this.lastFrame + 1) * frameDuration + this.startTime;
// normal precision timing, low cpu usage
if (this.options.timingPrecision != "highres") {
if (hrtime() < nextTime) {
const timeleft = Math.round(nextTime - hrtime());
if (this.nextFrameTimer)
clearTimeout(this.nextFrameTimer);
this.nextFrameTimer =
setTimeout(this.timerCallback.bind(this), Math.max(timeleft, 0));
return;
}
this.lastFrame++;
this.processAudio();
return setImmediate(this.timerCallback.bind(this));
}
// high precision timing, high cpu usage
const hiresTimerThreshold = this.options.frameDuration / 4;
if (hrtime() < nextTime) {
const timeleft = nextTime - hrtime();
if (timeleft <= hiresTimerThreshold) {
return setImmediate(this.timerCallback.bind(this));
}
if (this.nextFrameTimer)
clearTimeout(this.nextFrameTimer);
this.nextFrameTimer =
setTimeout(this.timerCallback.bind(this), timeleft / 4);
return;
}
this.lastFrame++;
this.processAudio();
return setImmediate(this.timerCallback.bind(this));
}
function processAudio() {
const frame = this.audioQueue.shift();
let frameData = frame.chunk.data;
if (frame.chunk instanceof Buffer)
frameData = frame.chunk;
if (!frame.chunk || !frameData || !frame.sampleCount) return;
if (this.options.proxy) {
this.sendPacket(frame.chunk, frame.sampleCount);
return;
}
if (frameData.length % 2 != 0 ||
(this.options.float && frameData.length % 4 != 0))
return console.log("check your audio payload, buffer size must be even");
const frameBuffer = Utils.createArrayBuffer(frameData);
const frameView = this.options.float ?
new Float32Array(frameBuffer) : new Int16Array(frameBuffer);
const bufferLength = this.options.downmix == "average" ?
frameData.length / this.options.channels :
frameData.length;
let channel = new ArrayBuffer(bufferLength);
let view = this.options.float ?
new Float32Array(channel) : new Int16Array(channel);
DiscordieProfiler.start("mix");
// much optimize, var
var channels = this.options.channels;
var volume = this.options.volume / 100;
var viewLength = view.length;
if (this.options.downmix == "average") {
var sum = 0;
for (var i = 0; i < viewLength; i++) {
sum = 0;
for (var c = 0; c < channels; c++) {
sum += frameView[i * channels + c];
}
view[i] = sum / channels;
if (volume < 1) view[i] *= volume;
}
} else {
if (volume < 1) {
for (var i = 0; i < viewLength; i++) {
view[i] = frameView[i] * volume;
}
} else {
channel = frameBuffer;
view = frameView;
}
}
DiscordieProfiler.stop("mix");
DiscordieProfiler.start("resample");
if (this.resampler != null) {
view = this.resampler.process_interleaved(channel);
}
DiscordieProfiler.stop("resample");
DiscordieProfiler.start("encode");
let encoded;
const encode = (this.options.float || this.resampler ?
this.encoder.encode_float : this.encoder.encode).bind(this.encoder);
encoded = encode(view);
DiscordieProfiler.stop("encode");
if (!encoded.length) {
this.failedPackets++;
if (this.failedPackets > 3) {
console.log(
"failed to encode packet, buffer size might be smaller than expected"
);
}
} else {
this.failedPackets = 0;
}
if (process.env.PROFILEVOICE) {
let out = "";
let sum = 0;
const timediff = (hrtime() - this.startTime);
const _print = (name, v) => {
var cpu = v / timediff * 100;
out += `[${name}] ${v.toFixed(2)}ms ${cpu.toFixed(2)}%cpu; `;
};
["mix", "resample", "encode"].map(n => {
const v = DiscordieProfiler.get(n);
if (!this.prof) this.prof = {};
if (!this.prof[n]) this.prof[n] = 0;
this.prof[n] += v;
const vacc = this.prof[n];
n = n.substr(0, 3).toUpperCase();
_print(n, vacc);
sum += vacc;
});
_print("TOTAL", sum);
out += "T: " + timediff.toFixed(2);
console.log(out);
}
for (let i = 0; i < encoded.length; i++) {
this.sendPacket(
encoded[i],
this.resampler ?
(view.length / this.encodingChannels) :
frame.sampleCount
);
}
}
function onIPCMessage(msg) {
if (!msg) return;
switch (msg.op) {
case "initialize":
this.initialize(msg.options);
break;
case "enqueue":
if (this.audioQueue) {
this.audioQueue.push(msg.frame);
this.processQueue();
}
break;
case "enqueueMultiple":
if (this.audioQueue) {
this.audioQueue.push.apply(this.audioQueue, msg.frames);
this.processQueue();
}
break;
case "clearQueue":
if (this.audioQueue) {
this.audioQueue.length = 0;
}
break;
case "set":
this.options[msg.key] = msg.value;
break;
case "setBitrate":
if (this.encoder) {
this.options.bitrate = msg.value;
this.encoder.setBitrate(msg.value);
}
break;
}
}
function sendIPC(data) {
if (!IPC) {
this.emit("message", data);
return;
}
process.send(data);
}
function sendPacket(packet, sampleCount) {
this.sendIPC({
op: "opuspacket",
packet: packet,
sampleCount: sampleCount
});
}
function sendNeedBuffer() {
this.sendIPC({op: "needbuffer"});
}
process.on("message", onIPCMessage.bind(EncoderWorker.prototype));
Object.assign(EncoderWorker.prototype, {
send: onIPCMessage,
initialize: initialize,
processQueue: processQueue,
processAudio: processAudio,
timerCallback: timerCallback,
sendIPC: sendIPC,
sendPacket: sendPacket,
sendNeedBuffer: sendNeedBuffer,
});
module.exports = EncoderWorker;