Pushing changes
This commit is contained in:
20
node_modules/log4js/lib/appenders/categoryFilter.js
generated
vendored
Normal file
20
node_modules/log4js/lib/appenders/categoryFilter.js
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
var log4js = require('../log4js');
|
||||
|
||||
function categoryFilter (excludes, appender) {
|
||||
if (typeof(excludes) === 'string') excludes = [excludes];
|
||||
return function(logEvent) {
|
||||
if (excludes.indexOf(logEvent.categoryName) === -1) {
|
||||
appender(logEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender, options);
|
||||
return categoryFilter(config.exclude, appender);
|
||||
}
|
||||
|
||||
exports.appender = categoryFilter;
|
||||
exports.configure = configure;
|
153
node_modules/log4js/lib/appenders/clustered.js
generated
vendored
Executable file
153
node_modules/log4js/lib/appenders/clustered.js
generated
vendored
Executable file
@ -0,0 +1,153 @@
|
||||
"use strict";
|
||||
|
||||
var cluster = require('cluster');
|
||||
var log4js = require('../log4js');
|
||||
|
||||
/**
|
||||
* Takes a loggingEvent object, returns string representation of it.
|
||||
*/
|
||||
function serializeLoggingEvent(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
for (var i = 0; i < loggingEvent.data.length; i++) {
|
||||
var item = loggingEvent.data[i];
|
||||
// Validate that we really are in this case
|
||||
if (item && item.stack && JSON.stringify(item) === '{}') {
|
||||
loggingEvent.data[i] = {stack : item.stack};
|
||||
}
|
||||
}
|
||||
return JSON.stringify(loggingEvent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a string, returns an object with
|
||||
* the correct log properties.
|
||||
*
|
||||
* This method has been "borrowed" from the `multiprocess` appender
|
||||
* by `nomiddlename`
|
||||
* (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
|
||||
*
|
||||
* Apparently, node.js serializes everything to strings when using `process.send()`,
|
||||
* so we need smart deserialization that will recreate log date and level for further
|
||||
* processing by log4js internals.
|
||||
*/
|
||||
function deserializeLoggingEvent(loggingEventString) {
|
||||
|
||||
var loggingEvent;
|
||||
|
||||
try {
|
||||
|
||||
loggingEvent = JSON.parse(loggingEventString);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||
// Unwrap serialized errors
|
||||
for (var i = 0; i < loggingEvent.data.length; i++) {
|
||||
var item = loggingEvent.data[i];
|
||||
if (item && item.stack) {
|
||||
loggingEvent.data[i] = item.stack;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
loggingEvent = {
|
||||
startTime: new Date(),
|
||||
categoryName: 'log4js',
|
||||
level: log4js.levels.ERROR,
|
||||
data: [ 'Unable to parse log:', loggingEventString ]
|
||||
};
|
||||
}
|
||||
return loggingEvent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an appender.
|
||||
*
|
||||
* If the current process is a master (`cluster.isMaster`), then this will be a "master appender".
|
||||
* Otherwise this will be a worker appender, that just sends loggingEvents to the master process.
|
||||
*
|
||||
* If you are using this method directly, make sure to provide it with `config.actualAppenders`
|
||||
* array of actual appender instances.
|
||||
*
|
||||
* Or better use `configure(config, options)`
|
||||
*/
|
||||
function createAppender(config) {
|
||||
|
||||
if (cluster.isMaster) {
|
||||
|
||||
var masterAppender = function(loggingEvent) {
|
||||
|
||||
if (config.actualAppenders) {
|
||||
var size = config.actualAppenders.length;
|
||||
for(var i = 0; i < size; i++) {
|
||||
if (
|
||||
!config.appenders[i].category ||
|
||||
config.appenders[i].category === loggingEvent.categoryName
|
||||
) {
|
||||
// Relying on the index is not a good practice but otherwise
|
||||
// the change would have been bigger.
|
||||
config.actualAppenders[i](loggingEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Listen on new workers
|
||||
cluster.on('fork', function(worker) {
|
||||
|
||||
worker.on('message', function(message) {
|
||||
if (message.type && message.type === '::log-message') {
|
||||
var loggingEvent = deserializeLoggingEvent(message.event);
|
||||
|
||||
// Adding PID metadata
|
||||
loggingEvent.pid = worker.process.pid;
|
||||
loggingEvent.cluster = {
|
||||
master: process.pid,
|
||||
worker: worker.process.pid,
|
||||
workerId: worker.id
|
||||
};
|
||||
|
||||
masterAppender(loggingEvent);
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
return masterAppender;
|
||||
|
||||
} else {
|
||||
|
||||
return function(loggingEvent) {
|
||||
// If inside the worker process, then send the logger event to master.
|
||||
if (cluster.isWorker) {
|
||||
// console.log("worker " + cluster.worker.id + " is sending message");
|
||||
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)});
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
|
||||
if (config.appenders && cluster.isMaster) {
|
||||
|
||||
var size = config.appenders.length;
|
||||
config.actualAppenders = new Array(size);
|
||||
|
||||
for(var i = 0; i < size; i++) {
|
||||
|
||||
log4js.loadAppender(config.appenders[i].type);
|
||||
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](
|
||||
config.appenders[i],
|
||||
options
|
||||
);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
21
node_modules/log4js/lib/appenders/console.js
generated
vendored
Normal file
21
node_modules/log4js/lib/appenders/console.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts')
|
||||
, consoleLog = console.log.bind(console);
|
||||
|
||||
function consoleAppender (layout, timezoneOffset) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
consoleLog(layout(loggingEvent, timezoneOffset));
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return consoleAppender(layout, config.timezoneOffset);
|
||||
}
|
||||
|
||||
exports.appender = consoleAppender;
|
||||
exports.configure = configure;
|
90
node_modules/log4js/lib/appenders/dateFile.js
generated
vendored
Normal file
90
node_modules/log4js/lib/appenders/dateFile.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
var streams = require('streamroller')
|
||||
, layouts = require('../layouts')
|
||||
, path = require('path')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, openFiles = [];
|
||||
|
||||
//close open files on process exit.
|
||||
process.on('exit', function() {
|
||||
openFiles.forEach(function (file) {
|
||||
file.end();
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* File appender that rolls files according to a date pattern.
|
||||
* @filename base filename.
|
||||
* @pattern the format that will be added to the end of filename when rolling,
|
||||
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
|
||||
* @layout layout function for log messages - defaults to basicLayout
|
||||
* @timezoneOffset optional timezone offset in minutes - defaults to system local
|
||||
*/
|
||||
function appender(filename, pattern, layout, options, timezoneOffset) {
|
||||
layout = layout || layouts.basicLayout;
|
||||
|
||||
var logFile = new streams.DateRollingFileStream(
|
||||
filename,
|
||||
pattern,
|
||||
options
|
||||
);
|
||||
openFiles.push(logFile);
|
||||
|
||||
return function(logEvent) {
|
||||
logFile.write(layout(logEvent, timezoneOffset) + eol, "utf8");
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
if (!config.alwaysIncludePattern) {
|
||||
config.alwaysIncludePattern = false;
|
||||
}
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return appender(
|
||||
config.filename,
|
||||
config.pattern,
|
||||
layout,
|
||||
config,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
var completed = 0;
|
||||
var error;
|
||||
var complete = function(err) {
|
||||
error = error || err;
|
||||
completed++;
|
||||
if (completed >= openFiles.length) {
|
||||
cb(error);
|
||||
}
|
||||
};
|
||||
if (!openFiles.length) {
|
||||
return cb();
|
||||
}
|
||||
openFiles.forEach(function(file) {
|
||||
if (!file.write(eol, "utf-8")) {
|
||||
file.once('drain', function() {
|
||||
file.end(complete);
|
||||
});
|
||||
} else {
|
||||
file.end(complete);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
exports.appender = appender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
129
node_modules/log4js/lib/appenders/file.js
generated
vendored
Normal file
129
node_modules/log4js/lib/appenders/file.js
generated
vendored
Normal file
@ -0,0 +1,129 @@
|
||||
"use strict";
|
||||
var debug = require('debug')('log4js:file')
|
||||
, layouts = require('../layouts')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, streams = require('streamroller')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
, openFiles = []
|
||||
, levels = require('../levels');
|
||||
|
||||
//close open files on process exit.
|
||||
process.on('exit', function() {
|
||||
debug('Exit handler called.');
|
||||
openFiles.forEach(function (file) {
|
||||
file.end();
|
||||
});
|
||||
});
|
||||
|
||||
// On SIGHUP, close and reopen all files. This allows this appender to work with
|
||||
// logrotate. Note that if you are using logrotate, you should not set
|
||||
// `logSize`.
|
||||
process.on('SIGHUP', function() {
|
||||
debug('SIGHUP handler called.');
|
||||
openFiles.forEach(function(writer) {
|
||||
writer.closeTheStream(writer.openTheStream.bind(writer));
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* has been reached (default 5)
|
||||
* @param options - options to be passed to the underlying stream
|
||||
* @param timezoneOffset - optional timezone offset in minutes (default system local)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups, options, timezoneOffset) {
|
||||
file = path.normalize(file);
|
||||
layout = layout || layouts.basicLayout;
|
||||
numBackups = numBackups === undefined ? 5 : numBackups;
|
||||
//there has to be at least one backup if logSize has been specified
|
||||
numBackups = numBackups === 0 ? 1 : numBackups;
|
||||
|
||||
debug("Creating file appender (",
|
||||
file, ", ",
|
||||
logSize, ", ",
|
||||
numBackups, ", ",
|
||||
options, ", ",
|
||||
timezoneOffset, ")"
|
||||
);
|
||||
var writer = openTheStream(file, logSize, numBackups, options);
|
||||
|
||||
// push file to the stack of open handlers
|
||||
openFiles.push(writer);
|
||||
|
||||
return function(loggingEvent) {
|
||||
writer.write(layout(loggingEvent, timezoneOffset) + eol, "utf8");
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
function openTheStream(file, fileSize, numFiles, options) {
|
||||
var stream = new streams.RollingFileStream(
|
||||
file,
|
||||
fileSize,
|
||||
numFiles,
|
||||
options
|
||||
);
|
||||
stream.on("error", function (err) {
|
||||
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(
|
||||
config.filename,
|
||||
layout,
|
||||
config.maxLogSize,
|
||||
config.backups,
|
||||
config,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
var completed = 0;
|
||||
var error;
|
||||
var complete = function(err) {
|
||||
error = error || err;
|
||||
completed++;
|
||||
if (completed >= openFiles.length) {
|
||||
cb(error);
|
||||
}
|
||||
};
|
||||
if (!openFiles.length) {
|
||||
return cb();
|
||||
}
|
||||
openFiles.forEach(function(file) {
|
||||
var stream = file;
|
||||
if (!stream.write(eol, "utf-8")) {
|
||||
stream.once('drain', function() {
|
||||
stream.end(complete);
|
||||
});
|
||||
} else {
|
||||
stream.end(complete);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
194
node_modules/log4js/lib/appenders/fileSync.js
generated
vendored
Executable file
194
node_modules/log4js/lib/appenders/fileSync.js
generated
vendored
Executable file
@ -0,0 +1,194 @@
|
||||
"use strict";
|
||||
var debug = require('debug')('log4js:fileSync')
|
||||
, layouts = require('../layouts')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, os = require('os')
|
||||
, eol = os.EOL || '\n'
|
||||
;
|
||||
|
||||
function RollingFileSync (filename, size, backups, options) {
|
||||
debug("In RollingFileStream");
|
||||
|
||||
function throwErrorIfArgumentsAreNotValid() {
|
||||
if (!filename || !size || size <= 0) {
|
||||
throw new Error("You must specify a filename and file size");
|
||||
}
|
||||
}
|
||||
|
||||
throwErrorIfArgumentsAreNotValid();
|
||||
|
||||
this.filename = filename;
|
||||
this.size = size;
|
||||
this.backups = backups || 1;
|
||||
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
|
||||
this.currentSize = 0;
|
||||
|
||||
function currentFileSize(file) {
|
||||
var fileSize = 0;
|
||||
try {
|
||||
fileSize = fs.statSync(file).size;
|
||||
} catch (e) {
|
||||
// file does not exist
|
||||
fs.appendFileSync(filename, '');
|
||||
}
|
||||
return fileSize;
|
||||
}
|
||||
|
||||
this.currentSize = currentFileSize(this.filename);
|
||||
}
|
||||
|
||||
RollingFileSync.prototype.shouldRoll = function() {
|
||||
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
|
||||
return this.currentSize >= this.size;
|
||||
};
|
||||
|
||||
RollingFileSync.prototype.roll = function(filename) {
|
||||
var that = this,
|
||||
nameMatcher = new RegExp('^' + path.basename(filename));
|
||||
|
||||
function justTheseFiles (item) {
|
||||
return nameMatcher.test(item);
|
||||
}
|
||||
|
||||
function index(filename_) {
|
||||
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
|
||||
}
|
||||
|
||||
function byIndex(a, b) {
|
||||
if (index(a) > index(b)) {
|
||||
return 1;
|
||||
} else if (index(a) < index(b) ) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
function increaseFileIndex (fileToRename) {
|
||||
var idx = index(fileToRename);
|
||||
debug('Index of ' + fileToRename + ' is ' + idx);
|
||||
if (idx < that.backups) {
|
||||
//on windows, you can get a EEXIST error if you rename a file to an existing file
|
||||
//so, we'll try to delete the file we're renaming to first
|
||||
try {
|
||||
fs.unlinkSync(filename + '.' + (idx+1));
|
||||
} catch(e) {
|
||||
//ignore err: if we could not delete, it's most likely that it doesn't exist
|
||||
}
|
||||
|
||||
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
|
||||
fs.renameSync(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1));
|
||||
}
|
||||
}
|
||||
|
||||
function renameTheFiles() {
|
||||
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
|
||||
debug("Renaming the old files");
|
||||
|
||||
var files = fs.readdirSync(path.dirname(filename));
|
||||
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
|
||||
}
|
||||
|
||||
debug("Rolling, rolling, rolling");
|
||||
renameTheFiles();
|
||||
};
|
||||
|
||||
RollingFileSync.prototype.write = function(chunk, encoding) {
|
||||
var that = this;
|
||||
|
||||
|
||||
function writeTheChunk() {
|
||||
debug("writing the chunk to the file");
|
||||
that.currentSize += chunk.length;
|
||||
fs.appendFileSync(that.filename, chunk);
|
||||
}
|
||||
|
||||
debug("in write");
|
||||
|
||||
|
||||
if (this.shouldRoll()) {
|
||||
this.currentSize = 0;
|
||||
this.roll(this.filename);
|
||||
}
|
||||
|
||||
writeTheChunk();
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* File Appender writing the logs to a text file. Supports rolling of logs by size.
|
||||
*
|
||||
* @param file file log messages will be written to
|
||||
* @param layout a function that takes a logevent and returns a string
|
||||
* (defaults to basicLayout).
|
||||
* @param logSize - the maximum size (in bytes) for a log file,
|
||||
* if not provided then logs won't be rotated.
|
||||
* @param numBackups - the number of log files to keep after logSize
|
||||
* has been reached (default 5)
|
||||
* @param timezoneOffset - optional timezone offset in minutes
|
||||
* (default system local)
|
||||
*/
|
||||
function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
|
||||
debug("fileSync appender created");
|
||||
var bytesWritten = 0;
|
||||
file = path.normalize(file);
|
||||
layout = layout || layouts.basicLayout;
|
||||
numBackups = numBackups === undefined ? 5 : numBackups;
|
||||
//there has to be at least one backup if logSize has been specified
|
||||
numBackups = numBackups === 0 ? 1 : numBackups;
|
||||
|
||||
function openTheStream(file, fileSize, numFiles) {
|
||||
var stream;
|
||||
|
||||
if (fileSize) {
|
||||
stream = new RollingFileSync(
|
||||
file,
|
||||
fileSize,
|
||||
numFiles
|
||||
);
|
||||
} else {
|
||||
stream = (function(f) {
|
||||
// create file if it doesn't exist
|
||||
if (!fs.existsSync(f))
|
||||
fs.appendFileSync(f, '');
|
||||
|
||||
return {
|
||||
write: function(data) {
|
||||
fs.appendFileSync(f, data);
|
||||
}
|
||||
};
|
||||
})(file);
|
||||
}
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
var logFile = openTheStream(file, logSize, numBackups);
|
||||
|
||||
return function(loggingEvent) {
|
||||
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
|
||||
if (options && options.cwd && !config.absolute) {
|
||||
config.filename = path.join(options.cwd, config.filename);
|
||||
}
|
||||
|
||||
return fileAppender(
|
||||
config.filename,
|
||||
layout,
|
||||
config.maxLogSize,
|
||||
config.backups,
|
||||
config.timezoneOffset
|
||||
);
|
||||
}
|
||||
|
||||
exports.appender = fileAppender;
|
||||
exports.configure = configure;
|
155
node_modules/log4js/lib/appenders/gelf.js
generated
vendored
Normal file
155
node_modules/log4js/lib/appenders/gelf.js
generated
vendored
Normal file
@ -0,0 +1,155 @@
|
||||
"use strict";
|
||||
var zlib = require('zlib');
|
||||
var layouts = require('../layouts');
|
||||
var levels = require('../levels');
|
||||
var dgram = require('dgram');
|
||||
var util = require('util');
|
||||
var debug = require('debug')('log4js:gelf');
|
||||
|
||||
var LOG_EMERG=0; // system is unusable
|
||||
var LOG_ALERT=1; // action must be taken immediately
|
||||
var LOG_CRIT=2; // critical conditions
|
||||
var LOG_ERR=3; // error conditions
|
||||
var LOG_ERROR=3; // because people WILL typo
|
||||
var LOG_WARNING=4; // warning conditions
|
||||
var LOG_NOTICE=5; // normal, but significant, condition
|
||||
var LOG_INFO=6; // informational message
|
||||
var LOG_DEBUG=7; // debug-level message
|
||||
|
||||
var levelMapping = {};
|
||||
levelMapping[levels.ALL] = LOG_DEBUG;
|
||||
levelMapping[levels.TRACE] = LOG_DEBUG;
|
||||
levelMapping[levels.DEBUG] = LOG_DEBUG;
|
||||
levelMapping[levels.INFO] = LOG_INFO;
|
||||
levelMapping[levels.WARN] = LOG_WARNING;
|
||||
levelMapping[levels.ERROR] = LOG_ERR;
|
||||
levelMapping[levels.FATAL] = LOG_CRIT;
|
||||
|
||||
var client;
|
||||
|
||||
/**
|
||||
* GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog
|
||||
*
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to none).
|
||||
* @param host - host to which to send logs (default:localhost)
|
||||
* @param port - port at which to send logs to (default:12201)
|
||||
* @param hostname - hostname of the current host (default:os hostname)
|
||||
* @param facility - facility to log to (default:nodejs-server)
|
||||
*/
|
||||
/* jshint maxstatements:21 */
|
||||
function gelfAppender (layout, host, port, hostname, facility) {
|
||||
var config, customFields;
|
||||
if (typeof(host) === 'object') {
|
||||
config = host;
|
||||
host = config.host;
|
||||
port = config.port;
|
||||
hostname = config.hostname;
|
||||
facility = config.facility;
|
||||
customFields = config.customFields;
|
||||
}
|
||||
|
||||
host = host || 'localhost';
|
||||
port = port || 12201;
|
||||
hostname = hostname || require('os').hostname();
|
||||
layout = layout || layouts.messagePassThroughLayout;
|
||||
|
||||
var defaultCustomFields = customFields || {};
|
||||
|
||||
if(facility) {
|
||||
defaultCustomFields._facility = facility;
|
||||
}
|
||||
|
||||
client = dgram.createSocket("udp4");
|
||||
|
||||
process.on('exit', function() {
|
||||
if (client) client.close();
|
||||
});
|
||||
|
||||
/**
|
||||
* Add custom fields (start with underscore )
|
||||
* - if the first object passed to the logger contains 'GELF' field,
|
||||
* copy the underscore fields to the message
|
||||
* @param loggingEvent
|
||||
* @param msg
|
||||
*/
|
||||
function addCustomFields(loggingEvent, msg){
|
||||
|
||||
/* append defaultCustomFields firsts */
|
||||
Object.keys(defaultCustomFields).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) && key !== "_id") {
|
||||
msg[key] = defaultCustomFields[key];
|
||||
}
|
||||
});
|
||||
|
||||
/* append custom fields per message */
|
||||
var data = loggingEvent.data;
|
||||
if (!Array.isArray(data) || data.length === 0) return;
|
||||
var firstData = data[0];
|
||||
|
||||
if (!firstData.GELF) return; // identify with GELF field defined
|
||||
// Remove the GELF key, some gelf supported logging systems drop the message with it
|
||||
delete firstData.GELF;
|
||||
Object.keys(firstData).forEach(function(key) {
|
||||
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
|
||||
if (key.match(/^_/) || key !== "_id") {
|
||||
msg[key] = firstData[key];
|
||||
}
|
||||
});
|
||||
|
||||
/* the custom field object should be removed, so it will not be looged by the later appenders */
|
||||
loggingEvent.data.shift();
|
||||
}
|
||||
|
||||
function preparePacket(loggingEvent) {
|
||||
var msg = {};
|
||||
addCustomFields(loggingEvent, msg);
|
||||
msg.short_message = layout(loggingEvent);
|
||||
|
||||
msg.version="1.1";
|
||||
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
|
||||
msg.host = hostname;
|
||||
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
|
||||
return msg;
|
||||
}
|
||||
|
||||
function sendPacket(packet) {
|
||||
client.send(packet, 0, packet.length, port, host, function(err) {
|
||||
if (err) { console.error(err); }
|
||||
});
|
||||
}
|
||||
|
||||
return function(loggingEvent) {
|
||||
var message = preparePacket(loggingEvent);
|
||||
zlib.gzip(new Buffer(JSON.stringify(message)), function(err, packet) {
|
||||
if (err) {
|
||||
console.error(err.stack);
|
||||
} else {
|
||||
if (packet.length > 8192) {
|
||||
debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending");
|
||||
} else {
|
||||
sendPacket(packet);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return gelfAppender(layout, config);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
if (client) {
|
||||
client.close(cb);
|
||||
client = null;
|
||||
}
|
||||
}
|
||||
|
||||
exports.appender = gelfAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
90
node_modules/log4js/lib/appenders/hipchat.js
generated
vendored
Normal file
90
node_modules/log4js/lib/appenders/hipchat.js
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
|
||||
var hipchat = require('hipchat-notifier');
|
||||
var layouts = require('../layouts');
|
||||
|
||||
exports.name = 'hipchat';
|
||||
exports.appender = hipchatAppender;
|
||||
exports.configure = hipchatConfigure;
|
||||
|
||||
/**
|
||||
@invoke as
|
||||
|
||||
log4js.configure({
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "hipchat",
|
||||
"hipchat_token": "< User token with Notification Privileges >",
|
||||
"hipchat_room": "< Room ID or Name >",
|
||||
// optionl
|
||||
"hipchat_from": "[ additional from label ]",
|
||||
"hipchat_notify": "[ notify boolean to bug people ]",
|
||||
"hipchat_host" : "api.hipchat.com"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
var logger = log4js.getLogger("hipchat");
|
||||
logger.warn("Test Warn message");
|
||||
|
||||
@invoke
|
||||
*/
|
||||
|
||||
function hipchatNotifierResponseCallback(err, response, body){
|
||||
if(err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
function hipchatAppender(config) {
|
||||
|
||||
var notifier = hipchat.make(config.hipchat_room, config.hipchat_token);
|
||||
|
||||
// @lint W074 This function's cyclomatic complexity is too high. (10)
|
||||
return function(loggingEvent){
|
||||
|
||||
var notifierFn;
|
||||
|
||||
notifier.setRoom(config.hipchat_room);
|
||||
notifier.setFrom(config.hipchat_from || '');
|
||||
notifier.setNotify(config.hipchat_notify || false);
|
||||
|
||||
if(config.hipchat_host) {
|
||||
notifier.setHost(config.hipchat_host);
|
||||
}
|
||||
|
||||
switch (loggingEvent.level.toString()) {
|
||||
case "TRACE":
|
||||
case "DEBUG":
|
||||
notifierFn = "info";
|
||||
break;
|
||||
case "WARN":
|
||||
notifierFn = "warning";
|
||||
break;
|
||||
case "ERROR":
|
||||
case "FATAL":
|
||||
notifierFn = "failure";
|
||||
break;
|
||||
default:
|
||||
notifierFn = "success";
|
||||
}
|
||||
|
||||
// @TODO, re-work in timezoneOffset ?
|
||||
var layoutMessage = config.layout(loggingEvent);
|
||||
|
||||
// dispatch hipchat api request, do not return anything
|
||||
// [overide hipchatNotifierResponseCallback]
|
||||
notifier[notifierFn](layoutMessage, config.hipchat_response_callback ||
|
||||
hipchatNotifierResponseCallback);
|
||||
};
|
||||
}
|
||||
|
||||
function hipchatConfigure(config) {
|
||||
var layout;
|
||||
|
||||
if (!config.layout) {
|
||||
config.layout = layouts.messagePassThroughLayout;
|
||||
}
|
||||
|
||||
return hipchatAppender(config, layout);
|
||||
}
|
125
node_modules/log4js/lib/appenders/logFacesAppender.js
generated
vendored
Normal file
125
node_modules/log4js/lib/appenders/logFacesAppender.js
generated
vendored
Normal file
@ -0,0 +1,125 @@
|
||||
/**
|
||||
* logFaces appender sends JSON formatted log events to logFaces receivers.
|
||||
* There are two types of receivers supported - raw UDP sockets (for server side apps),
|
||||
* and HTTP (for client side apps). Depending on the usage, this appender
|
||||
* requires either of the two:
|
||||
*
|
||||
* For UDP require 'dgram', see 'https://nodejs.org/api/dgram.html'
|
||||
* For HTTP require 'axios', see 'https://www.npmjs.com/package/axios'
|
||||
*
|
||||
* Make sure your project have relevant dependancy installed before using this appender.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
var util = require('util');
|
||||
var context = {};
|
||||
|
||||
function datagram(config){
|
||||
var sock = require('dgram').createSocket('udp4');
|
||||
var host = config.remoteHost || "127.0.0.1";
|
||||
var port = config.port || 55201;
|
||||
|
||||
return function(event){
|
||||
var buff = new Buffer(JSON.stringify(event));
|
||||
sock.send(buff, 0, buff.length, port, host, function(err, bytes) {
|
||||
if(err){
|
||||
console.error("log4js.logFacesAppender failed to %s:%d, error: %s",
|
||||
host, port, err);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function servlet(config){
|
||||
var axios = require('axios').create();
|
||||
axios.defaults.baseURL = config.url;
|
||||
axios.defaults.timeout = config.timeout || 5000;
|
||||
axios.defaults.headers = {'Content-Type': 'application/json'};
|
||||
axios.defaults.withCredentials = true;
|
||||
|
||||
return function(lfsEvent){
|
||||
axios.post("", lfsEvent)
|
||||
.then(function(response){
|
||||
if(response.status != 200){
|
||||
console.error("log4js.logFacesAppender post to %s failed: %d",
|
||||
config.url, response.status);
|
||||
}
|
||||
})
|
||||
.catch(function(response){
|
||||
console.error("log4js.logFacesAppender post to %s excepted: %s",
|
||||
config.url, response.status);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* For UDP (node.js) use the following configuration params:
|
||||
* {
|
||||
* "type": "logFacesAppender", // must be present for instantiation
|
||||
* "application": "LFS-TEST", // name of the application (domain)
|
||||
* "remoteHost": "127.0.0.1", // logFaces server address (hostname)
|
||||
* "port": 55201 // UDP receiver listening port
|
||||
* }
|
||||
*
|
||||
* For HTTP (browsers or node.js) use the following configuration params:
|
||||
* {
|
||||
* "type": "logFacesAppender", // must be present for instantiation
|
||||
* "application": "LFS-TEST", // name of the application (domain)
|
||||
* "url": "http://lfs-server/logs", // logFaces receiver servlet URL
|
||||
* }
|
||||
*/
|
||||
function logFacesAppender(config) {
|
||||
var send = config.send;
|
||||
if(send === undefined){
|
||||
send = (config.url === undefined) ? datagram(config) : servlet(config);
|
||||
}
|
||||
|
||||
return function log(event) {
|
||||
// convert to logFaces compact json format
|
||||
var lfsEvent = {
|
||||
a: config.application || "", // application name
|
||||
t: event.startTime.getTime(), // time stamp
|
||||
p: event.level.levelStr, // level (priority)
|
||||
g: event.categoryName, // logger name
|
||||
m: format(event.data) // message text
|
||||
};
|
||||
|
||||
// add context variables if exist
|
||||
Object.keys(context).forEach(function(key) {
|
||||
lfsEvent['p_' + key] = context[key];
|
||||
});
|
||||
|
||||
// send to server
|
||||
send(lfsEvent);
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
return logFacesAppender(config);
|
||||
}
|
||||
|
||||
function setContext(key, value){
|
||||
context[key] = value;
|
||||
}
|
||||
|
||||
function format(logData) {
|
||||
var data = Array.isArray(logData) ?
|
||||
logData : Array.prototype.slice.call(arguments);
|
||||
return util.format.apply(util, wrapErrorsWithInspect(data));
|
||||
}
|
||||
|
||||
function wrapErrorsWithInspect(items) {
|
||||
return items.map(function(item) {
|
||||
if ((item instanceof Error) && item.stack) {
|
||||
return { inspect: function() {
|
||||
return util.format(item) + '\n' + item.stack;
|
||||
}};
|
||||
} else {
|
||||
return item;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
exports.appender = logFacesAppender;
|
||||
exports.configure = configure;
|
||||
exports.setContext = setContext;
|
23
node_modules/log4js/lib/appenders/logLevelFilter.js
generated
vendored
Normal file
23
node_modules/log4js/lib/appenders/logLevelFilter.js
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
"use strict";
|
||||
var levels = require('../levels')
|
||||
, log4js = require('../log4js');
|
||||
|
||||
function logLevelFilter (minLevelString, maxLevelString, appender) {
|
||||
var minLevel = levels.toLevel(minLevelString);
|
||||
var maxLevel = levels.toLevel(maxLevelString, levels.FATAL);
|
||||
return function(logEvent) {
|
||||
var eventLevel = logEvent.level;
|
||||
if (eventLevel.isGreaterThanOrEqualTo(minLevel) && eventLevel.isLessThanOrEqualTo(maxLevel)) {
|
||||
appender(logEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
var appender = log4js.appenderMakers[config.appender.type](config.appender, options);
|
||||
return logLevelFilter(config.level, config.maxLevel, appender);
|
||||
}
|
||||
|
||||
exports.appender = logLevelFilter;
|
||||
exports.configure = configure;
|
114
node_modules/log4js/lib/appenders/loggly.js
generated
vendored
Normal file
114
node_modules/log4js/lib/appenders/loggly.js
generated
vendored
Normal file
@ -0,0 +1,114 @@
|
||||
'use strict';
|
||||
var layouts = require('../layouts')
|
||||
, loggly = require('loggly')
|
||||
, os = require('os')
|
||||
, passThrough = layouts.messagePassThroughLayout
|
||||
, openRequests = 0
|
||||
, shutdownCB;
|
||||
|
||||
function isAnyObject(value) {
|
||||
return value !== null && (typeof value === 'object' || typeof value === 'function');
|
||||
}
|
||||
|
||||
function numKeys(o) {
|
||||
var res = 0;
|
||||
for (var k in o) {
|
||||
if (o.hasOwnProperty(k)) res++;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param msg - array of args for logging.
|
||||
* @returns { deTaggedMsg: [], additionalTags: [] }
|
||||
*/
|
||||
function processTags(msgListArgs) {
|
||||
var msgList = (msgListArgs.length === 1 ? [msgListArgs[0]] : Array.apply(null, msgListArgs));
|
||||
|
||||
return msgList.reduce(function (accum, element, currentIndex, array) {
|
||||
if (isAnyObject(element) && Array.isArray(element.tags) && numKeys(element) == 1) {
|
||||
accum.additionalTags = accum.additionalTags.concat(element.tags);
|
||||
} else {
|
||||
accum.deTaggedData.push(element);
|
||||
}
|
||||
return accum;
|
||||
}, { deTaggedData: [], additionalTags: [] });
|
||||
}
|
||||
|
||||
/**
|
||||
* Loggly Appender. Sends logging events to Loggly using node-loggly, optionally adding tags.
|
||||
*
|
||||
* This appender will scan the msg from the logging event, and pull out any argument of the
|
||||
* shape `{ tags: [] }` so that it's possibleto add tags in a normal logging call.
|
||||
*
|
||||
* For example:
|
||||
*
|
||||
* logger.info({ tags: ['my-tag-1', 'my-tag-2'] }, 'Some message', someObj, ...)
|
||||
*
|
||||
* And then this appender will remove the tags param and append it to the config.tags.
|
||||
*
|
||||
* @param config object with loggly configuration data
|
||||
* {
|
||||
* token: 'your-really-long-input-token',
|
||||
* subdomain: 'your-subdomain',
|
||||
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
|
||||
* }
|
||||
* @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
|
||||
*/
|
||||
function logglyAppender(config, layout) {
|
||||
var client = loggly.createClient(config);
|
||||
if(!layout) layout = passThrough;
|
||||
|
||||
return function(loggingEvent) {
|
||||
var result = processTags(loggingEvent.data);
|
||||
var deTaggedData = result.deTaggedData;
|
||||
var additionalTags = result.additionalTags;
|
||||
|
||||
// Replace the data property with the deTaggedData
|
||||
loggingEvent.data = deTaggedData;
|
||||
|
||||
var msg = layout(loggingEvent);
|
||||
|
||||
openRequests++;
|
||||
|
||||
client.log({
|
||||
msg: msg,
|
||||
level: loggingEvent.level.levelStr,
|
||||
category: loggingEvent.categoryName,
|
||||
hostname: os.hostname().toString(),
|
||||
}, additionalTags, function (error, result) {
|
||||
if (error) {
|
||||
console.error("log4js.logglyAppender - error occurred: ", error);
|
||||
}
|
||||
|
||||
openRequests--;
|
||||
|
||||
if (shutdownCB && openRequests === 0) {
|
||||
shutdownCB();
|
||||
|
||||
shutdownCB = undefined;
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return logglyAppender(config, layout);
|
||||
}
|
||||
|
||||
function shutdown (cb) {
|
||||
if (openRequests === 0) {
|
||||
cb();
|
||||
} else {
|
||||
shutdownCB = cb;
|
||||
}
|
||||
}
|
||||
|
||||
exports.name = 'loggly';
|
||||
exports.appender = logglyAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
69
node_modules/log4js/lib/appenders/logstashUDP.js
generated
vendored
Normal file
69
node_modules/log4js/lib/appenders/logstashUDP.js
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts')
|
||||
, dgram = require('dgram')
|
||||
, util = require('util');
|
||||
|
||||
function logstashUDP (config, layout) {
|
||||
var udp = dgram.createSocket('udp4');
|
||||
var type = config.logType ? config.logType : config.category;
|
||||
layout = layout || layouts.dummyLayout;
|
||||
if(!config.fields) {
|
||||
config.fields = {};
|
||||
}
|
||||
return function log(loggingEvent) {
|
||||
|
||||
/*
|
||||
https://gist.github.com/jordansissel/2996677
|
||||
{
|
||||
"message" => "hello world",
|
||||
"@version" => "1",
|
||||
"@timestamp" => "2014-04-22T23:03:14.111Z",
|
||||
"type" => "stdin",
|
||||
"host" => "hello.local"
|
||||
}
|
||||
@timestamp is the ISO8601 high-precision timestamp for the event.
|
||||
@version is the version number of this json schema
|
||||
Every other field is valid and fine.
|
||||
*/
|
||||
|
||||
if (loggingEvent.data.length > 1) {
|
||||
var secondEvData = loggingEvent.data[1];
|
||||
for (var k in secondEvData) {
|
||||
config.fields[k] = secondEvData[k];
|
||||
}
|
||||
}
|
||||
config.fields.level = loggingEvent.level.levelStr;
|
||||
config.fields.category = loggingEvent.categoryName;
|
||||
|
||||
var logObject = {
|
||||
"@version" : "1",
|
||||
"@timestamp" : (new Date(loggingEvent.startTime)).toISOString(),
|
||||
"type" : config.logType ? config.logType : config.category,
|
||||
"message" : layout(loggingEvent),
|
||||
"fields" : config.fields
|
||||
};
|
||||
sendLog(udp, config.host, config.port, logObject);
|
||||
};
|
||||
}
|
||||
|
||||
function sendLog(udp, host, port, logObject) {
|
||||
var buffer = new Buffer(JSON.stringify(logObject));
|
||||
udp.send(buffer, 0, buffer.length, port, host, function(err, bytes) {
|
||||
if(err) {
|
||||
console.error(
|
||||
"log4js.logstashUDP - %s:%p Error: %s", host, port, util.inspect(err)
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return logstashUDP(config, layout);
|
||||
}
|
||||
|
||||
exports.appender = logstashUDP;
|
||||
exports.configure = configure;
|
43
node_modules/log4js/lib/appenders/mailgun.js
generated
vendored
Normal file
43
node_modules/log4js/lib/appenders/mailgun.js
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
"use strict";
|
||||
var layouts = require('../layouts');
|
||||
var layout;
|
||||
var config;
|
||||
var mailgun;
|
||||
|
||||
function mailgunAppender(_config, _layout) {
|
||||
|
||||
config = _config;
|
||||
layout = _layout || layouts.basicLayout;
|
||||
|
||||
return function (loggingEvent) {
|
||||
|
||||
var data = {
|
||||
from: _config.from,
|
||||
to: _config.to,
|
||||
subject: _config.subject,
|
||||
text: layout(loggingEvent, config.timezoneOffset)
|
||||
};
|
||||
|
||||
mailgun.messages().send(data, function (error, body) {
|
||||
if (error !== null) console.error("log4js.mailgunAppender - Error happened", error);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function configure(_config) {
|
||||
config = _config;
|
||||
|
||||
if (_config.layout) {
|
||||
layout = layouts.layout(_config.layout.type, _config.layout);
|
||||
}
|
||||
|
||||
mailgun = require('mailgun-js')({
|
||||
apiKey: _config.apikey,
|
||||
domain: _config.domain
|
||||
});
|
||||
|
||||
return mailgunAppender(_config, layout);
|
||||
}
|
||||
|
||||
exports.appender = mailgunAppender;
|
||||
exports.configure = configure;
|
157
node_modules/log4js/lib/appenders/multiprocess.js
generated
vendored
Normal file
157
node_modules/log4js/lib/appenders/multiprocess.js
generated
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
"use strict";
|
||||
var log4js = require('../log4js')
|
||||
, debug = require('debug')('log4js:multiprocess')
|
||||
, net = require('net')
|
||||
, END_MSG = '__LOG4JS__'
|
||||
, servers = [];
|
||||
|
||||
/**
|
||||
* Creates a server, listening on config.loggerPort, config.loggerHost.
|
||||
* Output goes to config.actualAppender (config.appender is used to
|
||||
* set up that appender).
|
||||
*/
|
||||
function logServer(config) {
|
||||
|
||||
/**
|
||||
* Takes a utf-8 string, returns an object with
|
||||
* the correct log properties.
|
||||
*/
|
||||
function deserializeLoggingEvent(clientSocket, msg) {
|
||||
var loggingEvent;
|
||||
try {
|
||||
loggingEvent = JSON.parse(msg);
|
||||
loggingEvent.startTime = new Date(loggingEvent.startTime);
|
||||
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
|
||||
} catch (e) {
|
||||
// JSON.parse failed, just log the contents probably a naughty.
|
||||
loggingEvent = {
|
||||
startTime: new Date(),
|
||||
categoryName: 'log4js',
|
||||
level: log4js.levels.ERROR,
|
||||
data: [ 'Unable to parse log:', msg ]
|
||||
};
|
||||
}
|
||||
|
||||
loggingEvent.remoteAddress = clientSocket.remoteAddress;
|
||||
loggingEvent.remotePort = clientSocket.remotePort;
|
||||
|
||||
return loggingEvent;
|
||||
}
|
||||
|
||||
var actualAppender = config.actualAppender,
|
||||
server = net.createServer(function serverCreated(clientSocket) {
|
||||
clientSocket.setEncoding('utf8');
|
||||
var logMessage = '';
|
||||
|
||||
function logTheMessage(msg) {
|
||||
if (logMessage.length > 0) {
|
||||
actualAppender(deserializeLoggingEvent(clientSocket, msg));
|
||||
}
|
||||
}
|
||||
|
||||
function chunkReceived(chunk) {
|
||||
var event;
|
||||
logMessage += chunk || '';
|
||||
if (logMessage.indexOf(END_MSG) > -1) {
|
||||
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
|
||||
logTheMessage(event);
|
||||
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
|
||||
//check for more, maybe it was a big chunk
|
||||
chunkReceived();
|
||||
}
|
||||
}
|
||||
|
||||
clientSocket.on('data', chunkReceived);
|
||||
clientSocket.on('end', chunkReceived);
|
||||
});
|
||||
|
||||
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost', function() {
|
||||
servers.push(server);
|
||||
//allow the process to exit, if this is the only socket active
|
||||
server.unref();
|
||||
});
|
||||
|
||||
return actualAppender;
|
||||
}
|
||||
|
||||
function workerAppender(config) {
|
||||
var canWrite = false,
|
||||
buffer = [],
|
||||
socket;
|
||||
|
||||
createSocket();
|
||||
|
||||
function createSocket() {
|
||||
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
|
||||
socket.on('connect', function() {
|
||||
emptyBuffer();
|
||||
canWrite = true;
|
||||
});
|
||||
socket.on('timeout', socket.end.bind(socket));
|
||||
//don't bother listening for 'error', 'close' gets called after that anyway
|
||||
socket.on('close', createSocket);
|
||||
}
|
||||
|
||||
function emptyBuffer() {
|
||||
var evt;
|
||||
while ((evt = buffer.shift())) {
|
||||
write(evt);
|
||||
}
|
||||
}
|
||||
|
||||
function write(loggingEvent) {
|
||||
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
|
||||
// The following allows us to serialize errors correctly.
|
||||
// Validate that we really are in this case
|
||||
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') {
|
||||
loggingEvent = {stack : loggingEvent.stack};
|
||||
}
|
||||
socket.write(JSON.stringify(loggingEvent), 'utf8');
|
||||
socket.write(END_MSG, 'utf8');
|
||||
}
|
||||
|
||||
return function log(loggingEvent) {
|
||||
if (canWrite) {
|
||||
write(loggingEvent);
|
||||
} else {
|
||||
buffer.push(loggingEvent);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createAppender(config) {
|
||||
if (config.mode === 'master') {
|
||||
return logServer(config);
|
||||
} else {
|
||||
return workerAppender(config);
|
||||
}
|
||||
}
|
||||
|
||||
function configure(config, options) {
|
||||
var actualAppender;
|
||||
if (config.appender && config.mode === 'master') {
|
||||
log4js.loadAppender(config.appender.type);
|
||||
actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
|
||||
config.actualAppender = actualAppender;
|
||||
}
|
||||
return createAppender(config);
|
||||
}
|
||||
|
||||
function shutdown(done) {
|
||||
var toBeClosed = servers.length;
|
||||
debug("multiprocess shutdown with ", toBeClosed, " servers to close.");
|
||||
servers.forEach(function(server) {
|
||||
server.close(function() {
|
||||
debug("server closed.");
|
||||
toBeClosed--;
|
||||
if (toBeClosed < 1) {
|
||||
debug("all servers closed.");
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
exports.appender = createAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
44
node_modules/log4js/lib/appenders/slack.js
generated
vendored
Normal file
44
node_modules/log4js/lib/appenders/slack.js
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
"use strict";
|
||||
var Slack = require('slack-node');
|
||||
var layouts = require('../layouts');
|
||||
var layout;
|
||||
|
||||
var slack, config;
|
||||
|
||||
function slackAppender(_config, _layout) {
|
||||
|
||||
layout = _layout || layouts.basicLayout;
|
||||
|
||||
return function (loggingEvent) {
|
||||
|
||||
var data = {
|
||||
channel_id: _config.channel_id,
|
||||
text: layout(loggingEvent, _config.timezoneOffset),
|
||||
icon_url: _config.icon_url,
|
||||
username: _config.username
|
||||
};
|
||||
|
||||
slack.api('chat.postMessage', {
|
||||
channel: data.channel_id,
|
||||
text: data.text,
|
||||
icon_url: data.icon_url,username: data.username}, function (err, response) {
|
||||
if (err) { throw err; }
|
||||
});
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
function configure(_config) {
|
||||
|
||||
if (_config.layout) {
|
||||
layout = layouts.layout(_config.layout.type, _config.layout);
|
||||
}
|
||||
|
||||
slack = new Slack(_config.token);
|
||||
|
||||
return slackAppender(_config, layout);
|
||||
}
|
||||
|
||||
exports.name = 'slack';
|
||||
exports.appender = slackAppender;
|
||||
exports.configure = configure;
|
152
node_modules/log4js/lib/appenders/smtp.js
generated
vendored
Normal file
152
node_modules/log4js/lib/appenders/smtp.js
generated
vendored
Normal file
@ -0,0 +1,152 @@
|
||||
"use strict";
|
||||
|
||||
var layouts = require("../layouts");
|
||||
var mailer = require("nodemailer");
|
||||
var os = require('os');
|
||||
|
||||
var logEventBuffer = [];
|
||||
var subjectLayout;
|
||||
var layout;
|
||||
|
||||
var unsentCount = 0;
|
||||
var shutdownTimeout;
|
||||
|
||||
var sendInterval;
|
||||
var sendTimer;
|
||||
|
||||
var config;
|
||||
|
||||
function sendBuffer() {
|
||||
if (logEventBuffer.length > 0) {
|
||||
|
||||
var transportOpts = getTransportOptions(config);
|
||||
var transport = mailer.createTransport(transportOpts);
|
||||
var firstEvent = logEventBuffer[0];
|
||||
var body = "";
|
||||
var count = logEventBuffer.length;
|
||||
while (logEventBuffer.length > 0) {
|
||||
body += layout(logEventBuffer.shift(), config.timezoneOffset) + "\n";
|
||||
}
|
||||
|
||||
var msg = {
|
||||
to: config.recipients,
|
||||
subject: config.subject || subjectLayout(firstEvent),
|
||||
headers: {"Hostname": os.hostname()}
|
||||
};
|
||||
|
||||
if (true === config.attachment.enable) {
|
||||
msg[config.html ? "html" : "text"] = config.attachment.message;
|
||||
msg.attachments = [
|
||||
{
|
||||
filename: config.attachment.filename,
|
||||
contentType: 'text/x-log',
|
||||
content: body
|
||||
}
|
||||
];
|
||||
} else {
|
||||
msg[config.html ? "html" : "text"] = body;
|
||||
}
|
||||
|
||||
if (config.sender) {
|
||||
msg.from = config.sender;
|
||||
}
|
||||
transport.sendMail(msg, function (error) {
|
||||
if (error) {
|
||||
console.error("log4js.smtpAppender - Error happened", error);
|
||||
}
|
||||
transport.close();
|
||||
unsentCount -= count;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getTransportOptions() {
|
||||
var transportOpts = null;
|
||||
if (config.SMTP) {
|
||||
transportOpts = config.SMTP;
|
||||
} else if (config.transport) {
|
||||
var plugin = config.transport.plugin || 'smtp';
|
||||
var transportModule = 'nodemailer-' + plugin + '-transport';
|
||||
var transporter = require(transportModule);
|
||||
transportOpts = transporter(config.transport.options);
|
||||
}
|
||||
|
||||
return transportOpts;
|
||||
}
|
||||
|
||||
function scheduleSend() {
|
||||
if (!sendTimer) {
|
||||
sendTimer = setTimeout(function () {
|
||||
sendTimer = null;
|
||||
sendBuffer();
|
||||
}, sendInterval);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* SMTP Appender. Sends logging events using SMTP protocol.
|
||||
* It can either send an email on each event or group several
|
||||
* logging events gathered during specified interval.
|
||||
*
|
||||
* @param _config appender configuration data
|
||||
* config.sendInterval time between log emails (in seconds), if 0
|
||||
* then every event sends an email
|
||||
* config.shutdownTimeout time to give up remaining emails (in seconds; defaults to 5).
|
||||
* @param _layout a function that takes a logevent and returns a string (defaults to basicLayout).
|
||||
*/
|
||||
function smtpAppender(_config, _layout) {
|
||||
config = _config;
|
||||
|
||||
if (!config.attachment) {
|
||||
config.attachment = {};
|
||||
}
|
||||
|
||||
config.attachment.enable = !!config.attachment.enable;
|
||||
config.attachment.message = config.attachment.message || "See logs as attachment";
|
||||
config.attachment.filename = config.attachment.filename || "default.log";
|
||||
layout = _layout || layouts.basicLayout;
|
||||
subjectLayout = layouts.messagePassThroughLayout;
|
||||
sendInterval = config.sendInterval * 1000 || 0;
|
||||
|
||||
shutdownTimeout = ('shutdownTimeout' in config ? config.shutdownTimeout : 5) * 1000;
|
||||
|
||||
return function (loggingEvent) {
|
||||
unsentCount++;
|
||||
logEventBuffer.push(loggingEvent);
|
||||
if (sendInterval > 0) {
|
||||
scheduleSend();
|
||||
} else {
|
||||
sendBuffer();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function configure(_config) {
|
||||
config = _config;
|
||||
if (_config.layout) {
|
||||
layout = layouts.layout(_config.layout.type, _config.layout);
|
||||
}
|
||||
return smtpAppender(_config, layout);
|
||||
}
|
||||
|
||||
function shutdown(cb) {
|
||||
if (shutdownTimeout > 0) {
|
||||
setTimeout(function () {
|
||||
if (sendTimer)
|
||||
clearTimeout(sendTimer);
|
||||
sendBuffer();
|
||||
}, shutdownTimeout);
|
||||
}
|
||||
(function checkDone() {
|
||||
if (unsentCount > 0) {
|
||||
setTimeout(checkDone, 100);
|
||||
} else {
|
||||
cb();
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
exports.name = "smtp";
|
||||
exports.appender = smtpAppender;
|
||||
exports.configure = configure;
|
||||
exports.shutdown = shutdown;
|
21
node_modules/log4js/lib/appenders/stderr.js
generated
vendored
Normal file
21
node_modules/log4js/lib/appenders/stderr.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
|
||||
var layouts = require('../layouts');
|
||||
|
||||
function stderrAppender(layout, timezoneOffset) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
process.stderr.write(layout(loggingEvent, timezoneOffset) + '\n');
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return stderrAppender(layout, config.timezoneOffset);
|
||||
}
|
||||
|
||||
exports.appender = stderrAppender;
|
||||
exports.configure = configure;
|
21
node_modules/log4js/lib/appenders/stdout.js
generated
vendored
Normal file
21
node_modules/log4js/lib/appenders/stdout.js
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
|
||||
var layouts = require('../layouts');
|
||||
|
||||
function stdoutAppender(layout, timezoneOffset) {
|
||||
layout = layout || layouts.colouredLayout;
|
||||
return function(loggingEvent) {
|
||||
process.stdout.write(layout(loggingEvent, timezoneOffset) + '\n');
|
||||
};
|
||||
}
|
||||
|
||||
function configure(config) {
|
||||
var layout;
|
||||
if (config.layout) {
|
||||
layout = layouts.layout(config.layout.type, config.layout);
|
||||
}
|
||||
return stdoutAppender(layout, config.timezoneOffset);
|
||||
}
|
||||
|
||||
exports.appender = stdoutAppender;
|
||||
exports.configure = configure;
|
Reference in New Issue
Block a user