Pushing changes

This commit is contained in:
2017-03-23 23:52:08 -05:00
parent 6075860b82
commit ac667ec74f
1465 changed files with 345149 additions and 3 deletions

20
node_modules/log4js/lib/appenders/categoryFilter.js generated vendored Normal file
View File

@ -0,0 +1,20 @@
"use strict";
var log4js = require('../log4js');
function categoryFilter (excludes, appender) {
if (typeof(excludes) === 'string') excludes = [excludes];
return function(logEvent) {
if (excludes.indexOf(logEvent.categoryName) === -1) {
appender(logEvent);
}
};
}
function configure(config, options) {
log4js.loadAppender(config.appender.type);
var appender = log4js.appenderMakers[config.appender.type](config.appender, options);
return categoryFilter(config.exclude, appender);
}
exports.appender = categoryFilter;
exports.configure = configure;

153
node_modules/log4js/lib/appenders/clustered.js generated vendored Executable file
View File

@ -0,0 +1,153 @@
"use strict";
var cluster = require('cluster');
var log4js = require('../log4js');
/**
* Takes a loggingEvent object, returns string representation of it.
*/
function serializeLoggingEvent(loggingEvent) {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
for (var i = 0; i < loggingEvent.data.length; i++) {
var item = loggingEvent.data[i];
// Validate that we really are in this case
if (item && item.stack && JSON.stringify(item) === '{}') {
loggingEvent.data[i] = {stack : item.stack};
}
}
return JSON.stringify(loggingEvent);
}
/**
* Takes a string, returns an object with
* the correct log properties.
*
* This method has been "borrowed" from the `multiprocess` appender
* by `nomiddlename`
* (https://github.com/nomiddlename/log4js-node/blob/master/lib/appenders/multiprocess.js)
*
* Apparently, node.js serializes everything to strings when using `process.send()`,
* so we need smart deserialization that will recreate log date and level for further
* processing by log4js internals.
*/
function deserializeLoggingEvent(loggingEventString) {
var loggingEvent;
try {
loggingEvent = JSON.parse(loggingEventString);
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
// Unwrap serialized errors
for (var i = 0; i < loggingEvent.data.length; i++) {
var item = loggingEvent.data[i];
if (item && item.stack) {
loggingEvent.data[i] = item.stack;
}
}
} catch (e) {
// JSON.parse failed, just log the contents probably a naughty.
loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: log4js.levels.ERROR,
data: [ 'Unable to parse log:', loggingEventString ]
};
}
return loggingEvent;
}
/**
* Creates an appender.
*
* If the current process is a master (`cluster.isMaster`), then this will be a "master appender".
* Otherwise this will be a worker appender, that just sends loggingEvents to the master process.
*
* If you are using this method directly, make sure to provide it with `config.actualAppenders`
* array of actual appender instances.
*
* Or better use `configure(config, options)`
*/
function createAppender(config) {
if (cluster.isMaster) {
var masterAppender = function(loggingEvent) {
if (config.actualAppenders) {
var size = config.actualAppenders.length;
for(var i = 0; i < size; i++) {
if (
!config.appenders[i].category ||
config.appenders[i].category === loggingEvent.categoryName
) {
// Relying on the index is not a good practice but otherwise
// the change would have been bigger.
config.actualAppenders[i](loggingEvent);
}
}
}
};
// Listen on new workers
cluster.on('fork', function(worker) {
worker.on('message', function(message) {
if (message.type && message.type === '::log-message') {
var loggingEvent = deserializeLoggingEvent(message.event);
// Adding PID metadata
loggingEvent.pid = worker.process.pid;
loggingEvent.cluster = {
master: process.pid,
worker: worker.process.pid,
workerId: worker.id
};
masterAppender(loggingEvent);
}
});
});
return masterAppender;
} else {
return function(loggingEvent) {
// If inside the worker process, then send the logger event to master.
if (cluster.isWorker) {
// console.log("worker " + cluster.worker.id + " is sending message");
process.send({ type: '::log-message', event: serializeLoggingEvent(loggingEvent)});
}
};
}
}
function configure(config, options) {
if (config.appenders && cluster.isMaster) {
var size = config.appenders.length;
config.actualAppenders = new Array(size);
for(var i = 0; i < size; i++) {
log4js.loadAppender(config.appenders[i].type);
config.actualAppenders[i] = log4js.appenderMakers[config.appenders[i].type](
config.appenders[i],
options
);
}
}
return createAppender(config);
}
exports.appender = createAppender;
exports.configure = configure;

21
node_modules/log4js/lib/appenders/console.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
"use strict";
var layouts = require('../layouts')
, consoleLog = console.log.bind(console);
function consoleAppender (layout, timezoneOffset) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
consoleLog(layout(loggingEvent, timezoneOffset));
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return consoleAppender(layout, config.timezoneOffset);
}
exports.appender = consoleAppender;
exports.configure = configure;

90
node_modules/log4js/lib/appenders/dateFile.js generated vendored Normal file
View File

@ -0,0 +1,90 @@
"use strict";
var streams = require('streamroller')
, layouts = require('../layouts')
, path = require('path')
, os = require('os')
, eol = os.EOL || '\n'
, openFiles = [];
//close open files on process exit.
process.on('exit', function() {
openFiles.forEach(function (file) {
file.end();
});
});
/**
* File appender that rolls files according to a date pattern.
* @filename base filename.
* @pattern the format that will be added to the end of filename when rolling,
* also used to check when to roll files - defaults to '.yyyy-MM-dd'
* @layout layout function for log messages - defaults to basicLayout
* @timezoneOffset optional timezone offset in minutes - defaults to system local
*/
function appender(filename, pattern, layout, options, timezoneOffset) {
layout = layout || layouts.basicLayout;
var logFile = new streams.DateRollingFileStream(
filename,
pattern,
options
);
openFiles.push(logFile);
return function(logEvent) {
logFile.write(layout(logEvent, timezoneOffset) + eol, "utf8");
};
}
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (!config.alwaysIncludePattern) {
config.alwaysIncludePattern = false;
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return appender(
config.filename,
config.pattern,
layout,
config,
config.timezoneOffset
);
}
function shutdown(cb) {
var completed = 0;
var error;
var complete = function(err) {
error = error || err;
completed++;
if (completed >= openFiles.length) {
cb(error);
}
};
if (!openFiles.length) {
return cb();
}
openFiles.forEach(function(file) {
if (!file.write(eol, "utf-8")) {
file.once('drain', function() {
file.end(complete);
});
} else {
file.end(complete);
}
});
}
exports.appender = appender;
exports.configure = configure;
exports.shutdown = shutdown;

129
node_modules/log4js/lib/appenders/file.js generated vendored Normal file
View File

@ -0,0 +1,129 @@
"use strict";
var debug = require('debug')('log4js:file')
, layouts = require('../layouts')
, path = require('path')
, fs = require('fs')
, streams = require('streamroller')
, os = require('os')
, eol = os.EOL || '\n'
, openFiles = []
, levels = require('../levels');
//close open files on process exit.
process.on('exit', function() {
debug('Exit handler called.');
openFiles.forEach(function (file) {
file.end();
});
});
// On SIGHUP, close and reopen all files. This allows this appender to work with
// logrotate. Note that if you are using logrotate, you should not set
// `logSize`.
process.on('SIGHUP', function() {
debug('SIGHUP handler called.');
openFiles.forEach(function(writer) {
writer.closeTheStream(writer.openTheStream.bind(writer));
});
});
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param options - options to be passed to the underlying stream
* @param timezoneOffset - optional timezone offset in minutes (default system local)
*/
function fileAppender (file, layout, logSize, numBackups, options, timezoneOffset) {
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
debug("Creating file appender (",
file, ", ",
logSize, ", ",
numBackups, ", ",
options, ", ",
timezoneOffset, ")"
);
var writer = openTheStream(file, logSize, numBackups, options);
// push file to the stack of open handlers
openFiles.push(writer);
return function(loggingEvent) {
writer.write(layout(loggingEvent, timezoneOffset) + eol, "utf8");
};
}
function openTheStream(file, fileSize, numFiles, options) {
var stream = new streams.RollingFileStream(
file,
fileSize,
numFiles,
options
);
stream.on("error", function (err) {
console.error("log4js.fileAppender - Writing to file %s, error happened ", file, err);
});
return stream;
}
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
config,
config.timezoneOffset
);
}
function shutdown(cb) {
var completed = 0;
var error;
var complete = function(err) {
error = error || err;
completed++;
if (completed >= openFiles.length) {
cb(error);
}
};
if (!openFiles.length) {
return cb();
}
openFiles.forEach(function(file) {
var stream = file;
if (!stream.write(eol, "utf-8")) {
stream.once('drain', function() {
stream.end(complete);
});
} else {
stream.end(complete);
}
});
}
exports.appender = fileAppender;
exports.configure = configure;
exports.shutdown = shutdown;

194
node_modules/log4js/lib/appenders/fileSync.js generated vendored Executable file
View File

@ -0,0 +1,194 @@
"use strict";
var debug = require('debug')('log4js:fileSync')
, layouts = require('../layouts')
, path = require('path')
, fs = require('fs')
, os = require('os')
, eol = os.EOL || '\n'
;
function RollingFileSync (filename, size, backups, options) {
debug("In RollingFileStream");
function throwErrorIfArgumentsAreNotValid() {
if (!filename || !size || size <= 0) {
throw new Error("You must specify a filename and file size");
}
}
throwErrorIfArgumentsAreNotValid();
this.filename = filename;
this.size = size;
this.backups = backups || 1;
this.options = options || { encoding: 'utf8', mode: parseInt('0644', 8), flags: 'a' };
this.currentSize = 0;
function currentFileSize(file) {
var fileSize = 0;
try {
fileSize = fs.statSync(file).size;
} catch (e) {
// file does not exist
fs.appendFileSync(filename, '');
}
return fileSize;
}
this.currentSize = currentFileSize(this.filename);
}
RollingFileSync.prototype.shouldRoll = function() {
debug("should roll with current size %d, and max size %d", this.currentSize, this.size);
return this.currentSize >= this.size;
};
RollingFileSync.prototype.roll = function(filename) {
var that = this,
nameMatcher = new RegExp('^' + path.basename(filename));
function justTheseFiles (item) {
return nameMatcher.test(item);
}
function index(filename_) {
return parseInt(filename_.substring((path.basename(filename) + '.').length), 10) || 0;
}
function byIndex(a, b) {
if (index(a) > index(b)) {
return 1;
} else if (index(a) < index(b) ) {
return -1;
} else {
return 0;
}
}
function increaseFileIndex (fileToRename) {
var idx = index(fileToRename);
debug('Index of ' + fileToRename + ' is ' + idx);
if (idx < that.backups) {
//on windows, you can get a EEXIST error if you rename a file to an existing file
//so, we'll try to delete the file we're renaming to first
try {
fs.unlinkSync(filename + '.' + (idx+1));
} catch(e) {
//ignore err: if we could not delete, it's most likely that it doesn't exist
}
debug('Renaming ' + fileToRename + ' -> ' + filename + '.' + (idx+1));
fs.renameSync(path.join(path.dirname(filename), fileToRename), filename + '.' + (idx + 1));
}
}
function renameTheFiles() {
//roll the backups (rename file.n to file.n+1, where n <= numBackups)
debug("Renaming the old files");
var files = fs.readdirSync(path.dirname(filename));
files.filter(justTheseFiles).sort(byIndex).reverse().forEach(increaseFileIndex);
}
debug("Rolling, rolling, rolling");
renameTheFiles();
};
RollingFileSync.prototype.write = function(chunk, encoding) {
var that = this;
function writeTheChunk() {
debug("writing the chunk to the file");
that.currentSize += chunk.length;
fs.appendFileSync(that.filename, chunk);
}
debug("in write");
if (this.shouldRoll()) {
this.currentSize = 0;
this.roll(this.filename);
}
writeTheChunk();
};
/**
* File Appender writing the logs to a text file. Supports rolling of logs by size.
*
* @param file file log messages will be written to
* @param layout a function that takes a logevent and returns a string
* (defaults to basicLayout).
* @param logSize - the maximum size (in bytes) for a log file,
* if not provided then logs won't be rotated.
* @param numBackups - the number of log files to keep after logSize
* has been reached (default 5)
* @param timezoneOffset - optional timezone offset in minutes
* (default system local)
*/
function fileAppender (file, layout, logSize, numBackups, timezoneOffset) {
debug("fileSync appender created");
var bytesWritten = 0;
file = path.normalize(file);
layout = layout || layouts.basicLayout;
numBackups = numBackups === undefined ? 5 : numBackups;
//there has to be at least one backup if logSize has been specified
numBackups = numBackups === 0 ? 1 : numBackups;
function openTheStream(file, fileSize, numFiles) {
var stream;
if (fileSize) {
stream = new RollingFileSync(
file,
fileSize,
numFiles
);
} else {
stream = (function(f) {
// create file if it doesn't exist
if (!fs.existsSync(f))
fs.appendFileSync(f, '');
return {
write: function(data) {
fs.appendFileSync(f, data);
}
};
})(file);
}
return stream;
}
var logFile = openTheStream(file, logSize, numBackups);
return function(loggingEvent) {
logFile.write(layout(loggingEvent, timezoneOffset) + eol);
};
}
function configure(config, options) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
if (options && options.cwd && !config.absolute) {
config.filename = path.join(options.cwd, config.filename);
}
return fileAppender(
config.filename,
layout,
config.maxLogSize,
config.backups,
config.timezoneOffset
);
}
exports.appender = fileAppender;
exports.configure = configure;

155
node_modules/log4js/lib/appenders/gelf.js generated vendored Normal file
View File

@ -0,0 +1,155 @@
"use strict";
var zlib = require('zlib');
var layouts = require('../layouts');
var levels = require('../levels');
var dgram = require('dgram');
var util = require('util');
var debug = require('debug')('log4js:gelf');
var LOG_EMERG=0; // system is unusable
var LOG_ALERT=1; // action must be taken immediately
var LOG_CRIT=2; // critical conditions
var LOG_ERR=3; // error conditions
var LOG_ERROR=3; // because people WILL typo
var LOG_WARNING=4; // warning conditions
var LOG_NOTICE=5; // normal, but significant, condition
var LOG_INFO=6; // informational message
var LOG_DEBUG=7; // debug-level message
var levelMapping = {};
levelMapping[levels.ALL] = LOG_DEBUG;
levelMapping[levels.TRACE] = LOG_DEBUG;
levelMapping[levels.DEBUG] = LOG_DEBUG;
levelMapping[levels.INFO] = LOG_INFO;
levelMapping[levels.WARN] = LOG_WARNING;
levelMapping[levels.ERROR] = LOG_ERR;
levelMapping[levels.FATAL] = LOG_CRIT;
var client;
/**
* GELF appender that supports sending UDP packets to a GELF compatible server such as Graylog
*
* @param layout a function that takes a logevent and returns a string (defaults to none).
* @param host - host to which to send logs (default:localhost)
* @param port - port at which to send logs to (default:12201)
* @param hostname - hostname of the current host (default:os hostname)
* @param facility - facility to log to (default:nodejs-server)
*/
/* jshint maxstatements:21 */
function gelfAppender (layout, host, port, hostname, facility) {
var config, customFields;
if (typeof(host) === 'object') {
config = host;
host = config.host;
port = config.port;
hostname = config.hostname;
facility = config.facility;
customFields = config.customFields;
}
host = host || 'localhost';
port = port || 12201;
hostname = hostname || require('os').hostname();
layout = layout || layouts.messagePassThroughLayout;
var defaultCustomFields = customFields || {};
if(facility) {
defaultCustomFields._facility = facility;
}
client = dgram.createSocket("udp4");
process.on('exit', function() {
if (client) client.close();
});
/**
* Add custom fields (start with underscore )
* - if the first object passed to the logger contains 'GELF' field,
* copy the underscore fields to the message
* @param loggingEvent
* @param msg
*/
function addCustomFields(loggingEvent, msg){
/* append defaultCustomFields firsts */
Object.keys(defaultCustomFields).forEach(function(key) {
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
if (key.match(/^_/) && key !== "_id") {
msg[key] = defaultCustomFields[key];
}
});
/* append custom fields per message */
var data = loggingEvent.data;
if (!Array.isArray(data) || data.length === 0) return;
var firstData = data[0];
if (!firstData.GELF) return; // identify with GELF field defined
// Remove the GELF key, some gelf supported logging systems drop the message with it
delete firstData.GELF;
Object.keys(firstData).forEach(function(key) {
// skip _id field for graylog2, skip keys not starts with UNDERSCORE
if (key.match(/^_/) || key !== "_id") {
msg[key] = firstData[key];
}
});
/* the custom field object should be removed, so it will not be looged by the later appenders */
loggingEvent.data.shift();
}
function preparePacket(loggingEvent) {
var msg = {};
addCustomFields(loggingEvent, msg);
msg.short_message = layout(loggingEvent);
msg.version="1.1";
msg.timestamp = msg.timestamp || new Date().getTime() / 1000; // log should use millisecond
msg.host = hostname;
msg.level = levelMapping[loggingEvent.level || levels.DEBUG];
return msg;
}
function sendPacket(packet) {
client.send(packet, 0, packet.length, port, host, function(err) {
if (err) { console.error(err); }
});
}
return function(loggingEvent) {
var message = preparePacket(loggingEvent);
zlib.gzip(new Buffer(JSON.stringify(message)), function(err, packet) {
if (err) {
console.error(err.stack);
} else {
if (packet.length > 8192) {
debug("Message packet length (" + packet.length + ") is larger than 8k. Not sending");
} else {
sendPacket(packet);
}
}
});
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return gelfAppender(layout, config);
}
function shutdown(cb) {
if (client) {
client.close(cb);
client = null;
}
}
exports.appender = gelfAppender;
exports.configure = configure;
exports.shutdown = shutdown;

90
node_modules/log4js/lib/appenders/hipchat.js generated vendored Normal file
View File

@ -0,0 +1,90 @@
"use strict";
var hipchat = require('hipchat-notifier');
var layouts = require('../layouts');
exports.name = 'hipchat';
exports.appender = hipchatAppender;
exports.configure = hipchatConfigure;
/**
@invoke as
log4js.configure({
"appenders": [
{
"type" : "hipchat",
"hipchat_token": "< User token with Notification Privileges >",
"hipchat_room": "< Room ID or Name >",
// optionl
"hipchat_from": "[ additional from label ]",
"hipchat_notify": "[ notify boolean to bug people ]",
"hipchat_host" : "api.hipchat.com"
}
]
});
var logger = log4js.getLogger("hipchat");
logger.warn("Test Warn message");
@invoke
*/
function hipchatNotifierResponseCallback(err, response, body){
if(err) {
throw err;
}
}
function hipchatAppender(config) {
var notifier = hipchat.make(config.hipchat_room, config.hipchat_token);
// @lint W074 This function's cyclomatic complexity is too high. (10)
return function(loggingEvent){
var notifierFn;
notifier.setRoom(config.hipchat_room);
notifier.setFrom(config.hipchat_from || '');
notifier.setNotify(config.hipchat_notify || false);
if(config.hipchat_host) {
notifier.setHost(config.hipchat_host);
}
switch (loggingEvent.level.toString()) {
case "TRACE":
case "DEBUG":
notifierFn = "info";
break;
case "WARN":
notifierFn = "warning";
break;
case "ERROR":
case "FATAL":
notifierFn = "failure";
break;
default:
notifierFn = "success";
}
// @TODO, re-work in timezoneOffset ?
var layoutMessage = config.layout(loggingEvent);
// dispatch hipchat api request, do not return anything
// [overide hipchatNotifierResponseCallback]
notifier[notifierFn](layoutMessage, config.hipchat_response_callback ||
hipchatNotifierResponseCallback);
};
}
function hipchatConfigure(config) {
var layout;
if (!config.layout) {
config.layout = layouts.messagePassThroughLayout;
}
return hipchatAppender(config, layout);
}

125
node_modules/log4js/lib/appenders/logFacesAppender.js generated vendored Normal file
View File

@ -0,0 +1,125 @@
/**
* logFaces appender sends JSON formatted log events to logFaces receivers.
* There are two types of receivers supported - raw UDP sockets (for server side apps),
* and HTTP (for client side apps). Depending on the usage, this appender
* requires either of the two:
*
* For UDP require 'dgram', see 'https://nodejs.org/api/dgram.html'
* For HTTP require 'axios', see 'https://www.npmjs.com/package/axios'
*
* Make sure your project have relevant dependancy installed before using this appender.
*/
"use strict";
var util = require('util');
var context = {};
function datagram(config){
var sock = require('dgram').createSocket('udp4');
var host = config.remoteHost || "127.0.0.1";
var port = config.port || 55201;
return function(event){
var buff = new Buffer(JSON.stringify(event));
sock.send(buff, 0, buff.length, port, host, function(err, bytes) {
if(err){
console.error("log4js.logFacesAppender failed to %s:%d, error: %s",
host, port, err);
}
});
};
}
function servlet(config){
var axios = require('axios').create();
axios.defaults.baseURL = config.url;
axios.defaults.timeout = config.timeout || 5000;
axios.defaults.headers = {'Content-Type': 'application/json'};
axios.defaults.withCredentials = true;
return function(lfsEvent){
axios.post("", lfsEvent)
.then(function(response){
if(response.status != 200){
console.error("log4js.logFacesAppender post to %s failed: %d",
config.url, response.status);
}
})
.catch(function(response){
console.error("log4js.logFacesAppender post to %s excepted: %s",
config.url, response.status);
});
};
}
/**
* For UDP (node.js) use the following configuration params:
* {
* "type": "logFacesAppender", // must be present for instantiation
* "application": "LFS-TEST", // name of the application (domain)
* "remoteHost": "127.0.0.1", // logFaces server address (hostname)
* "port": 55201 // UDP receiver listening port
* }
*
* For HTTP (browsers or node.js) use the following configuration params:
* {
* "type": "logFacesAppender", // must be present for instantiation
* "application": "LFS-TEST", // name of the application (domain)
* "url": "http://lfs-server/logs", // logFaces receiver servlet URL
* }
*/
function logFacesAppender(config) {
var send = config.send;
if(send === undefined){
send = (config.url === undefined) ? datagram(config) : servlet(config);
}
return function log(event) {
// convert to logFaces compact json format
var lfsEvent = {
a: config.application || "", // application name
t: event.startTime.getTime(), // time stamp
p: event.level.levelStr, // level (priority)
g: event.categoryName, // logger name
m: format(event.data) // message text
};
// add context variables if exist
Object.keys(context).forEach(function(key) {
lfsEvent['p_' + key] = context[key];
});
// send to server
send(lfsEvent);
};
}
function configure(config) {
return logFacesAppender(config);
}
function setContext(key, value){
context[key] = value;
}
function format(logData) {
var data = Array.isArray(logData) ?
logData : Array.prototype.slice.call(arguments);
return util.format.apply(util, wrapErrorsWithInspect(data));
}
function wrapErrorsWithInspect(items) {
return items.map(function(item) {
if ((item instanceof Error) && item.stack) {
return { inspect: function() {
return util.format(item) + '\n' + item.stack;
}};
} else {
return item;
}
});
}
exports.appender = logFacesAppender;
exports.configure = configure;
exports.setContext = setContext;

23
node_modules/log4js/lib/appenders/logLevelFilter.js generated vendored Normal file
View File

@ -0,0 +1,23 @@
"use strict";
var levels = require('../levels')
, log4js = require('../log4js');
function logLevelFilter (minLevelString, maxLevelString, appender) {
var minLevel = levels.toLevel(minLevelString);
var maxLevel = levels.toLevel(maxLevelString, levels.FATAL);
return function(logEvent) {
var eventLevel = logEvent.level;
if (eventLevel.isGreaterThanOrEqualTo(minLevel) && eventLevel.isLessThanOrEqualTo(maxLevel)) {
appender(logEvent);
}
};
}
function configure(config, options) {
log4js.loadAppender(config.appender.type);
var appender = log4js.appenderMakers[config.appender.type](config.appender, options);
return logLevelFilter(config.level, config.maxLevel, appender);
}
exports.appender = logLevelFilter;
exports.configure = configure;

114
node_modules/log4js/lib/appenders/loggly.js generated vendored Normal file
View File

@ -0,0 +1,114 @@
'use strict';
var layouts = require('../layouts')
, loggly = require('loggly')
, os = require('os')
, passThrough = layouts.messagePassThroughLayout
, openRequests = 0
, shutdownCB;
function isAnyObject(value) {
return value !== null && (typeof value === 'object' || typeof value === 'function');
}
function numKeys(o) {
var res = 0;
for (var k in o) {
if (o.hasOwnProperty(k)) res++;
}
return res;
}
/**
* @param msg - array of args for logging.
* @returns { deTaggedMsg: [], additionalTags: [] }
*/
function processTags(msgListArgs) {
var msgList = (msgListArgs.length === 1 ? [msgListArgs[0]] : Array.apply(null, msgListArgs));
return msgList.reduce(function (accum, element, currentIndex, array) {
if (isAnyObject(element) && Array.isArray(element.tags) && numKeys(element) == 1) {
accum.additionalTags = accum.additionalTags.concat(element.tags);
} else {
accum.deTaggedData.push(element);
}
return accum;
}, { deTaggedData: [], additionalTags: [] });
}
/**
* Loggly Appender. Sends logging events to Loggly using node-loggly, optionally adding tags.
*
* This appender will scan the msg from the logging event, and pull out any argument of the
* shape `{ tags: [] }` so that it's possibleto add tags in a normal logging call.
*
* For example:
*
* logger.info({ tags: ['my-tag-1', 'my-tag-2'] }, 'Some message', someObj, ...)
*
* And then this appender will remove the tags param and append it to the config.tags.
*
* @param config object with loggly configuration data
* {
* token: 'your-really-long-input-token',
* subdomain: 'your-subdomain',
* tags: ['loggly-tag1', 'loggly-tag2', .., 'loggly-tagn']
* }
* @param layout a function that takes a logevent and returns a string (defaults to objectLayout).
*/
function logglyAppender(config, layout) {
var client = loggly.createClient(config);
if(!layout) layout = passThrough;
return function(loggingEvent) {
var result = processTags(loggingEvent.data);
var deTaggedData = result.deTaggedData;
var additionalTags = result.additionalTags;
// Replace the data property with the deTaggedData
loggingEvent.data = deTaggedData;
var msg = layout(loggingEvent);
openRequests++;
client.log({
msg: msg,
level: loggingEvent.level.levelStr,
category: loggingEvent.categoryName,
hostname: os.hostname().toString(),
}, additionalTags, function (error, result) {
if (error) {
console.error("log4js.logglyAppender - error occurred: ", error);
}
openRequests--;
if (shutdownCB && openRequests === 0) {
shutdownCB();
shutdownCB = undefined;
}
});
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return logglyAppender(config, layout);
}
function shutdown (cb) {
if (openRequests === 0) {
cb();
} else {
shutdownCB = cb;
}
}
exports.name = 'loggly';
exports.appender = logglyAppender;
exports.configure = configure;
exports.shutdown = shutdown;

69
node_modules/log4js/lib/appenders/logstashUDP.js generated vendored Normal file
View File

@ -0,0 +1,69 @@
"use strict";
var layouts = require('../layouts')
, dgram = require('dgram')
, util = require('util');
function logstashUDP (config, layout) {
var udp = dgram.createSocket('udp4');
var type = config.logType ? config.logType : config.category;
layout = layout || layouts.dummyLayout;
if(!config.fields) {
config.fields = {};
}
return function log(loggingEvent) {
/*
https://gist.github.com/jordansissel/2996677
{
"message" => "hello world",
"@version" => "1",
"@timestamp" => "2014-04-22T23:03:14.111Z",
"type" => "stdin",
"host" => "hello.local"
}
@timestamp is the ISO8601 high-precision timestamp for the event.
@version is the version number of this json schema
Every other field is valid and fine.
*/
if (loggingEvent.data.length > 1) {
var secondEvData = loggingEvent.data[1];
for (var k in secondEvData) {
config.fields[k] = secondEvData[k];
}
}
config.fields.level = loggingEvent.level.levelStr;
config.fields.category = loggingEvent.categoryName;
var logObject = {
"@version" : "1",
"@timestamp" : (new Date(loggingEvent.startTime)).toISOString(),
"type" : config.logType ? config.logType : config.category,
"message" : layout(loggingEvent),
"fields" : config.fields
};
sendLog(udp, config.host, config.port, logObject);
};
}
function sendLog(udp, host, port, logObject) {
var buffer = new Buffer(JSON.stringify(logObject));
udp.send(buffer, 0, buffer.length, port, host, function(err, bytes) {
if(err) {
console.error(
"log4js.logstashUDP - %s:%p Error: %s", host, port, util.inspect(err)
);
}
});
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return logstashUDP(config, layout);
}
exports.appender = logstashUDP;
exports.configure = configure;

43
node_modules/log4js/lib/appenders/mailgun.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
"use strict";
var layouts = require('../layouts');
var layout;
var config;
var mailgun;
function mailgunAppender(_config, _layout) {
config = _config;
layout = _layout || layouts.basicLayout;
return function (loggingEvent) {
var data = {
from: _config.from,
to: _config.to,
subject: _config.subject,
text: layout(loggingEvent, config.timezoneOffset)
};
mailgun.messages().send(data, function (error, body) {
if (error !== null) console.error("log4js.mailgunAppender - Error happened", error);
});
};
}
function configure(_config) {
config = _config;
if (_config.layout) {
layout = layouts.layout(_config.layout.type, _config.layout);
}
mailgun = require('mailgun-js')({
apiKey: _config.apikey,
domain: _config.domain
});
return mailgunAppender(_config, layout);
}
exports.appender = mailgunAppender;
exports.configure = configure;

157
node_modules/log4js/lib/appenders/multiprocess.js generated vendored Normal file
View File

@ -0,0 +1,157 @@
"use strict";
var log4js = require('../log4js')
, debug = require('debug')('log4js:multiprocess')
, net = require('net')
, END_MSG = '__LOG4JS__'
, servers = [];
/**
* Creates a server, listening on config.loggerPort, config.loggerHost.
* Output goes to config.actualAppender (config.appender is used to
* set up that appender).
*/
function logServer(config) {
/**
* Takes a utf-8 string, returns an object with
* the correct log properties.
*/
function deserializeLoggingEvent(clientSocket, msg) {
var loggingEvent;
try {
loggingEvent = JSON.parse(msg);
loggingEvent.startTime = new Date(loggingEvent.startTime);
loggingEvent.level = log4js.levels.toLevel(loggingEvent.level.levelStr);
} catch (e) {
// JSON.parse failed, just log the contents probably a naughty.
loggingEvent = {
startTime: new Date(),
categoryName: 'log4js',
level: log4js.levels.ERROR,
data: [ 'Unable to parse log:', msg ]
};
}
loggingEvent.remoteAddress = clientSocket.remoteAddress;
loggingEvent.remotePort = clientSocket.remotePort;
return loggingEvent;
}
var actualAppender = config.actualAppender,
server = net.createServer(function serverCreated(clientSocket) {
clientSocket.setEncoding('utf8');
var logMessage = '';
function logTheMessage(msg) {
if (logMessage.length > 0) {
actualAppender(deserializeLoggingEvent(clientSocket, msg));
}
}
function chunkReceived(chunk) {
var event;
logMessage += chunk || '';
if (logMessage.indexOf(END_MSG) > -1) {
event = logMessage.substring(0, logMessage.indexOf(END_MSG));
logTheMessage(event);
logMessage = logMessage.substring(event.length + END_MSG.length) || '';
//check for more, maybe it was a big chunk
chunkReceived();
}
}
clientSocket.on('data', chunkReceived);
clientSocket.on('end', chunkReceived);
});
server.listen(config.loggerPort || 5000, config.loggerHost || 'localhost', function() {
servers.push(server);
//allow the process to exit, if this is the only socket active
server.unref();
});
return actualAppender;
}
function workerAppender(config) {
var canWrite = false,
buffer = [],
socket;
createSocket();
function createSocket() {
socket = net.createConnection(config.loggerPort || 5000, config.loggerHost || 'localhost');
socket.on('connect', function() {
emptyBuffer();
canWrite = true;
});
socket.on('timeout', socket.end.bind(socket));
//don't bother listening for 'error', 'close' gets called after that anyway
socket.on('close', createSocket);
}
function emptyBuffer() {
var evt;
while ((evt = buffer.shift())) {
write(evt);
}
}
function write(loggingEvent) {
// JSON.stringify(new Error('test')) returns {}, which is not really useful for us.
// The following allows us to serialize errors correctly.
// Validate that we really are in this case
if (loggingEvent && loggingEvent.stack && JSON.stringify(loggingEvent) === '{}') {
loggingEvent = {stack : loggingEvent.stack};
}
socket.write(JSON.stringify(loggingEvent), 'utf8');
socket.write(END_MSG, 'utf8');
}
return function log(loggingEvent) {
if (canWrite) {
write(loggingEvent);
} else {
buffer.push(loggingEvent);
}
};
}
function createAppender(config) {
if (config.mode === 'master') {
return logServer(config);
} else {
return workerAppender(config);
}
}
function configure(config, options) {
var actualAppender;
if (config.appender && config.mode === 'master') {
log4js.loadAppender(config.appender.type);
actualAppender = log4js.appenderMakers[config.appender.type](config.appender, options);
config.actualAppender = actualAppender;
}
return createAppender(config);
}
function shutdown(done) {
var toBeClosed = servers.length;
debug("multiprocess shutdown with ", toBeClosed, " servers to close.");
servers.forEach(function(server) {
server.close(function() {
debug("server closed.");
toBeClosed--;
if (toBeClosed < 1) {
debug("all servers closed.");
done();
}
});
});
}
exports.appender = createAppender;
exports.configure = configure;
exports.shutdown = shutdown;

44
node_modules/log4js/lib/appenders/slack.js generated vendored Normal file
View File

@ -0,0 +1,44 @@
"use strict";
var Slack = require('slack-node');
var layouts = require('../layouts');
var layout;
var slack, config;
function slackAppender(_config, _layout) {
layout = _layout || layouts.basicLayout;
return function (loggingEvent) {
var data = {
channel_id: _config.channel_id,
text: layout(loggingEvent, _config.timezoneOffset),
icon_url: _config.icon_url,
username: _config.username
};
slack.api('chat.postMessage', {
channel: data.channel_id,
text: data.text,
icon_url: data.icon_url,username: data.username}, function (err, response) {
if (err) { throw err; }
});
};
}
function configure(_config) {
if (_config.layout) {
layout = layouts.layout(_config.layout.type, _config.layout);
}
slack = new Slack(_config.token);
return slackAppender(_config, layout);
}
exports.name = 'slack';
exports.appender = slackAppender;
exports.configure = configure;

152
node_modules/log4js/lib/appenders/smtp.js generated vendored Normal file
View File

@ -0,0 +1,152 @@
"use strict";
var layouts = require("../layouts");
var mailer = require("nodemailer");
var os = require('os');
var logEventBuffer = [];
var subjectLayout;
var layout;
var unsentCount = 0;
var shutdownTimeout;
var sendInterval;
var sendTimer;
var config;
function sendBuffer() {
if (logEventBuffer.length > 0) {
var transportOpts = getTransportOptions(config);
var transport = mailer.createTransport(transportOpts);
var firstEvent = logEventBuffer[0];
var body = "";
var count = logEventBuffer.length;
while (logEventBuffer.length > 0) {
body += layout(logEventBuffer.shift(), config.timezoneOffset) + "\n";
}
var msg = {
to: config.recipients,
subject: config.subject || subjectLayout(firstEvent),
headers: {"Hostname": os.hostname()}
};
if (true === config.attachment.enable) {
msg[config.html ? "html" : "text"] = config.attachment.message;
msg.attachments = [
{
filename: config.attachment.filename,
contentType: 'text/x-log',
content: body
}
];
} else {
msg[config.html ? "html" : "text"] = body;
}
if (config.sender) {
msg.from = config.sender;
}
transport.sendMail(msg, function (error) {
if (error) {
console.error("log4js.smtpAppender - Error happened", error);
}
transport.close();
unsentCount -= count;
});
}
}
function getTransportOptions() {
var transportOpts = null;
if (config.SMTP) {
transportOpts = config.SMTP;
} else if (config.transport) {
var plugin = config.transport.plugin || 'smtp';
var transportModule = 'nodemailer-' + plugin + '-transport';
var transporter = require(transportModule);
transportOpts = transporter(config.transport.options);
}
return transportOpts;
}
function scheduleSend() {
if (!sendTimer) {
sendTimer = setTimeout(function () {
sendTimer = null;
sendBuffer();
}, sendInterval);
}
}
/**
* SMTP Appender. Sends logging events using SMTP protocol.
* It can either send an email on each event or group several
* logging events gathered during specified interval.
*
* @param _config appender configuration data
* config.sendInterval time between log emails (in seconds), if 0
* then every event sends an email
* config.shutdownTimeout time to give up remaining emails (in seconds; defaults to 5).
* @param _layout a function that takes a logevent and returns a string (defaults to basicLayout).
*/
function smtpAppender(_config, _layout) {
config = _config;
if (!config.attachment) {
config.attachment = {};
}
config.attachment.enable = !!config.attachment.enable;
config.attachment.message = config.attachment.message || "See logs as attachment";
config.attachment.filename = config.attachment.filename || "default.log";
layout = _layout || layouts.basicLayout;
subjectLayout = layouts.messagePassThroughLayout;
sendInterval = config.sendInterval * 1000 || 0;
shutdownTimeout = ('shutdownTimeout' in config ? config.shutdownTimeout : 5) * 1000;
return function (loggingEvent) {
unsentCount++;
logEventBuffer.push(loggingEvent);
if (sendInterval > 0) {
scheduleSend();
} else {
sendBuffer();
}
};
}
function configure(_config) {
config = _config;
if (_config.layout) {
layout = layouts.layout(_config.layout.type, _config.layout);
}
return smtpAppender(_config, layout);
}
function shutdown(cb) {
if (shutdownTimeout > 0) {
setTimeout(function () {
if (sendTimer)
clearTimeout(sendTimer);
sendBuffer();
}, shutdownTimeout);
}
(function checkDone() {
if (unsentCount > 0) {
setTimeout(checkDone, 100);
} else {
cb();
}
})();
}
exports.name = "smtp";
exports.appender = smtpAppender;
exports.configure = configure;
exports.shutdown = shutdown;

21
node_modules/log4js/lib/appenders/stderr.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
"use strict";
var layouts = require('../layouts');
function stderrAppender(layout, timezoneOffset) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
process.stderr.write(layout(loggingEvent, timezoneOffset) + '\n');
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stderrAppender(layout, config.timezoneOffset);
}
exports.appender = stderrAppender;
exports.configure = configure;

21
node_modules/log4js/lib/appenders/stdout.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
"use strict";
var layouts = require('../layouts');
function stdoutAppender(layout, timezoneOffset) {
layout = layout || layouts.colouredLayout;
return function(loggingEvent) {
process.stdout.write(layout(loggingEvent, timezoneOffset) + '\n');
};
}
function configure(config) {
var layout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return stdoutAppender(layout, config.timezoneOffset);
}
exports.appender = stdoutAppender;
exports.configure = configure;

262
node_modules/log4js/lib/connect-logger.js generated vendored Executable file
View File

@ -0,0 +1,262 @@
"use strict";
var levels = require("./levels");
var DEFAULT_FORMAT = ':remote-addr - -' +
' ":method :url HTTP/:http-version"' +
' :status :content-length ":referrer"' +
' ":user-agent"';
/**
* Log requests with the given `options` or a `format` string.
*
* Options:
*
* - `format` Format string, see below for tokens
* - `level` A log4js levels instance. Supports also 'auto'
*
* Tokens:
*
* - `:req[header]` ex: `:req[Accept]`
* - `:res[header]` ex: `:res[Content-Length]`
* - `:http-version`
* - `:response-time`
* - `:remote-addr`
* - `:date`
* - `:method`
* - `:url`
* - `:referrer`
* - `:user-agent`
* - `:status`
*
* @param {String|Function|Object} format or options
* @return {Function}
* @api public
*/
function getLogger(logger4js, options) {
if ('object' == typeof options) {
options = options || {};
} else if (options) {
options = { format: options };
} else {
options = {};
}
var thislogger = logger4js
, level = levels.toLevel(options.level, levels.INFO)
, fmt = options.format || DEFAULT_FORMAT
, nolog = options.nolog ? createNoLogCondition(options.nolog) : null;
return function (req, res, next) {
// mount safety
if (req._logging) return next();
// nologs
if (nolog && nolog.test(req.originalUrl)) return next();
if (thislogger.isLevelEnabled(level) || options.level === 'auto') {
var start = new Date()
, statusCode
, writeHead = res.writeHead
, url = req.originalUrl;
// flag as logging
req._logging = true;
// proxy for statusCode.
res.writeHead = function(code, headers){
res.writeHead = writeHead;
res.writeHead(code, headers);
res.__statusCode = statusCode = code;
res.__headers = headers || {};
//status code response level handling
if(options.level === 'auto'){
level = levels.INFO;
if(code >= 300) level = levels.WARN;
if(code >= 400) level = levels.ERROR;
} else {
level = levels.toLevel(options.level, levels.INFO);
}
};
//hook on end request to emit the log entry of the HTTP request.
res.on('finish', function() {
res.responseTime = new Date() - start;
//status code response level handling
if(res.statusCode && options.level === 'auto'){
level = levels.INFO;
if(res.statusCode >= 300) level = levels.WARN;
if(res.statusCode >= 400) level = levels.ERROR;
}
if (thislogger.isLevelEnabled(level)) {
var combined_tokens = assemble_tokens(req, res, options.tokens || []);
if (typeof fmt === 'function') {
var line = fmt(req, res, function(str){ return format(str, combined_tokens); });
if (line) thislogger.log(level, line);
} else {
thislogger.log(level, format(fmt, combined_tokens));
}
}
});
}
//ensure next gets always called
next();
};
}
/**
* Adds custom {token, replacement} objects to defaults,
* overwriting the defaults if any tokens clash
*
* @param {IncomingMessage} req
* @param {ServerResponse} res
* @param {Array} custom_tokens
* [{ token: string-or-regexp, replacement: string-or-replace-function }]
* @return {Array}
*/
function assemble_tokens(req, res, custom_tokens) {
var array_unique_tokens = function(array) {
var a = array.concat();
for(var i=0; i<a.length; ++i) {
for(var j=i+1; j<a.length; ++j) {
if(a[i].token == a[j].token) { // not === because token can be regexp object
a.splice(j--, 1);
}
}
}
return a;
};
var default_tokens = [];
default_tokens.push({ token: ':url', replacement: getUrl(req) });
default_tokens.push({ token: ':protocol', replacement: req.protocol });
default_tokens.push({ token: ':hostname', replacement: req.hostname });
default_tokens.push({ token: ':method', replacement: req.method });
default_tokens.push({ token: ':status', replacement: res.__statusCode || res.statusCode });
default_tokens.push({ token: ':response-time', replacement: res.responseTime });
default_tokens.push({ token: ':date', replacement: new Date().toUTCString() });
default_tokens.push({
token: ':referrer',
replacement: req.headers.referer || req.headers.referrer || ''
});
default_tokens.push({
token: ':http-version',
replacement: req.httpVersionMajor + '.' + req.httpVersionMinor
});
default_tokens.push({
token: ':remote-addr',
replacement:
req.headers['x-forwarded-for'] ||
req.ip ||
req._remoteAddress ||
(req.socket &&
(req.socket.remoteAddress ||
(req.socket.socket && req.socket.socket.remoteAddress)
)
)
}
);
default_tokens.push({ token: ':user-agent', replacement: req.headers['user-agent'] });
default_tokens.push({
token: ':content-length',
replacement:
(res._headers && res._headers['content-length']) ||
(res.__headers && res.__headers['Content-Length']) ||
'-'
}
);
default_tokens.push({ token: /:req\[([^\]]+)\]/g, replacement: function(_, field) {
return req.headers[field.toLowerCase()];
} });
default_tokens.push({ token: /:res\[([^\]]+)\]/g, replacement: function(_, field) {
return res._headers ?
(res._headers[field.toLowerCase()] || res.__headers[field])
: (res.__headers && res.__headers[field]);
} });
return array_unique_tokens(custom_tokens.concat(default_tokens));
}
/**
* Return request url path,
* adding this function prevents the Cyclomatic Complexity,
* for the assemble_tokens function at low, to pass the tests.
*
* @param {IncomingMessage} req
* @return {String}
* @api private
*/
function getUrl(req){
return req.originalUrl || req.url;
}
/**
* Return formatted log line.
*
* @param {String} str
* @param {IncomingMessage} req
* @param {ServerResponse} res
* @return {String}
* @api private
*/
function format(str, tokens) {
for (var i = 0; i < tokens.length; i++) {
str = str.replace(tokens[i].token, tokens[i].replacement);
}
return str;
}
/**
* Return RegExp Object about nolog
*
* @param {String} nolog
* @return {RegExp}
* @api private
*
* syntax
* 1. String
* 1.1 "\\.gif"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.gif?fuga
* LOGGING http://example.com/hoge.agif
* 1.2 in "\\.gif|\\.jpg$"
* NOT LOGGING http://example.com/hoge.gif and
* http://example.com/hoge.gif?fuga and http://example.com/hoge.jpg?fuga
* LOGGING http://example.com/hoge.agif,
* http://example.com/hoge.ajpg and http://example.com/hoge.jpg?hoge
* 1.3 in "\\.(gif|jpe?g|png)$"
* NOT LOGGING http://example.com/hoge.gif and http://example.com/hoge.jpeg
* LOGGING http://example.com/hoge.gif?uid=2 and http://example.com/hoge.jpg?pid=3
* 2. RegExp
* 2.1 in /\.(gif|jpe?g|png)$/
* SAME AS 1.3
* 3. Array
* 3.1 ["\\.jpg$", "\\.png", "\\.gif"]
* SAME AS "\\.jpg|\\.png|\\.gif"
*/
function createNoLogCondition(nolog) {
var regexp = null;
if (nolog) {
if (nolog instanceof RegExp) {
regexp = nolog;
}
if (typeof nolog === 'string') {
regexp = new RegExp(nolog);
}
if (Array.isArray(nolog)) {
var regexpsAsStrings = nolog.map(
function convertToStrings(o) {
return o.source ? o.source : o;
}
);
regexp = new RegExp(regexpsAsStrings.join('|'));
}
}
return regexp;
}
exports.connectLogger = getLogger;

74
node_modules/log4js/lib/date_format.js generated vendored Normal file
View File

@ -0,0 +1,74 @@
"use strict";
exports.ISO8601_FORMAT = "yyyy-MM-dd hh:mm:ss.SSS";
exports.ISO8601_WITH_TZ_OFFSET_FORMAT = "yyyy-MM-ddThh:mm:ss.SSSO";
exports.DATETIME_FORMAT = "dd MM yyyy hh:mm:ss.SSS";
exports.ABSOLUTETIME_FORMAT = "hh:mm:ss.SSS";
function padWithZeros(vNumber, width) {
var numAsString = vNumber + "";
while (numAsString.length < width) {
numAsString = "0" + numAsString;
}
return numAsString;
}
function addZero(vNumber) {
return padWithZeros(vNumber, 2);
}
/**
* Formats the TimeOffest
* Thanks to http://www.svendtofte.com/code/date_format/
* @private
*/
function offset(timezoneOffset) {
// Difference to Greenwich time (GMT) in hours
var os = Math.abs(timezoneOffset);
var h = String(Math.floor(os/60));
var m = String(os%60);
if (h.length == 1) {
h = "0" + h;
}
if (m.length == 1) {
m = "0" + m;
}
return timezoneOffset < 0 ? "+"+h+m : "-"+h+m;
}
exports.asString = function(/*format,*/ date, timezoneOffset) {
/*jshint -W071 */
var format = exports.ISO8601_FORMAT;
if (typeof(date) === "string") {
format = arguments[0];
date = arguments[1];
timezoneOffset = arguments[2];
}
// make the date independent of the system timezone by working with UTC
if (timezoneOffset === undefined) {
timezoneOffset = date.getTimezoneOffset();
}
date.setUTCMinutes(date.getUTCMinutes() - timezoneOffset);
var vDay = addZero(date.getUTCDate());
var vMonth = addZero(date.getUTCMonth()+1);
var vYearLong = addZero(date.getUTCFullYear());
var vYearShort = addZero(date.getUTCFullYear().toString().substring(2,4));
var vYear = (format.indexOf("yyyy") > -1 ? vYearLong : vYearShort);
var vHour = addZero(date.getUTCHours());
var vMinute = addZero(date.getUTCMinutes());
var vSecond = addZero(date.getUTCSeconds());
var vMillisecond = padWithZeros(date.getUTCMilliseconds(), 3);
var vTimeZone = offset(timezoneOffset);
date.setUTCMinutes(date.getUTCMinutes() + timezoneOffset);
var formatted = format
.replace(/dd/g, vDay)
.replace(/MM/g, vMonth)
.replace(/y{1,4}/g, vYear)
.replace(/hh/g, vHour)
.replace(/mm/g, vMinute)
.replace(/ss/g, vSecond)
.replace(/SSS/g, vMillisecond)
.replace(/O/g, vTimeZone);
return formatted;
};
/*jshint +W071 */

372
node_modules/log4js/lib/layouts.js generated vendored Normal file
View File

@ -0,0 +1,372 @@
"use strict";
var dateFormat = require('./date_format')
, os = require('os')
, eol = os.EOL || '\n'
, util = require('util')
, semver = require('semver')
, replacementRegExp = /%[sdj]/g
, layoutMakers = {
"messagePassThrough": function() { return messagePassThroughLayout; },
"basic": function() { return basicLayout; },
"colored": function() { return colouredLayout; },
"coloured": function() { return colouredLayout; },
"pattern": function (config) {
return patternLayout(config && config.pattern, config && config.tokens);
},
"dummy": function() { return dummyLayout; }
}
, colours = {
ALL: "grey",
TRACE: "blue",
DEBUG: "cyan",
INFO: "green",
WARN: "yellow",
ERROR: "red",
FATAL: "magenta",
OFF: "grey"
};
function wrapErrorsWithInspect(items) {
return items.map(function(item) {
if ((item instanceof Error) && item.stack) {
return { inspect: function() {
if (semver.satisfies(process.version, '>=6')) {
return util.format(item);
} else {
return util.format(item) + '\n' + item.stack;
}
} };
} else {
return item;
}
});
}
function formatLogData(logData) {
var data = logData;
if (!Array.isArray(data)) {
var numArgs = arguments.length;
data = new Array(numArgs);
for (var i = 0; i < numArgs; i++) {
data[i] = arguments[i];
}
}
return util.format.apply(util, wrapErrorsWithInspect(data));
}
var styles = {
//styles
'bold' : [1, 22],
'italic' : [3, 23],
'underline' : [4, 24],
'inverse' : [7, 27],
//grayscale
'white' : [37, 39],
'grey' : [90, 39],
'black' : [90, 39],
//colors
'blue' : [34, 39],
'cyan' : [36, 39],
'green' : [32, 39],
'magenta' : [35, 39],
'red' : [31, 39],
'yellow' : [33, 39]
};
function colorizeStart(style) {
return style ? '\x1B[' + styles[style][0] + 'm' : '';
}
function colorizeEnd(style) {
return style ? '\x1B[' + styles[style][1] + 'm' : '';
}
/**
* Taken from masylum's fork (https://github.com/masylum/log4js-node)
*/
function colorize (str, style) {
return colorizeStart(style) + str + colorizeEnd(style);
}
function timestampLevelAndCategory(loggingEvent, colour, timezoneOffest) {
var output = colorize(
formatLogData(
'[%s] [%s] %s - '
, dateFormat.asString(loggingEvent.startTime, timezoneOffest)
, loggingEvent.level
, loggingEvent.categoryName
)
, colour
);
return output;
}
/**
* BasicLayout is a simple layout for storing the logs. The logs are stored
* in following format:
* <pre>
* [startTime] [logLevel] categoryName - message\n
* </pre>
*
* @author Stephan Strittmatter
*/
function basicLayout (loggingEvent, timezoneOffset) {
return timestampLevelAndCategory(
loggingEvent,
undefined,
timezoneOffset
) + formatLogData(loggingEvent.data);
}
/**
* colouredLayout - taken from masylum's fork.
* same as basicLayout, but with colours.
*/
function colouredLayout (loggingEvent, timezoneOffset) {
return timestampLevelAndCategory(
loggingEvent,
colours[loggingEvent.level.toString()],
timezoneOffset
) + formatLogData(loggingEvent.data);
}
function messagePassThroughLayout (loggingEvent) {
return formatLogData(loggingEvent.data);
}
function dummyLayout(loggingEvent) {
return loggingEvent.data[0];
}
/**
* PatternLayout
* Format for specifiers is %[padding].[truncation][field]{[format]}
* e.g. %5.10p - left pad the log level by 5 characters, up to a max of 10
* Fields can be any of:
* - %r time in toLocaleTimeString format
* - %p log level
* - %c log category
* - %h hostname
* - %m log data
* - %d date in various formats
* - %% %
* - %n newline
* - %z pid
* - %x{<tokenname>} add dynamic tokens to your log. Tokens are specified in the tokens parameter
* You can use %[ and %] to define a colored block.
*
* Tokens are specified as simple key:value objects.
* The key represents the token name whereas the value can be a string or function
* which is called to extract the value to put in the log message. If token is not
* found, it doesn't replace the field.
*
* A sample token would be: { "pid" : function() { return process.pid; } }
*
* Takes a pattern string, array of tokens and returns a layout function.
* @param {String} Log format pattern String
* @param {object} map object of different tokens
* @param {number} timezone offset in minutes
* @return {Function}
* @author Stephan Strittmatter
* @author Jan Schmidle
*/
function patternLayout (pattern, tokens, timezoneOffset) {
// jshint maxstatements:22
var TTCC_CONVERSION_PATTERN = "%r %p %c - %m%n";
var regex = /%(-?[0-9]+)?(\.?[0-9]+)?([\[\]cdhmnprzxy%])(\{([^\}]+)\})?|([^%]+)/;
pattern = pattern || TTCC_CONVERSION_PATTERN;
function categoryName(loggingEvent, specifier) {
var loggerName = loggingEvent.categoryName;
if (specifier) {
var precision = parseInt(specifier, 10);
var loggerNameBits = loggerName.split(".");
if (precision < loggerNameBits.length) {
loggerName = loggerNameBits.slice(loggerNameBits.length - precision).join(".");
}
}
return loggerName;
}
function formatAsDate(loggingEvent, specifier) {
var format = dateFormat.ISO8601_FORMAT;
if (specifier) {
format = specifier;
// Pick up special cases
if (format == "ISO8601") {
format = dateFormat.ISO8601_FORMAT;
} else if (format == "ISO8601_WITH_TZ_OFFSET") {
format = dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT;
} else if (format == "ABSOLUTE") {
format = dateFormat.ABSOLUTETIME_FORMAT;
} else if (format == "DATE") {
format = dateFormat.DATETIME_FORMAT;
}
}
// Format the date
return dateFormat.asString(format, loggingEvent.startTime, timezoneOffset);
}
function hostname() {
return os.hostname().toString();
}
function formatMessage(loggingEvent) {
return formatLogData(loggingEvent.data);
}
function endOfLine() {
return eol;
}
function logLevel(loggingEvent) {
return loggingEvent.level.toString();
}
function startTime(loggingEvent) {
return dateFormat.asString('hh:mm:ss', loggingEvent.startTime, timezoneOffset);
}
function startColour(loggingEvent) {
return colorizeStart(colours[loggingEvent.level.toString()]);
}
function endColour(loggingEvent) {
return colorizeEnd(colours[loggingEvent.level.toString()]);
}
function percent() {
return '%';
}
function pid(loggingEvent) {
if (loggingEvent && loggingEvent.pid) {
return loggingEvent.pid;
} else {
return process.pid;
}
}
function clusterInfo(loggingEvent, specifier) {
if (loggingEvent.cluster && specifier) {
return specifier
.replace('%m', loggingEvent.cluster.master)
.replace('%w', loggingEvent.cluster.worker)
.replace('%i', loggingEvent.cluster.workerId);
} else if (loggingEvent.cluster) {
return loggingEvent.cluster.worker+'@'+loggingEvent.cluster.master;
} else {
return pid();
}
}
function userDefined(loggingEvent, specifier) {
if (typeof(tokens[specifier]) !== 'undefined') {
if (typeof(tokens[specifier]) === 'function') {
return tokens[specifier](loggingEvent);
} else {
return tokens[specifier];
}
}
return null;
}
var replacers = {
'c': categoryName,
'd': formatAsDate,
'h': hostname,
'm': formatMessage,
'n': endOfLine,
'p': logLevel,
'r': startTime,
'[': startColour,
']': endColour,
'y': clusterInfo,
'z': pid,
'%': percent,
'x': userDefined
};
function replaceToken(conversionCharacter, loggingEvent, specifier) {
return replacers[conversionCharacter](loggingEvent, specifier);
}
function truncate(truncation, toTruncate) {
var len;
if (truncation) {
len = parseInt(truncation.substr(1), 10);
return toTruncate.substring(0, len);
}
return toTruncate;
}
function pad(padding, toPad) {
var len;
if (padding) {
if (padding.charAt(0) == "-") {
len = parseInt(padding.substr(1), 10);
// Right pad with spaces
while (toPad.length < len) {
toPad += " ";
}
} else {
len = parseInt(padding, 10);
// Left pad with spaces
while (toPad.length < len) {
toPad = " " + toPad;
}
}
}
return toPad;
}
function truncateAndPad(toTruncAndPad, truncation, padding) {
var replacement = toTruncAndPad;
replacement = truncate(truncation, replacement);
replacement = pad(padding, replacement);
return replacement;
}
return function(loggingEvent) {
var formattedString = "";
var result;
var searchString = pattern;
while ((result = regex.exec(searchString))) {
var matchedString = result[0];
var padding = result[1];
var truncation = result[2];
var conversionCharacter = result[3];
var specifier = result[5];
var text = result[6];
// Check if the pattern matched was just normal text
if (text) {
formattedString += "" + text;
} else {
// Create a raw replacement string based on the conversion
// character and specifier
var replacement = replaceToken(conversionCharacter, loggingEvent, specifier);
formattedString += truncateAndPad(replacement, truncation, padding);
}
searchString = searchString.substr(result.index + result[0].length);
}
return formattedString;
};
}
module.exports = {
basicLayout: basicLayout,
messagePassThroughLayout: messagePassThroughLayout,
patternLayout: patternLayout,
colouredLayout: colouredLayout,
coloredLayout: colouredLayout,
dummyLayout: dummyLayout,
addLayout: function(name, serializerGenerator) {
layoutMakers[name] = serializerGenerator;
},
layout: function(name, config) {
return layoutMakers[name] && layoutMakers[name](config);
}
};

66
node_modules/log4js/lib/levels.js generated vendored Normal file
View File

@ -0,0 +1,66 @@
"use strict";
function Level(level, levelStr) {
this.level = level;
this.levelStr = levelStr;
}
/**
* converts given String to corresponding Level
* @param {String} sArg String value of Level OR Log4js.Level
* @param {Log4js.Level} defaultLevel default Level, if no String representation
* @return Level object
* @type Log4js.Level
*/
function toLevel(sArg, defaultLevel) {
if (!sArg) {
return defaultLevel;
}
if (sArg instanceof Level) {
module.exports[sArg.toString()] = sArg;
return sArg;
}
if (typeof sArg === "string") {
return module.exports[sArg.toUpperCase()] || defaultLevel;
}
return toLevel(sArg.toString());
}
Level.prototype.toString = function() {
return this.levelStr;
};
Level.prototype.isLessThanOrEqualTo = function(otherLevel) {
if (typeof otherLevel === "string") {
otherLevel = toLevel(otherLevel);
}
return this.level <= otherLevel.level;
};
Level.prototype.isGreaterThanOrEqualTo = function(otherLevel) {
if (typeof otherLevel === "string") {
otherLevel = toLevel(otherLevel);
}
return this.level >= otherLevel.level;
};
Level.prototype.isEqualTo = function(otherLevel) {
if (typeof otherLevel === "string") {
otherLevel = toLevel(otherLevel);
}
return this.level === otherLevel.level;
};
module.exports = {
ALL: new Level(Number.MIN_VALUE, "ALL"),
TRACE: new Level(5000, "TRACE"),
DEBUG: new Level(10000, "DEBUG"),
INFO: new Level(20000, "INFO"),
WARN: new Level(30000, "WARN"),
ERROR: new Level(40000, "ERROR"),
FATAL: new Level(50000, "FATAL"),
MARK: new Level(9007199254740992, "MARK"), // 2^53
OFF: new Level(Number.MAX_VALUE, "OFF"),
toLevel: toLevel,
Level: Level
};

514
node_modules/log4js/lib/log4js.js generated vendored Normal file
View File

@ -0,0 +1,514 @@
"use strict";
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview log4js is a library to log in JavaScript in similar manner
* than in log4j for Java. The API should be nearly the same.
*
* <h3>Example:</h3>
* <pre>
* var logging = require('log4js');
* //add an appender that logs all messages to stdout.
* logging.addAppender(logging.consoleAppender());
* //add an appender that logs "some-category" to a file
* logging.addAppender(logging.fileAppender("file.log"), "some-category");
* //get a logger
* var log = logging.getLogger("some-category");
* log.setLevel(logging.levels.TRACE); //set the Level
*
* ...
*
* //call the log
* log.trace("trace me" );
* </pre>
*
* NOTE: the authors below are the original browser-based log4js authors
* don't try to contact them about bugs in this version :)
* @version 1.0
* @author Stephan Strittmatter - http://jroller.com/page/stritti
* @author Seth Chisamore - http://www.chisamore.com
* @since 2005-05-20
* @static
* Website: http://log4js.berlios.de
*/
var events = require('events')
, fs = require('fs')
, path = require('path')
, util = require('util')
, layouts = require('./layouts')
, levels = require('./levels')
, loggerModule = require('./logger')
, LoggingEvent = loggerModule.LoggingEvent
, Logger = loggerModule.Logger
, ALL_CATEGORIES = '[all]'
, appenders = {}
, loggers = {}
, appenderMakers = {}
, appenderShutdowns = {}
, defaultConfig = {
appenders: [
{ type: "stdout" }
],
replaceConsole: false
};
function hasLogger(logger) {
return loggers.hasOwnProperty(logger);
}
levels.forName = function(levelStr, levelVal) {
var level;
if (typeof levelStr === "string" && typeof levelVal === "number") {
var levelUpper = levelStr.toUpperCase();
level = new levels.Level(levelVal, levelUpper);
loggerModule.addLevelMethods(level);
}
return level;
};
levels.getLevel = function(levelStr) {
var level;
if (typeof levelStr === "string") {
var levelUpper = levelStr.toUpperCase();
level = levels.toLevel(levelStr);
}
return level;
};
function getBufferedLogger(categoryName) {
var base_logger = getLogger(categoryName);
var logger = {};
logger.temp = [];
logger.target = base_logger;
logger.flush = function () {
for (var i = 0; i < logger.temp.length; i++) {
var log = logger.temp[i];
logger.target[log.level](log.message);
delete logger.temp[i];
}
};
logger.trace = function (message) { logger.temp.push({level: 'trace', message: message}); };
logger.debug = function (message) { logger.temp.push({level: 'debug', message: message}); };
logger.info = function (message) { logger.temp.push({level: 'info', message: message}); };
logger.warn = function (message) { logger.temp.push({level: 'warn', message: message}); };
logger.error = function (message) { logger.temp.push({level: 'error', message: message}); };
logger.fatal = function (message) { logger.temp.push({level: 'fatal', message: message}); };
return logger;
}
function normalizeCategory (category) {
return category + '.';
}
function doesLevelEntryContainsLogger (levelCategory, loggerCategory) {
var normalizedLevelCategory = normalizeCategory(levelCategory);
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
return normalizedLoggerCategory.substring(0, normalizedLevelCategory.length) == normalizedLevelCategory; //jshint ignore:line
}
function doesAppenderContainsLogger (appenderCategory, loggerCategory) {
var normalizedAppenderCategory = normalizeCategory(appenderCategory);
var normalizedLoggerCategory = normalizeCategory(loggerCategory);
return normalizedLoggerCategory.substring(0, normalizedAppenderCategory.length) == normalizedAppenderCategory; //jshint ignore:line
}
/**
* Get a logger instance. Instance is cached on categoryName level.
* @param {String} categoryName name of category to log to.
* @return {Logger} instance of logger for the category
* @static
*/
function getLogger (loggerCategoryName) {
// Use default logger if categoryName is not specified or invalid
if (typeof loggerCategoryName !== "string") {
loggerCategoryName = Logger.DEFAULT_CATEGORY;
}
if (!hasLogger(loggerCategoryName)) {
var level;
/* jshint -W073 */
// If there's a "levels" entry in the configuration
if (levels.config) {
// Goes through the categories in the levels configuration entry,
// starting with the "higher" ones.
var keys = Object.keys(levels.config).sort();
for (var idx = 0; idx < keys.length; idx++) {
var levelCategory = keys[idx];
if (doesLevelEntryContainsLogger(levelCategory, loggerCategoryName)) {
// level for the logger
level = levels.config[levelCategory];
}
}
}
/* jshint +W073 */
// Create the logger for this name if it doesn't already exist
loggers[loggerCategoryName] = new Logger(loggerCategoryName, level);
/* jshint -W083 */
var appenderList;
for(var appenderCategory in appenders) {
if (doesAppenderContainsLogger(appenderCategory, loggerCategoryName)) {
appenderList = appenders[appenderCategory];
appenderList.forEach(function(appender) {
loggers[loggerCategoryName].addListener("log", appender);
});
}
}
/* jshint +W083 */
if (appenders[ALL_CATEGORIES]) {
appenderList = appenders[ALL_CATEGORIES];
appenderList.forEach(function(appender) {
loggers[loggerCategoryName].addListener("log", appender);
});
}
}
return loggers[loggerCategoryName];
}
/**
* args are appender, optional shutdown function, then zero or more categories
*/
function addAppender () {
var args = Array.prototype.slice.call(arguments);
var appender = args.shift();
//check for a shutdown fn
if (args.length > 0 && typeof args[0] === 'function') {
appenderShutdowns[appender] = args.shift();
}
if (args.length === 0 || args[0] === undefined) {
args = [ ALL_CATEGORIES ];
}
//argument may already be an array
if (Array.isArray(args[0])) {
args = args[0];
}
args.forEach(function(appenderCategory) {
addAppenderToCategory(appender, appenderCategory);
if (appenderCategory === ALL_CATEGORIES) {
addAppenderToAllLoggers(appender);
} else {
for(var loggerCategory in loggers) {
if (doesAppenderContainsLogger(appenderCategory,loggerCategory)) {
loggers[loggerCategory].addListener("log", appender);
}
}
}
});
}
function addAppenderToAllLoggers(appender) {
for (var logger in loggers) {
if (hasLogger(logger)) {
loggers[logger].addListener("log", appender);
}
}
}
function addAppenderToCategory(appender, category) {
if (!appenders[category]) {
appenders[category] = [];
}
appenders[category].push(appender);
}
function clearAppenders () {
//if we're calling clearAppenders, we're probably getting ready to write
//so turn log writes back on, just in case this is after a shutdown
loggerModule.enableAllLogWrites();
appenders = {};
for (var logger in loggers) {
if (hasLogger(logger)) {
loggers[logger].removeAllListeners("log");
}
}
}
function configureAppenders(appenderList, options) {
clearAppenders();
if (appenderList) {
appenderList.forEach(function(appenderConfig) {
loadAppender(appenderConfig.type);
var appender;
appenderConfig.makers = appenderMakers;
try {
appender = appenderMakers[appenderConfig.type](appenderConfig, options);
addAppender(appender, appenderConfig.category);
} catch(e) {
throw new Error("log4js configuration problem for " + util.inspect(appenderConfig), e);
}
});
}
}
function configureLevels(_levels) {
levels.config = _levels; // Keep it so we can create loggers later using this cfg
if (_levels) {
var keys = Object.keys(levels.config).sort();
for (var idx in keys) {
var category = keys[idx];
if(category === ALL_CATEGORIES) {
setGlobalLogLevel(_levels[category]);
}
/* jshint -W073 */
for(var loggerCategory in loggers) {
if (doesLevelEntryContainsLogger(category, loggerCategory)) {
loggers[loggerCategory].setLevel(_levels[category]);
}
}
/* jshint +W073 */
}
}
}
function setGlobalLogLevel(level) {
Logger.prototype.level = levels.toLevel(level, levels.TRACE);
}
/**
* Get the default logger instance.
* @return {Logger} instance of default logger
* @static
*/
function getDefaultLogger () {
return getLogger(Logger.DEFAULT_CATEGORY);
}
var configState = {};
function loadConfigurationFile(filename) {
if (filename) {
return JSON.parse(fs.readFileSync(filename, "utf8"));
}
return undefined;
}
function configureOnceOff(config, options) {
if (config) {
try {
restoreConsole();
configureLevels(config.levels);
configureAppenders(config.appenders, options);
if (config.replaceConsole) {
replaceConsole();
}
} catch (e) {
throw new Error(
"Problem reading log4js config " + util.inspect(config) +
". Error was \"" + e.message + "\" (" + e.stack + ")"
);
}
}
}
function reloadConfiguration(options) {
var mtime = getMTime(configState.filename);
if (!mtime) return;
if (configState.lastMTime && (mtime.getTime() > configState.lastMTime.getTime())) {
configureOnceOff(loadConfigurationFile(configState.filename), options);
}
configState.lastMTime = mtime;
}
function getMTime(filename) {
var mtime;
try {
mtime = fs.statSync(configState.filename).mtime;
} catch (e) {
getLogger('log4js').warn('Failed to load configuration file ' + filename);
}
return mtime;
}
function initReloadConfiguration(filename, options) {
if (configState.timerId) {
clearInterval(configState.timerId);
delete configState.timerId;
}
configState.filename = filename;
configState.lastMTime = getMTime(filename);
configState.timerId = setInterval(reloadConfiguration, options.reloadSecs*1000, options);
}
function configure(configurationFileOrObject, options) {
var config = configurationFileOrObject;
config = config || process.env.LOG4JS_CONFIG;
options = options || {};
if (config === undefined || config === null || typeof(config) === 'string') {
if (options.reloadSecs) {
initReloadConfiguration(config, options);
}
config = loadConfigurationFile(config) || defaultConfig;
} else {
if (options.reloadSecs) {
getLogger('log4js').warn(
'Ignoring configuration reload parameter for "object" configuration.'
);
}
}
configureOnceOff(config, options);
}
var originalConsoleFunctions = {
log: console.log,
debug: console.debug,
info: console.info,
warn: console.warn,
error: console.error
};
function replaceConsole(logger) {
function replaceWith(fn) {
return function() {
fn.apply(logger, arguments);
};
}
logger = logger || getLogger("console");
['log','debug','info','warn','error'].forEach(function (item) {
console[item] = replaceWith(item === 'log' ? logger.info : logger[item]);
});
}
function restoreConsole() {
['log', 'debug', 'info', 'warn', 'error'].forEach(function (item) {
console[item] = originalConsoleFunctions[item];
});
}
/**
* Load an appenderModule based on the provided appender filepath. Will first
* check if the appender path is a subpath of the log4js "lib/appenders" directory.
* If not, it will attempt to load the the appender as complete path.
*
* @param {string} appender The filepath for the appender.
* @returns {Object|null} The required appender or null if appender could not be loaded.
* @private
*/
function requireAppender(appender) {
var appenderModule;
try {
appenderModule = require('./appenders/' + appender);
} catch (e) {
appenderModule = require(appender);
}
return appenderModule;
}
/**
* Load an appender. Provided the appender path to be loaded. If appenderModule is defined,
* it will be used in place of requiring the appender module.
*
* @param {string} appender The path to the appender module.
* @param {Object|void} [appenderModule] The pre-required appender module. When provided,
* instead of requiring the appender by its path, this object will be used.
* @returns {void}
* @private
*/
function loadAppender(appender, appenderModule) {
appenderModule = appenderModule || requireAppender(appender);
if (!appenderModule) {
throw new Error("Invalid log4js appender: " + util.inspect(appender));
}
module.exports.appenders[appender] = appenderModule.appender.bind(appenderModule);
if (appenderModule.shutdown) {
appenderShutdowns[appender] = appenderModule.shutdown.bind(appenderModule);
}
appenderMakers[appender] = appenderModule.configure.bind(appenderModule);
}
/**
* Shutdown all log appenders. This will first disable all writing to appenders
* and then call the shutdown function each appender.
*
* @params {Function} cb - The callback to be invoked once all appenders have
* shutdown. If an error occurs, the callback will be given the error object
* as the first argument.
* @returns {void}
*/
function shutdown(cb) {
// First, disable all writing to appenders. This prevents appenders from
// not being able to be drained because of run-away log writes.
loggerModule.disableAllLogWrites();
//turn off config reloading
if (configState.timerId) {
clearInterval(configState.timerId);
}
// Call each of the shutdown functions in parallel
var completed = 0;
var error;
var shutdownFcts = [];
var complete = function(err) {
error = error || err;
completed++;
if (completed >= shutdownFcts.length) {
cb(error);
}
};
for (var category in appenderShutdowns) {
if (appenderShutdowns.hasOwnProperty(category)) {
shutdownFcts.push(appenderShutdowns[category]);
}
}
if (!shutdownFcts.length) {
return cb();
}
shutdownFcts.forEach(function(shutdownFct) { shutdownFct(complete); });
}
module.exports = {
getBufferedLogger: getBufferedLogger,
getLogger: getLogger,
getDefaultLogger: getDefaultLogger,
hasLogger: hasLogger,
addAppender: addAppender,
loadAppender: loadAppender,
clearAppenders: clearAppenders,
configure: configure,
shutdown: shutdown,
replaceConsole: replaceConsole,
restoreConsole: restoreConsole,
levels: levels,
setGlobalLogLevel: setGlobalLogLevel,
layouts: layouts,
appenders: {},
appenderMakers: appenderMakers,
connectLogger: require('./connect-logger').connectLogger
};
//set ourselves up
configure();

119
node_modules/log4js/lib/logger.js generated vendored Normal file
View File

@ -0,0 +1,119 @@
"use strict";
var levels = require('./levels')
, util = require('util')
, events = require('events')
, DEFAULT_CATEGORY = '[default]';
var logWritesEnabled = true;
/**
* Models a logging event.
* @constructor
* @param {String} categoryName name of category
* @param {Log4js.Level} level level of message
* @param {Array} data objects to log
* @param {Log4js.Logger} logger the associated logger
* @author Seth Chisamore
*/
function LoggingEvent (categoryName, level, data, logger) {
this.startTime = new Date();
this.categoryName = categoryName;
this.data = data;
this.level = level;
this.logger = logger;
}
/**
* Logger to log messages.
* use {@see Log4js#getLogger(String)} to get an instance.
* @constructor
* @param name name of category to log to
* @author Stephan Strittmatter
*/
function Logger (name, level) {
this.category = name || DEFAULT_CATEGORY;
if (level) {
this.setLevel(level);
}
}
util.inherits(Logger, events.EventEmitter);
Logger.DEFAULT_CATEGORY = DEFAULT_CATEGORY;
Logger.prototype.level = levels.TRACE;
Logger.prototype.setLevel = function(level) {
this.level = levels.toLevel(level, this.level || levels.TRACE);
};
Logger.prototype.removeLevel = function() {
delete this.level;
};
Logger.prototype.log = function() {
var logLevel = levels.toLevel(arguments[0], levels.INFO);
if (!this.isLevelEnabled(logLevel)) {
return;
}
var numArgs = arguments.length - 1;
var args = new Array(numArgs);
for (var i = 0; i < numArgs; i++) {
args[i] = arguments[i + 1];
}
this._log(logLevel, args);
};
Logger.prototype.isLevelEnabled = function(otherLevel) {
return this.level.isLessThanOrEqualTo(otherLevel);
};
['Trace','Debug','Info','Warn','Error','Fatal', 'Mark'].forEach(addLevelMethods);
function addLevelMethods(level) {
level = levels.toLevel(level);
var levelStrLower = level.toString().toLowerCase();
var levelMethod = levelStrLower.replace(/_([a-z])/g, function(g) { return g[1].toUpperCase(); } );
var isLevelMethod = levelMethod[0].toUpperCase() + levelMethod.slice(1);
Logger.prototype['is'+isLevelMethod+'Enabled'] = function() {
return this.isLevelEnabled(level);
};
Logger.prototype[levelMethod] = function () {
if (logWritesEnabled && this.isLevelEnabled(level)) {
var numArgs = arguments.length;
var args = new Array(numArgs);
for (var i = 0; i < numArgs; i++) {
args[i] = arguments[i];
}
this._log(level, args);
}
};
}
Logger.prototype._log = function(level, data) {
var loggingEvent = new LoggingEvent(this.category, level, data, this);
this.emit('log', loggingEvent);
};
/**
* Disable all log writes.
* @returns {void}
*/
function disableAllLogWrites() {
logWritesEnabled = false;
}
/**
* Enable log writes.
* @returns {void}
*/
function enableAllLogWrites() {
logWritesEnabled = true;
}
exports.LoggingEvent = LoggingEvent;
exports.Logger = Logger;
exports.disableAllLogWrites = disableAllLogWrites;
exports.enableAllLogWrites = enableAllLogWrites;
exports.addLevelMethods = addLevelMethods;