Pushing changes
This commit is contained in:
33
node_modules/log4js/test/tape/default-settings-test.js
generated
vendored
Normal file
33
node_modules/log4js/test/tape/default-settings-test.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var test = require('tape')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
test('default settings', function(t) {
|
||||
var output = []
|
||||
, log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/stdout': {
|
||||
'name': 'stdout',
|
||||
'appender': function () {
|
||||
return function(evt) {
|
||||
output.push(evt);
|
||||
};
|
||||
},
|
||||
'configure': function (config) {
|
||||
return this.appender();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
, logger = log4js.getLogger("default-settings");
|
||||
|
||||
logger.info("This should go to stdout.");
|
||||
|
||||
t.plan(2);
|
||||
t.equal(output.length, 1, "It should log to stdout.");
|
||||
t.equal(output[0].data[0], "This should go to stdout.", "It should log the message.");
|
||||
t.end();
|
||||
});
|
37
node_modules/log4js/test/tape/file-sighup-test.js
generated
vendored
Normal file
37
node_modules/log4js/test/tape/file-sighup-test.js
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
var test = require('tape')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
test('file appender SIGHUP', function(t) {
|
||||
var closeCalled = 0
|
||||
, openCalled = 0
|
||||
, appender = sandbox.require(
|
||||
'../../lib/appenders/file',
|
||||
{
|
||||
'requires': {
|
||||
'streamroller': {
|
||||
'RollingFileStream': function() {
|
||||
this.openTheStream = function() {
|
||||
openCalled++;
|
||||
};
|
||||
|
||||
this.closeTheStream = function(cb) {
|
||||
closeCalled++;
|
||||
cb();
|
||||
};
|
||||
|
||||
this.on = function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
).appender('sighup-test-file');
|
||||
|
||||
process.kill(process.pid, 'SIGHUP');
|
||||
t.plan(2);
|
||||
setTimeout(function() {
|
||||
t.equal(openCalled, 1, 'open should be called once');
|
||||
t.equal(closeCalled, 1, 'close should be called once');
|
||||
t.end();
|
||||
}, 10);
|
||||
});
|
30
node_modules/log4js/test/tape/multiprocess-shutdown-test.js
generated
vendored
Normal file
30
node_modules/log4js/test/tape/multiprocess-shutdown-test.js
generated
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
var test = require('tape')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, net = require('net');
|
||||
|
||||
test('multiprocess appender shutdown (master)', function(t) {
|
||||
log4js.configure({
|
||||
appenders: [
|
||||
{
|
||||
type: "multiprocess",
|
||||
mode: "master",
|
||||
loggerPort: 12345,
|
||||
appender: { type: "stdout" }
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
t.timeoutAfter(1000, "shutdown did not happen within 1000ms");
|
||||
setTimeout(function() {
|
||||
log4js.shutdown(function() {
|
||||
var connection = net.connect({ port: 12345 }, function() {
|
||||
t.fail("connection should not still work");
|
||||
t.end();
|
||||
}).on('error', function(err) {
|
||||
t.ok(err, 'we got a connection error');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
}, 500);
|
||||
});
|
33
node_modules/log4js/test/tape/reload-shutdown-test.js
generated
vendored
Normal file
33
node_modules/log4js/test/tape/reload-shutdown-test.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var test = require('tape')
|
||||
, path = require('path')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
test('Reload configuration shutdown hook', function(t) {
|
||||
var timerId
|
||||
, log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
clearInterval: function(id) {
|
||||
timerId = id;
|
||||
},
|
||||
setInterval: function(fn, time) {
|
||||
return "1234";
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure(
|
||||
path.join(__dirname, 'test-config.json'),
|
||||
{ reloadSecs: 30 }
|
||||
);
|
||||
|
||||
t.plan(1);
|
||||
log4js.shutdown(function() {
|
||||
t.equal(timerId, "1234", "Shutdown should clear the reload timer");
|
||||
t.end();
|
||||
});
|
||||
|
||||
});
|
22
node_modules/log4js/test/tape/stderrAppender-test.js
generated
vendored
Normal file
22
node_modules/log4js/test/tape/stderrAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
var test = require('tape')
|
||||
, layouts = require('../../lib/layouts')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
test('stderr appender', function(t) {
|
||||
var output = []
|
||||
, appender = sandbox.require(
|
||||
'../../lib/appenders/stderr',
|
||||
{
|
||||
globals: {
|
||||
process: { stderr: { write : function(data) { output.push(data); } } }
|
||||
}
|
||||
}
|
||||
).appender(layouts.messagePassThroughLayout);
|
||||
|
||||
appender({ data: ["biscuits"] });
|
||||
t.plan(2);
|
||||
t.equal(output.length, 1, 'There should be one message.');
|
||||
t.equal(output[0], 'biscuits\n', 'The message should be biscuits.');
|
||||
t.end();
|
||||
});
|
22
node_modules/log4js/test/tape/stdoutAppender-test.js
generated
vendored
Normal file
22
node_modules/log4js/test/tape/stdoutAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
var test = require('tape')
|
||||
, layouts = require('../../lib/layouts')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
test('stdout appender', function(t) {
|
||||
var output = []
|
||||
, appender = sandbox.require(
|
||||
'../../lib/appenders/stdout',
|
||||
{
|
||||
globals: {
|
||||
process: { stdout: { write : function(data) { output.push(data); } } }
|
||||
}
|
||||
}
|
||||
).appender(layouts.messagePassThroughLayout);
|
||||
|
||||
appender({ data: ["cheese"] });
|
||||
t.plan(2);
|
||||
t.equal(output.length, 1, 'There should be one message.');
|
||||
t.equal(output[0], 'cheese\n', 'The message should be cheese.');
|
||||
t.end();
|
||||
});
|
5
node_modules/log4js/test/tape/test-config.json
generated
vendored
Normal file
5
node_modules/log4js/test/tape/test-config.json
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"appenders": [
|
||||
{ "type": "stdout" }
|
||||
]
|
||||
}
|
84
node_modules/log4js/test/vows/categoryFilter-test.js
generated
vendored
Normal file
84
node_modules/log4js/test/vows/categoryFilter-test.js
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
'use strict';
|
||||
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, assert = require('assert')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js categoryFilter').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
|
||||
var log4js = require('../../lib/log4js'), logEvents = [], webLogger, appLogger;
|
||||
log4js.clearAppenders();
|
||||
var appender = require('../../lib/appenders/categoryFilter')
|
||||
.appender(
|
||||
['app'],
|
||||
function(evt) { logEvents.push(evt); }
|
||||
);
|
||||
log4js.addAppender(appender, ["app","web"]);
|
||||
|
||||
webLogger = log4js.getLogger("web");
|
||||
appLogger = log4js.getLogger("app");
|
||||
|
||||
webLogger.debug('This should get logged');
|
||||
appLogger.debug('This should not');
|
||||
webLogger.debug('Hello again');
|
||||
log4js.getLogger('db').debug('This shouldn\'t be included by the appender anyway');
|
||||
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass matching category' : function(logEvents) {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'This should get logged');
|
||||
assert.equal(logEvents[1].data[0], 'Hello again');
|
||||
}
|
||||
},
|
||||
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js')
|
||||
, logger, weblogger;
|
||||
|
||||
remove(__dirname + '/categoryFilter-web.log');
|
||||
remove(__dirname + '/categoryFilter-noweb.log');
|
||||
|
||||
log4js.configure('test/vows/with-categoryFilter.json');
|
||||
logger = log4js.getLogger("app");
|
||||
weblogger = log4js.getLogger("web");
|
||||
|
||||
logger.info('Loading app');
|
||||
logger.debug('Initialising indexes');
|
||||
weblogger.info('00:00:00 GET / 200');
|
||||
weblogger.warn('00:00:00 GET / 500');
|
||||
//wait for the file system to catch up
|
||||
setTimeout(this.callback, 500);
|
||||
},
|
||||
'tmp-tests.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/categoryFilter-noweb.log', 'utf8', this.callback);
|
||||
},
|
||||
'should contain all log messages': function(contents) {
|
||||
var messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ['Loading app','Initialising indexes']);
|
||||
}
|
||||
},
|
||||
'tmp-tests-web.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/categoryFilter-web.log','utf8',this.callback);
|
||||
},
|
||||
'should contain only error and warning log messages': function(contents) {
|
||||
var messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ['00:00:00 GET / 200','00:00:00 GET / 500']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
166
node_modules/log4js/test/vows/clusteredAppender-test.js
generated
vendored
Normal file
166
node_modules/log4js/test/vows/clusteredAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,166 @@
|
||||
"use strict";
|
||||
var assert = require('assert');
|
||||
var vows = require('vows');
|
||||
var layouts = require('../../lib/layouts');
|
||||
var sandbox = require('sandboxed-module');
|
||||
var LoggingEvent = require('../../lib/logger').LoggingEvent;
|
||||
var cluster = require('cluster');
|
||||
|
||||
vows.describe('log4js cluster appender').addBatch({
|
||||
'when in master mode': {
|
||||
topic: function() {
|
||||
|
||||
var registeredClusterEvents = [];
|
||||
var loggingEvents = [];
|
||||
var onChildProcessForked;
|
||||
var onMasterReceiveChildMessage;
|
||||
|
||||
// Fake cluster module, so no real cluster listeners be really added
|
||||
var fakeCluster = {
|
||||
|
||||
on: function(event, callback) {
|
||||
registeredClusterEvents.push(event);
|
||||
onChildProcessForked = callback;
|
||||
},
|
||||
|
||||
isMaster: true,
|
||||
isWorker: false,
|
||||
|
||||
};
|
||||
var fakeWorker = {
|
||||
on: function(event, callback) {
|
||||
onMasterReceiveChildMessage = callback;
|
||||
},
|
||||
process: {
|
||||
pid: 123
|
||||
},
|
||||
id: 'workerid'
|
||||
};
|
||||
|
||||
var fakeActualAppender = function(loggingEvent) {
|
||||
loggingEvents.push(loggingEvent);
|
||||
};
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
}
|
||||
});
|
||||
|
||||
var masterAppender = appenderModule.appender({
|
||||
actualAppenders: [fakeActualAppender, fakeActualAppender, fakeActualAppender],
|
||||
appenders: [{}, {category: "test"}, {category: "wovs"}]
|
||||
});
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
masterAppender(new LoggingEvent('wovs', 'Info', ['masterAppender test']));
|
||||
|
||||
// Simulate a 'fork' event to register the master's message handler on our fake worker.
|
||||
onChildProcessForked(fakeWorker);
|
||||
// Simulate a cluster message received by the masterAppender.
|
||||
var simulatedLoggingEvent = new LoggingEvent(
|
||||
'wovs',
|
||||
'Error',
|
||||
[
|
||||
'message deserialization test',
|
||||
{stack: 'my wrapped stack'}
|
||||
]
|
||||
);
|
||||
onMasterReceiveChildMessage({
|
||||
type : '::log-message',
|
||||
event : JSON.stringify(simulatedLoggingEvent)
|
||||
});
|
||||
|
||||
var returnValue = {
|
||||
registeredClusterEvents: registeredClusterEvents,
|
||||
loggingEvents: loggingEvents,
|
||||
};
|
||||
|
||||
return returnValue;
|
||||
},
|
||||
|
||||
"should register 'fork' event listener on 'cluster'": function(topic) {
|
||||
assert.equal(topic.registeredClusterEvents[0], 'fork');
|
||||
},
|
||||
|
||||
"should log using actual appender": function(topic) {
|
||||
assert.equal(topic.loggingEvents.length, 4);
|
||||
assert.equal(topic.loggingEvents[0].data[0], 'masterAppender test');
|
||||
assert.equal(topic.loggingEvents[1].data[0], 'masterAppender test');
|
||||
assert.equal(topic.loggingEvents[2].data[0], 'message deserialization test');
|
||||
assert.equal(topic.loggingEvents[2].data[1], 'my wrapped stack');
|
||||
assert.equal(topic.loggingEvents[3].data[0], 'message deserialization test');
|
||||
assert.equal(topic.loggingEvents[3].data[1], 'my wrapped stack');
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
'when in worker mode': {
|
||||
|
||||
topic: function() {
|
||||
|
||||
var registeredProcessEvents = [];
|
||||
|
||||
// Fake cluster module, to fake we're inside a worker process
|
||||
var fakeCluster = {
|
||||
|
||||
isMaster: false,
|
||||
isWorker: true,
|
||||
|
||||
};
|
||||
|
||||
var fakeProcess = {
|
||||
|
||||
send: function(data) {
|
||||
registeredProcessEvents.push(data);
|
||||
},
|
||||
|
||||
};
|
||||
|
||||
// Load appender and fake modules in it
|
||||
var appenderModule = sandbox.require('../../lib/appenders/clustered', {
|
||||
requires: {
|
||||
'cluster': fakeCluster,
|
||||
},
|
||||
globals: {
|
||||
'process': fakeProcess,
|
||||
}
|
||||
});
|
||||
|
||||
var workerAppender = appenderModule.appender();
|
||||
|
||||
// Actual test - log message using masterAppender
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', ['workerAppender test']));
|
||||
workerAppender(new LoggingEvent('wovs', 'Info', [new Error('Error test')]));
|
||||
|
||||
var returnValue = {
|
||||
registeredProcessEvents: registeredProcessEvents,
|
||||
};
|
||||
|
||||
return returnValue;
|
||||
|
||||
},
|
||||
|
||||
"worker appender should call process.send" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[0].type, '::log-message');
|
||||
assert.equal(
|
||||
JSON.parse(topic.registeredProcessEvents[0].event).data[0],
|
||||
"workerAppender test"
|
||||
);
|
||||
},
|
||||
|
||||
"worker should serialize an Error correctly" : function(topic) {
|
||||
assert.equal(topic.registeredProcessEvents[1].type, '::log-message');
|
||||
assert(JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack);
|
||||
var actual = JSON.parse(topic.registeredProcessEvents[1].event).data[0].stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(
|
||||
actual.match(expectedRegex),
|
||||
"Expected: \n\n " + actual + "\n\n to match " + expectedRegex
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
149
node_modules/log4js/test/vows/configuration-test.js
generated
vendored
Normal file
149
node_modules/log4js/test/vows/configuration-test.js
generated
vendored
Normal file
@ -0,0 +1,149 @@
|
||||
"use strict";
|
||||
var assert = require('assert')
|
||||
, vows = require('vows')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function makeTestAppender() {
|
||||
return {
|
||||
configure: function(config, options) {
|
||||
this.configureCalled = true;
|
||||
this.config = config;
|
||||
this.options = options;
|
||||
return this.appender();
|
||||
},
|
||||
appender: function() {
|
||||
var self = this;
|
||||
return function(logEvt) { self.logEvt = logEvt; };
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('log4js configure').addBatch({
|
||||
'appenders': {
|
||||
'when specified by type': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/cheese': testAppender
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.configure(
|
||||
{
|
||||
appenders: [
|
||||
{ type: "cheese", flavour: "gouda" }
|
||||
]
|
||||
},
|
||||
{ pants: "yes" }
|
||||
);
|
||||
return testAppender;
|
||||
},
|
||||
'should load appender': function(testAppender) {
|
||||
assert.ok(testAppender.configureCalled);
|
||||
},
|
||||
'should pass config to appender': function(testAppender) {
|
||||
assert.equal(testAppender.config.flavour, 'gouda');
|
||||
},
|
||||
'should pass log4js options to appender': function(testAppender) {
|
||||
assert.equal(testAppender.options.pants, 'yes');
|
||||
}
|
||||
},
|
||||
'when core appender loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{ requires: { './appenders/cheese': testAppender } }
|
||||
);
|
||||
|
||||
log4js.loadAppender('cheese');
|
||||
return log4js;
|
||||
},
|
||||
'should load appender from ../../lib/appenders': function(log4js) {
|
||||
assert.ok(log4js.appenders.cheese);
|
||||
},
|
||||
'should add appender configure function to appenderMakers' : function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers.cheese);
|
||||
}
|
||||
},
|
||||
'when appender in node_modules loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{ requires: { 'some/other/external': testAppender } }
|
||||
);
|
||||
log4js.loadAppender('some/other/external');
|
||||
return log4js;
|
||||
},
|
||||
'should load appender via require': function(log4js) {
|
||||
assert.ok(log4js.appenders['some/other/external']);
|
||||
},
|
||||
'should add appender configure function to appenderMakers': function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
},
|
||||
'when appender object loaded via loadAppender': {
|
||||
topic: function() {
|
||||
var testAppender = makeTestAppender(),
|
||||
log4js = sandbox.require('../../lib/log4js');
|
||||
|
||||
log4js.loadAppender('some/other/external', testAppender);
|
||||
return log4js;
|
||||
},
|
||||
'should load appender with provided object': function(log4js) {
|
||||
assert.ok(log4js.appenders['some/other/external']);
|
||||
},
|
||||
'should add appender configure function to appenderMakers': function(log4js) {
|
||||
assert.isFunction(log4js.appenderMakers['some/other/external']);
|
||||
}
|
||||
},
|
||||
'when configuration file loaded via LOG4JS_CONFIG environment variable': {
|
||||
topic: function() {
|
||||
process.env.LOG4JS_CONFIG = 'some/path/to/mylog4js.json';
|
||||
var fileRead = 0,
|
||||
modulePath = 'some/path/to/mylog4js.json',
|
||||
pathsChecked = [],
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: { appenders: [ { type: 'console', layout: { type: 'messagePassThrough' } } ],
|
||||
levels: { 'a-test' : 'INFO' } },
|
||||
readdirSync: function(dir) {
|
||||
return require('fs').readdirSync(dir);
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return { mtime: mtime };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
}
|
||||
}
|
||||
);
|
||||
delete process.env.LOG4JS_CONFIG;
|
||||
return fileRead;
|
||||
},
|
||||
'should load the specified local configuration file' : function(fileRead) {
|
||||
assert.equal(fileRead, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
173
node_modules/log4js/test/vows/configureNoLevels-test.js
generated
vendored
Normal file
173
node_modules/log4js/test/vows/configureNoLevels-test.js
generated
vendored
Normal file
@ -0,0 +1,173 @@
|
||||
"use strict";
|
||||
// This test shows unexpected behaviour for log4js.configure() in log4js-node@0.4.3 and earlier:
|
||||
// 1) log4js.configure(), log4js.configure(null),
|
||||
// log4js.configure({}), log4js.configure(<some object with no levels prop>)
|
||||
// all set all loggers levels to trace, even if they were previously set to something else.
|
||||
// 2) log4js.configure({levels:{}}), log4js.configure({levels: {foo:
|
||||
// bar}}) leaves previously set logger levels intact.
|
||||
//
|
||||
|
||||
// Basic set up
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var toLevel = require('../../lib/levels').toLevel;
|
||||
|
||||
// uncomment one or other of the following to see progress (or not) while running the tests
|
||||
// var showProgress = console.log;
|
||||
var showProgress = function() {};
|
||||
|
||||
|
||||
// Define the array of levels as string to iterate over.
|
||||
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
|
||||
|
||||
// setup the configurations we want to test
|
||||
var configs = {
|
||||
'nop': 'nop', // special case where the iterating vows generator will not call log4js.configure
|
||||
'is undefined': undefined,
|
||||
'is null': null,
|
||||
'is empty': {},
|
||||
'has no levels': {foo: 'bar'},
|
||||
'has null levels': {levels: null},
|
||||
'has empty levels': {levels: {}},
|
||||
'has random levels': {levels: {foo: 'bar'}},
|
||||
'has some valid levels': {levels: {A: 'INFO'}}
|
||||
};
|
||||
|
||||
// Set up the basic vows batches for this test
|
||||
var batches = [];
|
||||
|
||||
|
||||
function getLoggerName(level) {
|
||||
return level+'-logger';
|
||||
}
|
||||
|
||||
// the common vows top-level context, whether log4js.configure is called or not
|
||||
// just making sure that the code is common,
|
||||
// so that there are no spurious errors in the tests themselves.
|
||||
function getTopLevelContext(nop, configToTest, name) {
|
||||
return {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js');
|
||||
// create loggers for each level,
|
||||
// keeping the level in the logger's name for traceability
|
||||
strLevels.forEach(function(l) {
|
||||
log4js.getLogger(getLoggerName(l)).setLevel(l);
|
||||
});
|
||||
|
||||
if (!nop) {
|
||||
showProgress('** Configuring log4js with', configToTest);
|
||||
log4js.configure(configToTest);
|
||||
}
|
||||
else {
|
||||
showProgress('** Not configuring log4js');
|
||||
}
|
||||
return log4js;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
showProgress('Populating batch object...');
|
||||
|
||||
function checkForMismatch(topic) {
|
||||
var er = topic.log4js.levels.toLevel(topic.baseLevel)
|
||||
.isLessThanOrEqualTo(topic.log4js.levels.toLevel(topic.comparisonLevel));
|
||||
|
||||
assert.equal(
|
||||
er,
|
||||
topic.expectedResult,
|
||||
'Mismatch: for setLevel(' + topic.baseLevel +
|
||||
') was expecting a comparison with ' + topic.comparisonLevel +
|
||||
' to be ' + topic.expectedResult
|
||||
);
|
||||
}
|
||||
|
||||
function checkExpectedResult(topic) {
|
||||
var result = topic.log4js
|
||||
.getLogger(getLoggerName(topic.baseLevel))
|
||||
.isLevelEnabled(topic.log4js.levels.toLevel(topic.comparisonLevel));
|
||||
|
||||
assert.equal(
|
||||
result,
|
||||
topic.expectedResult,
|
||||
'Failed: ' + getLoggerName(topic.baseLevel) +
|
||||
'.isLevelEnabled( ' + topic.comparisonLevel + ' ) returned ' + result
|
||||
);
|
||||
}
|
||||
|
||||
function setupBaseLevelAndCompareToOtherLevels(baseLevel) {
|
||||
var baseLevelSubContext = 'and checking the logger whose level was set to '+baseLevel ;
|
||||
var subContext = { topic: baseLevel };
|
||||
batch[context][baseLevelSubContext] = subContext;
|
||||
|
||||
// each logging level has strLevels sub-contexts,
|
||||
// to exhaustively test all the combinations of
|
||||
// setLevel(baseLevel) and isLevelEnabled(comparisonLevel) per config
|
||||
strLevels.forEach(compareToOtherLevels(subContext));
|
||||
}
|
||||
|
||||
function compareToOtherLevels(subContext) {
|
||||
var baseLevel = subContext.topic;
|
||||
|
||||
return function (comparisonLevel) {
|
||||
var comparisonLevelSubContext = 'with isLevelEnabled('+comparisonLevel+')';
|
||||
|
||||
// calculate this independently of log4js, but we'll add a vow
|
||||
// later on to check that we're not mismatched with log4js
|
||||
var expectedResult = strLevels.indexOf(baseLevel) <= strLevels.indexOf(comparisonLevel);
|
||||
|
||||
// the topic simply gathers all the parameters for the vow
|
||||
// into an object, to simplify the vow's work.
|
||||
subContext[comparisonLevelSubContext] = {
|
||||
topic: function(baseLevel, log4js) {
|
||||
return {
|
||||
comparisonLevel: comparisonLevel,
|
||||
baseLevel: baseLevel,
|
||||
log4js: log4js,
|
||||
expectedResult: expectedResult
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var vow = 'should return '+expectedResult;
|
||||
subContext[comparisonLevelSubContext][vow] = checkExpectedResult;
|
||||
|
||||
// the extra vow to check the comparison between baseLevel and
|
||||
// comparisonLevel we performed earlier matches log4js'
|
||||
// comparison too
|
||||
var subSubContext = subContext[comparisonLevelSubContext];
|
||||
subSubContext['finally checking for comparison mismatch with log4js'] = checkForMismatch;
|
||||
};
|
||||
}
|
||||
|
||||
// Populating the batches programmatically, as there are
|
||||
// (configs.length x strLevels.length x strLevels.length) = 324
|
||||
// possible test combinations
|
||||
for (var cfg in configs) {
|
||||
var configToTest = configs[cfg];
|
||||
var nop = configToTest === 'nop';
|
||||
var context;
|
||||
if (nop) {
|
||||
context = 'Setting up loggers with initial levels, then NOT setting a configuration,';
|
||||
}
|
||||
else {
|
||||
context = 'Setting up loggers with initial levels, then setting a configuration which '+cfg+',';
|
||||
}
|
||||
|
||||
showProgress('Setting up the vows batch and context for '+context);
|
||||
// each config to be tested has its own vows batch with a single top-level context
|
||||
var batch={};
|
||||
batch[context]= getTopLevelContext(nop, configToTest, context);
|
||||
batches.push(batch);
|
||||
|
||||
// each top-level context has strLevels sub-contexts, one per logger
|
||||
// which has set to a specific level in the top-level context's topic
|
||||
strLevels.forEach(setupBaseLevelAndCompareToOtherLevels);
|
||||
}
|
||||
|
||||
showProgress('Running tests');
|
||||
var v = vows.describe('log4js.configure(), with or without a "levels" property');
|
||||
|
||||
batches.forEach(function(batch) {v=v.addBatch(batch);});
|
||||
|
||||
v.export(module);
|
||||
|
303
node_modules/log4js/test/vows/connect-logger-test.js
generated
vendored
Normal file
303
node_modules/log4js/test/vows/connect-logger-test.js
generated
vendored
Normal file
@ -0,0 +1,303 @@
|
||||
/* jshint maxparams:7 */
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, util = require('util')
|
||||
, EE = require('events').EventEmitter
|
||||
, levels = require('../../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
|
||||
var that = this;
|
||||
this.messages = [];
|
||||
|
||||
this.log = function(level, message, exception) {
|
||||
that.messages.push({ level: level, message: message });
|
||||
};
|
||||
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(that.level);
|
||||
};
|
||||
|
||||
this.level = levels.TRACE;
|
||||
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl, headers) {
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.headers = headers || {};
|
||||
|
||||
var self = this;
|
||||
Object.keys(this.headers).forEach(function(key) {
|
||||
self.headers[key.toLowerCase()] = self.headers[key];
|
||||
});
|
||||
}
|
||||
|
||||
function MockResponse() {
|
||||
var r = this;
|
||||
this.end = function(chunk, encoding) {
|
||||
r.emit('finish');
|
||||
};
|
||||
|
||||
this.writeHead = function(code, headers) {
|
||||
this.statusCode = code;
|
||||
this._headers = headers;
|
||||
};
|
||||
}
|
||||
|
||||
util.inherits(MockResponse, EE);
|
||||
|
||||
function request(cl, method, url, code, reqHeaders, resHeaders) {
|
||||
var req = new MockRequest('my.remote.addr', method, url, reqHeaders);
|
||||
var res = new MockResponse();
|
||||
cl(req, res, function() {});
|
||||
res.writeHead(code, resHeaders);
|
||||
res.end('chunk','encoding');
|
||||
}
|
||||
|
||||
vows.describe('log4js connect logger').addBatch({
|
||||
'getConnectLoggerModule': {
|
||||
topic: function() {
|
||||
var clm = require('../../lib/connect-logger');
|
||||
return clm;
|
||||
},
|
||||
|
||||
'should return a "connect logger" factory' : function(clm) {
|
||||
assert.isObject(clm);
|
||||
},
|
||||
|
||||
'take a log4js logger and return a "connect logger"' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml);
|
||||
return cl;
|
||||
},
|
||||
|
||||
'should return a "connect logger"': function(cl) {
|
||||
assert.isFunction(cl);
|
||||
}
|
||||
},
|
||||
|
||||
'log events' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml);
|
||||
var cb = this.callback;
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
}
|
||||
},
|
||||
|
||||
'log events with level below logging level' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
ml.level = levels.FATAL;
|
||||
var cl = clm.connectLogger(ml);
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
return ml.messages;
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.isEmpty(messages);
|
||||
}
|
||||
},
|
||||
|
||||
'log events with non-default level and custom format' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: levels.INFO, format: ':method :url' } );
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10); },
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url');
|
||||
}
|
||||
},
|
||||
|
||||
'logger with options as string': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':method :url');
|
||||
request(cl, 'POST', 'http://meh', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
'should use the passed in format': function(messages) {
|
||||
assert.equal(messages[0].message, 'POST http://meh');
|
||||
}
|
||||
},
|
||||
|
||||
'auto log levels': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, { level: 'auto', format: ':method :url' });
|
||||
request(cl, 'GET', 'http://meh', 200);
|
||||
request(cl, 'GET', 'http://meh', 201);
|
||||
request(cl, 'GET', 'http://meh', 302);
|
||||
request(cl, 'GET', 'http://meh', 404);
|
||||
request(cl, 'GET', 'http://meh', 500);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'should use INFO for 2xx': function(messages) {
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.ok(levels.INFO.isEqualTo(messages[1].level));
|
||||
},
|
||||
|
||||
'should use WARN for 3xx': function(messages) {
|
||||
assert.ok(levels.WARN.isEqualTo(messages[2].level));
|
||||
},
|
||||
|
||||
'should use ERROR for 4xx': function(messages) {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[3].level));
|
||||
},
|
||||
|
||||
'should use ERROR for 5xx': function(messages) {
|
||||
assert.ok(levels.ERROR.isEqualTo(messages[4].level));
|
||||
}
|
||||
},
|
||||
|
||||
'format using a function': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, function(req, res, formatFn) { return "I was called"; });
|
||||
request(cl, 'GET', 'http://blah', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'should call the format function': function(messages) {
|
||||
assert.equal(messages[0].message, 'I was called');
|
||||
}
|
||||
},
|
||||
|
||||
'format that includes request headers': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':req[Content-Type]');
|
||||
request(
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
{ 'Content-Type': 'application/json' }
|
||||
);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
'should output the request header': function(messages) {
|
||||
assert.equal(messages[0].message, 'application/json');
|
||||
}
|
||||
},
|
||||
|
||||
'format that includes response headers': {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, ':res[Content-Type]');
|
||||
request(
|
||||
cl,
|
||||
'GET', 'http://blah', 200,
|
||||
null,
|
||||
{ 'Content-Type': 'application/cheese' }
|
||||
);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'should output the response header': function(messages) {
|
||||
assert.equal(messages[0].message, 'application/cheese');
|
||||
}
|
||||
},
|
||||
|
||||
'log events with custom token' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :custom_string',
|
||||
tokens: [{
|
||||
token: ':custom_string', replacement: 'fooBAR'
|
||||
}]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url fooBAR');
|
||||
}
|
||||
},
|
||||
|
||||
'log events with custom override token' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cb = this.callback;
|
||||
ml.level = levels.INFO;
|
||||
var cl = clm.connectLogger(ml, {
|
||||
level: levels.INFO,
|
||||
format: ':method :url :date',
|
||||
tokens: [{
|
||||
token: ':date', replacement: "20150310"
|
||||
}]
|
||||
});
|
||||
request(cl, 'GET', 'http://url', 200);
|
||||
setTimeout(function() {
|
||||
cb(null, ml.messages);
|
||||
},10);
|
||||
},
|
||||
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.equal(messages[0].message, 'GET http://url 20150310');
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
33
node_modules/log4js/test/vows/consoleAppender-test.js
generated
vendored
Normal file
33
node_modules/log4js/test/vows/consoleAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var assert = require('assert')
|
||||
, vows = require('vows')
|
||||
, layouts = require('../../lib/layouts')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('../../lib/appenders/console').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
var messages = []
|
||||
, fakeConsole = {
|
||||
log: function(msg) { messages.push(msg); }
|
||||
}
|
||||
, appenderModule = sandbox.require(
|
||||
'../../lib/appenders/console',
|
||||
{
|
||||
globals: {
|
||||
'console': fakeConsole
|
||||
}
|
||||
}
|
||||
)
|
||||
, appender = appenderModule.appender(layouts.messagePassThroughLayout);
|
||||
|
||||
appender({ data: ["blah"] });
|
||||
return messages;
|
||||
},
|
||||
|
||||
'should output to console': function(messages) {
|
||||
assert.equal(messages[0], 'blah');
|
||||
}
|
||||
}
|
||||
|
||||
}).exportTo(module);
|
223
node_modules/log4js/test/vows/dateFileAppender-test.js
generated
vendored
Normal file
223
node_modules/log4js/test/vows/dateFileAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,223 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, path = require('path')
|
||||
, fs = require('fs')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
function removeFile(filename) {
|
||||
return function() {
|
||||
fs.unlink(path.join(__dirname, filename), function(err) {
|
||||
if (err) {
|
||||
console.log("Could not delete ", filename, err);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('../../lib/appenders/dateFile').addBatch({
|
||||
'appender': {
|
||||
'adding multiple dateFileAppenders': {
|
||||
topic: function () {
|
||||
var listenersCount = process.listeners('exit').length,
|
||||
dateFileAppender = require('../../lib/appenders/dateFile'),
|
||||
count = 5,
|
||||
logfile;
|
||||
|
||||
while (count--) {
|
||||
logfile = path.join(__dirname, 'datefa-default-test' + count + '.log');
|
||||
log4js.addAppender(dateFileAppender.appender(logfile));
|
||||
}
|
||||
|
||||
return listenersCount;
|
||||
},
|
||||
teardown: function() {
|
||||
removeFile('datefa-default-test0.log')();
|
||||
removeFile('datefa-default-test1.log')();
|
||||
removeFile('datefa-default-test2.log')();
|
||||
removeFile('datefa-default-test3.log')();
|
||||
removeFile('datefa-default-test4.log')();
|
||||
},
|
||||
|
||||
'should only add one `exit` listener': function (initialCount) {
|
||||
assert.equal(process.listeners('exit').length, initialCount + 1);
|
||||
},
|
||||
|
||||
},
|
||||
|
||||
'exit listener': {
|
||||
topic: function() {
|
||||
var exitListener
|
||||
, openedFiles = []
|
||||
, dateFileAppender = sandbox.require(
|
||||
'../../lib/appenders/dateFile',
|
||||
{
|
||||
globals: {
|
||||
process: {
|
||||
on: function(evt, listener) {
|
||||
exitListener = listener;
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'streamroller': {
|
||||
DateRollingFileStream: function(filename) {
|
||||
openedFiles.push(filename);
|
||||
|
||||
this.end = function() {
|
||||
openedFiles.shift();
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
for (var i=0; i < 5; i += 1) {
|
||||
dateFileAppender.appender('test' + i);
|
||||
}
|
||||
assert.isNotEmpty(openedFiles);
|
||||
exitListener();
|
||||
return openedFiles;
|
||||
},
|
||||
'should close all open files': function(openedFiles) {
|
||||
assert.isEmpty(openedFiles);
|
||||
}
|
||||
},
|
||||
|
||||
'with default settings': {
|
||||
topic: function() {
|
||||
var that = this,
|
||||
testFile = path.join(__dirname, 'date-appender-default.log'),
|
||||
appender = require('../../lib/appenders/dateFile').appender(testFile),
|
||||
logger = log4js.getLogger('default-settings');
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(appender, 'default-settings');
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
|
||||
},
|
||||
teardown: removeFile('date-appender-default.log'),
|
||||
|
||||
'should write to the file': function(contents) {
|
||||
assert.include(contents, 'This should be in the file');
|
||||
},
|
||||
|
||||
'should use the basic layout': function(contents) {
|
||||
assert.match(
|
||||
contents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}).addBatch({
|
||||
'configure': {
|
||||
'with dateFileAppender': {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js')
|
||||
, logger;
|
||||
//this config file defines one file appender (to ./date-file-test.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('test/vows/with-dateFile.json');
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
fs.readFile(path.join(__dirname, 'date-file-test.log'), 'utf8', this.callback);
|
||||
},
|
||||
teardown: removeFile('date-file-test.log'),
|
||||
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
assert.include(contents, 'this should be written to the file' + EOL);
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
},
|
||||
'with options.alwaysIncludePattern': {
|
||||
topic: function() {
|
||||
var self = this
|
||||
, log4js = require('../../lib/log4js')
|
||||
, format = require('../../lib/date_format')
|
||||
, logger
|
||||
, options = {
|
||||
"appenders": [
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "dateFile",
|
||||
"filename": "test/vows/date-file-test",
|
||||
"pattern": "-from-MM-dd.log",
|
||||
"alwaysIncludePattern": true,
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
, thisTime = format.asString(options.appenders[0].pattern, new Date());
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, 'date-file-test' + thisTime),
|
||||
"this is existing data" + EOL,
|
||||
'utf8'
|
||||
);
|
||||
log4js.clearAppenders();
|
||||
log4js.configure(options);
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.warn('this should be written to the file with the appended date');
|
||||
this.teardown = removeFile('date-file-test' + thisTime);
|
||||
//wait for filesystem to catch up
|
||||
setTimeout(function() {
|
||||
fs.readFile(path.join(__dirname, 'date-file-test' + thisTime), 'utf8', self.callback);
|
||||
}, 100);
|
||||
},
|
||||
'should create file with the correct pattern': function(contents) {
|
||||
assert.include(contents, 'this should be written to the file with the appended date');
|
||||
},
|
||||
'should not overwrite the file on open (bug found in issue #132)': function(contents) {
|
||||
assert.include(contents, 'this is existing data');
|
||||
}
|
||||
},
|
||||
'with cwd option': {
|
||||
topic: function () {
|
||||
var fileOpened,
|
||||
appender = sandbox.require(
|
||||
'../../lib/appenders/dateFile',
|
||||
{ requires:
|
||||
{ 'streamroller':
|
||||
{ DateRollingFileStream:
|
||||
function(file) {
|
||||
fileOpened = file;
|
||||
return {
|
||||
on: function() {},
|
||||
end: function() {}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
appender.configure(
|
||||
{
|
||||
filename: "whatever.log",
|
||||
maxLogSize: 10
|
||||
},
|
||||
{ cwd: '/absolute/path/to' }
|
||||
);
|
||||
return fileOpened;
|
||||
},
|
||||
'should prepend options.cwd to config.filename': function (fileOpened) {
|
||||
var expected = path.sep + path.join("absolute", "path", "to", "whatever.log");
|
||||
assert.equal(fileOpened, expected);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}).exportTo(module);
|
58
node_modules/log4js/test/vows/date_format-test.js
generated
vendored
Normal file
58
node_modules/log4js/test/vows/date_format-test.js
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, dateFormat = require('../../lib/date_format');
|
||||
|
||||
function createFixedDate() {
|
||||
return new Date(2010, 0, 11, 14, 31, 30, 5);
|
||||
}
|
||||
|
||||
vows.describe('date_format').addBatch({
|
||||
'Date extensions': {
|
||||
topic: createFixedDate,
|
||||
'should format a date as string using a pattern': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.DATETIME_FORMAT, date),
|
||||
"11 01 2010 14:31:30.005"
|
||||
);
|
||||
},
|
||||
'should default to the ISO8601 format': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(date),
|
||||
'2010-01-11 14:31:30.005'
|
||||
);
|
||||
},
|
||||
'should provide a ISO8601 with timezone offset format': function() {
|
||||
var date = createFixedDate();
|
||||
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() - 660);
|
||||
date.getTimezoneOffset = function() { return -660; };
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||
"2010-01-11T14:31:30.005+1100"
|
||||
);
|
||||
date = createFixedDate();
|
||||
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
|
||||
date.getTimezoneOffset = function() { return 120; };
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ISO8601_WITH_TZ_OFFSET_FORMAT, date),
|
||||
"2010-01-11T14:31:30.005-0200"
|
||||
);
|
||||
|
||||
},
|
||||
'should provide a just-the-time format': function(date) {
|
||||
assert.equal(
|
||||
dateFormat.asString(dateFormat.ABSOLUTETIME_FORMAT, date),
|
||||
'14:31:30.005'
|
||||
);
|
||||
},
|
||||
'should provide a custom format': function() {
|
||||
var date = createFixedDate();
|
||||
date.setMinutes(date.getMinutes() - date.getTimezoneOffset() + 120);
|
||||
date.getTimezoneOffset = function() { return 120; };
|
||||
assert.equal(
|
||||
dateFormat.asString("O.SSS.ss.mm.hh.dd.MM.yy", date),
|
||||
'-0200.005.30.31.14.11.01.10'
|
||||
);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
450
node_modules/log4js/test/vows/fileAppender-test.js
generated
vendored
Normal file
450
node_modules/log4js/test/vows/fileAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,450 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, assert = require('assert')
|
||||
, zlib = require('zlib')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js fileAppender').addBatch({
|
||||
'adding multiple fileAppenders': {
|
||||
topic: function () {
|
||||
var listenersCount = process.listeners('exit').length
|
||||
, logger = log4js.getLogger('default-settings')
|
||||
, count = 5, logfile;
|
||||
|
||||
while (count--) {
|
||||
logfile = path.join(__dirname, 'fa-default-test' + count + '.log');
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/file').appender(logfile),
|
||||
'default-settings'
|
||||
);
|
||||
}
|
||||
|
||||
return listenersCount;
|
||||
},
|
||||
|
||||
'does not add more than one `exit` listener': function (initialCount) {
|
||||
assert.equal(initialCount + 1, process.listeners('exit').length);
|
||||
}
|
||||
},
|
||||
|
||||
'exit listener': {
|
||||
topic: function() {
|
||||
var exitListener
|
||||
, openedFiles = []
|
||||
, fileAppender = sandbox.require(
|
||||
'../../lib/appenders/file',
|
||||
{
|
||||
globals: {
|
||||
process: {
|
||||
on: function(evt, listener) {
|
||||
if (evt == 'exit') {
|
||||
exitListener = listener;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'streamroller': {
|
||||
RollingFileStream: function(filename) {
|
||||
openedFiles.push(filename);
|
||||
|
||||
this.end = function() {
|
||||
openedFiles.shift();
|
||||
};
|
||||
|
||||
this.on = function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
for (var i=0; i < 5; i += 1) {
|
||||
fileAppender.appender('test' + i, null, 100);
|
||||
}
|
||||
assert.isNotEmpty(openedFiles);
|
||||
exitListener();
|
||||
return openedFiles;
|
||||
},
|
||||
'should close all open files': function(openedFiles) {
|
||||
assert.isEmpty(openedFiles);
|
||||
}
|
||||
},
|
||||
|
||||
'with default fileAppender settings': {
|
||||
topic: function() {
|
||||
var that = this
|
||||
, testFile = path.join(__dirname, 'fa-default-test.log')
|
||||
, logger = log4js.getLogger('default-settings');
|
||||
remove(testFile);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/file').appender(testFile),
|
||||
'default-settings'
|
||||
);
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
},
|
||||
'should write log messages to the file': function (err, fileContents) {
|
||||
assert.include(fileContents, "This should be in the file." + EOL);
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
},
|
||||
'fileAppender subcategories': {
|
||||
topic: function() {
|
||||
var that = this;
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function addAppender(cat) {
|
||||
var testFile = path.join(
|
||||
__dirname,
|
||||
'fa-subcategories-test-'+cat.join('-').replace(/\./g, "_")+'.log'
|
||||
);
|
||||
remove(testFile);
|
||||
log4js.addAppender(require('../../lib/appenders/file').appender(testFile), cat);
|
||||
return testFile;
|
||||
}
|
||||
|
||||
var file_sub1 = addAppender([ 'sub1']);
|
||||
|
||||
var file_sub1_sub12$sub1_sub13 = addAppender([ 'sub1.sub12', 'sub1.sub13' ]);
|
||||
|
||||
var file_sub1_sub12 = addAppender([ 'sub1.sub12' ]);
|
||||
|
||||
|
||||
var logger_sub1_sub12_sub123 = log4js.getLogger('sub1.sub12.sub123');
|
||||
|
||||
var logger_sub1_sub13_sub133 = log4js.getLogger('sub1.sub13.sub133');
|
||||
|
||||
var logger_sub1_sub14 = log4js.getLogger('sub1.sub14');
|
||||
|
||||
var logger_sub2 = log4js.getLogger('sub2');
|
||||
|
||||
|
||||
logger_sub1_sub12_sub123.info('sub1_sub12_sub123');
|
||||
|
||||
logger_sub1_sub13_sub133.info('sub1_sub13_sub133');
|
||||
|
||||
logger_sub1_sub14.info('sub1_sub14');
|
||||
|
||||
logger_sub2.info('sub2');
|
||||
|
||||
|
||||
setTimeout(function() {
|
||||
that.callback(null, {
|
||||
file_sub1: fs.readFileSync(file_sub1).toString(),
|
||||
file_sub1_sub12$sub1_sub13: fs.readFileSync(file_sub1_sub12$sub1_sub13).toString(),
|
||||
file_sub1_sub12: fs.readFileSync(file_sub1_sub12).toString()
|
||||
});
|
||||
}, 3000);
|
||||
},
|
||||
'check file contents': function (err, fileContents) {
|
||||
|
||||
// everything but category 'sub2'
|
||||
assert.match(
|
||||
fileContents.file_sub1,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133|sub1.sub14 - sub1_sub14)[\s\S]){3}$/ // jshint ignore:line
|
||||
);
|
||||
assert.ok(
|
||||
fileContents.file_sub1.match(/sub123/) &&
|
||||
fileContents.file_sub1.match(/sub133/) &&
|
||||
fileContents.file_sub1.match(/sub14/)
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1.match(/sub2/));
|
||||
|
||||
// only catgories starting with 'sub1.sub12' and 'sub1.sub13'
|
||||
assert.match(
|
||||
fileContents.file_sub1_sub12$sub1_sub13,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123|sub1.sub13.sub133 - sub1_sub13_sub133)[\s\S]){2}$/ //jshint ignore:line
|
||||
);
|
||||
assert.ok(
|
||||
fileContents.file_sub1_sub12$sub1_sub13.match(/sub123/) &&
|
||||
fileContents.file_sub1_sub12$sub1_sub13.match(/sub133/)
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1_sub12$sub1_sub13.match(/sub14|sub2/));
|
||||
|
||||
// only catgories starting with 'sub1.sub12'
|
||||
assert.match(
|
||||
fileContents.file_sub1_sub12,
|
||||
/^(\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] (sub1.sub12.sub123 - sub1_sub12_sub123)[\s\S]){1}$/ //jshint ignore:line
|
||||
);
|
||||
assert.ok(!fileContents.file_sub1_sub12.match(/sub14|sub2|sub13/));
|
||||
|
||||
}
|
||||
},
|
||||
'with a max file size and no backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, 'fa-maxFileSize-test.log')
|
||||
, logger = log4js.getLogger('max-file-size')
|
||||
, that = this;
|
||||
remove(testFile);
|
||||
remove(testFile + '.1');
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 100, 0),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is an intermediate log message.");
|
||||
logger.info("This is the second log message.");
|
||||
//wait for the file system to catch up
|
||||
setTimeout(function() {
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
}, 100);
|
||||
},
|
||||
'log file should only contain the second message': function(err, fileContents) {
|
||||
assert.include(fileContents, "This is the second log message.");
|
||||
assert.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'starting with the test file name should be two': function(err, files) {
|
||||
//there will always be one backup if you've specified a max log size
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-test.log') > -1; }
|
||||
);
|
||||
assert.equal(logFiles.length, 2);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a max file size and 2 backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-test.log')
|
||||
, logger = log4js.getLogger('max-file-size-backups');
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/file').appender(testFile, log4js.layouts.basicLayout, 50, 2),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
}, 200);
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-with-backups-test.log') > -1; }
|
||||
);
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.equal(files.length, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-test.log',
|
||||
'fa-maxFileSize-with-backups-test.log.1',
|
||||
'fa-maxFileSize-with-backups-test.log.2'
|
||||
]);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||
},
|
||||
'should be the last log message': function(contents) {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the second file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
||||
},
|
||||
'should be the third log message': function(contents) {
|
||||
assert.include(contents, 'This is the third log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the third file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
||||
},
|
||||
'should be the second log message': function(contents) {
|
||||
assert.include(contents, 'This is the second log message.');
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a max file size and 2 compressed backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, 'fa-maxFileSize-with-backups-compressed-test.log')
|
||||
, logger = log4js.getLogger('max-file-size-backups');
|
||||
remove(testFile);
|
||||
remove(testFile+'.1.gz');
|
||||
remove(testFile+'.2.gz');
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/file').appender(
|
||||
testFile, log4js.layouts.basicLayout, 50, 2, { compress: true }
|
||||
),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
//give the system a chance to open the stream
|
||||
setTimeout(function() {
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
}, 1000);
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(
|
||||
function(file) {
|
||||
return file.indexOf('fa-maxFileSize-with-backups-compressed-test.log') > -1;
|
||||
}
|
||||
);
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.equal(files.length, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-compressed-test.log',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.1.gz',
|
||||
'fa-maxFileSize-with-backups-compressed-test.log.2.gz'
|
||||
]);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||
},
|
||||
'should be the last log message': function(contents) {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the second file': {
|
||||
topic: function(logFiles) {
|
||||
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[1])), this.callback);
|
||||
},
|
||||
'should be the third log message': function(contents) {
|
||||
assert.include(contents.toString('utf8'), 'This is the third log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the third file': {
|
||||
topic: function(logFiles) {
|
||||
zlib.gunzip(fs.readFileSync(path.join(__dirname, logFiles[2])), this.callback);
|
||||
},
|
||||
'should be the second log message': function(contents) {
|
||||
assert.include(contents.toString('utf8'), 'This is the second log message.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'configure' : {
|
||||
'with fileAppender': {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js')
|
||||
, logger;
|
||||
//this config file defines one file appender (to ./tmp-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure('./test/vows/log4js.json');
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
fs.readFile('tmp-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function (err, contents) {
|
||||
assert.include(contents, 'this should be written to the file' + EOL);
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'when underlying stream errors': {
|
||||
topic: function() {
|
||||
var consoleArgs
|
||||
, errorHandler
|
||||
, fileAppender = sandbox.require(
|
||||
'../../lib/appenders/file',
|
||||
{
|
||||
globals: {
|
||||
console: {
|
||||
error: function() {
|
||||
consoleArgs = Array.prototype.slice.call(arguments);
|
||||
}
|
||||
}
|
||||
},
|
||||
requires: {
|
||||
'streamroller': {
|
||||
RollingFileStream: function(filename) {
|
||||
|
||||
this.end = function() {};
|
||||
this.on = function(evt, cb) {
|
||||
if (evt === 'error') {
|
||||
errorHandler = cb;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
fileAppender.appender('test1.log', null, 100);
|
||||
errorHandler({ error: 'aargh' });
|
||||
return consoleArgs;
|
||||
},
|
||||
'should log the error to console.error': function(consoleArgs) {
|
||||
assert.isNotEmpty(consoleArgs);
|
||||
assert.equal(consoleArgs[0], 'log4js.fileAppender - Writing to file %s, error happened ');
|
||||
assert.equal(consoleArgs[1], 'test1.log');
|
||||
assert.equal(consoleArgs[2].error, 'aargh');
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
197
node_modules/log4js/test/vows/fileSyncAppender-test.js
generated
vendored
Normal file
197
node_modules/log4js/test/vows/fileSyncAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,197 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, path = require('path')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, assert = require('assert')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js fileSyncAppender').addBatch({
|
||||
'with default fileSyncAppender settings': {
|
||||
topic: function() {
|
||||
var that = this
|
||||
, testFile = path.join(__dirname, '/fa-default-sync-test.log')
|
||||
, logger = log4js.getLogger('default-settings');
|
||||
remove(testFile);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/fileSync').appender(testFile),
|
||||
'default-settings'
|
||||
);
|
||||
|
||||
logger.info("This should be in the file.");
|
||||
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
},
|
||||
'should write log messages to the file': function (err, fileContents) {
|
||||
assert.include(fileContents, "This should be in the file." + EOL);
|
||||
},
|
||||
'log messages should be in the basic layout format': function(err, fileContents) {
|
||||
assert.match(
|
||||
fileContents,
|
||||
/\[\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}\.\d{3}\] \[INFO\] default-settings - /
|
||||
);
|
||||
}
|
||||
},
|
||||
'with a max file size and no backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-sync-test.log')
|
||||
, logger = log4js.getLogger('max-file-size')
|
||||
, that = this;
|
||||
remove(testFile);
|
||||
remove(testFile + '.1');
|
||||
//log file of 100 bytes maximum, no backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require(
|
||||
'../../lib/appenders/fileSync'
|
||||
).appender(
|
||||
testFile,
|
||||
log4js.layouts.basicLayout,
|
||||
100,
|
||||
0
|
||||
),
|
||||
'max-file-size'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is an intermediate log message.");
|
||||
logger.info("This is the second log message.");
|
||||
|
||||
fs.readFile(testFile, "utf8", that.callback);
|
||||
},
|
||||
'log file should only contain the second message': function (err, fileContents) {
|
||||
assert.include(fileContents, "This is the second log message." + EOL);
|
||||
assert.equal(fileContents.indexOf("This is the first log message."), -1);
|
||||
},
|
||||
'the number of files': {
|
||||
topic: function() {
|
||||
fs.readdir(__dirname, this.callback);
|
||||
},
|
||||
'starting with the test file name should be two': function(err, files) {
|
||||
//there will always be one backup if you've specified a max log size
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-sync-test.log') > -1; }
|
||||
);
|
||||
assert.equal(logFiles.length, 2);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a max file size and 2 backups': {
|
||||
topic: function() {
|
||||
var testFile = path.join(__dirname, '/fa-maxFileSize-with-backups-sync-test.log')
|
||||
, logger = log4js.getLogger('max-file-size-backups');
|
||||
remove(testFile);
|
||||
remove(testFile+'.1');
|
||||
remove(testFile+'.2');
|
||||
|
||||
//log file of 50 bytes maximum, 2 backups
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/fileSync').appender(
|
||||
testFile,
|
||||
log4js.layouts.basicLayout,
|
||||
50,
|
||||
2
|
||||
),
|
||||
'max-file-size-backups'
|
||||
);
|
||||
logger.info("This is the first log message.");
|
||||
logger.info("This is the second log message.");
|
||||
logger.info("This is the third log message.");
|
||||
logger.info("This is the fourth log message.");
|
||||
var that = this;
|
||||
|
||||
fs.readdir(__dirname, function(err, files) {
|
||||
if (files) {
|
||||
that.callback(null, files.sort());
|
||||
} else {
|
||||
that.callback(err, files);
|
||||
}
|
||||
});
|
||||
},
|
||||
'the log files': {
|
||||
topic: function(files) {
|
||||
var logFiles = files.filter(
|
||||
function(file) { return file.indexOf('fa-maxFileSize-with-backups-sync-test.log') > -1; }
|
||||
);
|
||||
return logFiles;
|
||||
},
|
||||
'should be 3': function (files) {
|
||||
assert.equal(files.length, 3);
|
||||
},
|
||||
'should be named in sequence': function (files) {
|
||||
assert.deepEqual(files, [
|
||||
'fa-maxFileSize-with-backups-sync-test.log',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.1',
|
||||
'fa-maxFileSize-with-backups-sync-test.log.2'
|
||||
]);
|
||||
},
|
||||
'and the contents of the first file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[0]), "utf8", this.callback);
|
||||
},
|
||||
'should be the last log message': function(contents) {
|
||||
assert.include(contents, 'This is the fourth log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the second file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[1]), "utf8", this.callback);
|
||||
},
|
||||
'should be the third log message': function(contents) {
|
||||
assert.include(contents, 'This is the third log message.');
|
||||
}
|
||||
},
|
||||
'and the contents of the third file': {
|
||||
topic: function(logFiles) {
|
||||
fs.readFile(path.join(__dirname, logFiles[2]), "utf8", this.callback);
|
||||
},
|
||||
'should be the second log message': function(contents) {
|
||||
assert.include(contents, 'This is the second log message.');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'configure' : {
|
||||
'with fileSyncAppender': {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js')
|
||||
, logger;
|
||||
//this config defines one file appender (to ./tmp-sync-tests.log)
|
||||
//and sets the log level for "tests" to WARN
|
||||
log4js.configure({
|
||||
appenders: [{
|
||||
category: "tests",
|
||||
type: "file",
|
||||
filename: "tmp-sync-tests.log",
|
||||
layout: { type: "messagePassThrough" }
|
||||
}],
|
||||
|
||||
levels: { tests: "WARN" }
|
||||
});
|
||||
logger = log4js.getLogger('tests');
|
||||
logger.info('this should not be written to the file');
|
||||
logger.warn('this should be written to the file');
|
||||
|
||||
fs.readFile('tmp-sync-tests.log', 'utf8', this.callback);
|
||||
},
|
||||
'should load appender configuration from a json file': function(err, contents) {
|
||||
assert.include(contents, 'this should be written to the file' + EOL);
|
||||
assert.equal(contents.indexOf('this should not be written to the file'), -1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
257
node_modules/log4js/test/vows/gelfAppender-test.js
generated
vendored
Normal file
257
node_modules/log4js/test/vows/gelfAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,257 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, realLayouts = require('../../lib/layouts')
|
||||
, setupLogging = function(options, category, compressedLength) {
|
||||
var fakeDgram = {
|
||||
sent: false,
|
||||
socket: {
|
||||
packetLength: 0,
|
||||
closed: false,
|
||||
close: function() {
|
||||
this.closed = true;
|
||||
},
|
||||
send: function(pkt, offset, pktLength, port, host) {
|
||||
fakeDgram.sent = true;
|
||||
this.packet = pkt;
|
||||
this.offset = offset;
|
||||
this.packetLength = pktLength;
|
||||
this.port = port;
|
||||
this.host = host;
|
||||
}
|
||||
},
|
||||
createSocket: function(type) {
|
||||
this.type = type;
|
||||
return this.socket;
|
||||
}
|
||||
}
|
||||
, fakeZlib = {
|
||||
gzip: function(objectToCompress, callback) {
|
||||
fakeZlib.uncompressed = objectToCompress;
|
||||
if (this.shouldError) {
|
||||
callback({ stack: "oh noes" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (compressedLength) {
|
||||
callback(null, { length: compressedLength });
|
||||
} else {
|
||||
callback(null, "I've been compressed");
|
||||
}
|
||||
}
|
||||
}
|
||||
, exitHandler
|
||||
, fakeConsole = {
|
||||
error: function(message) {
|
||||
this.message = message;
|
||||
}
|
||||
}
|
||||
, fakeLayouts = {
|
||||
layout: function(type, options) {
|
||||
this.type = type;
|
||||
this.options = options;
|
||||
return realLayouts.messagePassThroughLayout;
|
||||
},
|
||||
messagePassThroughLayout: realLayouts.messagePassThroughLayout
|
||||
}
|
||||
, appender = sandbox.require('../../lib/appenders/gelf', {
|
||||
requires: {
|
||||
dgram: fakeDgram,
|
||||
zlib: fakeZlib,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
process: {
|
||||
on: function(evt, handler) {
|
||||
if (evt === 'exit') {
|
||||
exitHandler = handler;
|
||||
}
|
||||
}
|
||||
},
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(appender.configure(options || {}), category || "gelf-test");
|
||||
return {
|
||||
dgram: fakeDgram,
|
||||
compress: fakeZlib,
|
||||
exitHandler: exitHandler,
|
||||
console: fakeConsole,
|
||||
layouts: fakeLayouts,
|
||||
logger: log4js.getLogger(category || "gelf-test")
|
||||
};
|
||||
};
|
||||
|
||||
vows.describe('log4js gelfAppender').addBatch({
|
||||
|
||||
'with default gelfAppender settings': {
|
||||
topic: function() {
|
||||
var setup = setupLogging();
|
||||
setup.logger.info("This is a test");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should be sent via udp to the localhost gelf server': function(dgram) {
|
||||
assert.equal(dgram.type, "udp4");
|
||||
assert.equal(dgram.socket.host, "localhost");
|
||||
assert.equal(dgram.socket.port, 12201);
|
||||
assert.equal(dgram.socket.offset, 0);
|
||||
assert.ok(dgram.socket.packetLength > 0, "Received blank message");
|
||||
},
|
||||
'should be compressed': function(dgram) {
|
||||
assert.equal(dgram.socket.packet, "I've been compressed");
|
||||
}
|
||||
},
|
||||
'the uncompressed log message': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should be in the gelf format': function(message) {
|
||||
assert.equal(message.version, '1.1');
|
||||
assert.equal(message.host, require('os').hostname());
|
||||
assert.equal(message.level, 6); //INFO
|
||||
assert.equal(message.short_message, 'This is a test');
|
||||
}
|
||||
}
|
||||
},
|
||||
'with a message longer than 8k': {
|
||||
topic: function() {
|
||||
var setup = setupLogging(undefined, undefined, 10240);
|
||||
setup.logger.info("Blah.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should not be sent': function(dgram) {
|
||||
assert.equal(dgram.sent, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
'with non-default options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging({
|
||||
host: 'somewhere',
|
||||
port: 12345,
|
||||
hostname: 'cheese',
|
||||
facility: 'nonsense'
|
||||
});
|
||||
setup.logger.debug("Just testing.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should pick up the options': function(dgram) {
|
||||
assert.equal(dgram.socket.host, 'somewhere');
|
||||
assert.equal(dgram.socket.port, 12345);
|
||||
}
|
||||
},
|
||||
'the uncompressed packet': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should pick up the options': function(message) {
|
||||
assert.equal(message.host, 'cheese');
|
||||
assert.equal(message._facility, 'nonsense');
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'on process.exit': {
|
||||
topic: function() {
|
||||
var setup = setupLogging();
|
||||
setup.exitHandler();
|
||||
return setup;
|
||||
},
|
||||
'should close open sockets': function(setup) {
|
||||
assert.isTrue(setup.dgram.socket.closed);
|
||||
}
|
||||
},
|
||||
|
||||
'on zlib error': {
|
||||
topic: function() {
|
||||
var setup = setupLogging();
|
||||
setup.compress.shouldError = true;
|
||||
setup.logger.info('whatever');
|
||||
return setup;
|
||||
},
|
||||
'should output to console.error': function(setup) {
|
||||
assert.equal(setup.console.message, 'oh noes');
|
||||
}
|
||||
},
|
||||
|
||||
'with layout in configuration': {
|
||||
topic: function() {
|
||||
var setup = setupLogging({
|
||||
layout: {
|
||||
type: 'madeuplayout',
|
||||
earlgrey: 'yes, please'
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should pass options to layout': function(setup) {
|
||||
assert.equal(setup.layouts.type, 'madeuplayout');
|
||||
assert.equal(setup.layouts.options.earlgrey, 'yes, please');
|
||||
}
|
||||
},
|
||||
|
||||
'with custom fields options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging({
|
||||
host: 'somewhere',
|
||||
port: 12345,
|
||||
hostname: 'cheese',
|
||||
facility: 'nonsense',
|
||||
customFields: {
|
||||
_every1: 'Hello every one',
|
||||
_every2: 'Hello every two'
|
||||
}
|
||||
});
|
||||
var myFields = {
|
||||
GELF: true,
|
||||
_every2: 'Overwritten!',
|
||||
_myField: 'This is my field!'
|
||||
};
|
||||
setup.logger.debug(myFields, "Just testing.");
|
||||
return setup;
|
||||
},
|
||||
'the dgram packet': {
|
||||
topic: function(setup) {
|
||||
return setup.dgram;
|
||||
},
|
||||
'should pick up the options': function(dgram) {
|
||||
assert.equal(dgram.socket.host, 'somewhere');
|
||||
assert.equal(dgram.socket.port, 12345);
|
||||
}
|
||||
},
|
||||
'the uncompressed packet': {
|
||||
topic: function(setup) {
|
||||
var message = JSON.parse(setup.compress.uncompressed);
|
||||
return message;
|
||||
},
|
||||
'should pick up the options': function(message) {
|
||||
assert.equal(message.host, 'cheese');
|
||||
assert.isUndefined(message.GELF); // make sure flag was removed
|
||||
assert.equal(message._facility, 'nonsense');
|
||||
assert.equal(message._every1, 'Hello every one'); // the default value
|
||||
assert.equal(message._every2, 'Overwritten!'); // the overwritten value
|
||||
assert.equal(message._myField, 'This is my field!'); // the value for this message only
|
||||
assert.equal(message.short_message, 'Just testing.'); // skip the field object
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
121
node_modules/log4js/test/vows/global-log-level-test.js
generated
vendored
Normal file
121
node_modules/log4js/test/vows/global-log-level-test.js
generated
vendored
Normal file
@ -0,0 +1,121 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert');
|
||||
|
||||
vows.describe('log4js global loglevel').addBatch({
|
||||
'global loglevel' : {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js');
|
||||
return log4js;
|
||||
},
|
||||
|
||||
'set global loglevel on creation': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
log4js.setGlobalLogLevel(level);
|
||||
assert.equal(log1.level.toString(), level);
|
||||
|
||||
var log2 = log4js.getLogger('log2');
|
||||
assert.equal(log2.level.toString(), level);
|
||||
},
|
||||
|
||||
'global change loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var log2 = log4js.getLogger('log2');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
log4js.setGlobalLogLevel(level);
|
||||
assert.equal(log1.level.toString(), level);
|
||||
assert.equal(log2.level.toString(), level);
|
||||
},
|
||||
|
||||
'override loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var log2 = log4js.getLogger('log2');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
var oldLevel = log1.level.toString();
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
|
||||
log2.setLevel(level);
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), level);
|
||||
assert.notEqual(oldLevel, level);
|
||||
|
||||
log2.removeLevel();
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
},
|
||||
|
||||
'preload loglevel': function(log4js) {
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var level = 'OFF';
|
||||
if (log1.level.toString() == level) {
|
||||
level = 'TRACE';
|
||||
}
|
||||
assert.notEqual(log1.level.toString(), level);
|
||||
|
||||
var oldLevel = log1.level.toString();
|
||||
log4js.getLogger('log2').setLevel(level);
|
||||
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
|
||||
// get again same logger but as different variable
|
||||
var log2 = log4js.getLogger('log2');
|
||||
assert.equal(log2.level.toString(), level);
|
||||
assert.notEqual(oldLevel, level);
|
||||
|
||||
log2.removeLevel();
|
||||
assert.equal(log1.level.toString(), oldLevel);
|
||||
assert.equal(log2.level.toString(), oldLevel);
|
||||
},
|
||||
|
||||
'set level on all categories': function(log4js) {
|
||||
// Get 2 loggers
|
||||
var log1 = log4js.getLogger('log1');
|
||||
var log2 = log4js.getLogger('log2');
|
||||
|
||||
// First a test with 2 categories with different levels
|
||||
var config = {
|
||||
'levels': {
|
||||
'log1': 'ERROR',
|
||||
'log2': 'WARN'
|
||||
}
|
||||
};
|
||||
log4js.configure(config);
|
||||
|
||||
// Check if the levels are set correctly
|
||||
assert.equal('ERROR', log1.level.toString());
|
||||
assert.equal('WARN', log2.level.toString());
|
||||
|
||||
log1.removeLevel();
|
||||
log2.removeLevel();
|
||||
|
||||
// Almost identical test, but now we set
|
||||
// level on all categories
|
||||
var config2 = {
|
||||
'levels': {
|
||||
'[all]': 'DEBUG'
|
||||
}
|
||||
};
|
||||
log4js.configure(config2);
|
||||
|
||||
// Check if the loggers got the DEBUG level
|
||||
assert.equal('DEBUG', log1.level.toString());
|
||||
assert.equal('DEBUG', log2.level.toString());
|
||||
}
|
||||
}
|
||||
}).export(module);
|
112
node_modules/log4js/test/vows/hipchatAppender-test.js
generated
vendored
Normal file
112
node_modules/log4js/test/vows/hipchatAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,112 @@
|
||||
"use strict";
|
||||
var vows = require('vows'),
|
||||
assert = require('assert'),
|
||||
log4js = require('../../lib/log4js'),
|
||||
sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var lastRequest = {};
|
||||
|
||||
var fakeRequest = function(args, level){
|
||||
lastRequest.notifier = this;
|
||||
lastRequest.body = args[0];
|
||||
lastRequest.callback = args[1];
|
||||
lastRequest.level = level;
|
||||
};
|
||||
|
||||
var fakeHipchatNotifier = {
|
||||
'make': function(room, token, from, host, notify){
|
||||
return {
|
||||
'room': room,
|
||||
'token': token,
|
||||
'from': from || '',
|
||||
'host': host || 'api.hipchat.com',
|
||||
'notify': notify || false,
|
||||
'setRoom': function(val){ this.room = val; },
|
||||
'setFrom': function(val){ this.from = val; },
|
||||
'setHost': function(val){ this.host = val; },
|
||||
'setNotify': function(val){ this.notify = val; },
|
||||
'info': function(){ fakeRequest.call(this, arguments, 'info'); },
|
||||
'warning': function(){ fakeRequest.call(this, arguments, 'warning'); },
|
||||
'failure': function(){ fakeRequest.call(this, arguments, 'failure'); },
|
||||
'success': function(){ fakeRequest.call(this, arguments, 'success'); }
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var hipchatModule = sandbox.require('../../lib/appenders/hipchat', {
|
||||
requires: {
|
||||
'hipchat-notifier': fakeHipchatNotifier
|
||||
}
|
||||
});
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(hipchatModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
lastRequest: lastRequest
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('HipChat appender').addBatch({
|
||||
'when logging to HipChat v2 API': {
|
||||
topic: function() {
|
||||
var customCallback = function(err, res, body){ return 'works'; };
|
||||
|
||||
var setup = setupLogging('myCategory', {
|
||||
"type": "hipchat",
|
||||
"hipchat_token": "User_Token_With_Notification_Privs",
|
||||
"hipchat_room": "Room_ID_Or_Name",
|
||||
"hipchat_from": "Log4js_Test",
|
||||
"hipchat_notify": true,
|
||||
"hipchat_host": "hipchat.your-company.tld",
|
||||
"hipchat_response_callback": customCallback
|
||||
});
|
||||
setup.logger.warn('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'a request to hipchat_host should be sent': function (topic) {
|
||||
assert.equal(topic.lastRequest.notifier.host, "hipchat.your-company.tld");
|
||||
assert.equal(topic.lastRequest.notifier.notify, true);
|
||||
assert.equal(topic.lastRequest.body, 'Log event #1');
|
||||
assert.equal(topic.lastRequest.level, 'warning');
|
||||
},
|
||||
'a custom callback to the HipChat response is supported': function(topic) {
|
||||
assert.equal(topic.lastRequest.callback(), 'works');
|
||||
}
|
||||
},
|
||||
'when missing options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"type": "hipchat",
|
||||
});
|
||||
setup.logger.error('Log event #2');
|
||||
return setup;
|
||||
},
|
||||
'it sets some defaults': function (topic) {
|
||||
assert.equal(topic.lastRequest.notifier.host, "api.hipchat.com");
|
||||
assert.equal(topic.lastRequest.notifier.notify, false);
|
||||
assert.equal(topic.lastRequest.body, 'Log event #2');
|
||||
assert.equal(topic.lastRequest.level, 'failure');
|
||||
}
|
||||
},
|
||||
'when basicLayout is provided': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"type": "hipchat",
|
||||
"layout": log4js.layouts.basicLayout
|
||||
});
|
||||
setup.logger.debug('Log event #3');
|
||||
return setup;
|
||||
},
|
||||
'it should include the timestamp': function (topic) {
|
||||
|
||||
// basicLayout adds [TIMESTAMP] [LEVEL] category - message
|
||||
// e.g. [2016-06-10 11:50:53.819] [DEBUG] myLogger - Log event #23
|
||||
|
||||
assert.match(topic.lastRequest.body, /^\[[^\]]+\] \[[^\]]+\].*Log event \#3$/);
|
||||
assert.equal(topic.lastRequest.level, 'info');
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
338
node_modules/log4js/test/vows/layouts-test.js
generated
vendored
Normal file
338
node_modules/log4js/test/vows/layouts-test.js
generated
vendored
Normal file
@ -0,0 +1,338 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, os = require('os')
|
||||
, semver = require('semver')
|
||||
, EOL = os.EOL || '\n';
|
||||
|
||||
//used for patternLayout tests.
|
||||
function test(args, pattern, value) {
|
||||
var layout = args[0]
|
||||
, event = args[1]
|
||||
, tokens = args[2];
|
||||
|
||||
assert.equal(layout(pattern, tokens)(event), value);
|
||||
}
|
||||
|
||||
vows.describe('log4js layouts').addBatch({
|
||||
'colouredLayout': {
|
||||
topic: function() {
|
||||
return require('../../lib/layouts').colouredLayout;
|
||||
},
|
||||
|
||||
'should apply level colour codes to output': function(layout) {
|
||||
var output = layout({
|
||||
data: ["nonsense"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mnonsense');
|
||||
},
|
||||
'should support the console.log format for the message': function(layout) {
|
||||
var output = layout({
|
||||
data: ["thing %d", 2],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
assert.equal(output, '\x1B[31m[2010-12-05 14:18:30.045] [ERROR] cheese - \x1B[39mthing 2');
|
||||
}
|
||||
},
|
||||
|
||||
'messagePassThroughLayout': {
|
||||
topic: function() {
|
||||
return require('../../lib/layouts').messagePassThroughLayout;
|
||||
},
|
||||
'should take a logevent and output only the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["nonsense"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "nonsense");
|
||||
},
|
||||
'should support the console.log format for the message' : function(layout) {
|
||||
assert.equal(layout({
|
||||
data: ["thing %d", 1, "cheese"],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level : {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "thing 1 cheese");
|
||||
},
|
||||
'should output the first item even if it is not a string': function(layout) {
|
||||
assert.equal(layout({
|
||||
data: [ { thing: 1} ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}), "{ thing: 1 }");
|
||||
},
|
||||
'should print the stacks of a passed error objects': function(layout) {
|
||||
assert.isArray(
|
||||
layout({
|
||||
data: [ new Error() ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
}).match(
|
||||
new RegExp('' +
|
||||
/Error\s+at Object\..*\s+/.source +
|
||||
/\((.*)test[\\\/]vows[\\\/]layouts-test\.js/.source +
|
||||
/\:\d+\:\d+\)\s+at runTest/.source
|
||||
)
|
||||
),
|
||||
'regexp did not return a match'
|
||||
);
|
||||
},
|
||||
'with passed augmented errors': {
|
||||
topic: function(layout){
|
||||
var e = new Error("My Unique Error Message");
|
||||
e.augmented = "My Unique attribute value";
|
||||
e.augObj = { at1: "at2" };
|
||||
return layout({
|
||||
data: [ e ],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "cheese",
|
||||
level: {
|
||||
colour: "green",
|
||||
toString: function() { return "ERROR"; }
|
||||
}
|
||||
});
|
||||
},
|
||||
'should print error the contained error message': function(layoutOutput) {
|
||||
var m = layoutOutput.match(/Error: My Unique Error Message/);
|
||||
assert.isArray(m);
|
||||
},
|
||||
'should print error augmented string attributes': function(layoutOutput) {
|
||||
var m = layoutOutput.match(/augmented:\s'My Unique attribute value'/);
|
||||
assert.isArray(m);
|
||||
},
|
||||
'should print error augmented object attributes': function(layoutOutput) {
|
||||
var m = layoutOutput.match(/augObj:\s\{ at1: 'at2' \}/);
|
||||
assert.isArray(m);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
'basicLayout': {
|
||||
topic: function() {
|
||||
var layout = require('../../lib/layouts').basicLayout,
|
||||
event = {
|
||||
data: ['this is a test'],
|
||||
startTime: new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "tests",
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
}
|
||||
};
|
||||
return [layout, event];
|
||||
},
|
||||
'should take a logevent and output a formatted string': function(args) {
|
||||
var layout = args[0], event = args[1];
|
||||
assert.equal(layout(event), "[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test");
|
||||
},
|
||||
'should output a stacktrace, message if the event has an error attached': function(args) {
|
||||
var i, layout = args[0], event = args[1], output, lines,
|
||||
error = new Error("Some made-up error"),
|
||||
stack = error.stack.split(/\n/);
|
||||
|
||||
event.data = ['this is a test', error];
|
||||
output = layout(event);
|
||||
lines = output.split(/\n/);
|
||||
|
||||
if (semver.satisfies(process.version, '>=6')) {
|
||||
assert.equal(lines.length, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test Error: Some made-up error"
|
||||
);
|
||||
for (i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i], stack[i]);
|
||||
}
|
||||
} else {
|
||||
assert.equal(lines.length - 1, stack.length);
|
||||
assert.equal(
|
||||
lines[0],
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test [Error: Some made-up error]"
|
||||
);
|
||||
for (i = 1; i < stack.length; i++) {
|
||||
assert.equal(lines[i+2], stack[i+1]);
|
||||
}
|
||||
}
|
||||
|
||||
},
|
||||
'should output any extra data in the log event as util.inspect strings': function(args) {
|
||||
var layout = args[0], event = args[1], output, lines;
|
||||
event.data = ['this is a test', {
|
||||
name: 'Cheese',
|
||||
message: 'Gorgonzola smells.'
|
||||
}];
|
||||
output = layout(event);
|
||||
assert.equal(
|
||||
output,
|
||||
"[2010-12-05 14:18:30.045] [DEBUG] tests - this is a test " +
|
||||
"{ name: 'Cheese', message: 'Gorgonzola smells.' }"
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'patternLayout': {
|
||||
topic: function() {
|
||||
var event = {
|
||||
data: ['this is a test'],
|
||||
startTime: new Date('2010-12-05T14:18:30.045Z'), //new Date(2010, 11, 5, 14, 18, 30, 45),
|
||||
categoryName: "multiple.levels.of.tests",
|
||||
level: {
|
||||
toString: function() { return "DEBUG"; }
|
||||
}
|
||||
}, layout = require('../../lib/layouts').patternLayout
|
||||
, tokens = {
|
||||
testString: 'testStringToken',
|
||||
testFunction: function() { return 'testFunctionToken'; },
|
||||
fnThatUsesLogEvent: function(logEvent) { return logEvent.level.toString(); }
|
||||
};
|
||||
|
||||
//override getTimezoneOffset
|
||||
event.startTime.getTimezoneOffset = function() { return 0; };
|
||||
return [layout, event, tokens];
|
||||
},
|
||||
|
||||
'should default to "time logLevel loggerName - message"': function(args) {
|
||||
test(args, null, "14:18:30 DEBUG multiple.levels.of.tests - this is a test" + EOL);
|
||||
},
|
||||
'%r should output time only': function(args) {
|
||||
test(args, '%r', '14:18:30');
|
||||
},
|
||||
'%p should output the log level': function(args) {
|
||||
test(args, '%p', 'DEBUG');
|
||||
},
|
||||
'%c should output the log category': function(args) {
|
||||
test(args, '%c', 'multiple.levels.of.tests');
|
||||
},
|
||||
'%m should output the log data': function(args) {
|
||||
test(args, '%m', 'this is a test');
|
||||
},
|
||||
'%n should output a new line': function(args) {
|
||||
test(args, '%n', EOL);
|
||||
},
|
||||
'%h should output hostname' : function(args) {
|
||||
test(args, '%h', os.hostname().toString());
|
||||
},
|
||||
'%z should output pid' : function(args) {
|
||||
test(args, '%z', process.pid);
|
||||
},
|
||||
'%c should handle category names like java-style package names': function(args) {
|
||||
test(args, '%c{1}', 'tests');
|
||||
test(args, '%c{2}', 'of.tests');
|
||||
test(args, '%c{3}', 'levels.of.tests');
|
||||
test(args, '%c{4}', 'multiple.levels.of.tests');
|
||||
test(args, '%c{5}', 'multiple.levels.of.tests');
|
||||
test(args, '%c{99}', 'multiple.levels.of.tests');
|
||||
},
|
||||
'%d should output the date in ISO8601 format': function(args) {
|
||||
test(args, '%d', '2010-12-05 14:18:30.045');
|
||||
},
|
||||
'%d should allow for format specification': function(args) {
|
||||
test(args, '%d{ISO8601_WITH_TZ_OFFSET}', '2010-12-05T14:18:30.045-0000');
|
||||
test(args, '%d{ISO8601}', '2010-12-05 14:18:30.045');
|
||||
test(args, '%d{ABSOLUTE}', '14:18:30.045');
|
||||
test(args, '%d{DATE}', '05 12 2010 14:18:30.045');
|
||||
test(args, '%d{yy MM dd hh mm ss}', '10 12 05 14 18 30');
|
||||
test(args, '%d{yyyy MM dd}', '2010 12 05');
|
||||
test(args, '%d{yyyy MM dd hh mm ss SSS}', '2010 12 05 14 18 30 045');
|
||||
},
|
||||
'%% should output %': function(args) {
|
||||
test(args, '%%', '%');
|
||||
},
|
||||
'should output anything not preceded by % as literal': function(args) {
|
||||
test(args, 'blah blah blah', 'blah blah blah');
|
||||
},
|
||||
'should output the original string if no replacer matches the token': function(args) {
|
||||
test(args, '%a{3}', 'a{3}');
|
||||
},
|
||||
'should handle complicated patterns': function(args) {
|
||||
test(args,
|
||||
'%m%n %c{2} at %d{ABSOLUTE} cheese %p%n',
|
||||
'this is a test'+ EOL +' of.tests at 14:18:30.045 cheese DEBUG' + EOL
|
||||
);
|
||||
},
|
||||
'should truncate fields if specified': function(args) {
|
||||
test(args, '%.4m', 'this');
|
||||
test(args, '%.7m', 'this is');
|
||||
test(args, '%.9m', 'this is a');
|
||||
test(args, '%.14m', 'this is a test');
|
||||
test(args, '%.2919102m', 'this is a test');
|
||||
},
|
||||
'should pad fields if specified': function(args) {
|
||||
test(args, '%10p', ' DEBUG');
|
||||
test(args, '%8p', ' DEBUG');
|
||||
test(args, '%6p', ' DEBUG');
|
||||
test(args, '%4p', 'DEBUG');
|
||||
test(args, '%-4p', 'DEBUG');
|
||||
test(args, '%-6p', 'DEBUG ');
|
||||
test(args, '%-8p', 'DEBUG ');
|
||||
test(args, '%-10p', 'DEBUG ');
|
||||
},
|
||||
'%[%r%] should output colored time': function(args) {
|
||||
test(args, '%[%r%]', '\x1B[36m14:18:30\x1B[39m');
|
||||
},
|
||||
'%x{testString} should output the string stored in tokens': function(args) {
|
||||
test(args, '%x{testString}', 'testStringToken');
|
||||
},
|
||||
'%x{testFunction} should output the result of the function stored in tokens': function(args) {
|
||||
test(args, '%x{testFunction}', 'testFunctionToken');
|
||||
},
|
||||
'%x{doesNotExist} should output the string stored in tokens': function(args) {
|
||||
test(args, '%x{doesNotExist}', 'null');
|
||||
},
|
||||
'%x{fnThatUsesLogEvent} should be able to use the logEvent': function(args) {
|
||||
test(args, '%x{fnThatUsesLogEvent}', 'DEBUG');
|
||||
},
|
||||
'%x should output the string stored in tokens': function(args) {
|
||||
test(args, '%x', 'null');
|
||||
}
|
||||
},
|
||||
'layout makers': {
|
||||
topic: require('../../lib/layouts'),
|
||||
'should have a maker for each layout': function(layouts) {
|
||||
assert.ok(layouts.layout("messagePassThrough"));
|
||||
assert.ok(layouts.layout("basic"));
|
||||
assert.ok(layouts.layout("colored"));
|
||||
assert.ok(layouts.layout("coloured"));
|
||||
assert.ok(layouts.layout("pattern"));
|
||||
}
|
||||
},
|
||||
'add layout': {
|
||||
topic: require('../../lib/layouts'),
|
||||
'should be able to add a layout': function(layouts) {
|
||||
layouts.addLayout('test_layout', function(config){
|
||||
assert.equal(config, 'test_config');
|
||||
return function(logEvent) {
|
||||
return "TEST LAYOUT >"+logEvent.data;
|
||||
};
|
||||
});
|
||||
var serializer = layouts.layout('test_layout', 'test_config');
|
||||
assert.ok(serializer);
|
||||
assert.equal(serializer({data: "INPUT"}), "TEST LAYOUT >INPUT");
|
||||
}
|
||||
}
|
||||
}).export(module);
|
464
node_modules/log4js/test/vows/levels-test.js
generated
vendored
Normal file
464
node_modules/log4js/test/vows/levels-test.js
generated
vendored
Normal file
@ -0,0 +1,464 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../../lib/levels');
|
||||
|
||||
function assertThat(level) {
|
||||
function assertForEach(assertion, test, otherLevels) {
|
||||
otherLevels.forEach(function(other) {
|
||||
assertion.call(assert, test.call(level, other));
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
isLessThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isLessThanOrEqualTo, levels);
|
||||
},
|
||||
isNotLessThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isLessThanOrEqualTo, levels);
|
||||
},
|
||||
isGreaterThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isGreaterThanOrEqualTo, levels);
|
||||
},
|
||||
isNotGreaterThanOrEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isGreaterThanOrEqualTo, levels);
|
||||
},
|
||||
isEqualTo: function(levels) {
|
||||
assertForEach(assert.isTrue, level.isEqualTo, levels);
|
||||
},
|
||||
isNotEqualTo: function(levels) {
|
||||
assertForEach(assert.isFalse, level.isEqualTo, levels);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('levels').addBatch({
|
||||
'values': {
|
||||
topic: levels,
|
||||
'should define some levels': function(levels) {
|
||||
assert.isNotNull(levels.ALL);
|
||||
assert.isNotNull(levels.TRACE);
|
||||
assert.isNotNull(levels.DEBUG);
|
||||
assert.isNotNull(levels.INFO);
|
||||
assert.isNotNull(levels.WARN);
|
||||
assert.isNotNull(levels.ERROR);
|
||||
assert.isNotNull(levels.FATAL);
|
||||
assert.isNotNull(levels.MARK);
|
||||
assert.isNotNull(levels.OFF);
|
||||
},
|
||||
'ALL': {
|
||||
topic: levels.ALL,
|
||||
'should be less than the other levels': function(all) {
|
||||
assertThat(all).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should be greater than no levels': function(all) {
|
||||
assertThat(all).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should only be equal to ALL': function(all) {
|
||||
assertThat(all).isEqualTo([levels.toLevel("ALL")]);
|
||||
assertThat(all).isNotEqualTo(
|
||||
[
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
}
|
||||
},
|
||||
'TRACE': {
|
||||
topic: levels.TRACE,
|
||||
'should be less than DEBUG': function(trace) {
|
||||
assertThat(trace).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(trace).isNotLessThanOrEqualTo([levels.ALL]);
|
||||
},
|
||||
'should be greater than ALL': function(trace) {
|
||||
assertThat(trace).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(trace).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should only be equal to TRACE': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("TRACE")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
}
|
||||
},
|
||||
'DEBUG': {
|
||||
topic: levels.DEBUG,
|
||||
'should be less than INFO': function(debug) {
|
||||
assertThat(debug).isLessThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
assertThat(debug).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
},
|
||||
'should be greater than TRACE': function(debug) {
|
||||
assertThat(debug).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE]);
|
||||
assertThat(debug).isNotGreaterThanOrEqualTo(
|
||||
[
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
},
|
||||
'should only be equal to DEBUG': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("DEBUG")]);
|
||||
assertThat(trace).isNotEqualTo(
|
||||
[
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]
|
||||
);
|
||||
}
|
||||
},
|
||||
'INFO': {
|
||||
topic: levels.INFO,
|
||||
'should be less than WARN': function(info) {
|
||||
assertThat(info).isLessThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
assertThat(info).isNotLessThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
},
|
||||
'should be greater than DEBUG': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo([levels.ALL, levels.TRACE, levels.DEBUG]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo([
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
},
|
||||
'should only be equal to INFO': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("INFO")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'WARN': {
|
||||
topic: levels.WARN,
|
||||
'should be less than ERROR': function(warn) {
|
||||
assertThat(warn).isLessThanOrEqualTo([levels.ERROR, levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(warn).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
},
|
||||
'should be greater than INFO': function(warn) {
|
||||
assertThat(warn).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO
|
||||
]);
|
||||
assertThat(warn).isNotGreaterThanOrEqualTo([
|
||||
levels.ERROR, levels.FATAL, levels.MARK, levels.OFF
|
||||
]);
|
||||
},
|
||||
'should only be equal to WARN': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("WARN")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'ERROR': {
|
||||
topic: levels.ERROR,
|
||||
'should be less than FATAL': function(error) {
|
||||
assertThat(error).isLessThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
assertThat(error).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
},
|
||||
'should be greater than WARN': function(error) {
|
||||
assertThat(error).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN
|
||||
]);
|
||||
assertThat(error).isNotGreaterThanOrEqualTo([levels.FATAL, levels.MARK, levels.OFF]);
|
||||
},
|
||||
'should only be equal to ERROR': function(trace) {
|
||||
assertThat(trace).isEqualTo([levels.toLevel("ERROR")]);
|
||||
assertThat(trace).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'FATAL': {
|
||||
topic: levels.FATAL,
|
||||
'should be less than OFF': function(fatal) {
|
||||
assertThat(fatal).isLessThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
assertThat(fatal).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
},
|
||||
'should be greater than ERROR': function(fatal) {
|
||||
assertThat(fatal).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR
|
||||
]);
|
||||
assertThat(fatal).isNotGreaterThanOrEqualTo([levels.MARK, levels.OFF]);
|
||||
},
|
||||
'should only be equal to FATAL': function(fatal) {
|
||||
assertThat(fatal).isEqualTo([levels.toLevel("FATAL")]);
|
||||
assertThat(fatal).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.MARK,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'MARK': {
|
||||
topic: levels.MARK,
|
||||
'should be less than OFF': function(mark) {
|
||||
assertThat(mark).isLessThanOrEqualTo([levels.OFF]);
|
||||
assertThat(mark).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.FATAL,
|
||||
levels.ERROR
|
||||
]);
|
||||
},
|
||||
'should be greater than FATAL': function(mark) {
|
||||
assertThat(mark).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL
|
||||
]);
|
||||
assertThat(mark).isNotGreaterThanOrEqualTo([levels.OFF]);
|
||||
},
|
||||
'should only be equal to MARK': function(mark) {
|
||||
assertThat(mark).isEqualTo([levels.toLevel("MARK")]);
|
||||
assertThat(mark).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.OFF
|
||||
]);
|
||||
}
|
||||
},
|
||||
'OFF': {
|
||||
topic: levels.OFF,
|
||||
'should not be less than anything': function(off) {
|
||||
assertThat(off).isNotLessThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
},
|
||||
'should be greater than everything': function(off) {
|
||||
assertThat(off).isGreaterThanOrEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
},
|
||||
'should only be equal to OFF': function(off) {
|
||||
assertThat(off).isEqualTo([levels.toLevel("OFF")]);
|
||||
assertThat(off).isNotEqualTo([
|
||||
levels.ALL,
|
||||
levels.TRACE,
|
||||
levels.DEBUG,
|
||||
levels.INFO,
|
||||
levels.WARN,
|
||||
levels.ERROR,
|
||||
levels.FATAL,
|
||||
levels.MARK
|
||||
]);
|
||||
}
|
||||
}
|
||||
},
|
||||
'isGreaterThanOrEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isGreaterThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isNotGreaterThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||
}
|
||||
},
|
||||
'isLessThanOrEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isNotLessThanOrEqualTo(["all", "trace", "debug"]);
|
||||
assertThat(info).isLessThanOrEqualTo(['warn', 'ERROR', 'Fatal', 'MARK', 'off']);
|
||||
}
|
||||
},
|
||||
'isEqualTo': {
|
||||
topic: levels.INFO,
|
||||
'should handle string arguments': function(info) {
|
||||
assertThat(info).isEqualTo(["info", "INFO", "iNfO"]);
|
||||
}
|
||||
},
|
||||
'toLevel': {
|
||||
'with lowercase argument': {
|
||||
topic: levels.toLevel("debug"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with uppercase argument': {
|
||||
topic: levels.toLevel("DEBUG"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with varying case': {
|
||||
topic: levels.toLevel("DeBuG"),
|
||||
'should take the string and return the corresponding level': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
'with unrecognised argument': {
|
||||
topic: levels.toLevel("cheese"),
|
||||
'should return undefined': function(level) {
|
||||
assert.isUndefined(level);
|
||||
}
|
||||
},
|
||||
'with unrecognised argument and default value': {
|
||||
topic: levels.toLevel("cheese", levels.DEBUG),
|
||||
'should return default value': function(level) {
|
||||
assert.equal(level, levels.DEBUG);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
77
node_modules/log4js/test/vows/log-abspath-test.js
generated
vendored
Normal file
77
node_modules/log4js/test/vows/log-abspath-test.js
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, path = require('path')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
vows.describe('log4js-abspath').addBatch({
|
||||
'options': {
|
||||
topic: function() {
|
||||
var appenderOptions,
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{ requires:
|
||||
{ './appenders/fake':
|
||||
{ name: "fake",
|
||||
appender: function() {},
|
||||
configure: function(configuration, options) {
|
||||
appenderOptions = options;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = {
|
||||
"appenders": [
|
||||
{
|
||||
"type" : "fake",
|
||||
"filename" : "cheesy-wotsits.log"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config, {
|
||||
cwd: '/absolute/path/to'
|
||||
});
|
||||
return appenderOptions;
|
||||
},
|
||||
'should be passed to appenders during configuration': function(options) {
|
||||
assert.equal(options.cwd, '/absolute/path/to');
|
||||
}
|
||||
},
|
||||
|
||||
'file appender': {
|
||||
topic: function() {
|
||||
var fileOpened,
|
||||
fileAppender = sandbox.require(
|
||||
'../../lib/appenders/file',
|
||||
{ requires:
|
||||
{ 'streamroller':
|
||||
{ RollingFileStream:
|
||||
function(file) {
|
||||
fileOpened = file;
|
||||
return {
|
||||
on: function() {},
|
||||
end: function() {}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
fileAppender.configure(
|
||||
{
|
||||
filename: "whatever.log",
|
||||
maxLogSize: 10
|
||||
},
|
||||
{ cwd: '/absolute/path/to' }
|
||||
);
|
||||
return fileOpened;
|
||||
},
|
||||
'should prepend options.cwd to config.filename': function(fileOpened) {
|
||||
var expected = path.sep + path.join("absolute", "path", "to", "whatever.log");
|
||||
assert.equal(fileOpened, expected);
|
||||
}
|
||||
},
|
||||
}).export(module);
|
16
node_modules/log4js/test/vows/log4js.json
generated
vendored
Normal file
16
node_modules/log4js/test/vows/log4js.json
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "file",
|
||||
"filename": "tmp-tests.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
"levels": {
|
||||
"tests": "WARN"
|
||||
}
|
||||
}
|
92
node_modules/log4js/test/vows/logFacesAppender-test.js
generated
vendored
Normal file
92
node_modules/log4js/test/vows/logFacesAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,92 @@
|
||||
"use strict";
|
||||
var vows = require('vows'),
|
||||
assert = require('assert'),
|
||||
log4js = require('../../lib/log4js'),
|
||||
sandbox = require('sandboxed-module');
|
||||
|
||||
var log = log4js.getLogger('lfstest');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var sent = {};
|
||||
|
||||
function fake(event){
|
||||
Object.keys(event).forEach(function(key) {
|
||||
sent[key] = event[key];
|
||||
});
|
||||
}
|
||||
|
||||
var lfsModule = require('../../lib/appenders/logFacesAppender');
|
||||
options.send = fake;
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(lfsModule.configure(options), category);
|
||||
lfsModule.setContext("foo", "bar");
|
||||
lfsModule.setContext("bar", "foo");
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
results: sent
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('logFaces appender').addBatch({
|
||||
'when using HTTP receivers': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myCategory', {
|
||||
"type": "logFacesAppender",
|
||||
"application": "LFS-HTTP",
|
||||
"url": "http://localhost/receivers/rx1"
|
||||
});
|
||||
|
||||
setup.logger.warn('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'an event should be sent': function (topic) {
|
||||
var event = topic.results;
|
||||
assert.equal(event.a, 'LFS-HTTP');
|
||||
assert.equal(event.m, 'Log event #1');
|
||||
assert.equal(event.g, 'myCategory');
|
||||
assert.equal(event.p, 'WARN');
|
||||
assert.equal(event.p_foo, 'bar');
|
||||
assert.equal(event.p_bar, 'foo');
|
||||
|
||||
// Assert timestamp, up to hours resolution.
|
||||
var date = new Date(event.t);
|
||||
assert.equal(
|
||||
date.toISOString().substring(0, 14),
|
||||
new Date().toISOString().substring(0, 14)
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'when using UDP receivers': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('udpCategory', {
|
||||
"type": "logFacesAppender",
|
||||
"application": "LFS-UDP",
|
||||
"remoteHost": "127.0.0.1",
|
||||
"port": 55201
|
||||
});
|
||||
|
||||
setup.logger.error('Log event #2');
|
||||
return setup;
|
||||
},
|
||||
'an event should be sent': function (topic) {
|
||||
var event = topic.results;
|
||||
assert.equal(event.a, 'LFS-UDP');
|
||||
assert.equal(event.m, 'Log event #2');
|
||||
assert.equal(event.g, 'udpCategory');
|
||||
assert.equal(event.p, 'ERROR');
|
||||
assert.equal(event.p_foo, 'bar');
|
||||
assert.equal(event.p_bar, 'foo');
|
||||
|
||||
// Assert timestamp, up to hours resolution.
|
||||
var date = new Date(event.t);
|
||||
assert.equal(
|
||||
date.toISOString().substring(0, 14),
|
||||
new Date().toISOString().substring(0, 14)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}).export(module);
|
93
node_modules/log4js/test/vows/logLevelFilter-test.js
generated
vendored
Normal file
93
node_modules/log4js/test/vows/logLevelFilter-test.js
generated
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, fs = require('fs')
|
||||
, assert = require('assert')
|
||||
, os = require('os')
|
||||
, EOL = require('os').EOL || '\n';
|
||||
|
||||
function remove(filename) {
|
||||
try {
|
||||
fs.unlinkSync(filename);
|
||||
} catch (e) {
|
||||
//doesn't really matter if it failed
|
||||
}
|
||||
}
|
||||
|
||||
vows.describe('log4js logLevelFilter').addBatch({
|
||||
'appender': {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js'), logEvents = [], logger;
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(
|
||||
require('../../lib/appenders/logLevelFilter')
|
||||
.appender(
|
||||
'ERROR',
|
||||
undefined,
|
||||
function(evt) { logEvents.push(evt); }
|
||||
),
|
||||
"logLevelTest"
|
||||
);
|
||||
|
||||
logger = log4js.getLogger("logLevelTest");
|
||||
logger.debug('this should not trigger an event');
|
||||
logger.warn('neither should this');
|
||||
logger.error('this should, though');
|
||||
logger.fatal('so should this');
|
||||
return logEvents;
|
||||
},
|
||||
'should only pass log events greater than or equal to its own level' : function(logEvents) {
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'this should, though');
|
||||
assert.equal(logEvents[1].data[0], 'so should this');
|
||||
}
|
||||
},
|
||||
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js')
|
||||
, logger;
|
||||
|
||||
remove(__dirname + '/logLevelFilter.log');
|
||||
remove(__dirname + '/logLevelFilter-warnings.log');
|
||||
remove(__dirname + '/logLevelFilter-debugs.log');
|
||||
|
||||
log4js.configure('test/vows/with-logLevelFilter.json');
|
||||
logger = log4js.getLogger("tests");
|
||||
logger.debug('debug');
|
||||
logger.info('info');
|
||||
logger.error('error');
|
||||
logger.warn('warn');
|
||||
logger.debug('debug');
|
||||
logger.trace('trace');
|
||||
//wait for the file system to catch up
|
||||
setTimeout(this.callback, 500);
|
||||
},
|
||||
'tmp-tests.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/logLevelFilter.log', 'utf8', this.callback);
|
||||
},
|
||||
'should contain all log messages': function (contents) {
|
||||
var messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ['debug','info','error','warn','debug','trace']);
|
||||
}
|
||||
},
|
||||
'tmp-tests-warnings.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/logLevelFilter-warnings.log','utf8',this.callback);
|
||||
},
|
||||
'should contain only error and warning log messages': function(contents) {
|
||||
var messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ['error','warn']);
|
||||
}
|
||||
},
|
||||
'tmp-tests-debugs.log': {
|
||||
topic: function() {
|
||||
fs.readFile(__dirname + '/logLevelFilter-debugs.log','utf8',this.callback);
|
||||
},
|
||||
'should contain only trace and debug log messages': function(contents) {
|
||||
var messages = contents.trim().split(EOL);
|
||||
assert.deepEqual(messages, ['debug','debug','trace']);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
81
node_modules/log4js/test/vows/logger-test.js
generated
vendored
Normal file
81
node_modules/log4js/test/vows/logger-test.js
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, levels = require('../../lib/levels')
|
||||
, loggerModule = require('../../lib/logger')
|
||||
, Logger = loggerModule.Logger;
|
||||
|
||||
vows.describe('../../lib/logger').addBatch({
|
||||
'constructor with no parameters': {
|
||||
topic: new Logger(),
|
||||
'should use default category': function(logger) {
|
||||
assert.equal(logger.category, Logger.DEFAULT_CATEGORY);
|
||||
},
|
||||
'should use TRACE log level': function(logger) {
|
||||
assert.equal(logger.level, levels.TRACE);
|
||||
}
|
||||
},
|
||||
|
||||
'constructor with category': {
|
||||
topic: new Logger('cheese'),
|
||||
'should use category': function(logger) {
|
||||
assert.equal(logger.category, 'cheese');
|
||||
},
|
||||
'should use TRACE log level': function(logger) {
|
||||
assert.equal(logger.level, levels.TRACE);
|
||||
}
|
||||
},
|
||||
|
||||
'constructor with category and level': {
|
||||
topic: new Logger('cheese', 'debug'),
|
||||
'should use category': function(logger) {
|
||||
assert.equal(logger.category, 'cheese');
|
||||
},
|
||||
'should use level': function(logger) {
|
||||
assert.equal(logger.level, levels.DEBUG);
|
||||
}
|
||||
},
|
||||
|
||||
'isLevelEnabled': {
|
||||
topic: new Logger('cheese', 'info'),
|
||||
'should provide a level enabled function for all levels': function(logger) {
|
||||
assert.isFunction(logger.isTraceEnabled);
|
||||
assert.isFunction(logger.isDebugEnabled);
|
||||
assert.isFunction(logger.isInfoEnabled);
|
||||
assert.isFunction(logger.isWarnEnabled);
|
||||
assert.isFunction(logger.isErrorEnabled);
|
||||
assert.isFunction(logger.isFatalEnabled);
|
||||
},
|
||||
'should return the right values': function(logger) {
|
||||
assert.isFalse(logger.isTraceEnabled());
|
||||
assert.isFalse(logger.isDebugEnabled());
|
||||
assert.isTrue(logger.isInfoEnabled());
|
||||
assert.isTrue(logger.isWarnEnabled());
|
||||
assert.isTrue(logger.isErrorEnabled());
|
||||
assert.isTrue(logger.isFatalEnabled());
|
||||
}
|
||||
},
|
||||
|
||||
'should emit log events': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener('log', function (logEvent) { events.push(logEvent); });
|
||||
logger.debug('Event 1');
|
||||
loggerModule.disableAllLogWrites();
|
||||
logger.debug('Event 2');
|
||||
loggerModule.enableAllLogWrites();
|
||||
logger.debug('Event 3');
|
||||
return events;
|
||||
},
|
||||
|
||||
'when log writes are enabled': function(events) {
|
||||
assert.equal(events[0].data[0], 'Event 1');
|
||||
},
|
||||
|
||||
'but not when log writes are disabled': function(events) {
|
||||
assert.equal(events.length, 2);
|
||||
assert.equal(events[1].data[0], 'Event 3');
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
636
node_modules/log4js/test/vows/logging-test.js
generated
vendored
Normal file
636
node_modules/log4js/test/vows/logging-test.js
generated
vendored
Normal file
@ -0,0 +1,636 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function setupConsoleTest() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
};
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(function(evt) {
|
||||
logEvents.push(evt);
|
||||
});
|
||||
|
||||
return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
|
||||
}
|
||||
|
||||
vows.describe('log4js').addBatch({
|
||||
|
||||
'getBufferedLogger': {
|
||||
topic: function () {
|
||||
var log4js = require('../../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getBufferedLogger('tests');
|
||||
return logger;
|
||||
},
|
||||
|
||||
'should take a category and return a logger': function (logger) {
|
||||
assert.equal(logger.target.category, 'tests');
|
||||
assert.isFunction(logger.flush);
|
||||
assert.isFunction(logger.trace);
|
||||
assert.isFunction(logger.debug);
|
||||
assert.isFunction(logger.info);
|
||||
assert.isFunction(logger.warn);
|
||||
assert.isFunction(logger.error);
|
||||
assert.isFunction(logger.fatal);
|
||||
},
|
||||
|
||||
'cache events': {
|
||||
topic: function () {
|
||||
var log4js = require('../../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getBufferedLogger('tests1');
|
||||
var events = [];
|
||||
logger.target.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error(
|
||||
"Simulated CouchDB problem",
|
||||
{ err: 127, cause: "incendiary underwear" }
|
||||
);
|
||||
return events;
|
||||
},
|
||||
|
||||
'should not emit log events if .flush() is not called.': function (events) {
|
||||
assert.equal(events.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'log events after flush() is called': {
|
||||
topic: function () {
|
||||
var log4js = require('../../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getBufferedLogger('tests2');
|
||||
logger.target.setLevel("TRACE");
|
||||
var events = [];
|
||||
logger.target.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error(
|
||||
"Simulated CouchDB problem",
|
||||
{ err: 127, cause: "incendiary underwear" }
|
||||
);
|
||||
logger.flush();
|
||||
return events;
|
||||
},
|
||||
|
||||
'should emit log events when .flush() is called.': function (events) {
|
||||
assert.equal(events.length, 6);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
'getLogger': {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
var logger = log4js.getLogger('tests');
|
||||
logger.setLevel("DEBUG");
|
||||
return logger;
|
||||
},
|
||||
|
||||
'should take a category and return a logger': function(logger) {
|
||||
assert.equal(logger.category, 'tests');
|
||||
assert.equal(logger.level.toString(), "DEBUG");
|
||||
assert.isFunction(logger.debug);
|
||||
assert.isFunction(logger.info);
|
||||
assert.isFunction(logger.warn);
|
||||
assert.isFunction(logger.error);
|
||||
assert.isFunction(logger.fatal);
|
||||
},
|
||||
|
||||
'log events' : {
|
||||
topic: function(logger) {
|
||||
var events = [];
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
logger.debug("Debug event");
|
||||
logger.trace("Trace event 1");
|
||||
logger.trace("Trace event 2");
|
||||
logger.warn("Warning event");
|
||||
logger.error("Aargh!", new Error("Pants are on fire!"));
|
||||
logger.error("Simulated CouchDB problem", { err: 127, cause: "incendiary underwear" });
|
||||
return events;
|
||||
},
|
||||
|
||||
'should emit log events': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'DEBUG');
|
||||
assert.equal(events[0].data[0], 'Debug event');
|
||||
assert.instanceOf(events[0].startTime, Date);
|
||||
},
|
||||
|
||||
'should not emit events of a lower level': function(events) {
|
||||
assert.equal(events.length, 4);
|
||||
assert.equal(events[1].level.toString(), 'WARN');
|
||||
},
|
||||
|
||||
'should include the error if passed in': function(events) {
|
||||
assert.instanceOf(events[2].data[1], Error);
|
||||
assert.equal(events[2].data[1].message, 'Pants are on fire!');
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'when shutdown is called': {
|
||||
topic: function() {
|
||||
var callback = this.callback;
|
||||
var events = {
|
||||
appenderShutdownCalled: false,
|
||||
shutdownCallbackCalled: false
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
return function() {};
|
||||
},
|
||||
shutdown: function(cb) {
|
||||
events.appenderShutdownCalled = true;
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
log4js.configure(config);
|
||||
log4js.shutdown(function shutdownCallback() {
|
||||
events.shutdownCallbackCalled = true;
|
||||
// Re-enable log writing so other tests that use logger are not
|
||||
// affected.
|
||||
require('../../lib/logger').enableAllLogWrites();
|
||||
callback(null, events);
|
||||
});
|
||||
},
|
||||
|
||||
'should invoke appender shutdowns': function(events) {
|
||||
assert.ok(events.appenderShutdownCalled);
|
||||
},
|
||||
|
||||
'should call callback': function(events) {
|
||||
assert.ok(events.shutdownCallbackCalled);
|
||||
}
|
||||
},
|
||||
|
||||
'invalid configuration': {
|
||||
'should throw an exception': function() {
|
||||
assert.throws(function() {
|
||||
require('log4js').configure({ "type": "invalid" });
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
'configuration when passed as object': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
log4js.configure(config);
|
||||
return appenderConfig;
|
||||
},
|
||||
'should be passed to appender config': function(configuration) {
|
||||
assert.equal(configuration.filename, 'cheesy-wotsits.log');
|
||||
}
|
||||
},
|
||||
|
||||
'configuration that causes an error': {
|
||||
topic: function() {
|
||||
var log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/file':
|
||||
{
|
||||
name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
throw new Error("oh noes");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
config = { appenders:
|
||||
[ { "type" : "file",
|
||||
"filename" : "cheesy-wotsits.log",
|
||||
"maxLogSize" : 1024,
|
||||
"backups" : 3
|
||||
}
|
||||
]
|
||||
};
|
||||
try {
|
||||
log4js.configure(config);
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should wrap error in a meaningful message': function(e) {
|
||||
assert.ok(e.message.indexOf('log4js configuration problem for') > -1);
|
||||
}
|
||||
},
|
||||
|
||||
'configuration when passed as filename': {
|
||||
topic: function() {
|
||||
var appenderConfig,
|
||||
configFilename,
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{ requires:
|
||||
{ 'fs':
|
||||
{ statSync:
|
||||
function() {
|
||||
return { mtime: Date.now() };
|
||||
},
|
||||
readFileSync:
|
||||
function(filename) {
|
||||
configFilename = filename;
|
||||
return JSON.stringify({
|
||||
appenders: [
|
||||
{ type: "file"
|
||||
, filename: "whatever.log"
|
||||
}
|
||||
]
|
||||
});
|
||||
},
|
||||
readdirSync:
|
||||
function() {
|
||||
return ['file'];
|
||||
}
|
||||
},
|
||||
'./appenders/file':
|
||||
{ name: "file",
|
||||
appender: function() {},
|
||||
configure: function(configuration) {
|
||||
appenderConfig = configuration;
|
||||
return function() {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
log4js.configure("/path/to/cheese.json");
|
||||
return [ configFilename, appenderConfig ];
|
||||
},
|
||||
'should read the config from a file': function(args) {
|
||||
assert.equal(args[0], '/path/to/cheese.json');
|
||||
},
|
||||
'should pass config to appender': function(args) {
|
||||
assert.equal(args[1].filename, "whatever.log");
|
||||
}
|
||||
},
|
||||
|
||||
'with no appenders defined' : {
|
||||
topic: function() {
|
||||
var logger,
|
||||
that = this,
|
||||
fakeConsoleAppender = {
|
||||
name: "console",
|
||||
appender: function() {
|
||||
return function(evt) {
|
||||
that.callback(null, evt);
|
||||
};
|
||||
},
|
||||
configure: function() {
|
||||
return fakeConsoleAppender.appender();
|
||||
}
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/stdout': fakeConsoleAppender
|
||||
}
|
||||
}
|
||||
);
|
||||
logger = log4js.getLogger("some-logger");
|
||||
logger.debug("This is a test");
|
||||
},
|
||||
'should default to the stdout appender': function(evt) {
|
||||
assert.equal(evt.data[0], "This is a test");
|
||||
}
|
||||
},
|
||||
|
||||
'addAppender' : {
|
||||
topic: function() {
|
||||
var log4js = require('../../lib/log4js');
|
||||
log4js.clearAppenders();
|
||||
return log4js;
|
||||
},
|
||||
'without a category': {
|
||||
'should register the function as a listener for all loggers': function (log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender);
|
||||
logger.debug("This is a test");
|
||||
assert.equal(appenderEvent.data[0], "This is a test");
|
||||
assert.equal(appenderEvent.categoryName, "tests");
|
||||
assert.equal(appenderEvent.level.toString(), "DEBUG");
|
||||
},
|
||||
'if an appender for a category is defined': {
|
||||
'should register for that category': function (log4js) {
|
||||
var otherEvent,
|
||||
appenderEvent,
|
||||
cheeseLogger;
|
||||
|
||||
log4js.addAppender(function (evt) { appenderEvent = evt; });
|
||||
log4js.addAppender(function (evt) { otherEvent = evt; }, 'cheese');
|
||||
|
||||
cheeseLogger = log4js.getLogger('cheese');
|
||||
cheeseLogger.debug('This is a test');
|
||||
assert.deepEqual(appenderEvent, otherEvent);
|
||||
assert.equal(otherEvent.data[0], 'This is a test');
|
||||
assert.equal(otherEvent.categoryName, 'cheese');
|
||||
|
||||
otherEvent = undefined;
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('pants').debug("this should not be propagated to otherEvent");
|
||||
assert.isUndefined(otherEvent);
|
||||
assert.equal(appenderEvent.data[0], "this should not be propagated to otherEvent");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'with a category': {
|
||||
'should only register the function as a listener for that category': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger("tests");
|
||||
|
||||
log4js.addAppender(appender, 'tests');
|
||||
logger.debug('this is a category test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a category test');
|
||||
|
||||
appenderEvent = undefined;
|
||||
log4js.getLogger('some other category').debug('Cheese');
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
},
|
||||
|
||||
'with multiple categories': {
|
||||
'should register the function as a listener for all the categories': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; },
|
||||
logger = log4js.getLogger('tests');
|
||||
|
||||
log4js.addAppender(appender, 'tests', 'biscuits');
|
||||
|
||||
logger.debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
appenderEvent = undefined;
|
||||
|
||||
var otherLogger = log4js.getLogger('biscuits');
|
||||
otherLogger.debug("mmm... garibaldis");
|
||||
assert.equal(appenderEvent.data[0], "mmm... garibaldis");
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
},
|
||||
'should register the function when the list of categories is an array': function(log4js) {
|
||||
var appenderEvent,
|
||||
appender = function(evt) { appenderEvent = evt; };
|
||||
|
||||
log4js.addAppender(appender, ['tests', 'pants']);
|
||||
|
||||
log4js.getLogger('tests').debug('this is a test');
|
||||
assert.equal(appenderEvent.data[0], 'this is a test');
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger('pants').debug("big pants");
|
||||
assert.equal(appenderEvent.data[0], "big pants");
|
||||
|
||||
appenderEvent = undefined;
|
||||
|
||||
log4js.getLogger("something else").debug("pants");
|
||||
assert.isUndefined(appenderEvent);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'default setup': {
|
||||
topic: function() {
|
||||
var appenderEvents = [],
|
||||
fakeConsole = {
|
||||
'name': 'stdout',
|
||||
'appender': function () {
|
||||
return function(evt) {
|
||||
appenderEvents.push(evt);
|
||||
};
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
globalConsole = {
|
||||
log: function() { }
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'./appenders/stdout': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
console: globalConsole
|
||||
}
|
||||
}
|
||||
),
|
||||
logger = log4js.getLogger('a-test');
|
||||
|
||||
logger.debug("this is a test");
|
||||
globalConsole.log("this should not be logged");
|
||||
|
||||
return appenderEvents;
|
||||
},
|
||||
|
||||
'should configure a stdout appender': function(appenderEvents) {
|
||||
assert.equal(appenderEvents[0].data[0], 'this is a test');
|
||||
},
|
||||
|
||||
'should not replace console.log with log4js version': function(appenderEvents) {
|
||||
assert.equal(appenderEvents.length, 1);
|
||||
}
|
||||
},
|
||||
|
||||
'console' : {
|
||||
topic: setupConsoleTest,
|
||||
|
||||
'when replaceConsole called': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
|
||||
test.fakeConsole.log("Some debug message someone put in a module");
|
||||
test.fakeConsole.debug("Some debug");
|
||||
test.fakeConsole.error("An error");
|
||||
test.fakeConsole.info("some info");
|
||||
test.fakeConsole.warn("a warning");
|
||||
|
||||
test.fakeConsole.log("cheese (%s) and biscuits (%s)", "gouda", "garibaldis");
|
||||
test.fakeConsole.log({ lumpy: "tapioca" });
|
||||
test.fakeConsole.log("count %d", 123);
|
||||
test.fakeConsole.log("stringify %j", { lumpy: "tapioca" });
|
||||
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
'should replace console.log methods with log4js ones': function(logEvents) {
|
||||
assert.equal(logEvents.length, 9);
|
||||
assert.equal(logEvents[0].data[0], "Some debug message someone put in a module");
|
||||
assert.equal(logEvents[0].level.toString(), "INFO");
|
||||
assert.equal(logEvents[1].data[0], "Some debug");
|
||||
assert.equal(logEvents[1].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[2].data[0], "An error");
|
||||
assert.equal(logEvents[2].level.toString(), "ERROR");
|
||||
assert.equal(logEvents[3].data[0], "some info");
|
||||
assert.equal(logEvents[3].level.toString(), "INFO");
|
||||
assert.equal(logEvents[4].data[0], "a warning");
|
||||
assert.equal(logEvents[4].level.toString(), "WARN");
|
||||
assert.equal(logEvents[5].data[0], "cheese (%s) and biscuits (%s)");
|
||||
assert.equal(logEvents[5].data[1], "gouda");
|
||||
assert.equal(logEvents[5].data[2], "garibaldis");
|
||||
}
|
||||
},
|
||||
'when turned off': {
|
||||
topic: function(test) {
|
||||
test.log4js.restoreConsole();
|
||||
try {
|
||||
test.fakeConsole.log("This should cause the error described in the setup");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should call the original console methods': function (err) {
|
||||
assert.instanceOf(err, Error);
|
||||
assert.equal(err.message, "this should not be called.");
|
||||
}
|
||||
}
|
||||
},
|
||||
'console configuration': {
|
||||
topic: setupConsoleTest,
|
||||
'when disabled': {
|
||||
topic: function(test) {
|
||||
test.log4js.replaceConsole();
|
||||
test.log4js.configure({ replaceConsole: false });
|
||||
try {
|
||||
test.fakeConsole.log("This should cause the error described in the setup");
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
},
|
||||
'should allow for turning off console replacement': function (err) {
|
||||
assert.instanceOf(err, Error);
|
||||
assert.equal(err.message, 'this should not be called.');
|
||||
}
|
||||
},
|
||||
'when enabled': {
|
||||
topic: function(test) {
|
||||
test.log4js.restoreConsole();
|
||||
test.log4js.configure({ replaceConsole: true });
|
||||
//log4js.configure clears all appenders
|
||||
test.log4js.addAppender(function(evt) {
|
||||
test.logEvents.push(evt);
|
||||
});
|
||||
|
||||
test.fakeConsole.debug("Some debug");
|
||||
return test.logEvents;
|
||||
},
|
||||
|
||||
'should allow for turning on console replacement': function (logEvents) {
|
||||
assert.equal(logEvents.length, 1);
|
||||
assert.equal(logEvents[0].level.toString(), "DEBUG");
|
||||
assert.equal(logEvents[0].data[0], "Some debug");
|
||||
}
|
||||
}
|
||||
},
|
||||
'configuration persistence' : {
|
||||
topic: function() {
|
||||
var logEvent,
|
||||
firstLog4js = require('../../lib/log4js'),
|
||||
secondLog4js;
|
||||
|
||||
firstLog4js.clearAppenders();
|
||||
firstLog4js.addAppender(function(evt) { logEvent = evt; });
|
||||
|
||||
secondLog4js = require('../../lib/log4js');
|
||||
secondLog4js.getLogger().info("This should go to the appender defined in firstLog4js");
|
||||
|
||||
return logEvent;
|
||||
},
|
||||
'should maintain appenders between requires': function (logEvent) {
|
||||
assert.equal(logEvent.data[0], "This should go to the appender defined in firstLog4js");
|
||||
}
|
||||
},
|
||||
|
||||
'getDefaultLogger': {
|
||||
topic: function() {
|
||||
return require('../../lib/log4js').getDefaultLogger();
|
||||
},
|
||||
'should return a logger': function(logger) {
|
||||
assert.ok(logger.info);
|
||||
assert.ok(logger.debug);
|
||||
assert.ok(logger.error);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
141
node_modules/log4js/test/vows/logglyAppender-test.js
generated
vendored
Normal file
141
node_modules/log4js/test/vows/logglyAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var fakeLoggly = {
|
||||
createClient: function(options) {
|
||||
return {
|
||||
config: options,
|
||||
log: function(msg, tags, cb) {
|
||||
msgs.push({
|
||||
msg: msg,
|
||||
tags: tags,
|
||||
cb: cb
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function(type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
error: function(msg, value) {
|
||||
this.errors.push({ msg: msg, value: value });
|
||||
}
|
||||
};
|
||||
|
||||
var logglyModule = sandbox.require('../../lib/appenders/loggly', {
|
||||
requires: {
|
||||
'loggly': fakeLoggly,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.addAppender(
|
||||
logglyModule.configure(options),
|
||||
logglyModule.shutdown,
|
||||
category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
loggly: fakeLoggly,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
results: msgs
|
||||
};
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
function setupTaggedLogging() {
|
||||
return setupLogging('loggly', {
|
||||
token: 'your-really-long-input-token',
|
||||
subdomain: 'your-subdomain',
|
||||
tags: ['loggly-tag1', 'loggly-tag2', 'loggly-tagn']
|
||||
});
|
||||
}
|
||||
|
||||
vows.describe('log4js logglyAppender').addBatch({
|
||||
'with minimal config': {
|
||||
topic: function() {
|
||||
var setup = setupTaggedLogging();
|
||||
setup.logger.log('trace', 'Log event #1', 'Log 2', { tags: ['tag1', 'tag2'] });
|
||||
return setup;
|
||||
},
|
||||
'has a results.length of 1': function(topic) {
|
||||
assert.equal(topic.results.length, 1);
|
||||
},
|
||||
'has a result msg with both args concatenated': function(topic) {
|
||||
assert.equal(topic.results[0].msg.msg, 'Log event #1 Log 2');
|
||||
},
|
||||
'has a result tags with the arg that contains tags': function(topic) {
|
||||
assert.deepEqual(topic.results[0].tags, ['tag1', 'tag2']);
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'config with object with tags and other keys': {
|
||||
topic: function() {
|
||||
var setup = setupTaggedLogging();
|
||||
|
||||
// ignore this tags object b/c there are 2 keys
|
||||
setup.logger.log('trace', 'Log event #1', { other: 'other', tags: ['tag1', 'tag2'] });
|
||||
return setup;
|
||||
},
|
||||
'has a results.length of 1': function(topic) {
|
||||
assert.equal(topic.results.length, 1);
|
||||
},
|
||||
'has a result msg with the args concatenated': function(topic) {
|
||||
assert.equal(topic.results[0].msg.msg,
|
||||
'Log event #1 { other: \'other\', tags: [ \'tag1\', \'tag2\' ] }');
|
||||
},
|
||||
'has a result tags with the arg that contains no tags': function(topic) {
|
||||
assert.deepEqual(topic.results[0].tags, []);
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'with shutdown callback': {
|
||||
topic: function() {
|
||||
var setup = setupTaggedLogging();
|
||||
|
||||
setup.logger.log('trace', 'Log event #1', 'Log 2', {
|
||||
tags: ['tag1', 'tag2']
|
||||
});
|
||||
|
||||
return setup;
|
||||
},
|
||||
'after the last message has been sent': {
|
||||
topic: function (topic) {
|
||||
var that = this;
|
||||
|
||||
log4js.shutdown(this.callback);
|
||||
topic.results[0].cb();
|
||||
|
||||
// setTimeout(function() {
|
||||
// that.callback(new Error('Shutdown callback has not been called'));
|
||||
// }, 0);
|
||||
},
|
||||
'calls `log4js.shutdown`s callback function.': function(error, result) {
|
||||
assert.equal(error, undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
}).export(module);
|
135
node_modules/log4js/test/vows/logstashUDP-test.js
generated
vendored
Normal file
135
node_modules/log4js/test/vows/logstashUDP-test.js
generated
vendored
Normal file
@ -0,0 +1,135 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, sandbox = require('sandboxed-module')
|
||||
;
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var udpSent = {};
|
||||
|
||||
var fakeDgram = {
|
||||
createSocket: function (type) {
|
||||
return {
|
||||
send: function(buffer, offset, length, port, host, callback) {
|
||||
udpSent.date = new Date();
|
||||
udpSent.host = host;
|
||||
udpSent.port = port;
|
||||
udpSent.length = length;
|
||||
udpSent.offset = 0;
|
||||
udpSent.buffer = buffer;
|
||||
callback(undefined, length);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var logstashModule = sandbox.require('../../lib/appenders/logstashUDP', {
|
||||
requires: {
|
||||
'dgram': fakeDgram
|
||||
}
|
||||
});
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(logstashModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
results: udpSent
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('logstashUDP appender').addBatch({
|
||||
'when logging with logstash via UDP': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myCategory', {
|
||||
"host": "127.0.0.1",
|
||||
"port": 10001,
|
||||
"type": "logstashUDP",
|
||||
"logType": "myAppType",
|
||||
"category": "myLogger",
|
||||
"fields": {
|
||||
"field1": "value1",
|
||||
"field2": "value2"
|
||||
},
|
||||
"layout": {
|
||||
"type": "pattern",
|
||||
"pattern": "%m"
|
||||
}
|
||||
});
|
||||
setup.logger.log('trace', 'Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'an UDP packet should be sent': function (topic) {
|
||||
assert.equal(topic.results.host, "127.0.0.1");
|
||||
assert.equal(topic.results.port, 10001);
|
||||
assert.equal(topic.results.offset, 0);
|
||||
var json = JSON.parse(topic.results.buffer.toString());
|
||||
assert.equal(json.type, 'myAppType');
|
||||
var fields = {
|
||||
field1: 'value1',
|
||||
field2: 'value2',
|
||||
level: 'TRACE',
|
||||
category: 'myCategory'
|
||||
};
|
||||
assert.equal(JSON.stringify(json.fields), JSON.stringify(fields));
|
||||
assert.equal(json.message, 'Log event #1');
|
||||
// Assert timestamp, up to hours resolution.
|
||||
var date = new Date(json['@timestamp']);
|
||||
assert.equal(
|
||||
date.toISOString().substring(0, 14),
|
||||
topic.results.date.toISOString().substring(0, 14)
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'when missing some options': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"host": "127.0.0.1",
|
||||
"port": 10001,
|
||||
"type": "logstashUDP",
|
||||
"category": "myLogger",
|
||||
"layout": {
|
||||
"type": "pattern",
|
||||
"pattern": "%m"
|
||||
}
|
||||
});
|
||||
setup.logger.log('trace', 'Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'it sets some defaults': function (topic) {
|
||||
var json = JSON.parse(topic.results.buffer.toString());
|
||||
assert.equal(json.type, 'myLogger');
|
||||
assert.equal(
|
||||
JSON.stringify(json.fields),
|
||||
JSON.stringify({'level': 'TRACE', 'category': 'myLogger'})
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'when extra fields provided': {
|
||||
topic: function() {
|
||||
var setup = setupLogging('myLogger', {
|
||||
"host": "127.0.0.1",
|
||||
"port": 10001,
|
||||
"type": "logstashUDP",
|
||||
"category": "myLogger",
|
||||
"layout": {
|
||||
"type": "dummy"
|
||||
}
|
||||
});
|
||||
setup.logger.log('trace', 'Log event #1', {'extra1': 'value1', 'extra2': 'value2'});
|
||||
return setup;
|
||||
},'they should be added to fields structure': function (topic) {
|
||||
var json = JSON.parse(topic.results.buffer.toString());
|
||||
var fields = {
|
||||
'extra1': 'value1',
|
||||
'extra2': 'value2',
|
||||
'level': 'TRACE',
|
||||
'category': 'myLogger'
|
||||
};
|
||||
assert.equal(JSON.stringify(json.fields), JSON.stringify(fields));
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
190
node_modules/log4js/test/vows/mailgunAppender-test.js
generated
vendored
Normal file
190
node_modules/log4js/test/vows/mailgunAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,190 @@
|
||||
"use strict";
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../../lib/log4js');
|
||||
var sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var mailgunCredentials = {
|
||||
apiKey: options.apikey,
|
||||
domain: options.domain
|
||||
};
|
||||
|
||||
var fakeMailgun = function (conf) {
|
||||
return {
|
||||
messages: function () {
|
||||
return {
|
||||
config: options,
|
||||
send: function (data, callback) {
|
||||
msgs.push(data);
|
||||
callback(false, {status:"OK"});
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function (type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
logs: [],
|
||||
error: function (msg, value) {
|
||||
this.errors.push({msg: msg, value: value});
|
||||
},
|
||||
log: function (msg, value) {
|
||||
this.logs.push({msg: msg, value: value});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var mailgunModule = sandbox.require('../../lib/appenders/mailgun', {
|
||||
requires: {
|
||||
'mailgun-js': fakeMailgun,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
log4js.addAppender(mailgunModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeMailgun,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
mails: msgs,
|
||||
credentials: mailgunCredentials
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages(result) {
|
||||
for (var i = 0; i < result.mails.length; ++i) {
|
||||
assert.equal(result.mails[i].from, 'sender@domain.com');
|
||||
assert.equal(result.mails[i].to, 'recepient@domain.com');
|
||||
assert.equal(result.mails[i].subject, 'This is subject');
|
||||
assert.ok(new RegExp('.+Log event #' + (i + 1)).test(result.mails[i].text));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
vows.describe('log4js mailgunAppender').addBatch({
|
||||
'mailgun setup': {
|
||||
topic: setupLogging('mailgun setup', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
}),
|
||||
'mailgun credentials should match': function(result){
|
||||
assert.equal(result.credentials.apiKey, 'APIKEY');
|
||||
assert.equal(result.credentials.domain, 'DOMAIN');
|
||||
}
|
||||
},
|
||||
|
||||
'basic usage': {
|
||||
topic: function(){
|
||||
var setup = setupLogging('basic usage', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
});
|
||||
|
||||
setup.logger.info("Log event #1");
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.mails.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'config with layout': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function (result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
},
|
||||
'error when sending email': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
});
|
||||
|
||||
setup.mailer.messages = function () {
|
||||
return {
|
||||
send: function (msg, cb) {
|
||||
cb({msg: "log4js.mailgunAppender - Error happened"}, null);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
setup.logger.info("This will break");
|
||||
return setup.console;
|
||||
},
|
||||
'should be logged to console': function (cons) {
|
||||
assert.equal(cons.errors.length, 1);
|
||||
assert.equal(cons.errors[0].msg, 'log4js.mailgunAppender - Error happened');
|
||||
}
|
||||
},
|
||||
'separate email for each event': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
apikey: 'APIKEY',
|
||||
domain: 'DOMAIN',
|
||||
from: 'sender@domain.com',
|
||||
to: 'recepient@domain.com',
|
||||
subject: 'This is subject'
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1100);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.mails.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
317
node_modules/log4js/test/vows/multiprocess-test.js
generated
vendored
Normal file
317
node_modules/log4js/test/vows/multiprocess-test.js
generated
vendored
Normal file
@ -0,0 +1,317 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, assert = require('assert')
|
||||
;
|
||||
|
||||
function makeFakeNet() {
|
||||
return {
|
||||
logEvents: [],
|
||||
data: [],
|
||||
cbs: {},
|
||||
createConnectionCalled: 0,
|
||||
fakeAppender: function(logEvent) {
|
||||
this.logEvents.push(logEvent);
|
||||
},
|
||||
createConnection: function(port, host) {
|
||||
var fakeNet = this;
|
||||
this.port = port;
|
||||
this.host = host;
|
||||
this.createConnectionCalled += 1;
|
||||
return {
|
||||
on: function(evt, cb) {
|
||||
fakeNet.cbs[evt] = cb;
|
||||
},
|
||||
write: function(data, encoding) {
|
||||
fakeNet.data.push(data);
|
||||
fakeNet.encoding = encoding;
|
||||
},
|
||||
end: function() {
|
||||
fakeNet.closeCalled = true;
|
||||
}
|
||||
};
|
||||
},
|
||||
createServer: function(cb) {
|
||||
var fakeNet = this;
|
||||
cb({
|
||||
remoteAddress: '1.2.3.4',
|
||||
remotePort: '1234',
|
||||
setEncoding: function(encoding) {
|
||||
fakeNet.encoding = encoding;
|
||||
},
|
||||
on: function(event, cb) {
|
||||
fakeNet.cbs[event] = cb;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
listen: function(port, host) {
|
||||
fakeNet.port = port;
|
||||
fakeNet.host = host;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
vows.describe('Multiprocess Appender').addBatch({
|
||||
'worker': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker', loggerPort: 1234, loggerHost: 'pants' });
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after connect');
|
||||
fakeNet.cbs.close(true);
|
||||
appender('after error, before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after error, after connect');
|
||||
appender(new Error('Error test'));
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to the loggerPort and loggerHost': function(net) {
|
||||
assert.equal(net.port, 1234);
|
||||
assert.equal(net.host, 'pants');
|
||||
},
|
||||
'should buffer messages written before socket is connected': function(net) {
|
||||
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||
},
|
||||
'should write log messages to socket as json strings with a terminator string': function(net) {
|
||||
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||
assert.equal(net.data[1], '__LOG4JS__');
|
||||
assert.equal(net.data[2], JSON.stringify('after connect'));
|
||||
assert.equal(net.data[3], '__LOG4JS__');
|
||||
assert.equal(net.encoding, 'utf8');
|
||||
},
|
||||
'should attempt to re-open the socket on error': function(net) {
|
||||
assert.equal(net.data[4], JSON.stringify('after error, before connect'));
|
||||
assert.equal(net.data[5], '__LOG4JS__');
|
||||
assert.equal(net.data[6], JSON.stringify('after error, after connect'));
|
||||
assert.equal(net.data[7], '__LOG4JS__');
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
},
|
||||
'should serialize an Error correctly': function(net) {
|
||||
assert(
|
||||
JSON.parse(net.data[8]).stack,
|
||||
"Expected:\n\n" + net.data[8] + "\n\n to have a 'stack' property"
|
||||
);
|
||||
var actual = JSON.parse(net.data[8]).stack;
|
||||
var expectedRegex = /^Error: Error test/;
|
||||
assert(
|
||||
actual.match(expectedRegex),
|
||||
"Expected: \n\n " + actual + "\n\n to match " + expectedRegex
|
||||
);
|
||||
|
||||
}
|
||||
},
|
||||
'worker with timeout': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
//don't need a proper log event for the worker tests
|
||||
appender('before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after connect');
|
||||
fakeNet.cbs.timeout();
|
||||
appender('after timeout, before close');
|
||||
fakeNet.cbs.close();
|
||||
appender('after close, before connect');
|
||||
fakeNet.cbs.connect();
|
||||
appender('after close, after connect');
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should attempt to re-open the socket': function(net) {
|
||||
//skipping the __LOG4JS__ separators
|
||||
assert.equal(net.data[0], JSON.stringify('before connect'));
|
||||
assert.equal(net.data[2], JSON.stringify('after connect'));
|
||||
assert.equal(net.data[4], JSON.stringify('after timeout, before close'));
|
||||
assert.equal(net.data[6], JSON.stringify('after close, before connect'));
|
||||
assert.equal(net.data[8], JSON.stringify('after close, after connect'));
|
||||
assert.equal(net.createConnectionCalled, 2);
|
||||
}
|
||||
},
|
||||
'worker defaults': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'worker' });
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should open a socket to localhost:5000': function(net) {
|
||||
assert.equal(net.port, 5000);
|
||||
assert.equal(net.host, 'localhost');
|
||||
}
|
||||
},
|
||||
'master': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'master',
|
||||
loggerHost: 'server',
|
||||
loggerPort: 1234,
|
||||
actualAppender: fakeNet.fakeAppender.bind(fakeNet)
|
||||
});
|
||||
|
||||
appender('this should be sent to the actual appender directly');
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on loggerPort and loggerHost': function(net) {
|
||||
assert.equal(net.port, 1234);
|
||||
assert.equal(net.host, 'server');
|
||||
},
|
||||
'should return the underlying appender': function(net) {
|
||||
assert.equal(net.logEvents[0], 'this should be sent to the actual appender directly');
|
||||
},
|
||||
'when a client connects': {
|
||||
topic: function(net) {
|
||||
var logString = JSON.stringify(
|
||||
{ level: { level: 10000, levelStr: 'DEBUG' }
|
||||
, data: ['some debug']}
|
||||
) + '__LOG4JS__';
|
||||
|
||||
net.cbs.data(
|
||||
JSON.stringify(
|
||||
{ level: { level: 40000, levelStr: 'ERROR' }
|
||||
, data: ['an error message'] }
|
||||
) + '__LOG4JS__'
|
||||
);
|
||||
net.cbs.data(logString.substring(0, 10));
|
||||
net.cbs.data(logString.substring(10));
|
||||
net.cbs.data(logString + logString + logString);
|
||||
net.cbs.end(
|
||||
JSON.stringify(
|
||||
{ level: { level: 50000, levelStr: 'FATAL' }
|
||||
, data: ["that's all folks"] }
|
||||
) + '__LOG4JS__'
|
||||
);
|
||||
net.cbs.data('bad message__LOG4JS__');
|
||||
return net;
|
||||
},
|
||||
'should parse log messages into log events and send to appender': function(net) {
|
||||
assert.equal(net.logEvents[1].level.toString(), 'ERROR');
|
||||
assert.equal(net.logEvents[1].data[0], 'an error message');
|
||||
assert.equal(net.logEvents[1].remoteAddress, '1.2.3.4');
|
||||
assert.equal(net.logEvents[1].remotePort, '1234');
|
||||
},
|
||||
'should parse log messages split into multiple chunks': function(net) {
|
||||
assert.equal(net.logEvents[2].level.toString(), 'DEBUG');
|
||||
assert.equal(net.logEvents[2].data[0], 'some debug');
|
||||
assert.equal(net.logEvents[2].remoteAddress, '1.2.3.4');
|
||||
assert.equal(net.logEvents[2].remotePort, '1234');
|
||||
},
|
||||
'should parse multiple log messages in a single chunk': function(net) {
|
||||
assert.equal(net.logEvents[3].data[0], 'some debug');
|
||||
assert.equal(net.logEvents[4].data[0], 'some debug');
|
||||
assert.equal(net.logEvents[5].data[0], 'some debug');
|
||||
},
|
||||
'should handle log messages sent as part of end event': function(net) {
|
||||
assert.equal(net.logEvents[6].data[0], "that's all folks");
|
||||
},
|
||||
'should handle unparseable log messages': function(net) {
|
||||
assert.equal(net.logEvents[7].level.toString(), 'ERROR');
|
||||
assert.equal(net.logEvents[7].categoryName, 'log4js');
|
||||
assert.equal(net.logEvents[7].data[0], 'Unable to parse log:');
|
||||
assert.equal(net.logEvents[7].data[1], 'bad message');
|
||||
}
|
||||
}
|
||||
},
|
||||
'master defaults': {
|
||||
topic: function() {
|
||||
var fakeNet = makeFakeNet(),
|
||||
appender = sandbox.require(
|
||||
'../../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet
|
||||
}
|
||||
}
|
||||
).appender({ mode: 'master' });
|
||||
|
||||
return fakeNet;
|
||||
},
|
||||
'should listen for log messages on localhost:5000': function(net) {
|
||||
assert.equal(net.port, 5000);
|
||||
assert.equal(net.host, 'localhost');
|
||||
}
|
||||
}
|
||||
}).addBatch({
|
||||
'configure': {
|
||||
topic: function() {
|
||||
var results = {}
|
||||
, fakeNet = makeFakeNet()
|
||||
, appender = sandbox.require(
|
||||
'../../lib/appenders/multiprocess',
|
||||
{
|
||||
requires: {
|
||||
'net': fakeNet,
|
||||
'../log4js': {
|
||||
loadAppender: function(app) {
|
||||
results.appenderLoaded = app;
|
||||
},
|
||||
appenderMakers: {
|
||||
'madeupappender': function(config, options) {
|
||||
results.config = config;
|
||||
results.options = options;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
).configure(
|
||||
{
|
||||
mode: 'master',
|
||||
appender: {
|
||||
type: 'madeupappender',
|
||||
cheese: 'gouda'
|
||||
}
|
||||
},
|
||||
{ crackers: 'jacobs' }
|
||||
);
|
||||
|
||||
return results;
|
||||
|
||||
},
|
||||
'should load underlying appender for master': function(results) {
|
||||
assert.equal(results.appenderLoaded, 'madeupappender');
|
||||
},
|
||||
'should pass config to underlying appender': function(results) {
|
||||
assert.equal(results.config.cheese, 'gouda');
|
||||
},
|
||||
'should pass options to underlying appender': function(results) {
|
||||
assert.equal(results.options.crackers, 'jacobs');
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
138
node_modules/log4js/test/vows/newLevel-test.js
generated
vendored
Normal file
138
node_modules/log4js/test/vows/newLevel-test.js
generated
vendored
Normal file
@ -0,0 +1,138 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, Level = require('../../lib/levels')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, loggerModule = require('../../lib/logger')
|
||||
, Logger = loggerModule.Logger;
|
||||
|
||||
vows.describe('../../lib/logger').addBatch({
|
||||
'creating a new log level': {
|
||||
topic: function () {
|
||||
Level.forName("DIAG", 6000);
|
||||
return new Logger();
|
||||
},
|
||||
|
||||
'should export new log level in levels module': function (logger) {
|
||||
assert.isDefined(Level.DIAG);
|
||||
assert.equal(Level.DIAG.levelStr, "DIAG");
|
||||
assert.equal(Level.DIAG.level, 6000);
|
||||
},
|
||||
|
||||
'should create named function on logger prototype': function(logger) {
|
||||
assert.isFunction(logger.diag);
|
||||
},
|
||||
|
||||
'should create isLevelEnabled function on logger prototype': function(logger) {
|
||||
assert.isFunction(logger.isDiagEnabled);
|
||||
},
|
||||
},
|
||||
|
||||
'creating a new log level with underscores': {
|
||||
topic: function () {
|
||||
Level.forName("NEW_LEVEL_OTHER", 6000);
|
||||
return new Logger();
|
||||
},
|
||||
|
||||
'should export new log level to levels module': function (logger) {
|
||||
assert.isDefined(Level.NEW_LEVEL_OTHER);
|
||||
assert.equal(Level.NEW_LEVEL_OTHER.levelStr, "NEW_LEVEL_OTHER");
|
||||
assert.equal(Level.NEW_LEVEL_OTHER.level, 6000);
|
||||
},
|
||||
|
||||
'should create named function on logger prototype in camel case': function(logger) {
|
||||
assert.isFunction(logger.newLevelOther);
|
||||
},
|
||||
|
||||
'should create named isLevelEnabled function on logger prototype in camel case':
|
||||
function(logger) {
|
||||
assert.isFunction(logger.isNewLevelOtherEnabled);
|
||||
}
|
||||
},
|
||||
|
||||
'creating log events containing newly created log level': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
|
||||
logger.log(Level.forName("LVL1", 6000), "Event 1");
|
||||
logger.log(Level.getLevel("LVL1"), "Event 2");
|
||||
logger.log("LVL1", "Event 3");
|
||||
logger.lvl1("Event 4");
|
||||
|
||||
logger.setLevel(Level.forName("LVL2", 7000));
|
||||
logger.lvl1("Event 5");
|
||||
|
||||
return events;
|
||||
},
|
||||
|
||||
'should show log events with new log level': function(events) {
|
||||
assert.equal(events[0].level.toString(), "LVL1");
|
||||
assert.equal(events[0].data[0], "Event 1");
|
||||
|
||||
assert.equal(events[1].level.toString(), "LVL1");
|
||||
assert.equal(events[1].data[0], "Event 2");
|
||||
|
||||
assert.equal(events[2].level.toString(), "LVL1");
|
||||
assert.equal(events[2].data[0], "Event 3");
|
||||
|
||||
assert.equal(events[3].level.toString(), "LVL1");
|
||||
assert.equal(events[3].data[0], "Event 4");
|
||||
},
|
||||
|
||||
'should not be present if min log level is greater than newly created level':
|
||||
function(events) {
|
||||
assert.equal(events.length, 4);
|
||||
}
|
||||
},
|
||||
|
||||
'creating a new log level with incorrect parameters': {
|
||||
topic: function() {
|
||||
log4js.levels.forName(9000, "FAIL_LEVEL_1");
|
||||
log4js.levels.forName("FAIL_LEVEL_2");
|
||||
return new Logger();
|
||||
},
|
||||
|
||||
'should fail to create the level': function(logger) {
|
||||
assert.isUndefined(Level.FAIL_LEVEL_1);
|
||||
assert.isUndefined(Level.FAIL_LEVEL_2);
|
||||
}
|
||||
},
|
||||
|
||||
'calling log with an undefined log level': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
|
||||
logger.log("LEVEL_DOES_NEXT_EXIST", "Event 1");
|
||||
logger.log(Level.forName("LEVEL_DOES_NEXT_EXIST"), "Event 2");
|
||||
|
||||
return events;
|
||||
},
|
||||
|
||||
'should fallback to the default log level (INFO)': function(events) {
|
||||
assert.equal(events[0].level.toString(), "INFO");
|
||||
assert.equal(events[1].level.toString(), "INFO");
|
||||
}
|
||||
},
|
||||
|
||||
'creating a new level with an existing level name': {
|
||||
topic: function() {
|
||||
var events = [],
|
||||
logger = new Logger();
|
||||
logger.addListener("log", function (logEvent) { events.push(logEvent); });
|
||||
|
||||
logger.log(log4js.levels.forName("MY_LEVEL", 9000), "Event 1");
|
||||
logger.log(log4js.levels.forName("MY_LEVEL", 8000), "Event 1");
|
||||
|
||||
return events;
|
||||
},
|
||||
|
||||
'should override the existing log level': function(events) {
|
||||
assert.equal(events[0].level.level, 9000);
|
||||
assert.equal(events[1].level.level, 8000);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
297
node_modules/log4js/test/vows/nolog-test.js
generated
vendored
Normal file
297
node_modules/log4js/test/vows/nolog-test.js
generated
vendored
Normal file
@ -0,0 +1,297 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, util = require('util')
|
||||
, EE = require('events').EventEmitter
|
||||
, levels = require('../../lib/levels');
|
||||
|
||||
function MockLogger() {
|
||||
|
||||
var that = this;
|
||||
this.messages = [];
|
||||
|
||||
this.log = function(level, message, exception) {
|
||||
that.messages.push({ level: level, message: message });
|
||||
};
|
||||
|
||||
this.isLevelEnabled = function(level) {
|
||||
return level.isGreaterThanOrEqualTo(that.level);
|
||||
};
|
||||
|
||||
this.level = levels.TRACE;
|
||||
|
||||
}
|
||||
|
||||
function MockRequest(remoteAddr, method, originalUrl) {
|
||||
|
||||
this.socket = { remoteAddress: remoteAddr };
|
||||
this.originalUrl = originalUrl;
|
||||
this.method = method;
|
||||
this.httpVersionMajor = '5';
|
||||
this.httpVersionMinor = '0';
|
||||
this.headers = {};
|
||||
}
|
||||
|
||||
function MockResponse(statusCode) {
|
||||
var r = this;
|
||||
this.statusCode = statusCode;
|
||||
|
||||
this.end = function(chunk, encoding) {
|
||||
r.emit('finish');
|
||||
};
|
||||
}
|
||||
util.inherits(MockResponse, EE);
|
||||
|
||||
vows.describe('log4js connect logger').addBatch({
|
||||
'getConnectLoggerModule': {
|
||||
topic: function() {
|
||||
var clm = require('../../lib/connect-logger');
|
||||
return clm;
|
||||
},
|
||||
|
||||
'should return a "connect logger" factory' : function(clm) {
|
||||
assert.isObject(clm);
|
||||
},
|
||||
|
||||
'nolog String' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, { nolog: "\\.gif" });
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
},10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
},10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
'nolog Strings' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: "\\.gif|\\.jpe?g"});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
},
|
||||
'nolog Array<String>' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: ["\\.gif", "\\.jpe?g"]});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
},
|
||||
'nolog RegExp' : {
|
||||
topic: function(clm) {
|
||||
var ml = new MockLogger();
|
||||
var cl = clm.connectLogger(ml, {nolog: /\.gif|\.jpe?g/});
|
||||
return {cl: cl, ml: ml};
|
||||
},
|
||||
|
||||
'check unmatch url request (png)': {
|
||||
topic: function(d){
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.png'); // not gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages){
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 1);
|
||||
assert.ok(levels.INFO.isEqualTo(messages[0].level));
|
||||
assert.include(messages[0].message, 'GET');
|
||||
assert.include(messages[0].message, 'http://url');
|
||||
assert.include(messages[0].message, 'my.remote.addr');
|
||||
assert.include(messages[0].message, '200');
|
||||
messages.pop();
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (gif)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.gif'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
},
|
||||
|
||||
'check match url request (jpeg)': {
|
||||
topic: function(d) {
|
||||
var req = new MockRequest('my.remote.addr', 'GET', 'http://url/hoge.jpeg'); // gif
|
||||
var res = new MockResponse(200);
|
||||
var cb = this.callback;
|
||||
d.cl(req, res, function() { });
|
||||
res.end('chunk', 'encoding');
|
||||
setTimeout(function() {
|
||||
cb(null, d.ml.messages);
|
||||
}, 10);
|
||||
},
|
||||
'check message': function(messages) {
|
||||
assert.isArray(messages);
|
||||
assert.equal(messages.length, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}).export(module);
|
340
node_modules/log4js/test/vows/reloadConfiguration-test.js
generated
vendored
Normal file
340
node_modules/log4js/test/vows/reloadConfiguration-test.js
generated
vendored
Normal file
@ -0,0 +1,340 @@
|
||||
"use strict";
|
||||
var vows = require('vows')
|
||||
, assert = require('assert')
|
||||
, sandbox = require('sandboxed-module');
|
||||
|
||||
function setupConsoleTest() {
|
||||
var fakeConsole = {}
|
||||
, logEvents = []
|
||||
, log4js;
|
||||
|
||||
['trace','debug','log','info','warn','error'].forEach(function(fn) {
|
||||
fakeConsole[fn] = function() {
|
||||
throw new Error("this should not be called.");
|
||||
};
|
||||
});
|
||||
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.clearAppenders();
|
||||
log4js.addAppender(function(evt) {
|
||||
logEvents.push(evt);
|
||||
});
|
||||
|
||||
return { log4js: log4js, logEvents: logEvents, fakeConsole: fakeConsole };
|
||||
}
|
||||
|
||||
vows.describe('reload configuration').addBatch({
|
||||
'with config file changing' : {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = 'path/to/log4js.json',
|
||||
fakeFS = {
|
||||
lastMtime: Date.now(),
|
||||
config: {
|
||||
appenders: [
|
||||
{ type: 'console', layout: { type: 'messagePassThrough' } }
|
||||
],
|
||||
levels: { 'a-test' : 'INFO' }
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
fakeFS.lastMtime += 1;
|
||||
return { mtime: new Date(fakeFS.lastMtime) };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure('path/to/log4js.json', { reloadSecs: 30 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
fakeFS.config.levels['a-test'] = "DEBUG";
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
return logEvents;
|
||||
},
|
||||
'should configure log4js from first log4js.json found': function(logEvents) {
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].data[0], 'info3');
|
||||
assert.equal(logEvents[2].data[0], 'debug4');
|
||||
assert.equal(logEvents.length, 3);
|
||||
}
|
||||
},
|
||||
|
||||
'with config file staying the same' : {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
fileRead = 0,
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: {
|
||||
appenders: [
|
||||
{ type: 'console', layout: { type: 'messagePassThrough' } }
|
||||
],
|
||||
levels: { 'a-test' : 'INFO' }
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
pathsChecked.push(path);
|
||||
if (path === modulePath) {
|
||||
return { mtime: mtime };
|
||||
} else {
|
||||
throw new Error("no such file");
|
||||
}
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure(modulePath, { reloadSecs: 3 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
return [ pathsChecked, logEvents, modulePath, fileRead ];
|
||||
},
|
||||
'should only read the configuration file once': function(args) {
|
||||
var fileRead = args[3];
|
||||
assert.equal(fileRead, 1);
|
||||
},
|
||||
'should configure log4js from first log4js.json found': function(args) {
|
||||
var logEvents = args[1];
|
||||
assert.equal(logEvents.length, 2);
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].data[0], 'info3');
|
||||
}
|
||||
},
|
||||
|
||||
'when config file is removed': {
|
||||
topic: function() {
|
||||
var pathsChecked = [],
|
||||
fileRead = 0,
|
||||
logEvents = [],
|
||||
logger,
|
||||
modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
|
||||
mtime = new Date(),
|
||||
fakeFS = {
|
||||
config: {
|
||||
appenders: [
|
||||
{ type: 'console', layout: { type: 'messagePassThrough' } }
|
||||
],
|
||||
levels: { 'a-test' : 'INFO' }
|
||||
},
|
||||
readFileSync: function (file, encoding) {
|
||||
fileRead += 1;
|
||||
assert.isString(file);
|
||||
assert.equal(file, modulePath);
|
||||
assert.equal(encoding, 'utf8');
|
||||
return JSON.stringify(fakeFS.config);
|
||||
},
|
||||
statSync: function (path) {
|
||||
this.statSync = function() {
|
||||
throw new Error("no such file");
|
||||
};
|
||||
return { mtime: new Date() };
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { logEvents.push(evt); };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure(modulePath, { reloadSecs: 3 });
|
||||
logger = log4js.getLogger('a-test');
|
||||
logger.info("info1");
|
||||
logger.debug("debug2 - should be ignored");
|
||||
setIntervalCallback();
|
||||
logger.info("info3");
|
||||
logger.debug("debug4");
|
||||
|
||||
return [ pathsChecked, logEvents, modulePath, fileRead ];
|
||||
},
|
||||
'should only read the configuration file once': function(args) {
|
||||
var fileRead = args[3];
|
||||
assert.equal(fileRead, 1);
|
||||
},
|
||||
'should not clear configuration when config file not found': function(args) {
|
||||
var logEvents = args[1];
|
||||
assert.equal(logEvents.length, 3);
|
||||
assert.equal(logEvents[0].data[0], 'info1');
|
||||
assert.equal(logEvents[1].level.toString(), 'WARN');
|
||||
assert.include(logEvents[1].data[0], 'Failed to load configuration file');
|
||||
assert.equal(logEvents[2].data[0], 'info3');
|
||||
}
|
||||
},
|
||||
|
||||
'when passed an object': {
|
||||
topic: function() {
|
||||
var test = setupConsoleTest();
|
||||
test.log4js.configure({}, { reloadSecs: 30 });
|
||||
return test.logEvents;
|
||||
},
|
||||
'should log a warning': function(events) {
|
||||
assert.equal(events[0].level.toString(), 'WARN');
|
||||
assert.equal(
|
||||
events[0].data[0],
|
||||
'Ignoring configuration reload parameter for "object" configuration.'
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
'when called twice with reload options': {
|
||||
topic: function() {
|
||||
var modulePath = require('path').normalize(__dirname + '/../../lib/log4js.json'),
|
||||
fakeFS = {
|
||||
readFileSync: function (file, encoding) {
|
||||
return JSON.stringify({});
|
||||
},
|
||||
statSync: function (path) {
|
||||
return { mtime: new Date() };
|
||||
}
|
||||
},
|
||||
fakeConsole = {
|
||||
'name': 'console',
|
||||
'appender': function () {
|
||||
return function(evt) { };
|
||||
},
|
||||
'configure': function (config) {
|
||||
return fakeConsole.appender();
|
||||
}
|
||||
},
|
||||
setIntervalCallback,
|
||||
intervalCleared = false,
|
||||
clearedId,
|
||||
fakeSetInterval = function(cb, timeout) {
|
||||
setIntervalCallback = cb;
|
||||
return 1234;
|
||||
},
|
||||
log4js = sandbox.require(
|
||||
'../../lib/log4js',
|
||||
{
|
||||
requires: {
|
||||
'fs': fakeFS,
|
||||
'./appenders/console': fakeConsole
|
||||
},
|
||||
globals: {
|
||||
'console': fakeConsole,
|
||||
'setInterval' : fakeSetInterval,
|
||||
'clearInterval': function(interval) {
|
||||
intervalCleared = true;
|
||||
clearedId = interval;
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
log4js.configure(modulePath, { reloadSecs: 3 });
|
||||
log4js.configure(modulePath, { reloadSecs: 15 });
|
||||
|
||||
return { cleared: intervalCleared, id: clearedId };
|
||||
},
|
||||
'should clear the previous interval': function(result) {
|
||||
assert.isTrue(result.cleared);
|
||||
assert.equal(result.id, 1234);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
100
node_modules/log4js/test/vows/setLevel-asymmetry-test.js
generated
vendored
Normal file
100
node_modules/log4js/test/vows/setLevel-asymmetry-test.js
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
/* jshint loopfunc: true */
|
||||
// This test shows an asymmetry between setLevel and isLevelEnabled
|
||||
// (in log4js-node@0.4.3 and earlier):
|
||||
// 1) setLevel("foo") works, but setLevel(log4js.levels.foo) silently
|
||||
// does not (sets the level to TRACE).
|
||||
// 2) isLevelEnabled("foo") works as does isLevelEnabled(log4js.levels.foo).
|
||||
//
|
||||
|
||||
// Basic set up
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../../lib/log4js');
|
||||
var logger = log4js.getLogger('test-setLevel-asymmetry');
|
||||
|
||||
// uncomment one or other of the following to see progress (or not) while running the tests
|
||||
// var showProgress = console.log;
|
||||
var showProgress = function() {};
|
||||
|
||||
|
||||
// Define the array of levels as string to iterate over.
|
||||
var strLevels= ['Trace','Debug','Info','Warn','Error','Fatal'];
|
||||
|
||||
var log4jsLevels =[];
|
||||
// populate an array with the log4js.levels that match the strLevels.
|
||||
// Would be nice if we could iterate over log4js.levels instead,
|
||||
// but log4js.levels.toLevel prevents that for now.
|
||||
strLevels.forEach(function(l) {
|
||||
log4jsLevels.push(log4js.levels.toLevel(l));
|
||||
});
|
||||
|
||||
|
||||
// We are going to iterate over this object's properties to define an exhaustive list of vows.
|
||||
var levelTypes = {
|
||||
'string': strLevels,
|
||||
'log4js.levels.level': log4jsLevels,
|
||||
};
|
||||
|
||||
// Set up the basic vows batch for this test
|
||||
var batch = {
|
||||
setLevel: {
|
||||
}
|
||||
};
|
||||
|
||||
showProgress('Populating batch object...');
|
||||
|
||||
// Populating the batch object programmatically,
|
||||
// as I don't have the patience to manually populate it with
|
||||
// the (strLevels.length x levelTypes.length) ^ 2 = 144 possible test combinations
|
||||
for (var type in levelTypes) {
|
||||
var context = 'is called with a '+type;
|
||||
var levelsToTest = levelTypes[type];
|
||||
showProgress('Setting up the vows context for '+context);
|
||||
|
||||
batch.setLevel[context]= {};
|
||||
levelsToTest.forEach( function(level) {
|
||||
var subContext = 'of '+level;
|
||||
var log4jsLevel=log4js.levels.toLevel(level.toString());
|
||||
|
||||
showProgress('Setting up the vows sub-context for '+subContext);
|
||||
batch.setLevel[context][subContext] = {topic: level};
|
||||
for (var comparisonType in levelTypes) {
|
||||
levelTypes[comparisonType].forEach(function(comparisonLevel) {
|
||||
var t = type;
|
||||
var ct = comparisonType;
|
||||
var expectedResult = log4jsLevel.isLessThanOrEqualTo(comparisonLevel);
|
||||
var vow = 'isLevelEnabled(' + comparisonLevel +
|
||||
') called with a ' + comparisonType +
|
||||
' should return ' + expectedResult;
|
||||
showProgress('Setting up the vows vow for '+vow);
|
||||
|
||||
batch.setLevel[context][subContext][vow] = function(levelToSet) {
|
||||
logger.setLevel(levelToSet);
|
||||
showProgress(
|
||||
'*** Checking setLevel( ' + level +
|
||||
' ) of type ' + t +
|
||||
', and isLevelEnabled( ' + comparisonLevel +
|
||||
' ) of type ' + ct + '. Expecting: ' + expectedResult
|
||||
);
|
||||
assert.equal(
|
||||
logger.isLevelEnabled(comparisonLevel),
|
||||
expectedResult,
|
||||
'Failed: calling setLevel( ' + level +
|
||||
' ) with type ' + type +
|
||||
', isLevelEnabled( ' + comparisonLevel +
|
||||
' ) of type ' + comparisonType +
|
||||
' did not return ' + expectedResult
|
||||
);
|
||||
};
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
showProgress('Running tests...');
|
||||
|
||||
vows.describe('log4js setLevel asymmetry fix').addBatch(batch).export(module);
|
||||
|
||||
|
168
node_modules/log4js/test/vows/slackAppender-test.js
generated
vendored
Normal file
168
node_modules/log4js/test/vows/slackAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,168 @@
|
||||
"use strict";
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../../lib/log4js');
|
||||
var sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var slackCredentials = {
|
||||
token: options.token,
|
||||
channel_id: options.channel_id,
|
||||
username: options.username,
|
||||
format: options.format,
|
||||
icon_url: options.icon_url
|
||||
};
|
||||
var fakeSlack = (function (key) {
|
||||
function constructor() {
|
||||
return {
|
||||
options: key,
|
||||
api: function (action, data, callback) {
|
||||
msgs.push(data);
|
||||
callback(false, {status: "sent"});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return constructor(key);
|
||||
});
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function (type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
coloredLayout: log4js.layouts.coloredLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
logs: [],
|
||||
error: function (msg, value) {
|
||||
this.errors.push({msg: msg, value: value});
|
||||
},
|
||||
log: function (msg, value) {
|
||||
this.logs.push({msg: msg, value: value});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var slackModule = sandbox.require('../../lib/appenders/slack', {
|
||||
requires: {
|
||||
'slack-node': fakeSlack,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
log4js.addAppender(slackModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeSlack,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
messages: msgs,
|
||||
credentials: slackCredentials
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages(result) {
|
||||
for (var i = 0; i < result.messages.length; ++i) {
|
||||
assert.equal(result.messages[i].channel, '#CHANNEL');
|
||||
assert.equal(result.messages[i].username, 'USERNAME');
|
||||
assert.ok(new RegExp('.+Log event #' + (i + 1)).test(result.messages[i].text));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
|
||||
vows.describe('log4js slackAppender').addBatch({
|
||||
'slack setup': {
|
||||
topic: setupLogging('slack setup', {
|
||||
token: 'TOKEN',
|
||||
channel_id: "#CHANNEL",
|
||||
username: "USERNAME",
|
||||
format: "FORMAT",
|
||||
icon_url: "ICON_URL"
|
||||
}),
|
||||
'slack credentials should match': function (result) {
|
||||
assert.equal(result.credentials.token, 'TOKEN');
|
||||
assert.equal(result.credentials.channel_id, '#CHANNEL');
|
||||
assert.equal(result.credentials.username, 'USERNAME');
|
||||
assert.equal(result.credentials.format, 'FORMAT');
|
||||
assert.equal(result.credentials.icon_url, 'ICON_URL');
|
||||
}
|
||||
},
|
||||
|
||||
'basic usage': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('basic usage', {
|
||||
token: 'TOKEN',
|
||||
channel_id: "#CHANNEL",
|
||||
username: "USERNAME",
|
||||
format: "FORMAT",
|
||||
icon_url: "ICON_URL",
|
||||
});
|
||||
|
||||
setup.logger.info("Log event #1");
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.messages.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'config with layout': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function (result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
},
|
||||
'separate notification for each event': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate notification for each event', {
|
||||
token: 'TOKEN',
|
||||
channel_id: "#CHANNEL",
|
||||
username: "USERNAME",
|
||||
format: "FORMAT",
|
||||
icon_url: "ICON_URL",
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1100);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.messages.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
}
|
||||
}).export(module);
|
318
node_modules/log4js/test/vows/smtpAppender-test.js
generated
vendored
Normal file
318
node_modules/log4js/test/vows/smtpAppender-test.js
generated
vendored
Normal file
@ -0,0 +1,318 @@
|
||||
"use strict";
|
||||
var vows = require('vows');
|
||||
var assert = require('assert');
|
||||
var log4js = require('../../lib/log4js');
|
||||
var sandbox = require('sandboxed-module');
|
||||
|
||||
function setupLogging(category, options) {
|
||||
var msgs = [];
|
||||
|
||||
var fakeMailer = {
|
||||
createTransport: function (name, options) {
|
||||
return {
|
||||
config: options,
|
||||
sendMail: function (msg, callback) {
|
||||
msgs.push(msg);
|
||||
callback(null, true);
|
||||
},
|
||||
close: function () {
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var fakeLayouts = {
|
||||
layout: function (type, config) {
|
||||
this.type = type;
|
||||
this.config = config;
|
||||
return log4js.layouts.messagePassThroughLayout;
|
||||
},
|
||||
basicLayout: log4js.layouts.basicLayout,
|
||||
messagePassThroughLayout: log4js.layouts.messagePassThroughLayout
|
||||
};
|
||||
|
||||
var fakeConsole = {
|
||||
errors: [],
|
||||
error: function (msg, value) {
|
||||
this.errors.push({msg: msg, value: value});
|
||||
}
|
||||
};
|
||||
|
||||
var fakeTransportPlugin = function () {
|
||||
};
|
||||
|
||||
var smtpModule = sandbox.require('../../lib/appenders/smtp', {
|
||||
requires: {
|
||||
'nodemailer': fakeMailer,
|
||||
'nodemailer-sendmail-transport': fakeTransportPlugin,
|
||||
'nodemailer-smtp-transport': fakeTransportPlugin,
|
||||
'../layouts': fakeLayouts
|
||||
},
|
||||
globals: {
|
||||
console: fakeConsole
|
||||
}
|
||||
});
|
||||
|
||||
log4js.addAppender(smtpModule.configure(options), category);
|
||||
|
||||
return {
|
||||
logger: log4js.getLogger(category),
|
||||
mailer: fakeMailer,
|
||||
layouts: fakeLayouts,
|
||||
console: fakeConsole,
|
||||
results: msgs
|
||||
};
|
||||
}
|
||||
|
||||
function checkMessages(result, sender, subject) {
|
||||
for (var i = 0; i < result.results.length; ++i) {
|
||||
assert.equal(result.results[i].from, sender);
|
||||
assert.equal(result.results[i].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[i].subject, subject ? subject : 'Log event #' + (i + 1));
|
||||
assert.ok(new RegExp('.+Log event #' + (i + 1) + '\n$').test(result.results[i].text));
|
||||
}
|
||||
}
|
||||
|
||||
log4js.clearAppenders();
|
||||
vows.describe('log4js smtpAppender').addBatch({
|
||||
'minimal config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('minimal config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'fancy config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('fancy config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sender: 'sender@domain.com',
|
||||
subject: 'This is subject',
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result, 'sender@domain.com', 'This is subject');
|
||||
}
|
||||
},
|
||||
'config with layout': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('config with layout', {
|
||||
layout: {
|
||||
type: "tester"
|
||||
}
|
||||
});
|
||||
return setup;
|
||||
},
|
||||
'should configure layout': function (result) {
|
||||
assert.equal(result.layouts.type, 'tester');
|
||||
}
|
||||
},
|
||||
'separate email for each event': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('separate email for each event', {
|
||||
recipients: 'recipient@domain.com',
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1100);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be three messages': function (result) {
|
||||
assert.equal(result.results.length, 3);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'multiple events in one email': {
|
||||
topic: function () {
|
||||
var self = this;
|
||||
var setup = setupLogging('multiple events in one email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 1,
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #1');
|
||||
}, 0);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #2');
|
||||
}, 100);
|
||||
setTimeout(function () {
|
||||
setup.logger.info('Log event #3');
|
||||
}, 1500);
|
||||
setTimeout(function () {
|
||||
self.callback(null, setup);
|
||||
}, 3000);
|
||||
},
|
||||
'there should be two messages': function (result) {
|
||||
assert.equal(result.results.length, 2);
|
||||
},
|
||||
'messages should contain proper data': function (result) {
|
||||
assert.equal(result.results[0].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[0].subject, 'Log event #1');
|
||||
assert.equal(
|
||||
result.results[0].text.match(new RegExp('.+Log event #[1-2]$', 'gm')).length,
|
||||
2
|
||||
);
|
||||
assert.equal(result.results[1].to, 'recipient@domain.com');
|
||||
assert.equal(result.results[1].subject, 'Log event #3');
|
||||
assert.ok(new RegExp('.+Log event #3\n$').test(result.results[1].text));
|
||||
}
|
||||
},
|
||||
'error when sending email': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('error when sending email', {
|
||||
recipients: 'recipient@domain.com',
|
||||
sendInterval: 0,
|
||||
SMTP: {port: 25, auth: {user: 'user@domain.com'}}
|
||||
});
|
||||
|
||||
setup.mailer.createTransport = function () {
|
||||
return {
|
||||
sendMail: function (msg, cb) {
|
||||
cb({message: "oh noes"});
|
||||
},
|
||||
close: function () {
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
setup.logger.info("This will break");
|
||||
return setup.console;
|
||||
},
|
||||
'should be logged to console': function (cons) {
|
||||
assert.equal(cons.errors.length, 1);
|
||||
assert.equal(cons.errors[0].msg, "log4js.smtpAppender - Error happened");
|
||||
assert.equal(cons.errors[0].value.message, 'oh noes');
|
||||
}
|
||||
},
|
||||
'transport full config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('transport full config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: {
|
||||
plugin: 'sendmail',
|
||||
options: {
|
||||
path: '/usr/sbin/sendmail'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'transport no-options config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('transport no-options config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: {
|
||||
plugin: 'sendmail'
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'transport no-plugin config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('transport no-plugin config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
transport: {
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'there should be one message only': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
checkMessages(result);
|
||||
}
|
||||
},
|
||||
'attachment config': {
|
||||
topic: function () {
|
||||
var setup = setupLogging('attachment config', {
|
||||
recipients: 'recipient@domain.com',
|
||||
attachment: {
|
||||
enable: true
|
||||
},
|
||||
SMTP: {
|
||||
port: 25,
|
||||
auth: {
|
||||
user: 'user@domain.com'
|
||||
}
|
||||
}
|
||||
});
|
||||
setup.logger.info('Log event #1');
|
||||
return setup;
|
||||
},
|
||||
'message should contain proper data': function (result) {
|
||||
assert.equal(result.results.length, 1);
|
||||
assert.equal(result.results[0].attachments.length, 1);
|
||||
var attachment = result.results[0].attachments[0];
|
||||
assert.equal(result.results[0].text, "See logs as attachment");
|
||||
assert.equal(attachment.filename, "default.log");
|
||||
assert.equal(attachment.contentType, "text/x-log");
|
||||
assert.ok(new RegExp('.+Log event #' + 1 + '\n$').test(attachment.content));
|
||||
}
|
||||
}
|
||||
}).export(module);
|
86
node_modules/log4js/test/vows/subcategories-test.js
generated
vendored
Normal file
86
node_modules/log4js/test/vows/subcategories-test.js
generated
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
"use strict";
|
||||
var assert = require('assert')
|
||||
, vows = require('vows')
|
||||
, sandbox = require('sandboxed-module')
|
||||
, log4js = require('../../lib/log4js')
|
||||
, levels = require('../../lib/levels');
|
||||
|
||||
vows.describe('subcategories').addBatch({
|
||||
'loggers created after levels configuration is loaded': {
|
||||
topic: function() {
|
||||
|
||||
log4js.configure({
|
||||
"levels": {
|
||||
"sub1": "WARN",
|
||||
"sub1.sub11": "TRACE",
|
||||
"sub1.sub11.sub111": "WARN",
|
||||
"sub1.sub12": "INFO"
|
||||
}
|
||||
}, { reloadSecs: 30 });
|
||||
|
||||
return {
|
||||
"sub1": log4js.getLogger('sub1'), // WARN
|
||||
"sub11": log4js.getLogger('sub1.sub11'), // TRACE
|
||||
"sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||
"sub12": log4js.getLogger('sub1.sub12'), // INFO
|
||||
|
||||
"sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||
"sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||
"sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||
"sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||
};
|
||||
},
|
||||
'check logger levels': function(loggers) {
|
||||
assert.equal(loggers.sub1.level, levels.WARN);
|
||||
assert.equal(loggers.sub11.level, levels.TRACE);
|
||||
assert.equal(loggers.sub111.level, levels.WARN);
|
||||
assert.equal(loggers.sub12.level, levels.INFO);
|
||||
|
||||
assert.equal(loggers.sub13.level, levels.WARN);
|
||||
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||
assert.equal(loggers.sub121.level, levels.INFO);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
}
|
||||
},
|
||||
'loggers created before levels configuration is loaded': {
|
||||
topic: function() {
|
||||
|
||||
var loggers = {
|
||||
"sub1": log4js.getLogger('sub1'), // WARN
|
||||
"sub11": log4js.getLogger('sub1.sub11'), // TRACE
|
||||
"sub111": log4js.getLogger('sub1.sub11.sub111'), // WARN
|
||||
"sub12": log4js.getLogger('sub1.sub12'), // INFO
|
||||
|
||||
"sub13": log4js.getLogger('sub1.sub13'), // Inherits sub1: WARN
|
||||
"sub112": log4js.getLogger('sub1.sub11.sub112'), // Inherits sub1.sub11: TRACE
|
||||
"sub121": log4js.getLogger('sub1.sub12.sub121'), // Inherits sub12: INFO
|
||||
"sub0": log4js.getLogger('sub0') // Not defined, not inherited: TRACE
|
||||
};
|
||||
|
||||
|
||||
log4js.configure({
|
||||
"levels": {
|
||||
"sub1": "WARN",
|
||||
"sub1.sub11": "TRACE",
|
||||
"sub1.sub11.sub111": "WARN",
|
||||
"sub1.sub12": "INFO"
|
||||
}
|
||||
}, { reloadSecs: 30 });
|
||||
|
||||
return loggers;
|
||||
|
||||
|
||||
},
|
||||
'check logger levels': function(loggers) {
|
||||
assert.equal(loggers.sub1.level, levels.WARN);
|
||||
assert.equal(loggers.sub11.level, levels.TRACE);
|
||||
assert.equal(loggers.sub111.level, levels.WARN);
|
||||
assert.equal(loggers.sub12.level, levels.INFO);
|
||||
|
||||
assert.equal(loggers.sub13.level, levels.WARN);
|
||||
assert.equal(loggers.sub112.level, levels.TRACE);
|
||||
assert.equal(loggers.sub121.level, levels.INFO);
|
||||
assert.equal(loggers.sub0.level, levels.TRACE);
|
||||
}
|
||||
}
|
||||
}).exportTo(module);
|
23
node_modules/log4js/test/vows/with-categoryFilter.json
generated
vendored
Normal file
23
node_modules/log4js/test/vows/with-categoryFilter.json
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"type": "categoryFilter",
|
||||
"exclude": "web",
|
||||
"appender": {
|
||||
"type": "file",
|
||||
"filename": "test/vows/categoryFilter-noweb.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"category": "web",
|
||||
"type": "file",
|
||||
"filename": "test/vows/categoryFilter-web.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
17
node_modules/log4js/test/vows/with-dateFile.json
generated
vendored
Normal file
17
node_modules/log4js/test/vows/with-dateFile.json
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "dateFile",
|
||||
"filename": "test/vows/date-file-test.log",
|
||||
"pattern": "-from-MM-dd",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
"levels": {
|
||||
"tests": "WARN"
|
||||
}
|
||||
}
|
10
node_modules/log4js/test/vows/with-log-rolling.json
generated
vendored
Normal file
10
node_modules/log4js/test/vows/with-log-rolling.json
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"type": "file",
|
||||
"filename": "tmp-test.log",
|
||||
"maxLogSize": 1024,
|
||||
"backups": 3
|
||||
}
|
||||
]
|
||||
}
|
41
node_modules/log4js/test/vows/with-logLevelFilter.json
generated
vendored
Normal file
41
node_modules/log4js/test/vows/with-logLevelFilter.json
generated
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
{
|
||||
"appenders": [
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "logLevelFilter",
|
||||
"level": "WARN",
|
||||
"appender": {
|
||||
"type": "file",
|
||||
"filename": "test/vows/logLevelFilter-warnings.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "logLevelFilter",
|
||||
"level": "TRACE",
|
||||
"maxLevel": "DEBUG",
|
||||
"appender": {
|
||||
"type": "file",
|
||||
"filename": "test/vows/logLevelFilter-debugs.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"category": "tests",
|
||||
"type": "file",
|
||||
"filename": "test/vows/logLevelFilter.log",
|
||||
"layout": {
|
||||
"type": "messagePassThrough"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
"levels": {
|
||||
"tests": "TRACE"
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user