简介
- 在载入 log4js 模块后需要马上配置该模块,否则 log4js 会引用默认配置或者 LOG4JS_CONFIG (如果已经定义) -- 参考(https://blog.csdn.net/wonder233/article/details/80738658)
- Config 字段: levels appenders categories pm2 pm2InstanceVar disableClustering
-
levels
- 默认优先级 ALL(Number.MIN_VALUE) < TRACE(5000) < DEBUG(10000) < INFO(20000) < WARN(30000) < ERROR(40000) < FATAL(50000) < MARK(9007199254740992) < OFF(Number.MAX_VALUE)
- OFF 不是日志等级,调用 logger.off('...') 将会关闭日志功能
- 默认优先级可以更改
- node_modules/log4js/lib/levels.js 中有默认优先级的颜色范围,优先级整数(数字越小等级越低),
- { ..., LevelsName: { value: 1234, colour: 'yellow' }, ...}
- 可选颜色: 'white', 'grey', 'black','blue', 'cyan', 'green','magenta', 'red', 'yellow'
- 颜色自定义在:node_modules/log4js/lib/layouts.js : const styles = { ... }
-
appenders
- 定义插件的参数到插件实例,将会指定引用的插件类型(type),插件的参数(除开 type 字段,其他字段都会传入插件模块,具体的字段名称需要查看插件内部实现)
- 参考: https://log4js-node.github.io/log4js-node/appenders.html
- Core appender Type: categoryFilter console dateFile file fileSync logLevelFilter multiFile multiprocess recording stderr stdout tcp tcp-server
- Optional appender Type: gelf hipchat logFaces-HTTP logFaces-UDP loggly logstashHTTP logstashUDP mailgun rabbitmq redis slack smtp
- 当 type 字段的值没在上叙范围内,则将会夹在 type 字段指向的目标目录的模块,如果自己要写插件,则可先参考 https://log4js-node.github.io/log4js-node/writing-appenders.html
-
categories
- 定义将哪些日志类型的 log 输出到哪些插件
- default 一定要配置的,这是所有的 log 都会输出到此类
- 可以配置输出源(appender),输出等级 level, 是否现实调用栈的状态(enableCallStack -- 显示文件名,行号)
- 格式 default: { appenders: [ 'out', 'app' ], level: 'debug' }
-
pm2
- 如果你使用了 pm2,一定要 enable 此选项
- 而且一定要安装 pm2 install pm2-intercom
-
pm2InstanceVar
- 默认 NODE_APP_INSTANCE
- 如果您正在使用pm2并更改了NODE_APP_INSTANCE变量的默认名称,请设置此项。
-
disableClustering
- set this to true if you liked the way log4js used to just ignore clustered environments, or you’re having trouble with PM2 logging.
- Each worker process will do its own logging. Be careful with this if you’re logging to files, weirdness can occur.
-
- Loggers API
- 打印 log :
( ... ) 如:debug( 'print some logs' ) - 查看 level 等级的 log 是否打开 is
Enable() - addContext( key, value ) 添加一个键值对,将会出现在所有 log 中,目前只有 logFaces 插件支持
- removeContext( key ): addContext 的 anti-action
- clearContext: remove all addContext
- level:打印等级(将会覆盖所有 appender 中的)
- 打印 log :
- shutdown
- addLayout
插件 API
- file
- 三种 file( File Appender ), dateFile( Date Rolling File Appender ), fileSync( Synchronous File Appender )
- File Appender 与 Synchronous File Appender 的区别在于
- 记录日志的方法:dateFile 根据时间来轮询,file 和 filesync 根据文件大小来轮询
- file(sync) 的参数:filename, maxLogSize, backups, layout, ( next params will passed to underlying nodejs core stream implementation ) encoding(default “utf-8”), mode- integer (default 0644), flags - string (default ‘a’)
- datafile 的参数: filename, pattern, layout, ( next params will passed to underlying nodejs core stream implementation ), encoding, mode, flags, compress( true/false 是否压缩备份 ), alwaysIncludePattern( true/false 在日志文件名称中是否包含 pattern ), daysToKeep( 滚动天数限制 ), keepFileExt( 滚动保存时候保留文件扩展名称 )
- ...
实操
将 log 保存到 mongodb 数据库
- npm 安装 log4jslx-helpersmongodb。
- 我参考 log4js-node-mongodb 将 lib 下的代码拷贝一份做了一定修改,以适应当前 log4js 版本,和 mongodb 版本,参考以下 mongodbAppender.js 中的代码。
修改记录
- 20190705 mongodb.MongoClient.connect 回调函数返回的参数在新 mongodb 库中变成了 MongoClient 类型,而不是 database 类型,所以需要再调用
client.db
获取 database 事例;
mongodbAppender.js
const util = require('util');
const log4js = require('log4js');
const lxHelpers = require('lx-helpers');
const mongodb = require('mongodb');
function messagePassThroughLayout(loggingEvent) {
return util.format(...loggingEvent.data);
}
/**
* Returns a function to log data in mongodb.
*
* @param {Object} config The configuration object.
* @param {string} config.connectionString The connection string to the mongo db.
* @param {string=} config.layout The log4js layout.
* @param {string=} config.write The write mode.
* @returns {Function}
*/
function appender( config ){
if( !config || !config.connectionString ){
throw new Error('connectionString is missing. Cannot connect to mongdb.');
}
var collection;
var cache = [];
var layout = config.layout || messagePassThroughLayout ;
var collectionName = config.collectionName || 'log';
var connectionOptions = config.connectionOptions || {};
function ERROR(err) {
Error.call(this);
Error.captureStackTrace(this, this.constructor);
this.name = err.toString();
this.message = err.message || 'error';
}
function replaceKeys(src) {
var result = {};
function mixin(dest, source, cloneFunc) {
if (lxHelpers.isObject(source)) {
lxHelpers.forEach(source, function (value, key) {
// replace $ at start
if (key[0] === '$') {
key = key.replace('$', '_dollar_');
}
// replace all dots
key = key.replace(/\./g, '_dot_');
dest[key] = cloneFunc ? cloneFunc(value) : value;
});
}
return dest;
}
if( ( !src )
|| ( typeof src !== 'object' )
|| ( typeof src === 'function' )
|| ( src instanceof Date )
|| ( src instanceof RegExp )
|| ( src instanceof mongodb.ObjectID ) ){
return src;
}
// wrap Errors in a new object because otherwise they are saved as an empty object {}
if( lxHelpers.getType(src) === 'error' ){
return new ERROR(src);
}
// Array
if( lxHelpers.isArray( src ) ){
result = [];
lxHelpers.arrayForEach(src, function (item) {
result.push( replaceKeys( item ) );
});
}
return mixin( result, src, replaceKeys );
}
function getOptions() {
var options = { w: 0 };
if (config.write === 'normal') {
options.w = 1;
}
if (config.write === 'safe') {
options.w = 1;
options.journal = true;
}
return options;
}
function insert( loggingEvent ){
// if( loggingEvent.data == null ) return;
var options = getOptions();
if (collection) {
if (options.w === 0) {
// fast write
collection.insertOne( {
timestamp: loggingEvent.startTime,
data: loggingEvent.data,
level: loggingEvent.level,
category: loggingEvent.categoryName,
}, options );
} else {
// save write
collection.insert( {
timestamp: loggingEvent.startTime,
data: loggingEvent.data,
level: loggingEvent.level,
category: loggingEvent.categoryName,
}, options, function (error) {
if (error) {
console.error('log: Error writing data to log!');
console.error(error);
console.log('log: Connection: %s, collection: %, data: %j', config.connectionString, collectionName, loggingEvent);
}
} );
}
} else {
cache.push(loggingEvent);
}
}
// check connection string
if (config.connectionString.indexOf('mongodb://') !== 0) {
config.connectionString = 'mongodb://' + config.connectionString;
}
// connect to mongodb
mongodb.MongoClient.connect( config.connectionString, connectionOptions, ( err, cli ) => {
if( err ){
console.error( err );
throw new Error( 'This code not compatible latest mongodb');
}
if( cli.s.options.dbName == null ) {
throw new Error( 'This code not compatible latest mongodb');
}
let db = cli.db( cli.s.options.dbName );
collection = db.collection( config.collectionName || 'log' );
// process cache
cache.forEach( ( loggingEvent ) => {
setImmediate( () => {
insert(loggingEvent);
} );
} );
} );
return function (loggingEvent) {
// get the information to log
if( Object.prototype.toString.call(loggingEvent.data[0])
=== '[object String]') {
// format string with layout
loggingEvent.data = layout( loggingEvent );
}else if( loggingEvent.data.length === 1 ){
loggingEvent.data = loggingEvent.data[0];
}else{
console.log( 'unknow type' );
}
loggingEvent.data = replaceKeys( loggingEvent.data );
// save in db
insert(loggingEvent);
};
}
function configure(config) {
if( config.layout ){
config.layout = log4js.layouts.layout(
config.layout.type, config.layout );
}
return appender(config);
}
module.exports.appender = appender;
module.exports.configure = configure;
app.js
const fs = require( 'fs' );
const log4js = require('log4js');
const lvCA = fs.readFileSync( './ssl/CA.crt', 'utf8' );
const lvCert = fs.readFileSync( './ssl/cli.crt', 'utf8' );
const lvKey = fs.readFileSync( './ssl/cli.key', 'utf8' );
log4js.configure( {
appenders: {
mongodb: {
type: 'log/mongodbAppender',
connectionString: '192.168.1.200:9002/log?ssl=true',
collectionName: 'log',
connectionOptions: {
// useNewUrlParser: true,
ssl: true,
sslValidate: true,
sslCA: lvCA,
sslCert: lvCert,
sslKey: lvKey,
checkServerIdentity: false,
}
}
},
categories: {
default: { appenders: [ 'mongodb' ], level: 'debug' }
}
} )
const logger = log4js.getLogger();
logger.level = 'debug';
logger.debug("Some debug messages", 'aaa');
logger.fatal({
whatever: 'foo'
})
将 log 保存到 redis 数据库
不需要用到 lxHelper 这个库了,ES6+ 判断是否为 Array 可以用 Array.isArray
, 因为担心 mongodb 存储会影响磁盘 iops 因此应该用 redis 会好一点。
redisAppender.js
const util = require( 'util' );
const redis = require("redis");
// function messagePassThroughLayout(loggingEvent) {
// return util.format(...loggingEvent.data);
// }
let lvOutsideClient = null;
let lvInsideClient = null;
let lvLayout = null;
let lvCache = [];
/**
* Returns a function to log data in mongodb.
*
* @param {string} config.host The redis host
* @param {string} config.port The redis port
* @param {Object} config.redisOption Option, If this parameters is exist, it
* will ignore host and port, and you can set all redis params in this object
* @returns {Function}
*/
function appender( config ){
if( ( config == null )
|| ( ( config.redisOption == null )
&& ( ( config.host == null )
|| ( config.port == null ) ) )
|| ( config.dbIndex == null )
){
throw new Error( 'Please provide full params' );
}
let tvReconnLoop = null;
let tvReconnectTimes = 0;
tfPersistenceCache = ( ) => {
if( ( lvCache.length != 0 ) && ( lvOutsideClient ) ){
let tvLen = lvCache.length;
for( let i = 0; i < tvLen; i++ ){
let tvDocument = lvCache.shift();
lvOutsideClient.lpush( tvDocument.keys,
tvDocument.document, ( err ) => {
if( err ){
lvCache.splice(0, 0, tvDocument);
}
} );
}
}
}
tfConnectToRedis = () => {
if( lvOutsideClient ) return;
if( config.redisOption )
lvInsideClient = redis.createClient( config.redisOption );
else lvInsideClient = redis.createClient( config.port, config.host );
console.log( '-> connect' )
lvInsideClient.on('connect', (err) => {
console.log( '-> connected' )
if( tvReconnLoop )
clearInterval( tvReconnLoop );
tvReconnLoop = null;
lvInsideClient.select( config.dbIndex, ( err, res ) => {
if( err ){
tfConnectServerPerSeconds();
return;
}
lvOutsideClient = lvInsideClient;
tfPersistenceCache();
} );
});
lvInsideClient.on('reconnecting', (err) => {
console.log( '-> reconnecting' )
} )
lvInsideClient.on('error', (err) => {
console.log( '-> error' )
lvOutsideClient = null;
tfConnectServerPerSeconds();
});
lvInsideClient.on('end', (err) => {
console.log( '-> end' )
lvOutsideClient = null;
tfConnectServerPerSeconds();
});
}
tfConnectServerPerSeconds = () => {
if( tvReconnectTimes == 0 ){
tfConnectToRedis();
tvReconnectTimes++;
}else{
if( tvReconnLoop ) return;
tvReconnectTimes++;
tvReconnLoop = setInterval( () => {
tfConnectToRedis();
}, 20000 )
}
}
tfConnectServerPerSeconds();
// lvLayout = config.layout || messagePassThroughLayout ;
return ( loggingEvent ) => {
let tvDocument = JSON.stringify( loggingEvent );
console.log( `Document: ${tvDocument}` );
if( lvOutsideClient )
lvOutsideClient.lpush( loggingEvent.level.levelStr, tvDocument, () => {} )
else
lvCache.push( {
keys: loggingEvent.level.levelStr,
document: tvDocument
} )
};
}
function configure( config, layouts ){
let layout = layouts.basicLayout;
if (config.layout) {
layout = layouts.layout(config.layout.type, config.layout);
}
return appender(config);
}
module.exports.configure = configure;