log4js 隨筆 + 實戰(將 log 保存到 mongodb || redis 中)

簡介

  1. 在載入 log4js 模塊後須要立刻配置該模塊,不然 log4js 會引用默認配置或者 LOG4JS_CONFIG (若是已經定義) -- 參考(https://blog.csdn.net/wonder233/article/details/80738658)
  2. Config 字段: levels appenders categories pm2 pm2InstanceVar disableClustering
    1. levels
      1. 默認優先級 ALL(Number.MIN_VALUE) < TRACE(5000) < DEBUG(10000) < INFO(20000) < WARN(30000) < ERROR(40000) < FATAL(50000) < MARK(9007199254740992) < OFF(Number.MAX_VALUE)
      2. OFF 不是日誌等級,調用 logger.off('...') 將會關閉日誌功能
      3. 默認優先級能夠更改
      4. node_modules/log4js/lib/levels.js 中有默認優先級的顏色範圍,優先級整數(數字越小等級越低),
      5. { ..., LevelsName: { value: 1234, colour: 'yellow' }, ...}
      6. 可選顏色: 'white', 'grey', 'black','blue', 'cyan', 'green','magenta', 'red', 'yellow'
      7. 顏色自定義在:node_modules/log4js/lib/layouts.js : const styles = { ... }
    2. appenders
      1. 定義插件的參數到插件實例,將會指定引用的插件類型(type),插件的參數(除開 type 字段,其餘字段都會傳入插件模塊,具體的字段名稱須要查看插件內部實現)
      2. 參考: https://log4js-node.github.io/log4js-node/appenders.html
      3. Core appender Type: categoryFilter console dateFile file fileSync logLevelFilter multiFile multiprocess recording stderr stdout tcp tcp-server
      4. Optional appender Type: gelf hipchat logFaces-HTTP logFaces-UDP loggly logstashHTTP logstashUDP mailgun rabbitmq redis slack smtp
      5. 當 type 字段的值沒在上敘範圍內,則將會夾在 type 字段指向的目標目錄的模塊,若是本身要寫插件,則可先參考 https://log4js-node.github.io/log4js-node/writing-appenders.html
    3. categories
      1. 定義將哪些日誌類型的 log 輸出到哪些插件
      2. default 必定要配置的,這是全部的 log 都會輸出到此類
      3. 能夠配置輸出源(appender),輸出等級 level, 是否現實調用棧的狀態(enableCallStack -- 顯示文件名,行號)
      4. 格式 default: { appenders: [ 'out', 'app' ], level: 'debug' }
    4. pm2
      1. 若是你使用了 pm2,必定要 enable 此選項
      2. 並且必定要安裝 pm2 install pm2-intercom
    5. pm2InstanceVar
      1. 默認 NODE_APP_INSTANCE
      2. 若是您正在使用pm2並更改了NODE_APP_INSTANCE變量的默認名稱,請設置此項。
    6. disableClustering
      1. set this to true if you liked the way log4js used to just ignore clustered environments, or you’re having trouble with PM2 logging.
      2. Each worker process will do its own logging. Be careful with this if you’re logging to files, weirdness can occur.
  3. Loggers API
    1. 打印 log : ( ... ) 如:debug( 'print some logs' )
    2. 查看 level 等級的 log 是否打開 is Enable()
    3. addContext( key, value ) 添加一個鍵值對,將會出如今全部 log 中,目前只有 logFaces 插件支持
    4. removeContext( key ): addContext 的 anti-action
    5. clearContext: remove all addContext
    6. level:打印等級(將會覆蓋全部 appender 中的)
  4. shutdown
  5. addLayout

插件 API

  1. file
    1. 三種 file( File Appender ), dateFile( Date Rolling File Appender ), fileSync( Synchronous File Appender )
    2. File Appender 與 Synchronous File Appender 的區別在於
    3. 記錄日誌的方法:dateFile 根據時間來輪詢,file 和 filesync 根據文件大小來輪詢
    4. file(sync) 的參數:filename, maxLogSize, backups, layout, ( next params will passed to underlying nodejs core stream implementation ) encoding(default 「utf-8」), mode- integer (default 0644), flags - string (default ‘a’)
    5. datafile 的參數: filename, pattern, layout, ( next params will passed to underlying nodejs core stream implementation ), encoding, mode, flags, compress( true/false 是否壓縮備份 ), alwaysIncludePattern( true/false 在日誌文件名稱中是否包含 pattern ), daysToKeep( 滾動天數限制 ), keepFileExt( 滾動保存時候保留文件擴展名稱 )
  2. ...

實操

將 log 保存到 mongodb 數據庫

  1. npm 安裝 log4jslx-helpersmongodb。
  2. 我參考 log4js-node-mongodb 將 lib 下的代碼拷貝一份作了必定修改,以適應當前 log4js 版本,和 mongodb 版本,參考如下 mongodbAppender.js 中的代碼。

修改記錄

  1. 20190705 mongodb.MongoClient.connect 回調函數返回的參數在新 mongodb 庫中變成了 MongoClient 類型,而不是 database 類型,因此須要再調用 client.db 獲取 database 事例;

mongodbAppender.js

const util = require('util');
const log4js = require('log4js');
const lxHelpers = require('lx-helpers');
const mongodb = require('mongodb');

function messagePassThroughLayout(loggingEvent) {
  return util.format(...loggingEvent.data);
}
/**
 * Returns a function to log data in mongodb.
 *
 * @param {Object} config The configuration object.
 * @param {string} config.connectionString The connection string to the mongo db.
 * @param {string=} config.layout The log4js layout.
 * @param {string=} config.write The write mode.
 * @returns {Function}
 */
function appender( config ){
    if( !config || !config.connectionString ){
    throw new Error('connectionString is missing. Cannot connect to mongdb.');
    }

    var collection;
    var cache = [];
    var layout = config.layout || messagePassThroughLayout ;
    var collectionName = config.collectionName || 'log';
    var connectionOptions = config.connectionOptions || {};

    function ERROR(err) {
    Error.call(this);
    Error.captureStackTrace(this, this.constructor);

    this.name = err.toString();
    this.message = err.message || 'error';
    }

    function replaceKeys(src) {
    var result = {};

    function mixin(dest, source, cloneFunc) {
        if (lxHelpers.isObject(source)) {
            lxHelpers.forEach(source, function (value, key) {
                // replace $ at start
                if (key[0] === '$') {
                    key = key.replace('$', '_dollar_');
                }

                // replace all dots
                key = key.replace(/\./g, '_dot_');

                dest[key] = cloneFunc ? cloneFunc(value) : value;
            });
        }

        return dest;
    }

    if( ( !src )
     || ( typeof src !== 'object' )
     || ( typeof src === 'function' )
     || ( src instanceof Date )
     || ( src instanceof RegExp )
     || ( src instanceof mongodb.ObjectID ) ){
        return src;
    }

    // wrap Errors in a new object because otherwise they are saved as an empty object {}
    if( lxHelpers.getType(src) === 'error' ){
        return new ERROR(src);
    }

    // Array
    if( lxHelpers.isArray( src ) ){
        result = [];

        lxHelpers.arrayForEach(src, function (item) {
            result.push( replaceKeys( item ) );
        });
    }

    return mixin( result, src, replaceKeys );
    }

    function getOptions() {
    var options = { w: 0 };

    if (config.write === 'normal') {
        options.w = 1;
    }

    if (config.write === 'safe') {
        options.w = 1;
        options.journal = true;
    }

    return options;
    }

    function insert( loggingEvent ){
    // if( loggingEvent.data == null ) return;

    var options = getOptions();

    if (collection) {
        if (options.w === 0) {
            // fast write
            collection.insertOne( {
                timestamp: loggingEvent.startTime,
                data: loggingEvent.data,
                level: loggingEvent.level,
                category: loggingEvent.categoryName,
            }, options );
        } else {
            // save write
            collection.insert( {
                timestamp: loggingEvent.startTime,
                data: loggingEvent.data,
                level: loggingEvent.level,
                category: loggingEvent.categoryName,
            }, options, function (error) {
                if (error) {
                    console.error('log: Error writing data to log!');
                    console.error(error);
                    console.log('log: Connection: %s, collection: %, data: %j', config.connectionString, collectionName, loggingEvent);
                }
            } );
        }
    } else {
        cache.push(loggingEvent);
    }
    }

    // check connection string
    if (config.connectionString.indexOf('mongodb://') !== 0) {
    config.connectionString = 'mongodb://' + config.connectionString;
    }

    // connect to mongodb
    mongodb.MongoClient.connect( config.connectionString, connectionOptions, ( err, cli ) => {

    if( err ){
        console.error( err );
        throw new Error( 'This code not compatible latest mongodb');
    }

    if( cli.s.options.dbName == null ) {
        throw new Error( 'This code not compatible latest mongodb');
    }
    let db = cli.db( cli.s.options.dbName );
    collection = db.collection( config.collectionName || 'log' );

    // process cache
    cache.forEach( ( loggingEvent ) => {
        setImmediate( () => {
            insert(loggingEvent);
        } );
    } );
    } );

    return function (loggingEvent) {
    // get the information to log
    if( Object.prototype.toString.call(loggingEvent.data[0])
        === '[object String]') {
        // format string with layout
        loggingEvent.data = layout( loggingEvent );
    }else if( loggingEvent.data.length === 1 ){
        loggingEvent.data = loggingEvent.data[0];
    }else{
        console.log( 'unknow type' );
    }
    loggingEvent.data = replaceKeys( loggingEvent.data );
    // save in db
    insert(loggingEvent);
    };
}

function configure(config) {
    if( config.layout ){
    config.layout = log4js.layouts.layout(
        config.layout.type, config.layout );
    }

    return appender(config);
}

module.exports.appender = appender;
module.exports.configure = configure;

app.js

const fs = require( 'fs' );
const log4js = require('log4js');

const lvCA = fs.readFileSync( './ssl/CA.crt', 'utf8' );
const lvCert = fs.readFileSync( './ssl/cli.crt', 'utf8' );
const lvKey = fs.readFileSync( './ssl/cli.key', 'utf8' );

log4js.configure( {
    appenders: {
    mongodb: {
        type: 'log/mongodbAppender',
        connectionString: '192.168.1.200:9002/log?ssl=true',
        collectionName: 'log',
        connectionOptions: {
            // useNewUrlParser: true,
            ssl: true,
            sslValidate: true,
            sslCA: lvCA,
            sslCert: lvCert,
            sslKey: lvKey,
            checkServerIdentity: false,
        }
    }
    },
    categories: {
    default: { appenders: [ 'mongodb' ], level: 'debug' }
    }
} )

const logger = log4js.getLogger();
logger.level = 'debug';

logger.debug("Some debug messages", 'aaa');
logger.fatal({
  whatever: 'foo'
})

將 log 保存到 redis 數據庫

不須要用到 lxHelper 這個庫了,ES6+ 判斷是否爲 Array 能夠用 Array.isArray, 由於擔憂 mongodb 存儲會影響磁盤 iops 所以應該用 redis 會好一點。html

redisAppender.js

const util = require( 'util' );
const redis = require("redis");


// function messagePassThroughLayout(loggingEvent) {
//   return util.format(...loggingEvent.data);
// }

let lvOutsideClient = null;
let lvInsideClient = null;
let lvLayout = null;
let lvCache = [];
/**
* Returns a function to log data in mongodb.
*
* @param {string} config.host The redis host
* @param {string} config.port The redis port
* @param {Object} config.redisOption Option, If this parameters is exist, it
* will ignore host and port, and you can set all redis params in this object
* @returns {Function}
*/
function appender( config ){
    if( ( config == null )
    || ( ( config.redisOption == null )
    && ( ( config.host == null )
    || ( config.port == null ) ) )
    || ( config.dbIndex == null )
    ){
        throw new Error( 'Please provide full params' );
    }

    let tvReconnLoop = null;
    let tvReconnectTimes = 0;

    tfPersistenceCache = (  ) => {
        if( ( lvCache.length != 0 ) && ( lvOutsideClient ) ){

            let tvLen = lvCache.length;
            for( let i = 0; i < tvLen; i++ ){
                let tvDocument = lvCache.shift();

                lvOutsideClient.lpush( tvDocument.keys,
                tvDocument.document, ( err ) => {
                    if( err ){
                        lvCache.splice(0, 0, tvDocument);
                    }
                } );
            }
        }
    }

    tfConnectToRedis = () => {
        if( lvOutsideClient ) return;

        if( config.redisOption )
            lvInsideClient = redis.createClient( config.redisOption );
        else lvInsideClient = redis.createClient( config.port, config.host );

        console.log( '-> connect' )
        lvInsideClient.on('connect', (err) => {
            console.log( '-> connected' )
            if( tvReconnLoop )
                clearInterval( tvReconnLoop );
            tvReconnLoop = null;
            lvInsideClient.select( config.dbIndex, ( err, res ) => {
                if( err ){
                    tfConnectServerPerSeconds();
                    return;
                }
                lvOutsideClient = lvInsideClient;
                tfPersistenceCache();
            } );
        });

        lvInsideClient.on('reconnecting', (err) => {
            console.log( '-> reconnecting' )
        } )

        lvInsideClient.on('error', (err) => {
            console.log( '-> error' )
            lvOutsideClient = null;
            tfConnectServerPerSeconds();
        });


        lvInsideClient.on('end', (err) => {
            console.log( '-> end' )
            lvOutsideClient = null;
            tfConnectServerPerSeconds();
        });

    }

    tfConnectServerPerSeconds = () => {

        if( tvReconnectTimes == 0 ){
            tfConnectToRedis();
            tvReconnectTimes++;
        }else{
            if( tvReconnLoop ) return;
            tvReconnectTimes++;
            tvReconnLoop = setInterval( () => {
                tfConnectToRedis();
            }, 20000 )
        }
    }

    tfConnectServerPerSeconds();

    // lvLayout = config.layout || messagePassThroughLayout ;

    return ( loggingEvent ) => {
        let tvDocument = JSON.stringify( loggingEvent );
        console.log( `Document: ${tvDocument}` );
        if( lvOutsideClient )
            lvOutsideClient.lpush( loggingEvent.level.levelStr, tvDocument, () => {}   )
        else
            lvCache.push( {
                keys: loggingEvent.level.levelStr,
                document: tvDocument
            } )
    };
}

function configure( config, layouts ){
let layout = layouts.basicLayout;

if (config.layout) {
    layout = layouts.layout(config.layout.type, config.layout);
}

    return appender(config);
}

module.exports.configure = configure;

參考資料

  1. log4js 官網
  2. log4js-node-mongodb
  3. node-mongodb-native API
相關文章
相關標籤/搜索