Intercept Node Js Console Log And Insert In Mongodb

Easily Manageable And Searchable Application Logs Stored In Database

Your node js application is running on production, leaving hard-to-search ever-increasing log files in its wake. You want all your logs in a centralized, easily searchable place. For that, you have two options: 1 - Buy a subscription of some cloud log management service, such as papertrail. 2 - Spin up your own log database and dump all your application logs in it.

Option 1 is the way to go if you want all the features it offers, and most importantly you can afford the price tag. Otherwise, option 2, though very basic, is something you may want to try.

To Start

You need to have:

  • mongodb installed and running
  • npm mongoose installed

Log Schema

Add a file log.js and define its schema.

const mongoose = require( 'mongoose' );

const logSchema = new mongoose.Schema({
  text: String,
  tags: {type: Array, index: true},
  timestamp: {type: Date, index: true, default: Date.now, expires: '30d'}
});

module.exports = mongoose.model("Logs", logSchema); 

tags is an indexed array field. timestamp is a date field, also indexed, default to Date.now, with time to live 30 days (remove expires if you want the logs forever).

Console Log Interceptor

Now add another file console-log-interceptor.js; fill it with the code below

const mongoose = require( 'mongoose' );
const Log = require('/path/to/log.js');

mongoose.connect('mongodb://127.0.0.1/loggingtutorial');

// https://gist.github.com/pguillory/729616
function hook_stdout(callback) {
  process.stdout.write = (function (write) {
    return function (string) {
      write.apply(process.stdout, arguments)
      callback(string)
    }
  })(process.stdout.write)  
}

function logToDb(log) {
  Log.create(log)
  .exec()
  .catch((e) => {
    console.error(e);
  })
}

function extractTags(str){  
  let tags = str.split(' ').filter(v => v.startsWith('#'));  
  tags = tags.length ? tags.map((t) => t.toLowerCase()) : null;
  return tags;
}

module.exports = function (serverName) {
  const serverNameTag = serverName? `#${serverName}` : '';
  hook_stdout(function (str) {
    let text = ` ${str.trim()} ${serverNameTag}` //add empty space at the start so that first hash tag can be extracted if present
    const log = {
      text,
      tags: extractTags(text)
    }
    logToDb(log);
  })
}

Here’s the explanation of the code:

- mongoose.connect

Connects with database loggingtutorial

- hook_stdout

Taken and modified from this gist, it’s the function that actually hooks/intercepts process.stdout.write (and in turn console.log, console.debug and console.info) and calls our provided callback with the intercepted string. write.apply(process.stdout, arguments) continues with process.stdout.write, as if no interception occurred.

(Word of caution: Don’t call console.log from within hook_stdout, or it will start recursive calls, resulting in ‘Maximum call stack size exceeded’ error)

- module.exports

Is assigned a function, that will be called from other files to link with the interceptor. It expects serverName argument, which, if present, is treated as hashtag (serverNameTag).

Next, it calls hook_stdout with a callback function with str argument. The callback function prepares the log data (text and tags) and calls logToDb.

- logToDB

Inserts log document in mongodb.

- extractTags

Splits the text on empty string, filters values starting with #, and converts all tags to lower case. Returns the array of hashtags.

Though this hashtag extracting approach is easier than using a complex regex, the simplicity has its shortcomings. The tags must have empty space on either side, without comma, dot, exclamation or anything that you don’t want to be part of the tag.

//incorrect
console.log('this is a #tag, #thank_you'); // tags: ["#tag,", "thank_you"]

//correct
console.log('this is a #tag , #thank_you');
//OR
console.log('this is a #tag #thank_you'); // tags["#tag", "thank_you"]

How To Use

From any other file, simply require and call console-log-interceptor.js with optional server/script name.

require('/path/to/console-log-interceptor.js')('http_server_1');

console.log("test #log");
require('/path/to/console-log-interceptor.js')('http_server_2');

console.log("test #log #lorem #ipsum");
require('/path/to/console-log-interceptor.js')('migration_script');

console.log("migration #successful");
require('/path/to/console-log-interceptor.js')('cronjob');

const err = new Error("server error");
console.log("#update #error while updating records", err);

In database these logs will be stored as:

{
    "_id" : ObjectId("5cbf635d6740d2278863c2ac"),
    "text" : " test #log #http_server_1",
    "timestamp" : ISODate("2019-04-23T19:11:25.592Z"),
    "tags" : [ 
        "#log", 
        "#http_server_1"
    ],
    "__v" : 0
}


{
    "_id" : ObjectId("5cbf637e5e081227b22c185d"),
    "text" : " test #log #lorem #ipsum #http_server_2",
    "timestamp" : ISODate("2019-04-23T19:11:58.241Z"),
    "tags" : [ 
        "#log", 
        "#lorem", 
        "#ipsum", 
        "#http_server_2"
    ],
    "__v" : 0
}


{
    "_id" : ObjectId("5cbf639be13df327ca6aa1cd"),
    "text" : " migration #successful #migration_script",
    "timestamp" : ISODate("2019-04-23T19:12:27.408Z"),
    "tags" : [ 
        "#successful", 
        "#migration_script"
    ],
    "__v" : 0
}


{
    "_id" : ObjectId("5cbf641e167442282bfe20c5"),
    "text" : " #update #error while updating records Error: server error\n    at Object.<anonymous> (/home/user/Desktop/project/cronjob.js:21:13)\n    at Module._compile (internal/modules/cjs/loader.js:688:30)\n    at Object.Module._extensions..js (internal/modules/cjs/loader.js:699:10)\n    at Module.load (internal/modules/cjs/loader.js:598:32)\n    at tryModuleLoad (internal/modules/cjs/loader.js:537:12)\n    at Function.Module._load (internal/modules/cjs/loader.js:529:3)\n    at Function.Module.runMain (internal/modules/cjs/loader.js:741:12)\n    at startup (internal/bootstrap/node.js:285:19)\n    at bootstrapNodeJSCore (internal/bootstrap/node.js:739:3) #cronjob",
    "timestamp" : ISODate("2019-04-23T19:14:38.595Z"),
    "tags" : [ 
        "#update", 
        "#error", 
        "#cronjob"
    ],
    "__v" : 0
}

That’s about it! You have stored all these logs from different scripts and servers at one place and in searchable format.

You can search and narrow down these logs by the combination of tags and time through mongo database queries.

Examples:

  db.getCollection('logs').find({tags: {$all: ["#cronjob", "#error"]}}); //where both these tags exist

  db.getCollection('logs').find({tags: {$in: ["#cronjob", "#error"]}}); //where either tag exists

  db.getCollection('logs').find({tags: "#cronjob", "timestamp" : {$lt: ISODate("2019-04-24T00:00:00.000Z"), $gte: ISODate("2019-04-23T00:00:00.000Z")}}) //logs with #cronjob tag, within certain time

Much easier than direct database queries, however, is to build a small frontend app with a text and a datepicker fields, and query these logs through an API in the backend.