how to filter a simple message via LogStash to ElasticSearch dividing the message in multiple fields -


this input file:

{"meta":"","level":"error","message":"clienterrorhandler: erro não previsto ou mapeado durante chamada dos serviços.","timestamp":"2017-04-06t16:08:37.861z"} {"meta":"","level":"error","message":"clienterrorhandler: erro não previsto ou mapeado durante chamada dos serviços.","timestamp":"2017-04-06t19:40:17.682z"} 

basically, such log outcome of nodejs application via winstom module. doubt focuses how adjust logstash filter in order 4 fields created in elasticsearch.

my intention see "columns" (properties or fileds may better words in elasticsearch context guess): level (eg. error), message_source (eg. clienterrorhandler), message_content (eg. erro não ...serviços) , error_time without nanoseconds (eg. 2017-04-06t19:40:17).

i got stuck on point:

1 - used logstash.conf

input {     file {          path => "/home/demetrio/dev/testes_manuais/elk/logs/*"          start_position => "beginning"     } }  filter {    grok {         match => {         "message" => '%{syslog5424sd:loglevel} %{timestamp_iso8601:date} %{greedydata:content}'       }   }    date {     match => [ "date", "yyyy-mm-dd hh:mm:ss.sss" ]     locale => en   }  }  output {   stdout {     codec => plain {                         charset => "iso-8859-1"                 }      }     elasticsearch {         hosts => "http://127.0.0.1:9200"         index => "dmz-logs-indice"    } } 

2 - search elasticsearch via kibana devtools

get _search {   "query": {     "match_all": {}   } } 

and saw:

{   "took": 5,   "timed_out": false,   "_shards": {     "total": 6,     "successful": 6,     "failed": 0   },   "hits": {     "total": 3,     "max_score": 1,     "hits": [       {         "_index": ".kibana",         "_type": "config",         "_id": "5.3.0",         "_score": 1,         "_source": {           "buildnum": 14823         }       },       {         "_index": "dmz-logs-indice",         "_type": "logs",         "_id": "avtjlz5x6gscwn5fxxa_",         "_score": 1,         "_source": {           "path": "/home/demetrio/dev/testes_manuais/elk/logs/logs.log",           "@timestamp": "2017-04-07t16:09:36.996z",           "@version": "1",           "host": "nodejs",           "message": """{"meta":"","level":"error","message":"clienterrorhandler: erro não previsto ou mapeado durante chamada dos serviços.","timestamp":"2017-04-06t16:08:37.861z"}""",           "tags": [             "_grokparsefailure"           ]         }       },       {         "_index": "dmz-logs-indice",         "_type": "logs",         "_id": "avtjlz5x6gscwn5fxxba",         "_score": 1,         "_source": {           "path": "/home/demetrio/dev/testes_manuais/elk/logs/logs.log",           "@timestamp": "2017-04-07t16:09:36.998z",           "@version": "1",           "host": "nodejs",           "message": """{"meta":"","level":"error","message":"clienterrorhandler: erro não previsto ou mapeado durante chamada dos serviços.","timestamp":"2017-04-06t19:40:17.682z"}""",           "tags": [             "_grokparsefailure"           ]         }       }     ]   } } 

i guess should use regularexpresss or grok in order divide in 4 peaces:

1 - level 2 - message come before ":" 3 - message come after ":" 4 - timestamp

and, if possible, provide better column (field/property) labels like:

1 - level 2 - message_source 3 - message_content 4 - error_time

and remove timestamp nanoseconds

ps. in case future reader interested on how logging in nodejs, here are:

...

var winston = require('winston'); winston.emiterrs = true;  var logger = new winston.logger({     transports: [         new winston.transports.file({             level: 'error',             filename: './logs/logs.log',             handleexceptions: true,             json: true,             maxsize: 5242880, //5mb             maxfiles: 5,             colorize: false,             prettyprint: true         })                    ],     exitonerror: false });  ...  function clienterrorhandler(err, req, res, next) {       logger.log("error","clienterrorhandler: erro não previsto ou mapeado durante chamada dos serviços.",err.message);        res.send(500, { error: 'erro genérico!' });    }  app.use(clienterrorhandler); 

ps2: read questions filter specific message logstash before sending elasticsearch stuck

since application outputs log json string, can configure logstash parse log json. simple adding codec => "json" file input configuration.

below example configuration scenario:

input {   file {     path => "/home/demetrio/dev/testes_manuais/elk/logs/*"     start_position => "beginning"     codec => "json"   } }  filter {   # matches `timestamp` field `@timestamp` field kibana consume.   date {     match => [ "timestamp", "iso8601" ]     remove_field => [ "timestamp" ]   } }  output {   stdout {     # codec gives more details event.     codec => rubydebug   }    elasticsearch {     hosts => "http://127.0.0.1:9200"     index => "dmz-logs-indice"   } } 

this sample stdout logstash:

{           "path" => "/home/demetrio/dev/testes_manuais/elk/logs/demo.log",     "@timestamp" => 2017-04-06t19:40:17.682z,          "level" => "error",           "meta" => "",       "@version" => "1",           "host" => "dbf718c4b8e4",        "message" => "clienterrorhandler: erro não previsto ou mapeado durante chamada dos serviços.", } 

Comments

Popular posts from this blog

Command prompt result in label. Python 2.7 -

javascript - How do I use URL parameters to change link href on page? -

amazon web services - AWS Route53 Trying To Get Site To Resolve To www -