Short notes about using logstash to import logs into an ElasticSearch cluster.
Processing custom HTTP logs (grok)
The grok pattern used for my custom logs (includes time_firstbyte and time_processing):
input {
stdin {}
tcp { port => 3333 }
}
filter {
grok {
match => [
"message",
"%{IP:client} %{NOTSPACE} %{NOTSPACE} \[(?<timestamp>%{MONTHDAY}/%{MONTH}/%{YEAR} %{TIME})\] \"%{WORD:verb} %{NOTSPACE:url} HTTP/%{NUMBER:http_version}\" %{NUMBER:code} (?:%{NUMBER:bytes}|-) (?:\"(?:%{URI:referrer}|-)\"|%{QS:referrer}) %{QS:agent} %{NUMBER:time_firstbyte} %{NUMBER:time_processing}"
]
}
date { match => [ "timestamp", "d/MMM/YYYY HH:mm:ss" ] }
geoip { source => "client" }
}
output {
elasticsearch { host => "cluster.local" protocol => "transport" }
stdout { codec => rubydebug }
}