docker-compose-files/elk/logstash/pipeline/logstash.conf

46 lines
1.1 KiB
Plaintext
Raw Normal View History

2018-08-18 01:11:05 +08:00
# example to parse HLF logs
input {
tcp {
port => 5000
}
}
## Add your filters / logstash plugins configuration here
#"message" =>"%{TIMESTAMP_ISO8601:logtime}\s\[%{DATA:logthread}\]\s%{LOGLEVEL:loglevel}\s\s%{DATA:logclass}\s\[\].{4}%{GREEDYDATA:logcontent}"
#"message" =>"%{NOTSPACE:host} %{NOTSPACE:color1} ^[[36m[%{NUMBER:number} %{SYSLOGTIMESTAMP:timestamp}] [%{NOTSPACE:component}] %{NOTSPACE:func} -> %{LOGLEVEL:loglevel}^[[0m %{GREEDYDATA:content}"
# Test for parsing HLF logs
filter{
mutate {
gsub => ["message", "\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]", ""]
}
grok {
#"message" =>"%{NOTSPACE:host} %{NOTSPACE:color1} ^[[36m[%{NUMBER:number} %{SYSLOGTIMESTAMP:timestamp}] [%{NOTSPACE:component}] %{NOTSPACE:func} -> %{LOGLEVEL:loglevel}^[[0m %{GREEDYDATA:content}"
match => {
"message" => "%{GREEDYDATA:content}"
}
remove_field => ["message"]
}
}
#if "_grokparsefailure" in [tags] {
# drop { }
#}
output {
elasticsearch {
hosts => "elasticsearch:9200"
index => "logstash-%{+YYYY.MM.dd}"
}
}
#stdout {
# codec => json
#}