Add elk case

pull/124/head
Baohua Yang 2018-08-17 10:11:05 -07:00
parent f05cd6ccf8
commit 43e32414c5
4 changed files with 103 additions and 0 deletions

1
elk/.env 100644
View File

@ -0,0 +1 @@
ELK_VERSION=6.3.2

12
elk/README.md 100644
View File

@ -0,0 +1,12 @@
Example ELK Stack
===
## Usage
```bash
$ docker-compose up
$ nc localhost 5000 <PATH_TO_LOG_FILE>
$ # Visit localhost:5601 for the kibina dashboard
$ docker-compose down
```

View File

@ -0,0 +1,45 @@
version: '2'
services:
elasticsearch:
image: "docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}"
container_name: elasticsearch
ports:
- "9200:9200"
- "9300:9300"
environment:
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
#command: ["bash", "-c", "echo 'discovery.type: single-node'>>/usr/share/elasticsearch/config/elasticsearch.yml; /usr/local/bin/docker-entrypoint.sh eswrapper"]
command: ["bash", "-c", "/usr/local/bin/docker-entrypoint.sh eswrapper"]
networks:
- elk
logstash:
image: "docker.elastic.co/logstash/logstash:${ELK_VERSION}"
container_name: logstash
volumes:
- ./logstash/pipeline:/usr/share/logstash/pipeline:ro
ports:
- "5000:5000"
environment:
LS_JAVA_OPTS: "-Xmx256m -Xms256m"
depends_on:
- elasticsearch
command: ["bash", "-c", "echo 'path.config: /usr/share/logstash/pipeline'>>/usr/share/logstash/config/logstash.yml; /usr/local/bin/docker-entrypoint"]
networks:
- elk
kibana:
image: "docker.elastic.co/kibana/kibana:${ELK_VERSION}"
container_name: kibana
ports:
- "5601:5601"
depends_on:
- elasticsearch
networks:
- elk
networks:
elk:
driver: bridge

View File

@ -0,0 +1,45 @@
# example to parse HLF logs
input {
tcp {
port => 5000
}
}
## Add your filters / logstash plugins configuration here
#"message" =>"%{TIMESTAMP_ISO8601:logtime}\s\[%{DATA:logthread}\]\s%{LOGLEVEL:loglevel}\s\s%{DATA:logclass}\s\[\].{4}%{GREEDYDATA:logcontent}"
#"message" =>"%{NOTSPACE:host} %{NOTSPACE:color1} ^[[36m[%{NUMBER:number} %{SYSLOGTIMESTAMP:timestamp}] [%{NOTSPACE:component}] %{NOTSPACE:func} -> %{LOGLEVEL:loglevel}^[[0m %{GREEDYDATA:content}"
# Test for parsing HLF logs
filter{
mutate {
gsub => ["message", "\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]", ""]
}
grok {
#"message" =>"%{NOTSPACE:host} %{NOTSPACE:color1} ^[[36m[%{NUMBER:number} %{SYSLOGTIMESTAMP:timestamp}] [%{NOTSPACE:component}] %{NOTSPACE:func} -> %{LOGLEVEL:loglevel}^[[0m %{GREEDYDATA:content}"
match => {
"message" => "%{GREEDYDATA:content}"
}
remove_field => ["message"]
}
}
#if "_grokparsefailure" in [tags] {
# drop { }
#}
output {
elasticsearch {
hosts => "elasticsearch:9200"
index => "logstash-%{+YYYY.MM.dd}"
}
}
#stdout {
# codec => json
#}