diff --git a/elk/.env b/elk/.env new file mode 100644 index 00000000..1b796c2e --- /dev/null +++ b/elk/.env @@ -0,0 +1 @@ +ELK_VERSION=6.3.2 diff --git a/elk/README.md b/elk/README.md new file mode 100644 index 00000000..ab43f9d6 --- /dev/null +++ b/elk/README.md @@ -0,0 +1,12 @@ +Example ELK Stack +=== + + +## Usage + +```bash +$ docker-compose up +$ nc localhost 5000 +$ # Visit localhost:5601 for the kibina dashboard +$ docker-compose down +``` diff --git a/elk/docker-compose.yml b/elk/docker-compose.yml new file mode 100644 index 00000000..ce9087df --- /dev/null +++ b/elk/docker-compose.yml @@ -0,0 +1,45 @@ +version: '2' + +services: + + elasticsearch: + image: "docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}" + container_name: elasticsearch + ports: + - "9200:9200" + - "9300:9300" + environment: + ES_JAVA_OPTS: "-Xmx256m -Xms256m" + #command: ["bash", "-c", "echo 'discovery.type: single-node'>>/usr/share/elasticsearch/config/elasticsearch.yml; /usr/local/bin/docker-entrypoint.sh eswrapper"] + command: ["bash", "-c", "/usr/local/bin/docker-entrypoint.sh eswrapper"] + networks: + - elk + + logstash: + image: "docker.elastic.co/logstash/logstash:${ELK_VERSION}" + container_name: logstash + volumes: + - ./logstash/pipeline:/usr/share/logstash/pipeline:ro + ports: + - "5000:5000" + environment: + LS_JAVA_OPTS: "-Xmx256m -Xms256m" + depends_on: + - elasticsearch + command: ["bash", "-c", "echo 'path.config: /usr/share/logstash/pipeline'>>/usr/share/logstash/config/logstash.yml; /usr/local/bin/docker-entrypoint"] + networks: + - elk + + kibana: + image: "docker.elastic.co/kibana/kibana:${ELK_VERSION}" + container_name: kibana + ports: + - "5601:5601" + depends_on: + - elasticsearch + networks: + - elk + +networks: + elk: + driver: bridge \ No newline at end of file diff --git a/elk/logstash/pipeline/logstash.conf b/elk/logstash/pipeline/logstash.conf new file mode 100644 index 00000000..e5f558d9 --- /dev/null +++ b/elk/logstash/pipeline/logstash.conf @@ -0,0 +1,45 @@ +# example to parse HLF logs + +input { + tcp { + port => 5000 + } +} + +## Add your filters / logstash plugins configuration here + +#"message" =>"%{TIMESTAMP_ISO8601:logtime}\s\[%{DATA:logthread}\]\s%{LOGLEVEL:loglevel}\s\s%{DATA:logclass}\s\[\].{4}%{GREEDYDATA:logcontent}" + +#"message" =>"%{NOTSPACE:host} %{NOTSPACE:color1} ^[[36m[%{NUMBER:number} %{SYSLOGTIMESTAMP:timestamp}] [%{NOTSPACE:component}] %{NOTSPACE:func} -> %{LOGLEVEL:loglevel}^[[0m %{GREEDYDATA:content}" + +# Test for parsing HLF logs +filter{ + mutate { + gsub => ["message", "\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]", ""] + } + grok { + #"message" =>"%{NOTSPACE:host} %{NOTSPACE:color1} ^[[36m[%{NUMBER:number} %{SYSLOGTIMESTAMP:timestamp}] [%{NOTSPACE:component}] %{NOTSPACE:func} -> %{LOGLEVEL:loglevel}^[[0m %{GREEDYDATA:content}" + match => { + "message" => "%{GREEDYDATA:content}" + } + remove_field => ["message"] + } +} + + + +#if "_grokparsefailure" in [tags] { +# drop { } +#} + +output { + elasticsearch { + hosts => "elasticsearch:9200" + index => "logstash-%{+YYYY.MM.dd}" + } +} + + +#stdout { +# codec => json +#}