Este commit está contenido en:
Your Name
2020-05-27 17:46:42 +00:00
padre d5a1551995
commit cc05427130
Se han modificado 5 ficheros con 290 adiciones y 0 borrados

29
production/elk/apache.conf Archivo normal
Ver fichero

@@ -0,0 +1,29 @@
input {
file {
path => "/access.log"
# start_position => "beginning"
mode => "tail"
file_completed_action => "log"
}
}
filter {
if [path] =~ "access" {
mutate { replace => { "type" => "apache_access" } }
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
}
}
date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "nginx"
document_type => "access_log"
}
# stdout { codec => rubydebug }
}

Ver fichero

@@ -0,0 +1,83 @@
version: '2'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.6.2
hostname: elasticsearch
container_name: elasticsearch
restart: always
environment:
- node.name=elastic
- cluster.name=cluster01
- cluster.initial_master_nodes=elastic
- bootstrap.memory_lock=true
- ES_JAVA_OPTS=-Xms1g -Xmx1g
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- ./elastic:/usr/share/elasticsearch/data
# - ./elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
expose:
- 9200
- 9300
networks:
mynet:
ipv4_address: 172.1.0.101
kibana:
image: docker.elastic.co/kibana/kibana:7.6.2
hostname: kibana
container_name: kibana
restart: always
environment:
SERVER_NAME: kibana.hatthieves.es
ELASTICSEARCH_HOSTS: http://elasticsearch:9200
expose:
- 5601
networks:
mynet:
ipv4_address: 172.1.0.102
logstash:
image: docker.elastic.co/logstash/logstash:7.6.2
hostname: logstash
container_name: logstash
restart: always
entrypoint:
- logstash
- -f
- nginx.conf
# expose:
# - 5044
volumes:
- ./nginx.conf:/usr/share/logstash/nginx.conf:ro
- ./file:/usr/share/logstash/data/plugins/inputs/file
- /opt/docker/production/nginx/logs/access.log:/access.log
networks:
mynet:
ipv4_address: 172.1.0.103
# filebeat:
# image: docker.elastic.co/beats/filebeat:7.6.2
# hostname: filebeat
# container_name: filebeat
# restart: always
# entrypoint:
# - /bin/sleep
# - infinity
## - /usr/local/bin/docker-entrypoint -e
# volumes:
# - ./filebeat.yml:/usr/share/filebeat/filebeat.yml
# - /opt/docker/nginx/logs/access.log:/access.log
# networks:
# mynet:
# ipv4_address: 172.1.0.104
networks:
mynet:
driver: bridge
ipam:
config:
- subnet: 172.1.0.0/24

129
production/elk/filebeat.conf Archivo normal
Ver fichero

@@ -0,0 +1,129 @@
input {
beats {
port => "5044"
}
}
filter {
## beat and LSF compatibility
## https://discuss.elastic.co/t/problem-with-transfer-filebeat-6-1-3-logstash-6-1-3-elasticsearch-6-1-3/136264/6
## https://discuss.elastic.co/t/logstash-errors-after-upgrading-to-filebeat-6-3-0/135984/6
if [beat][hostname] {
if [source] {
if ![file] {
mutate {
add_field => {
"file" => "%{source}"
}
}
}
}
mutate {
remove_field => [ "[host]" ]
}
mutate {
add_field => {
"host" => "%{[beat][hostname]}"
}
}
}
# ## apache2 module
# ## filebeat apache module https://www.elastic.co/guide/en/logstash/current/logstash-config-for-filebeat-modules.html
if [fileset][module] == "apache2" {
if [fileset][name] == "access" {
grok {
match => { "message" => ["%{IPORHOST:[apache2][access][remote_ip]} - %{DATA:[apache2][access][user_name]} \[%{HTTPDATE:[apache2][access][time]}\] \"%{WORD:[apache2][access][method]} %{DATA:[apache2][access][url]} HTTP/%{NUMBER:[apache2][access][http_version]}\" %{NUMBER:[apache2][access][response_code]} %{NUMBER:[apache2][access][body_sent][bytes]}( \"%{DATA:[apache2][access][referrer]}\")?( \"%{DATA:[apache2][access][agent]}\")?",
"%{IPORHOST:[apache2][access][remote_ip]} - %{DATA:[apache2][access][user_name]} \\[%{HTTPDATE:[apache2][access][time]}\\] \"-\" %{NUMBER:[apache2][access][response_code]} -" ] }
remove_field => "message"
}
mutate {
add_field => { "read_timestamp" => "%{@timestamp}" }
}
date {
match => [ "[apache2][access][time]", "dd/MMM/YYYY:H:m:s Z" ]
remove_field => "[apache2][access][time]"
}
useragent {
source => "[apache2][access][agent]"
target => "[apache2][access][user_agent]"
remove_field => "[apache2][access][agent]"
}
geoip {
source => "[apache2][access][remote_ip]"
target => "[apache2][access][geoip]"
}
}
else if [fileset][name] == "error" {
grok {
match => { "message" => ["\[%{APACHE_TIME:[apache2][error][timestamp]}\] \[%{LOGLEVEL:[apache2][error][level]}\]( \[client %{IPORHOST:[apache2][error][client]}\])? %{GREEDYDATA:[apache2][error][message]}",
"\[%{APACHE_TIME:[apache2][error][timestamp]}\] \[%{DATA:[apache2][error][module]}:%{LOGLEVEL:[apache2][error][level]}\] \[pid %{NUMBER:[apache2][error][pid]}(:tid %{NUMBER:[apache2][error][tid]})?\]( \[client %{IPORHOST:[apache2][error][client]}\])? %{GREEDYDATA:[apache2][error][message1]}" ] }
pattern_definitions => {
"APACHE_TIME" => "%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}"
}
remove_field => "message"
}
mutate {
rename => { "[apache2][error][message1]" => "[apache2][error][message]" }
}
date {
match => [ "[apache2][error][timestamp]", "EEE MMM dd H:m:s YYYY", "EEE MMM dd H:m:s.SSSSSS YYYY" ]
remove_field => "[apache2][error][timestamp]"
}
}
}
## syslog, there might be a module for this as well
if [type] == "syslog" {
### "$RepeatedMsgReduction off" /etc/rsyslog.conf
#if [message] =~ /last message repeated [0-9]+ times/ {
# drop { }
#}
## enable high precision timestamps
# comment out $ActionFileDefaultTemplate RSYSLOG_TraditionalFileFormat
grok {
match => { "message" => "(?:%{SYSLOGTIMESTAMP:syslog_timestamp}|%{TIMESTAMP_ISO8601:syslog_timestamp}) %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => {
"syslog_received_at" => "%{@timestamp}"
"syslog_received_from" => "%{host}"
}
}
syslog_pri {
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601" ]
timezone => "Europe/Madrid"
}
mutate {
replace => { "syslog_timestamp" => "%{@timestamp}" }
}
# for check grok data type conversion bug???
mutate {
convert => {
"syslog_pid" => "integer"
}
}
}
## old apache filter
# if [type] == "apache" {
# grok {
# match => { "message" => "%{COMBINEDAPACHELOG}" }
# }
# date {
# match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
# timezone => "America/New York"
# }
# }
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "apache"
document_type => "log"
}
# stdout { codec => rubydebug }
}

19
production/elk/filebeat.yml Archivo normal
Ver fichero

@@ -0,0 +1,19 @@
prospectors:
- paths:
- /access.log
tags: [apache_access]
filebeat.inputs:
- type: log
paths:
- /access.log
# tail_files: true
filebeat.modules:
- module: apache
access:
enabled: true
var.paths: ["/access.log"]
output.logstash:
hosts: ["logstash:5044"]

30
production/elk/nginx.conf Archivo normal
Ver fichero

@@ -0,0 +1,30 @@
input {
file {
path => "/access.log"
# start_position => "beginning"
mode => "tail"
file_completed_action => "log"
file_completed_log_path => "/dev/null"
}
}
filter {
if [path] =~ "access" {
mutate { replace => { "type" => "apache_access" } }
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
}
}
date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "nginx"
document_type => "access_log"
}
# stdout { codec => rubydebug }
}