ubuntu@master:~$ git clone https://github.com/deviantony/docker-elk.git
ubuntu@master:~$ cd docker-elk
ubuntu@master:~/docker-elk$ sudo docker-compose up -d
Creating dockerelk_elasticsearch_1 ... done
WARNING: Image for service kibana was built because it did not already exist. To rebuild this image you must use `docker-compose build` or `docker-compose up --build`.
Creating dockerelk_elasticsearch_1 ...
Creating dockerelk_logstash_1 ... done
Creating dockerelk_kibana_1 ... done
ubuntu@master:~/docker-elk$ sudo docker-compose ps
Name Command State Ports
-------------------------------------------------------------------------------------------------------------------------------------------
dockerelk_elasticsearch_1 /usr/local/bin/docker-entr ... Up 0.0.0.0:9200->9200/tcp, 0.0.0.0:9300->9300/tcp
dockerelk_kibana_1 /usr/local/bin/dumb-init - ... Up 0.0.0.0:5601->5601/tcp
dockerelk_logstash_1 /usr/local/bin/docker-entr ... Up 0.0.0.0:5044->5044/tcp, 0.0.0.0:5044->5044/udp, 0.0.0.0:9600->9600/tcp
ubuntu@master:~$ curl -u elastic:changeme http://localhost:9200
{
"name" : "9da3f6728c23",
"cluster_name" : "docker-cluster",
"cluster_uuid" : "SvBevg8qTyqBI5k9kaL2MQ",
"version" : {
"number" : "7.6.2",
"build_flavor" : "default",
"build_type" : "docker",
"build_hash" : "ef48eb35cf30adf4db14086e8aabd07ef6fb113f",
"build_date" : "2020-03-26T06:34:37.794943Z",
"build_snapshot" : false,
"lucene_version" : "8.4.0",
"minimum_wire_compatibility_version" : "6.8.0",
"minimum_index_compatibility_version" : "6.0.0-beta1"
},
"tagline" : "You Know, for Search"
}
ubuntu@master:~$ curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.7.0-amd64.deb
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
100 26.8M 100 26.8M 0 0 23.8M 0 0:00:01 0:00:01 --:--:-- 23.8M
ubuntu@master:~$ sudo dpkg -i filebeat-7.7.0-amd64.deb
Selecting previously unselected package filebeat.
(Reading database ... 207642 files and directories currently installed.)
Preparing to unpack filebeat-7.7.0-amd64.deb ...
Unpacking filebeat (7.7.0) ...
Setting up filebeat (7.7.0) ...
Processing triggers for systemd (229-4ubuntu21.27) ...
Processing triggers for ureadahead (0.100.0-19.1) ...
ubuntu@master:/etc/filebeat$ sudo vi filebeat.yml
Commented the below part:
########################
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
# Array of hosts to connect to.
# hosts: ["localhost:9200"]
and added the below lines:
##########################
#----------------------------- Logstash output --------------------------------
output.logstash:
# The Logstash hosts
hosts: ["localhost:5044"]
###########################
filebeat.inputs:
# Each - is an input. Most options can be set at the input level, so
# you can use different inputs for various configurations.
# Below are the input specific configurations.
- type: log
# Change to true to enable this input configuration.
enabled: true
# Paths that should be crawled and fetched. Glob based paths.
paths:
#- /var/log/*.log
#- c:\programdata\elasticsearch\logs\*
- /srv/nfs/k8sdata/log/app/app-api-*/app.log
Now we need to change the logstash input so that it can get filebeat forwarded content
##############################################################################
For that go to the logstash.yml file in the docker-compose folder.
ubuntu@master:~/docker-elk$ cat ./logstash/pipeline/logstash.conf
input {
beats {
port => 5044
}
}
## Add your filters / logstash plugins configuration here
filter {
grok {
match => { "message" => ["%{DATE_EU:date} %{TIME:logTime} *\[%{DATA:requestId}] %{LOGLEVEL:logLevel} %{NUMBER:processId} *\[%{DATA:threadName}] %{JAVACLASS:className} *\[%{DATA:origin}] :%{GREEDYDATA:message}"] }
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
user => "elastic"
password => "changeme"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
ubuntu@master:~/docker-elk$ sudo docker-compose restart logstash
Restarting dockerelk_logstash_1 ... done
ubuntu@master:~/docker-elk$
ubuntu@master:~/docker-elk$ sudo docker exec -it dockerelk_logstash_1 sh
sh-4.2$
sh-4.2$
sh-4.2$ cat /usr/share/logstash/pipeline/logstash.conf
input {
beats {
port => 5044
}
}
## Add your filters / logstash plugins configuration here
filter {
grok {
match => { "message" => ["%{DATE_EU:date} %{TIME:logTime} *\[%{DATA:requestId}] %{LOGLEVEL:logLevel} %{NUMBER:processId} *\[%{DATA:threadName}] %{JAVACLASS:className} *\[%{DATA:origin}] :%{GREEDYDATA:message}"] }
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
user => "elastic"
password => "changeme"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
}
}
ubuntu@master:~$ cd docker-elk
ubuntu@master:~/docker-elk$ sudo docker-compose up -d
Creating dockerelk_elasticsearch_1 ... done
WARNING: Image for service kibana was built because it did not already exist. To rebuild this image you must use `docker-compose build` or `docker-compose up --build`.
Creating dockerelk_elasticsearch_1 ...
Creating dockerelk_logstash_1 ... done
Creating dockerelk_kibana_1 ... done
ubuntu@master:~/docker-elk$ sudo docker-compose ps
Name Command State Ports
-------------------------------------------------------------------------------------------------------------------------------------------
dockerelk_elasticsearch_1 /usr/local/bin/docker-entr ... Up 0.0.0.0:9200->9200/tcp, 0.0.0.0:9300->9300/tcp
dockerelk_kibana_1 /usr/local/bin/dumb-init - ... Up 0.0.0.0:5601->5601/tcp
dockerelk_logstash_1 /usr/local/bin/docker-entr ... Up 0.0.0.0:5044->5044/tcp, 0.0.0.0:5044->5044/udp, 0.0.0.0:9600->9600/tcp
ubuntu@master:~$ curl -u elastic:changeme http://localhost:9200
{
"name" : "9da3f6728c23",
"cluster_name" : "docker-cluster",
"cluster_uuid" : "SvBevg8qTyqBI5k9kaL2MQ",
"version" : {
"number" : "7.6.2",
"build_flavor" : "default",
"build_type" : "docker",
"build_hash" : "ef48eb35cf30adf4db14086e8aabd07ef6fb113f",
"build_date" : "2020-03-26T06:34:37.794943Z",
"build_snapshot" : false,
"lucene_version" : "8.4.0",
"minimum_wire_compatibility_version" : "6.8.0",
"minimum_index_compatibility_version" : "6.0.0-beta1"
},
"tagline" : "You Know, for Search"
}
ubuntu@master:~$ curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.7.0-amd64.deb
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
100 26.8M 100 26.8M 0 0 23.8M 0 0:00:01 0:00:01 --:--:-- 23.8M
ubuntu@master:~$ sudo dpkg -i filebeat-7.7.0-amd64.deb
Selecting previously unselected package filebeat.
(Reading database ... 207642 files and directories currently installed.)
Preparing to unpack filebeat-7.7.0-amd64.deb ...
Unpacking filebeat (7.7.0) ...
Setting up filebeat (7.7.0) ...
Processing triggers for systemd (229-4ubuntu21.27) ...
Processing triggers for ureadahead (0.100.0-19.1) ...
ubuntu@master:/etc/filebeat$ sudo vi filebeat.yml
Commented the below part:
########################
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
# Array of hosts to connect to.
# hosts: ["localhost:9200"]
and added the below lines:
##########################
#----------------------------- Logstash output --------------------------------
output.logstash:
# The Logstash hosts
hosts: ["localhost:5044"]
###########################
filebeat.inputs:
# Each - is an input. Most options can be set at the input level, so
# you can use different inputs for various configurations.
# Below are the input specific configurations.
- type: log
# Change to true to enable this input configuration.
enabled: true
# Paths that should be crawled and fetched. Glob based paths.
paths:
#- /var/log/*.log
#- c:\programdata\elasticsearch\logs\*
- /srv/nfs/k8sdata/log/app/app-api-*/app.log
Now we need to change the logstash input so that it can get filebeat forwarded content
##############################################################################
For that go to the logstash.yml file in the docker-compose folder.
ubuntu@master:~/docker-elk$ cat ./logstash/pipeline/logstash.conf
input {
beats {
port => 5044
}
}
## Add your filters / logstash plugins configuration here
filter {
grok {
match => { "message" => ["%{DATE_EU:date} %{TIME:logTime} *\[%{DATA:requestId}] %{LOGLEVEL:logLevel} %{NUMBER:processId} *\[%{DATA:threadName}] %{JAVACLASS:className} *\[%{DATA:origin}] :%{GREEDYDATA:message}"] }
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
user => "elastic"
password => "changeme"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
ubuntu@master:~/docker-elk$ sudo docker-compose restart logstash
Restarting dockerelk_logstash_1 ... done
ubuntu@master:~/docker-elk$
ubuntu@master:~/docker-elk$ sudo docker exec -it dockerelk_logstash_1 sh
sh-4.2$
sh-4.2$
sh-4.2$ cat /usr/share/logstash/pipeline/logstash.conf
input {
beats {
port => 5044
}
}
## Add your filters / logstash plugins configuration here
filter {
grok {
match => { "message" => ["%{DATE_EU:date} %{TIME:logTime} *\[%{DATA:requestId}] %{LOGLEVEL:logLevel} %{NUMBER:processId} *\[%{DATA:threadName}] %{JAVACLASS:className} *\[%{DATA:origin}] :%{GREEDYDATA:message}"] }
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
user => "elastic"
password => "changeme"
manage_template => false
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
}
}
Comments