ELK日志系统搭建

目录结构

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
root@ubuntu:~/log-collect/ELK# tree
├── docker-compose.yml
├── .env
├── elasticsearch
│   ├── data
│   └── Dockerfile
├── filebeat
│   ├── config
│   │   └── filebeat.yml
│   └── Dockerfile
├── kibana
│   ├── data
│   ├── Dockerfile
│   └── kibana.yml
└── logstash
├── config
│   ├── log4j2.properties
│   ├── logstash.yml
│   └── pipelines.yml
├── default_config
│   ├── 02-beats-input.conf
│   ├── 10-syslog-filter.conf
│   └── 30-elasticsearch-output.conf
├── Dockerfile
└── pipeline
└── logstash.conf

新建目录

1
2
3
4
5
mkdir -p ELK/elasticsearch/data
mkdir -p ELK/kibana/data
mkdir -p ELK/filebeat/config
mkdir -p ELK/logstash/config
mkdir -p ELK/logstash/pipeline

elasticsearch

Dockerfile

1
2
3
ARG ELK_VERSION

FROM docker.elastic.co/elasticsearch/elasticsearch:${ELK_VERSION}

filebeat

Dockerfile

1
2
3
4
5
6
7
8
ARG ELK_VERSION

FROM docker.elastic.co/beats/filebeat:${ELK_VERSION}

# Home path: [/usr/share/filebeat]
# Config path: [/usr/share/filebeat]
# Data path: [/usr/share/filebeat/data]
# Logs path: [/usr/share/filebeat/logs]

filebeat.yml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
filebeat.autodiscover:
providers:
- type: docker
labels.dedot: true
hints.enabled: true
templates:
- condition:
contains:
container.labels.collect_logs_with_filebeat: "true" # label name in service docker-compose file
docker.container.name: "test_golang_app_2"
config:
- type: container
format: docker # auto, docker, cli
# stream: stdout # all, stdout, stderr
#containers.ids:
# - "${data.docker.container.id}"
paths:
- "/var/lib/docker/containers/${data.docker.containers.id}/*.log"

filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false

setup.template.settings:
index.number_of_shards: 1

setup.kibana:
host: "kibana:5601"

output.elasticsearch:
enabled: false
hosts: ["elasticsearch:9200"]
username: "elastic"
password: "elastic"

output.logstash:
enabled: true
hosts: ["logstash:5044"]

processors:
- drop_fields:
fields: ["agent.ephemeral_id", "agent.hostname", "agent.id", "agent.name", "agent.version", "docker.container.labels.com_docker_compose_version", "ecs.version", "host.name", "input.type", "kubernetes.container.image", "log.offset", "docker.container.labels.com_docker_compose_project_working_dir", "log.file.path" ]
ignore_missing: false

monitoring.enabled: false
logging.metrics.enabled: false
logging.level: debug
logging.selectors: ["*"]
logging.to_files: true

kibana

Dockerfile

1
2
3
ARG ELK_VERSION

FROM docker.elastic.co/kibana/kibana:${ELK_VERSION}

kibana.yml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
#
# ** THIS IS AN AUTO-GENERATED FILE **
#

# Default Kibana configuration for docker target
server.name: kibana
server.host: "0"
# server.basePath: "/efk"
elasticsearch.hosts: [ "http://elasticsearch:9200" ]
monitoring.ui.container.elasticsearch.enabled: true
i18n.locale: "zh-CN"
elasticsearch.username: "kibana_system"
elasticsearch.password: "kibana_system"

#$ vim nginx.conf
# location /efk/ {
# proxy_pass http://192.168.1.11:5601/;
# }

#配置kibana
#$ vim ./kibana/kibana.yml
#server.basePath: "/efk"

logstash

Dockerfile

1
2
3
4
5
6
7
8
9
ARG ELK_VERSION

# https://www.docker.elastic.co/
FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}

# Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json

# CMD ["/usr/share/logstash/bin/logstash", "--path.settings", "/etc/logstash", "-t"]

logstash.yml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
---
## Default Logstash configuration from Logstash base image.
## https://github.com/elastic/logstash/blob/master/docker/data/logstash/config/logstash-full.yml
#
http.host: "0.0.0.0"
# http.host: "logstash_cm" # :5000, 5044 - fatal! SocketError: initialize: name or service not known

xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ]

## X-Pack security credentials
#
xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.username: elastic
xpack.monitoring.elasticsearch.password: elastic

pipelines.yml

1
2
3
4
- pipeline.id: main
# pipeline.workers: 1
# pipeline.batch.size: 1
path.config: "/usr/share/logstash/pipeline/logstash.conf"

log4j2.properties

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17

status = error
name = LogstashPropertiesConfig

appender.console.type = Console
appender.console.name = plain_console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]}%notEmpty{[%X{plugin.id}]} %m%n

appender.json_console.type = Console
appender.json_console.name = json_console
appender.json_console.layout.type = JSONLayout
appender.json_console.layout.compact = true
appender.json_console.layout.eventEol = true

rootLogger.level = ${sys:ls.log.level}
rootLogger.appenderRef.console.ref = ${sys:ls.log.format}_console

logstash.conf

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
input {
beats {
# ssl => false # port 5015
port => 5044
}
tcp {
port => 5001
}
}

filter {
mutate {
add_tag => [ "logstash_filter_applied" ]
}
# mutate {
# add_field => [ "%{@agent.type}", "%{@container.name}", "%{@log.file.path}", "%{@message}", "%{@stream}"]
# }
# grok {
# match => { "message" => ["%{IPORHOST:remote_ip} - %{DATA:user_name} \[%{HTTPDATE:access_time}\] \"%{WORD:http_method} %{DATA:url} HTTP/%{NUMBER:http_version}\" %{NUMBER:response_code} %{NUMBER:body_sent_bytes} \"%{DATA:referrer}\" \"%{DATA:agent}\""] }
# remove_field => "message"
# }

if "beats_input_codec_plain_applied" in [tags] {
mutate {
remove_tag => ["beats_input_codec_plain_applied"]
}
}
}

output {
elasticsearch {
hosts => "elasticsearch:9200"
user => "elastic"
password => "elastic"

index => "from_logstash-%{[@metadata][beat]}-%{+YYYY.MM.dd}"

ecs_compatibility => disabled
# sniffing => true
# manage_template => true
}
}

docker-compose.yml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
version: "3.7"
services:
flog:
image: mingrammer/flog
container_name: flog
command: "-f json -l -d 1"
networks:
- elk-nginx

elasticsearch:
image: elasticsearch:${ELK_VERSION}
container_name: elasticsearch_${ELK_VERSION}
build:
context: ./elasticsearch
args:
ELK_VERSION: $ELK_VERSION
mem_limit: 1G
ports:
- "9200:9200"
expose:
- 9200
volumes:
- ./elasticsearch/data:/usr/share/elasticsearch/data
environment:
- discovery.type=single-node
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
- xpack.security.enabled=true
networks:
- elk-nginx

kibana:
image: kibana:${ELK_VERSION}
container_name: kibana_${ELK_VERSION}
build:
context: ./kibana
args:
ELK_VERSION: $ELK_VERSION
mem_limit: 1G
volumes:
- ./kibana/kibana.yml:/usr/share/kibana/config/kibana.yml
- ./kibana/data:/usr/share/kibana/data
links:
- "elasticsearch"
ports:
- "5601:5601"
networks:
- elk-nginx

logstash:
image: logstash:${ELK_VERSION}
container_name: logstash_${ELK_VERSION}
build:
context: ./logstash
args:
ELK_VERSION: $ELK_VERSION
volumes:
- ./logstash/config:/usr/share/logstash/config
- ./logstash/pipeline:/usr/share/logstash/pipeline
environment:
- STDOUT=true
- "LS_JAVA_OPTS=-Xms512m -Xmx512m"
links:
- elasticsearch
depends_on:
- elasticsearch
- kibana
ports:
- "5044:5044" # Logstash Beats [filebeat] input
- "5001:5001/tcp" # Logstash TCP input
- "5001:5001/udp" # Logstash UDP input
- "9600:9600" # Logstash monitoring API
networks:
- elk-nginx
labels:
co.elastic.logs/enabled: "false" # for Filebeat

filebeat:
image: filebeat:${ELK_VERSION}
container_name: filebeat_${ELK_VERSION}
build:
context: filebeat/
args:
ELK_VERSION: $ELK_VERSION
entrypoint: filebeat -e -strict.perms=false
user: root
volumes:
- ./filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml
- /var/lib/docker/containers:/var/lib/docker/containers:ro
- /var/run/docker.sock:/var/run/docker.sock:ro
depends_on:
- elasticsearch
- logstash
- kibana
networks:
- elk-nginx
labels:
co.elastic.logs/enabled: "false"

networks:
elk-nginx:
driver: bridge

.env

1
ELK_VERSION=7.16.3

修改文件权限

1
2
chmod 777 -R ./
chmod 755 filebeat/config/filebeat.yml

配置密码访问

Elasticsearch安全认证

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
# 重置密码 auto:随机密码 interactive:手动设置
docker exec -it elasticsearch_7.16.3 bash
./bin/elasticsearch-setup-passwords interactive

--- Please confirm that you would like to continue [y/N]y

Enter password for [elastic]: <--输入用户elastic的密码>
Reenter password for [elastic]: <--再次输入定义的密码>
Enter password for [apm_system]: <--输入用户apm_system的密码>
Reenter password for [apm_system]: <--再次输入定义的密码>
Enter password for [kibana_system]: <--输入用户kibana的密码>
Reenter password for [kibana_system]: <--再次输入定义的密码>
Enter password for [logstash_system]: <--输入用户logstash_system的密码>
Reenter password for [logstash_system]: <--再次输入定义的密码>
Enter password for [beats_system]: <--输入用户beats_system的密码>
Reenter password for [beats_system]: <--再次输入定义的密码>
Enter password for [remote_monitoring_user]: <--输入用户remote_monitoring_user的密码>
Reenter password for [remote_monitoring_user]: <--再次输入定义的密码>
Changed password for user [apm_system]
Changed password for user [kibana_system]
Changed password for user [logstash_system]
Changed password for user [beats_system]
Changed password for user [remote_monitoring_user]
Changed password for user [elastic]

exit
1
curl -H "Content-Type:application/json" -XPOST -u elastic 'http://127.0.0.1:9200/_xpack/security/user/elastic/_password' -d '{ "password" : "123456" }'