本文是使用docker-compost搭建efk没有安装docker-compose的可以参考我另一篇文章https://www.jianshu.com/p/778af797f635
安装docker-compose
看效果

目录结构
docker-compose.yaml
fluentd
- Dockerfile
- conf
-fluent.conf
docker-compose.yaml
version: '2'
services:
web:
image: httpd
ports:
- "1080:80" #避免和默认的80端口冲突
links:
- fluentd
logging:
driver: "fluentd"
options:
fluentd-address: localhost:24224
tag: httpd.access
fluentd:
build: ./fluentd
volumes:
- ./fluentd/conf/:/etc/fluent/
links:
- "elasticsearch"
ports:
- "24224:24224"
- "24224:24224/udp"
elasticsearch:
image: elasticsearch:7.6.0
environment:
- discovery.type=single-node
expose:
- 9200
ports:
- "9200:9200"
kibana:
image: kibana:7.6.0
links:
- "elasticsearch"
ports:
- "5601:5601"
Dockerfile文件:
FROM fluent/fluentd:v1.3.2
#ADD fluent.conf /etc/fluent/
USER root
RUN echo "source 'https://mirrors.tuna.tsinghua.edu.cn/rubygems/'" > Gemfile && gem install bundler
#RUN gem install fluent-plugin-kafka -v 0.12.3 --no-document
RUN gem install fluent-plugin-elasticsearch -v 4.0.3 --no-document
CMD ["fluentd"]
fluent.conf文件
<source>
@type forward
port 24224
bind 0.0.0.0
</source>
<match *.**>
@type copy
<store>
@type elasticsearch
host elasticsearch
port 9200
logstash_format true
logstash_prefix fluentd
logstash_dateformat %Y%m%d
include_tag_key true
type_name access_log
tag_key @log_name
flush_interval 1s
</store>
<store>
@type stdout
</store>
</match>
nginx配置转发和添加用户鉴权
生成并配置用户登陆信息文件:/etc/ssl/certs/pwd.db
auth_basic "kibana";
auth_basic_user_file /etc/ssl/certs/pwd.db;
server {
listen 443 ssl;
server_name jssasa.xyz;
root /opt/lark;
auth_basic "kibana";
auth_basic_user_file /etc/ssl/certs/pwd.db;
location / {
proxy_pass http://172.31.24.87:5601;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
}
}

网友评论