之前的流程是:filebeat,redis,logstash,elasticsearch
现在的流程是:filebeat,Kafka(zookeeper),logstash,elasticsearch
zookeeper集群,使用ZK UI提供的web界面
kafka集群,使用的是kafka eagle提供的web界面
filebeat.yml
# 其他的配置保持不变
# 之前是输出到redis
#output.redis:
# hosts: ["172.17.107.187:6370"]
# key: log_messages
# password: foobar2000
# db: 0
# 现在输出到kafka
output.kafka:
hosts: ["localhost:9092", "localhost:9093", "localhost:9094"]
topic: ktopic
logstash.conf
# 之前是从redis获取数据的
#input {
# redis {
# host => "172.17.107.187"
# port => 6370
# password => "foobar2000"
# data_type => "list"
# key => "log_messages"
# db => 0
# }
#}
# 现在从Kafka中获取数据
input {
kafka {
bootstrap_servers => "localhost:9092"
auto_offset_reset => "latest"
consumer_threads => 5
topics_pattern => ".*"
decorate_events => true
topics => "ktopic"
codec => json {
charset => "UTF-8"
}
}
}
# 其他的配置保持不变