前两节已经成功完成ek的搭建,还剩最后的一个日志上传的功能
依次执行如下命令
cd /home/elk wget https://artifacts.elastic.co/downloads/logstash/logstash-6.2.4.tar.gz tar zxvf logstash-6.2.4.tar.gz mv logstash-6.2.4 logstash
创建配置文件
cd logstash cd data vi logstash-simple.conf
监听8082端口
input {
stdin { }
tcp {
port => 8082
codec => json_lines
}
}
output {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "myfistlog"
}
stdout { codec => rubydebug }
}
如果搭配filebeat使用,配置如下
input {
beats {
port => 8082
}
}
filter {
if [fields][logtype] == "claimzuul" {
json {
source => "message"
target => "data"
}
}
}
output {
if [fields][logtype] == "claimzuul"{
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "claimzuul-%{+YYYY.MM.dd}"
}
}
}
启动
[root@insure bin]# ./logstash -f /home/elk/logstash/data/logstash-simple.conf & Sending Logstash's logs to /home/elk/logstash/logs which is now configured via log4j2.properties [2018-12-27T15:08:00,686][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"netflow", :directory=>"/home/elk/logstash/modules/netflow/configuration"} [2018-12-27T15:08:00,706][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"fb_apache", :directory=>"/home/elk/logstash/modules/fb_apache/configuration"} [2018-12-27T15:08:00,777][INFO ][logstash.setting.writabledirectory] Creating directory {:setting=>"path.queue", :path=>"/home/elk/logstash/data/queue"} [2018-12-27T15:08:00,780][INFO ][logstash.setting.writabledirectory] Creating directory {:setting=>"path.dead_letter_queue", :path=>"/home/elk/logstash/data/dead_letter_queue"} [2018-12-27T15:08:01,163][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified [2018-12-27T15:08:01,195][INFO ][logstash.agent ] No persistent UUID file found. Generating new UUID {:uuid=>"d4f319cf-97a7-4f90-83c6-8e62e8205fa5", :path=>"/home/elk/logstash/data/uuid"} [2018-12-27T15:08:01,697][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.2.4"} [2018-12-27T15:08:02,151][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600} [2018-12-27T15:08:04,880][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50} [2018-12-27T15:08:05,359][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://127.0.0.1:9200/]}} [2018-12-27T15:08:05,371][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://127.0.0.1:9200/, :path=>"/"} [2018-12-27T15:08:05,569][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://127.0.0.1:9200/"} [2018-12-27T15:08:05,624][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6} [2018-12-27T15:08:05,628][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6} [2018-12-27T15:08:05,679][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil} [2018-12-27T15:08:05,698][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}} [2018-12-27T15:08:05,729][INFO ][logstash.outputs.elasticsearch] Installing elasticsearch template to _template/logstash [2018-12-27T15:08:05,914][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//127.0.0.1:9200"]} [2018-12-27T15:08:05,993][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x18db84e4 run>"} The stdin plugin is now waiting for input: [2018-12-27T15:08:06,077][INFO ][logstash.agent ] Pipelines running {:count=>1, :pipelines=>["main"]}