环境配置参考 ELK环境配置
需要在三台机器上全部部署
安装logstash
tar zxf logstash-5.5.2.tar.gz
mv /opt/elk/logstash-5.5.2 /usr/local/logstash
配置logstash
-
配置broker (可以不用配置,使用filebeat上传数据流至kafka)
cd /usr/local/logstash/config vim beat_to_kafka.conf input { beats { port => 5044 } } filter { } # topic_id改成按beat中配置的document_type来输出到不同的topic中, 供kibana分组过滤用 output { kafka { bootstrap_servers => "10.8.189.101:9092,10.8.189.102:9092,10.8.189.103:9092" topic_id => '%{[type]}' } }
-
配置indexer集群
cd /usr/local/logstash/config vim kafka_to_es.conf input { kafka { bootstrap_servers => "10.8.189.101:9092,10.8.189.102:9092,10.8.189.103:9092" group_id => "logstash" topics => ["drds-sql","drds-slow","sc_user","sc_channel","sc_order","sc_inventory","sc_message","sc_file","sc_marketing","rms",'scm','engineering'] consumer_threads => 50 decorate_events => true } } filter { } output { elasticsearch { hosts => ["10.8.189.101:9200","10.8.189.102:9200","10.8.189.103:9200"] index => "logstash-%{+YYYY.MM.dd.hh}" manage_template => true template_overwrite => true template_name => "drdsLogstash" flush_size => 50000 idle_flush_time => 10 } }
启动logstash
### /usr/local/logstash/bin/logstash -f /usr/local/logstash/config/beat_to_kafka.conf > /var/log/beat_to_kafka.log &
/usr/local/logstash/bin/logstash -f /usr/local/logstash/config/kafka_to_es.conf > /var/log/kafka_to_es.log &
### /usr/local/logstash/bin/logstash -f /usr/local/logstash/config/beat_to_kafka.conf --path.data /home/logstash_data/beat_to_kafka > /var/log/beat_to_kafka.log &
/usr/local/logstash/bin/logstash -f /usr/local/logstash/config/kafka_to_es.conf --path.data /home/logstash_data/kafka_to_es > /var/log/kafka_to_es.log &
启动多个配置文件
mkdir -p /usr/local/logstash/conf.d
将需要启动的配置文件,全部放置在该目录下
/usr/local/logstash/bin/logstash -f /usr/local/logstash/conf.d --path.data /home/logstash_data/kafka_to_es > /var/log/kafka_to_es.log &