499689317 / notes

note
2 stars 0 forks source link

centos elk环境搭建 #40

Open 499689317 opened 1 year ago

499689317 commented 1 year ago

安装java8

  1. 卸载已安装java版本 rpm -aq | grep java | xargs rpm -e --nodeps
  2. yum安装java 1.8.0 yum install -y java-1.8.0-openjdk.x86_64

安装logstash

  1. 下载安装包 curl -L -O https://artifacts.elastic.co/downloads/logstash/logstash-7.3.0.tar.gz
  2. 解压包 tar -xzvf logstash-7.3.0.tar.gz
  3. 启动logstash
    ./bin/logstash -e 'input { stdin {} } output { stdout {} }'
  4. 修改启动配置 vi config/logstash.conf

rpm安装filebeat

  1. 下载rpm安装包 curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-5.1.1-x86_64.rpm
  2. 安装命令 sudo rpm -vi filebeat-5.1.1-x86_64.rpm
  3. 启动filebeat sudo /etc/init.d/filebeat start
  4. 修改配置 sudo vi /etc/filebeat/filebeat.yml
  5. 查看运行日志 cat /var/log/filebeat/filebeat
499689317 commented 1 year ago

logstash配置

input {
  # kafka读取数据
  kafka {
    bootstrap_servers => "127.0.0.1:9092"
    topics => "filebeat-topic"
    group_id => "filebeat-group"
    decorate_events => true
    consumer_threads => 1
    auto_offset_reset => "earliest"
    codec => "json"
  }
}

filter {
  # 匹配每条日志日期给logtimefix字段
  grok {
    match => { "message" => "(?<logtimefix>(\d{4}\/\d{1,2}\/\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}))" }
  }
  # 将日志日期重写es的@timestamp字段
  date {
    match => ["logtimefix", "yyyy/MM/dd HH:mm:ss"]
    target => "@timestamp"
    remove_field => ["logtimefix"]
  }
  # 将无关字段删除
  mutate {
    remove_field => ["beat", "offset", "input_type",  "@version", "source"]
  }
}
# 日志输出到es
output {
  elasticsearch {
    hosts => "127.0.0.1:9002"
    index => "logstash-%{type}-%{+YYYY.MM.dd}"
    codec => "json"
    ilm_enabled => false
    user => "admin"
    password => "password"
  }
  # 调试标准输出
  stdout {}
}
499689317 commented 1 year ago

filebeat配置


filebeat.prospectors:
- type: log
  enabled: true
  encoding: utf-8
  paths:
    - /home/ec2-user/var/log/push*
  document_type: push-test
- type: log
  enabled: true
  encoding: utf-8
  paths:
    - /home/ec2-user/var/log/run*
  document_type: run-test
#-------------------------- Elasticsearch output ------------------------------
#output.elasticsearch:
  # Array of hosts to connect to.
  #hosts: ["localhost:9200"]

  # Optional protocol and basic auth credentials.
  #protocol: "https"
  #username: "elastic"
  #password: "changeme"

#----------------------------- Logstash output --------------------------------
#output.logstash:
  # The Logstash hosts
  #hosts: ["localhost:5044"]

  # Optional SSL. By default is off.
  # List of root certificates for HTTPS server verifications
  #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]

  # Certificate for SSL client authentication
  #ssl.certificate: "/etc/pki/client/cert.pem"

  # Client Certificate Key
  #ssl.key: "/etc/pki/client/cert.key"

output.kafka:
  enabled: true
  hosts: ["127.0.0.1:9092"]
  topic: "filebeat-topic"
  partition.hash:
    reachable_only: true
  compression: gzip
  max_message_bytes: 1000000
  required_acks: 1