LogStash部署helm

明月清风 216浏览 0条评论
首页/正文
分享到: / / / /

官方helm 

https://helm.elastic.co

 

通过kafka消息队列获取日志

 

---
replicas: 2
extraEnvs:
  - name: TZ 
    value: Asia/Shanghai
logstashConfig:
  patterns.conf: |
    JAVA_TIME 20%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}(?::?%{SECOND})
    COMMONAPACHELOGS %{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:local_time}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response} (?:%{NUMBER:bytes}|-)
    COMBINEDAPACHELOGS %{COMMONAPACHELOGS} %{QS:referrer} %{QS:agent}
  logstash.yml: |
    http.host: 0.0.0.0
    xpack.monitoring.enabled: true
    xpack.monitoring.elasticsearch.username: logstash_system
    xpack.monitoring.elasticsearch.password: ****
    xpack.monitoring.elasticsearch.hosts: ["elasticsearch-master.elasticsearch.svc.cluster.local:9200"]
logstashPipeline:
  logstash.conf: |
    input{
        kafka{
            bootstrap_servers => ["***.***.***.***:9092,***.***.***.***:9092,***.***.***.***:9092"]
            topics_pattern => "nginx_.*"
            client_id => "${HOSTNAME}-nginx"
            type => "nginx"
        }
    }
     
    input{
        kafka{
            bootstrap_servers => ["***.***.***.***:9092,***.***.***.***:9092,***.***.***.***:9092"]
            topics_pattern => "java_.*"
            client_id => "${HOSTNAME}-java"
            type => "java"
        }
    }
     
    filter {
        if [type] == "java" {
            json {
                source => "message"
                remove_field => [ "beat","stream","offset","source","prospector" ]
            }
            grok {
                patterns_dir => "/usr/share/logstash/config/patterns.conf"
                match => { "message" => "(%{JAVA_TIME:local_time} \[%{JAVALOGMESSAGE:thread}\] %{LOGLEVEL:level} %{JAVALOGMESSAGE:java_class} - %{JAVALOGMESSAGE}|%{LOGLEVEL:level} %{JAVA_TIME:local_time} %{JAVALOGMESSAGE:thread} %{JAVALOGMESSAGE:java_class} : %{JAVALOGMESSAGE})" }
            }
        }
 
        if [type] == "nginx" {
            json {
                source => "message"
                remove_field => [ "beat","stream","offset","source","prospector" ]
            }
            grok {
                patterns_dir => "/usr/share/logstash/config/patterns.conf"
                match => { "message" => "%{COMBINEDAPACHELOGS}" }
            }
        }
         
        date {
            locale => "cn"
            match => [ "local_time", "ISO8601", "dd/MMM/yyyy:HH:mm:ss Z" ]
            remove_field => [ "local_time" ]
        }
    }
     
    output {
        elasticsearch {
            hosts => ["elasticsearch-master.elasticsearch.svc.cluster.local:9200"]
            index => "%{index}-%{topic}-%{+YYYY-MM-dd}"
            user => elastic
            password => ***
        }
    }
 
volumeClaimTemplate:
  accessModes: [ "ReadWriteOnce" ]
  #storageClassName: "cloud-ssd"
  resources:
    requests:
      storage: 20Gi
       
logstashJavaOpts: "-Xmx2g -Xms2g"
 
resources:
  requests:
    cpu: "1000m"
    memory: "1536Mi"
  limits:
    cpu: "8000m"
    memory: "2536Mi"

 

最后修改: © 著作权归作者所有
如果觉得我的文章对你有用,请随意赞赏
扫一扫支付

上一篇
登录即可评论哦~

评论列表

还没有人评论哦~赶快抢占沙发吧~