docker-compose 搭建elk (单机版)

1 .编辑docker-compose.yml文件


version: '3.2'

services:

    elasticsearch:

        image: elasticsearch:7.17.4

        volumes:

            - /etc/localtime:/etc/localtime

            - /你的目录/elk/elasticsearch/plugins:/usr/share/elasticsearch/plugins #插件文件挂载

            - /你的目录/elk/elasticsearch/data:/usr/share/elasticsearch/data #数据文件挂载

            - '/你的目录/elk/elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml' #配置文件挂载

        ports:

            - '9200:9200'

            - '9300:9300'

        container_name: elasticsearch

        restart: always

        environment:

            - 'cluster.name=elasticsearch' #设置集群名称为elasticsearch

            - 'discovery.type=single-node' #以单一节点模式启动

            - 'ES_JAVA_OPTS=-Xms1024m -Xmx2048m' #设置使用jvm内存大小

        networks:

            - elk

    logstash:

        image: logstash:7.17.4

        container_name: logstash

        restart: always

        volumes:

            - /etc/localtime:/etc/localtime

            - '/你的目录/elk/logstash/pipelines.yml:/usr/share/logstash/config/pipelines.yml'

            - '/你的目录/elk/logstash/logstash.yml:/usr/share/logstash/config/logstash.yml'

            - '/你的目录/elk/logstash/logstash-audit.conf:/usr/share/logstash/pipeline/logstash-audit.conf'

            - '/你的目录/elk/logstash/logstash-user-action.conf:/usr/share/logstash/pipeline/logstash-user-action.conf'

            - /你的目录/laravel/storage/logs:/piao_1 #挂载项目1 日志

            - /你的目录/laravel-two/storage/logs:/piao_2 #挂载项目2 日志

        ports:

            - '5044:5044'

            - '50000:50000/tcp'

            - '50000:50000/udp'

            - '9600:9600'

        environment:

            LS_JAVA_OPTS: -Xms1024m -Xmx1024m

            TZ: 'Asia/Shanghai'

            MONITORING_ENABLED: 'false'

        links:

            - elasticsearch:es #可以用es这个域名访问elasticsearch服务

        networks:

            - elk

        depends_on:

            - elasticsearch

    kibana:

        image: kibana:7.17.4

        container_name: kibana

        restart: always

        volumes:

            - /etc/localtime:/etc/localtime

            - /你的目录/elk/kibana/conf/kibana.yml:/usr/share/kibana/config/kibana.yml

        ports:

            - '5601:5601'

        links:

            - elasticsearch:es #可以用es这个域名访问elasticsearch服务

        environment:

            - ELASTICSEARCH_URL=http://elasticsearch:9200 #设置访问elasticsearch的地址

            - 'elasticsearch.hosts=http://es:9200' #设置访问elasticsearch的地址

            - I18N_LOCALE=zh-CN

        networks:

            - elk

        depends_on:

            - elasticsearch

networks:

    elk:

        name: elk

        driver:

            bridge


/etc/localtime:/etc/localtime 主要目的是将 Docker 容器中的时间与宿主机同步

2, 对应docker-compose.yml里面的目录&文件创建对应的文件和目录;自行创建,不过多表述。

3, 对应配置文件

elasticsearch.yml

cluster.name: "docker-cluster"

network.host: 0.0.0.0

kibana.yml

# Default Kibana configuration for docker target

server.host: '0.0.0.0'

server.shutdownTimeout: '5s'

elasticsearch.hosts: ['http://elasticsearch:9200']

monitoring.ui.container.elasticsearch.enabled: true

logstash.yml

http.host: "0.0.0.0"

xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ]

pipelines.yml

- pipeline.id: piao-one

  path.config: /usr/share/logstash/pipeline/piao-one.conf

  pipeline.workers: 3

- pipeline.id: piao-two

  path.config: /usr/share/logstash/pipeline/piao-two.conf

  pipeline.workers: 3

piao-one.conf

input {

  file {

    path =>"/piao_1/*.log"

    #第一次从头收集,之后从新添加的日志收集

    start_position => "beginning"

    #日志收集的间隔时间

    stat_interval =>"3"

    type=>"piao_1"

    sincedb_path => [ "/tmp/piao-1a.data" ]

    #有些日志需要多行收集到一起

    codec => multiline{

        pattern => "^\[\d{4}"

        what => "previous"

        negate => true

        auto_flush_interval => 1

    }

  }

}

filter{

    #从日志中提取时间

  grok {

      match => { "message" => "%{TIMESTAMP_ISO8601:logdate}" }

  }

  date {

    match => [ "logdate", "YYYY-MM-dd HH:mm:ss" ]

    target => "@timestamp"

    timezone =>"+00:00"

  }

    #更正时间,修改8小时时差

ruby {

        code => "event.set('timestamp', event.get('@timestamp').time.localtime - 8*60*60)"

    }

    ruby {

        code => "event.set('@timestamp', event.get('timestamp'))"

        remove_field => ["timestamp"]

    }

    ruby {

        code => "event.set('ct', event.get('create_time').time.localtime - 8*60*60)"

    }

    ruby {

        code => "event.set('create_time', event.get('ct'))"

        remove_field => ["ct"]

    }

  mutate{

    remove_field => "logdate"

  }

}

output {

  if [type] == "piao_1" {

    elasticsearch {

      hosts => ["elasticsearch:9200"]

      index => "piao-1-%{+YYYY.MM.dd}"

    }

  }

}

piao-two.conf

input {

  file {

    path =>"/piao_2/*.log"

    #第一次从头收集,之后从新添加的日志收集

    start_position => "beginning"

    #日志收集的间隔时间

    stat_interval =>"3"

    type=>"piao_2"

    sincedb_path => [ "/tmp/piao-2a.data" ]

    codec => multiline{

        pattern => "^\[\d{4}"

        what => "previous"

        negate => true

        auto_flush_interval => 1

    }

  }

}

filter{

  grok {

    match => { "message" => "%{TIMESTAMP_ISO8601:logdate}" }

  }

  date {

    match => [ "logdate", "YYYY-MM-dd HH:mm:ss" ]

    target => "@timestamp"

    timezone =>"+00:00"

  }

ruby {

        code => "event.set('timestamp', event.get('@timestamp').time.localtime - 8*60*60)"

    }

    ruby {

        code => "event.set('@timestamp', event.get('timestamp'))"

        remove_field => ["timestamp"]

    }

    ruby {

        code => "event.set('ct', event.get('create_time').time.localtime - 8*60*60)"

    }

    ruby {

        code => "event.set('create_time', event.get('ct'))"

        remove_field => ["ct"]

    }

  mutate{

    remove_field => "logdate"

  }

}

output {

  if [type] == "piao_2" {

    elasticsearch {

      hosts => ["elasticsearch:9200"]

      index => "piao-2-%{+YYYY.MM.dd}"

    }

  }

}


一定要注意对应的文件路径要正确!

3, 启动容器,docker-compose up -d 

4,  访问kibana;   IP:5601

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容