1. 准备工作
docker-desktop
docker运行elk命令并将配置文件挂在到指定位置
# 1.先将run命令中的-v参数去掉,先使其正常运行
# 2.再将镜像内的配置文件等目录复制到指定目录,方便管理,
# 3.复制后删除未挂载目录的容器,在运行带-v的run命令
# 举例
docker run --name elasticsearch7.4.2 \
-p 9200:9200 -p 9300:9300 \
-e "ES_JAVA_OPTS=-Xms1024m -Xmx1024m" \
-e "discovery.type=single-node" \
-d elasticsearch:7.4.2
docker cp elasticsearch7.4.2:/usr/share/elasticsearch/data E:/docker/elasticsearch7.4.2/data
docker run --name elasticsearch7.4.2 \
-p 9200:9200 -p 9300:9300 \
-e "ES_JAVA_OPTS=-Xms1024m -Xmx1024m" \
-e "discovery.type=single-node" \
-v E:/docker/elasticsearch7.4.2/data:/usr/share/elasticsearch/data \
-v E:/docker/elasticsearch7.4.2/plugins:/usr/share/elasticsearch/plugins \
-v E:/docker/elasticsearch7.4.2/logs:/usr/share/elasticsearch/logs \
-d elasticsearch:7.4.2
docker run -d --name kibana7.4.2 \
-p 5601:5601 \
-v E:/docker/kibana7.4.2/data:/usr/share/kibana/data \
-v E:/docker/kibana7.4.2/config:/usr/share/kibana/config \
-v E:/docker/kibana7.4.2/plugins:/usr/share/kibana/plugins \
-d kibana:7.4.2
docker run -d --name logstash7.4.2 \
-p 5044:5044 \
-p 9600:9600 \
-p 5000:5000 \
-v E:/docker/logstash7.4.2/pipeline:/usr/share/logstash/pipeline \
-v E:/docker/logstash7.4.2/config:/usr/share/logstash/config \
-v E:/docker/logstash7.4.2/data:/usr/share/logstash/data \
-d logstash:7.4.2
修改配置文件
kibana的kibana.yml
# Default Kibana configuration for docker target
server.name: kibana
server.host: "0"
elasticsearch.hosts: [ "http://172.21.80.1:9200" ]
xpack.monitoring.ui.container.elasticsearch.enabled: true
logstash的logstash.yml
http.host: "0.0.0.0"
xpack.monitoring.elasticsearch.hosts: [ "http://172.21.80.1:9200" ]
logstash的logstash-sample.conf
input {
tcp {
port => 5000 # 自定义端口(不要用 5044/9600)
codec => json_lines # 必须使用 JSON 解码
}
}
output {
stdout {
codec => json_lines
}
elasticsearch {
hosts => "172.21.80.1:9200"
index => "elk-test-%{+YYYY.MM.dd}"
}
}
logstash的pipelines.yml
# This file is where you define your pipelines. You can define multiple.
# For more information on multiple pipelines, see the documentation:
# https://www.elastic.co/guide/en/logstash/current/multiple-pipelines.html
- pipeline.id: main
path.config: "/usr/share/logstash/pipeline"
验证启动是否成功
kibana首页
2. BasicController
package com.wl.test.controller;
import com.wl.test.model.User;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
@RestController
@Slf4j
public class BasicController {
// http://127.0.0.1:8080/user
@RequestMapping("/user")
public User user() {
User user = new User();
user.setName("张三");
user.setAge(666);
log.info("用户信息:{}", user);
log.debug("用户信息2:{}", user);
log.warn("用户信息3:{}", user);
log.error("用户信息4:{}", user);
return user;
}
}
3. pom
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.wl.test</groupId>
<artifactId>elk-test</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>elk-test</name>
<description>elk-test</description>
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<spring-boot.version>2.6.13</spring-boot.version>
</properties>
<dependencies>
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>7.2</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.18</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring-boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${spring-boot.version}</version>
<configuration>
<mainClass>com.wl.test.ElkTestApplication</mainClass>
<skip>true</skip>
</configuration>
<executions>
<execution>
<id>repackage</id>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
4. application.yml
server:
port: 8080
spring:
application:
name: elk-test
logging:
config: classpath:logback.xml
5. logback.xml
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<property name="LOG_PATH" value="E:/docker/logs"/>
<property name="FILE_NAME" value="elk-test"/>
<!--默认日志配置-->
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<!-- 控制台日志 -->
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder" charset="UTF-8">
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %highlight(%-5level) %cyan(%logger{50}:%L) - %msg%n</pattern>
</encoder>
</appender>
<!-- Info日志 -->
<appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- <filter class="ch.qos.logback.classic.filter.LevelFilter">-->
<!-- <level>INFO</level>-->
<!-- <onMatch>ACCEPT</onMatch>-->
<!-- <onMismatch>DENY</onMismatch>-->
<!-- </filter>-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- 超出maxFileSize,%i会+1,即新建一个文件 -->
<FileNamePattern>${LOG_PATH}/${FILE_NAME}-%d{yyyy-MM-dd}.%i-info.log</FileNamePattern>
<!-- 保留的历史日志文件天数 -->
<maxHistory>30</maxHistory>
<maxFileSize>100MB</maxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder" charset="UTF-8">
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{50}:%L - %msg%n</pattern>
</encoder>
</appender>
<!-- Warn日志 -->
<appender name="FILE_WARN" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>WARN</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_PATH}/${FILE_NAME}-%d{yyyy-MM-dd}.%i-warn.log</FileNamePattern>
<!-- 保留的历史日志文件天数 -->
<maxHistory>30</maxHistory>
<maxFileSize>100MB</maxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder" charset="UTF-8">
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{50}:%L - %msg%n</pattern>
</encoder>
</appender>
<!-- Error日志 -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>ERROR</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_PATH}/${FILE_NAME}-%d{yyyy-MM-dd}.%i-error.log</FileNamePattern>
<!-- 保留的历史日志文件天数 -->
<maxHistory>30</maxHistory>
<maxFileSize>100MB</maxFileSize>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder" charset="UTF-8">
<pattern>%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{50}:%L - %msg%n</pattern>
</encoder>
</appender>
<!-- 异步输出 -->
<appender name="info-asyn" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="FILE_INFO"/>
<queueSize>512</queueSize> <!-- 异步队列的大小 -->
</appender>
<appender name="warn-asyn" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="FILE_WARN"/>
<queueSize>512</queueSize> <!-- 异步队列的大小 -->
</appender>
<appender name="error-asyn" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="FILE_ERROR"/>
<queueSize>512</queueSize>
</appender>
<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!-- 端口是在logstach.conf中配置的 -->
<destination>172.21.80.1:5000</destination>
<!-- encoder必须配置,有多种可选 -->
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
<!-- "appname":"spring-boot-lee" 的作用是指定创建索引的名字时用,并且在生成的文档中会多了这个字段,切记小写 -->
<customFields>{"appname":"elk-test"}</customFields>
</encoder>
</appender>
<!-- 应用日志 -->
<logger name="com.wl.test" additivity="false">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE_INFO"/>
<appender-ref ref="FILE_WARN"/>
<appender-ref ref="FILE_ERROR"/>
<appender-ref ref="LOGSTASH"/>
</logger>
<!-- 总日志出口 -->
<root level="info">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="info-asyn"/>
<appender-ref ref="warn-asyn"/>
<appender-ref ref="error-asyn"/>
<appender-ref ref="LOGSTASH"/>
</root>
</configuration>
6. User.java
package com.wl.test.model;
import lombok.Data;
@Data
public class User {
private String name;
private Integer age;
}
7. ElkTestApplication.java
package com.wl.test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class ElkTestApplication {
public static void main(String[] args) {
SpringApplication.run(ElkTestApplication.class, args);
}
}
8. kibana创建indexPattern
9. 效果展示(需先创建)
访问接口:http://127.0.0.1:8080/user
控制台
日志页面