首先先封装消息实体,确定需要上报到ES的消息字段
@Data
public class ESLogVo {
private String host;
private String ip;
private String env;
private String message;
private String timestamp;
private String logger;
private String level;
private String thread;
private String throwable;
private Location location;
private String traceId;
private String rpcId;
}
@Data
class Location {
private String className;
private String method;
private String file;
private String line;
}
编写ESAppender,继承logback的父类UnsynchronizedAppenderBase,重写append、start、stop三个方法,其中append会接收到一个ILoggingEvent对象,这个对象是封装了当前写的日志的信息,另外,可以在start、stop方法里面实例化/释放客户端连接
@Slf4j
public class ESAppender extends UnsynchronizedAppenderBase<ILoggingEvent> {
private static final FastDateFormat ISO_DATETIME_TIME_ZONE_FORMAT_WITH_MILLIS = FastDateFormat.getInstance("yyyy-MM-dd hh:mm:ss");
private static final FastDateFormat INDEX_NAME_DATE_FORMAT = FastDateFormat.getInstance("yyyy.MM.dd");
// 客户端使用TransportClient
private TransportClient client;
String esIndex = "java-log-#date#";
String esType = "java-log";
boolean isLocation = true;
String env = "";
String esAddress = "";
public void setEsIndex(String esIndex) {
this.esIndex = esIndex;
}
public void setEsType(String esType) {
this.esType = esType;
}
public void setEnv(String env) {
this.env = env;
}
public void setEsAddress(String esAddress) {
this.esAddress = esAddress;
}
@SneakyThrows
@Override
protected void append(ILoggingEvent event) {
// 1. 构建esLog
ESLogVo esLogVo = new ESLogVo();
esLogVo.setHost(InetAddress.getLocalHost().getHostName());
esLogVo.setIp(InetAddress.getLocalHost().getHostAddress());
esLogVo.setEnv(this.env);
esLogVo.setLevel(event.getLevel().toString());
Location location = new Location();
StackTraceElement[] callerDataArray = event.getCallerData();
if (callerDataArray != null && callerDataArray.length > 0) {
StackTraceElement immediateCallerData = callerDataArray[0];
location.setClassName(immediateCallerData.getClassName());
location.setMethod(immediateCallerData.getMethodName());
location.setFile(immediateCallerData.getFileName());
location.setLine(Integer.toString(immediateCallerData.getLineNumber()));
}
esLogVo.setLocation(location);
IThrowableProxy tp = event.getThrowableProxy();
if (tp != null) {
String throwable = ThrowableProxyUtil.asString(tp);
esLogVo.setThrowable(throwable);
}
esLogVo.setLogger(event.getLoggerName());
esLogVo.setMessage(event.getFormattedMessage());
esLogVo.setTimestamp(ISO_DATETIME_TIME_ZONE_FORMAT_WITH_MILLIS.format(new Date(event.getTimeStamp())));
esLogVo.setThread(event.getThreadName());
Map<String, String> mdcPropertyMap = event.getMDCPropertyMap();
// 2. 执行日志写入es
String jsonString = JSON.toJSONString(esLogVo);
String esIndex_format = esIndex.replace("#date#", INDEX_NAME_DATE_FORMAT.format(new Date(event.getTimeStamp())));
IndexResponse indexResponse = client.prepareIndex(esIndex_format, esType).setSource(jsonString, XContentType.JSON).get();
//log.info("result: " + indexResponse);
}
@Override
public void start() {
super.start();
// 实例化es client
Settings settings = Settings.builder()
.put("cluster.name", "herohua's cluster")
.put("client.transport.sniff", true)
.build();
client = new PreBuiltTransportClient(settings);
try {
List<String> addresses = Arrays.asList(esAddress.split(","));
for (String address : addresses) {
client.addTransportAddresses(new InetSocketTransportAddress(
InetAddress.getByName(address.substring(0, address.indexOf(":")).trim()),
Integer.parseInt(address.substring(address.indexOf(":") + 1).trim())));
}
} catch (UnknownHostException e) {
e.printStackTrace();
}
}
@Override
public void stop() {
super.stop();
// 关闭client
client.close();
}
private static final Logger logger = LoggerFactory.getLogger(ESAppender.class);
public static void main(String[] args) throws InterruptedException {
logger.info("test info message!");
logger.error("这是一条异常的消息", new Exception("sss"));
logger.debug("debug hello 世界消息");
logger.warn("注意注意");
TimeUnit.SECONDS.sleep(10);
}
}
配置logback.xml
<?xml version="1.0" encoding="utf-8"?>
<configuration scan="true" scanPeriod="30 seconds" debug="true">
<contextName>logback-api</contextName>
<property name="logback.system" value="logback-system" />
<property name="logback.path" value="../logs/logback-system" />
<property name="logback.level" value="DEBUG" />
<property name="logback.pattern" value="%d{yyyy-MM-dd HH:mm:ss} [%t] %-5p -%m%n" />
<property name="logback.env" value="dev" />
<property name="logback.isLocation" value="true" />
<property name="logback.esAddress" value="192.168.123.11:9300,192.168.123.12:9300,192.168.123.13:9300" />
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<charset>UTF-8</charset>
<layout class="ch.qos.logback.classic.PatternLayout">
<pattern>${logback.pattern}</pattern>
</layout>
</encoder>
</appender>
<!-- 日志搜索引擎 -->
<appender name="ES" class="com.herohua.logback.ESAppender">
<!-- 索引名 date为通配符 会自动替换为yyyy-MM-dd格式 -->
<esIndex>java-log-#date#</esIndex>
<!-- 索引类型 -->
<esType>${logback.system}</esType>
<!-- 运行环境 -->
<env>${logback.env}</env>
<!-- es地址 -->
<esAddress>${logback.esAddress}</esAddress>
</appender>
<appender name="ASYNC_ES" class="ch.qos.logback.classic.AsyncAppender">
<!-- 默认情况下,当BlockingQueue还有20%容量,将丢弃TRACE、DEBUG和INFO级别的event,只保留warn -->
<discardingThreshold>0</discardingThreshold>
<!-- BlockingQueue的最大容量,默认情况下为256 -->
<queueSize>256</queueSize>
<appender-ref ref="ES" />
<!-- 只保留行号,需要开启为true -->
<includeCallerData>true</includeCallerData>
</appender>
<logger name="com.herohua.logback" additivity="true">
<level value="${logback.level}" />
<appender-ref ref="ASYNC_ES" />
</logger>
<root level="${logback.level}">
<appender-ref ref="STDOUT" />
</root>
</configuration>
定义ES mapping 模板,每一天的日志写入一个索引,在项目部署之前,写入该动态模板
PUT _template/java-log
{
"template": "java-log-*",
"order": 0,
"settings": {
"index": {
"refresh_interval": "5s"
}
},
"mappings": {
"_default_": {
"dynamic_templates": [
{
"message_field": {
"match_mapping_type": "string",
"path_match": "message",
"mapping": {
"norms": false,
"type": "text",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word"
}
}
},
{
"throwable_field": {
"match_mapping_type": "string",
"path_match": "throwable",
"mapping": {
"norms": false,
"type": "text",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word"
}
}
},
{
"string_field": {
"match_mapping_type": "string",
"match": "*",
"mapping": {
"norms": false,
"type": "text",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word",
"fields": {
"keyword": {
"type": "keyword"
}
}
}
}
}
],
"_all": {
"enabled": false
},
"properties": {
"env": {
"type": "keyword"
},
"host": {
"type": "keyword"
},
"ip": {
"type": "ip"
},
"level": {
"type": "keyword"
},
"location": {
"properties": {
"line": {
"type": "integer"
}
}
},
"timestamp": {
"type": "date",
"format": "yyyy-MM-dd hh:mm:ss"
}
}
}
}
}
测试方法
public class TestLog {
private static final Logger logger = LoggerFactory.getLogger(TestLog.class);
@Test
public void testLog() throws InterruptedException {
logger.info("test info message!");
logger.error("这是一条异常的消息", new Exception("sss"));
logger.debug("debug hello 世界消息");
logger.warn("注意注意");
TimeUnit.SECONDS.sleep(10);
}
}
从kibana的Discover面板可以查看写入的日志信息