docker-compose.yml
version: '2'
services:
elasticsearch:
image: elasticsearch:latest
command: elasticsearch
volumes:
- $PWD/esdata:/usr/share/elasticsearch/data
container_name: elasticsearch
hostname: elasticsearch
restart: always
ports:
- "9200:9200"
- "9300:9300"
logstash:
image: 192.168.1.13:5000/logstash-gaia
command: logstash -f /etc/logstash/conf.d/logstash.conf
volumes:
- $PWD/logstash:/etc/logstash/conf.d
container_name: logstash
hostname: logstash
restart: always
depends_on:
- elasticsearch
ports:
- "7001-7005:7001-7005"
kibana:
image: kibana:latest
environment:
- ELASTICSEARCH_URL=http://elasticsearch:9200
container_name: kibana
hostname: kibana
depends_on:
- elasticsearch
restart: always
ports:
- "5601:5601"
能够顺利启动,但是logstash会启动很慢,最后发现是每次都要安装插件,所以先把需要用的插件进行安装后,在封装一个docker,Dockerfile如下
Dockerfile for 192.168.1.13:5000/logstash-gaia
FROM logstash:5.1.2
RUN logstash-plugin install --no-verify --local logstash-input-log4j logstash-output-elasticsearch logstash-input-tcp
logstash.conf
input {
stdin{}
log4j {
mode => "server"
host => "0.0.0.0"
port => 7001
type => "log4j"
}
tcp {
host => '0.0.0.0'
port => 7002
codec => 'json_lines'
}
}
output {
elasticsearch { hosts => ["elasticsearch:9200"] }
}
最坑爹是这个版本的logstash,log4j有bug,无语。所以换成logback,使用tcp方式
logback.xml
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/base.xml" />
<!-- 按照每天生成日志文件 -->
<appender name="FILE"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy
class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--日志文件输出的文件名 -->
<FileNamePattern>${user.dir}/logs/accounts.%d{yyyy-MM-dd}.log
</FileNamePattern>
<!--日志文件保留天数 -->
<MaxHistory>30</MaxHistory>
</rollingPolicy>
<!--日志文件最大的大小 -->
<triggeringPolicy
class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>10MB</MaxFileSize>
</triggeringPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符 -->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level
%logger{50} - %msg %n</pattern>
</encoder>
</appender>
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>192.168.1.13:7002</destination>
<encoder class="net.logstash.logback.encoder.LogstashEncoder" />
<keepAliveDuration>5 minutes</keepAliveDuration>
</appender>
<root level="INFO">
<appender-ref ref="logstash" />
</root>
</configuration>
LogbackTest.java
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
public class LogbackTest {
static {
//PropertyConfigurator.configure("../../log4j.properties");
}
private Logger logger = org.slf4j.LoggerFactory.getLogger(LogbackTest.class);
@Before
public void setUp() {
}
@Test
public void testLog() {
int i = 0;
while (i++ < 100) {
logger.debug("hello logstash, this is a message from log4j");
if (i % 2 == 0) {
logger.info("hello logstash," + System.currentTimeMillis());
}
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
@Test
public void testException() {
//logger.error("error", new Runti("sorry, error"));
}
}
run & test
-
日志索引
kibana 192.168.1.13:5601
首次进入需要进行索引建立,建立好后就可以在discover界面查询日志了 -
日志查询