1. 使用docker安装Elasticsearch

拉取镜像(项目使用版本为6.5.0)
docker pull elasticsearch:6.5.0
查看镜像
使用 docker images 命令查看是否已经拉取成功
启动镜像
docker run --name elasticsearch -d -e ES_JAVA_OPTS="-Xms512m -Xmx512m" -p 9200:9200 -p 9300:9300 elasticsearch:6.5.0

启动后可以在页面使用 机器ip:9200 验证是否启动成功
修改elasticsearch配置
sudo docker exec -it 容器Id /bin/bash  #以管理员权限进入镜像
使用命令 vi elasticsearch-6.5/config/elasticsearch.yml修改配置,内容如下

	#集群名称(根据需要自己确定就可以)
	cluster.name: "my-cluster"
	#本节点名称
	node.name: master
	#是否master节点
	node.master: true
	#是否存储数据
	node.data: true
	#head插件设置
	http.cors.enabled: true
	http.cors.allow-origin: "*"
	transport.tcp.port: 9300
	#可以访问的ip
    http.port: 9200
    network.host: 0.0.0.0
重启elasticsearch
docker restart 容器Id 

使用docker安装elasticsearch-head

拉取镜像
docker pull mobz/elasticsearch-head:5
启动镜像
docker run -d -p 9100:9100 docker.io/mobz/elasticsearch-head:5
验证
使用浏览器访问 机器ip:9100 

2. 配置log4j打印日志到ElasticSearch

项目pom文件先贴出来, 配置的时候因为包的版本问题踩了好多坑
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.2.0.RELEASE</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.log4jtest</groupId>
    <artifactId>log4jtest</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>log4jtest</name>
    <description>Demo project for Spring Boot</description>

    <properties>
        <java.version>1.8</java.version>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
            <exclusions>
                <exclusion>
                    <groupId>org.junit.vintage</groupId>
                    <artifactId>junit-vintage-engine</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>
        <!-- Elasticsearch核心依赖包 -->
        <dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch</artifactId>
            <version>6.5.0</version>
        </dependency>
        <dependency>
            <groupId>org.elasticsearch.client</groupId>
            <artifactId>transport</artifactId>
            <version>6.5.0</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.elasticsearch.plugin/transport-netty4-client -->
        <dependency>
            <groupId>org.elasticsearch.plugin</groupId>
            <artifactId>transport-netty4-client</artifactId>
            <version>6.5.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-api</artifactId>
            <version>2.11.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-core</artifactId>
            <version>2.11.1</version>
        </dependency>
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/com.alibaba/fastjson -->
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.62</version>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>

</project>
配置log4j.properties(文件名:log4j.properties)
	#定义根日志级别和输出端(定义了两个输出端)
	#添加了ESrootLogger配置
	log4j.rootLogger=DEBUG,CONSOLE,DAILY_ROLLING_FILE,ES
	#定义第一个输出端,输出到控制台
	log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
	log4j.appender.Threshold=debug
	log4j.appender.CONSOLE.Target=System.out
	log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
	log4j.appender.CONSOLE.layout.ConversionPattern=[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} [%c{1}:%L] %m%n
	#定义第二个输出端,输出到文件
	log4j.appender.DAILY_ROLLING_FILE=org.apache.log4j.DailyRollingFileAppender
	log4j.appender.DAILY_ROLLING_FILE.Append=true
	log4j.appender.DAILY_ROLLING_FILE.Threshold=debug
	log4j.appender.DAILY_ROLLING_FILE.Encoding=UTF-8
	#${com.gysoft.app.log.dir}环境变量 jvm tomcat可配置 # 试验发现,这是一个输出目录
	log4j.appender.DAILY_ROLLING_FILE.File=E:/super.log
	log4j.appender.DAILY_ROLLING_FILE.DatePattern='.'yyyy-MM-dd
	log4j.appender.DAILY_ROLLING_FILE.layout=org.apache.log4j.PatternLayout
	log4j.appender.DAILY_ROLLING_FILE.layout.ConversionPattern=[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} [%c{1}]:%L %m%n
	#定义日志输出到ES集群
	#这里(com.log4jtest.config.EsAppender)为ElasticSearch日志添加类(EsAppender)的包路径
	log4j.appender.ES=com.log4jtest.config.EsAppender
	#ES集群名称
	log4j.appender.ES.clusterName=my-cluster
	#ES集群地址格式为ip:port,ip2:port2
	log4j.appender.ES.address=192.168.59.130:9300
	#日志索引名称
	log4j.appender.ES.index=mytest
	#日志类型
	log4j.appender.ES.type=doc
	#缓冲大小,当缓冲池容量达到bufferSize时会将日志刷新到ES中,默认不配置为1,此时日志时效性高
	#可以自行调整bufferSize大小来降低ES并发,但是日志刷新到ES的时效性会降低
	log4j.appender.ES.bufferSize=1
添加日志添加类
import com.alibaba.fastjson.JSON;
import lombok.Data;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.spi.ErrorCode;
import org.apache.log4j.spi.LocationInfo;
import org.apache.log4j.spi.LoggingEvent;
import org.apache.log4j.spi.ThrowableInformation;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.springframework.util.CollectionUtils;

import java.io.IOException;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Date;
import java.util.Enumeration;
import java.util.List;

import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;

@Data
public class EsAppender extends AppenderSkeleton {
    private static final String COLON_SEPARATOR = ":";
    private static final String COMMA_SEPARATOR = ",";
    private static final int DEFAULT_BUFFER_SIZE = 1;

    /**
     * 集群名称
     */
    private String clusterName;
    /**
     * Es集群地址
     * ip:port,ip2:port2,ip3:port3
     */
    private String address;
    /**
     * 日志缓冲池大小,如果缓冲池大小为1则日志会被立即同步到ES中</br>
     * 否则需要等到缓冲池Size达到bufferSize了后才会将日志刷新至ES</br>
     * bufferSize默认初始化为1
     */
    private int bufferSize;
    /**
     * 日志缓冲数据
     */
    private List<XContentBuilder> buffers;
    /**
     * 日志删除数据
     */
    private List<XContentBuilder> removes;
    /**
     * 操作ES集群的客户端
     */
    private TransportClient client;
    /**
     * 插入索引
     */
    private String index;
    /**
     * 插入类型
     */
    private String type;

    public EsAppender() {
        buffers = new ArrayList<>();
        removes = new ArrayList<>();
    }

    @Override
    protected void append(LoggingEvent event) {
        parseLog(event);
        if (buffers.size() >= (bufferSize == 0 ? DEFAULT_BUFFER_SIZE : bufferSize)) {
            flushBuffer();
        }
    }

    private void parseLog(LoggingEvent event) {
        LocationInfo locationInfo = event.getLocationInformation();
        ThrowableInformation throwableInformation = event.getThrowableInformation();
        // 判断是否打印堆栈信息
        if (throwableInformation != null) {
            Throwable throwable = throwableInformation.getThrowable();
            try {
                buffers.add(jsonBuilder()
                        .startObject()
                        .field("className", locationInfo.getClassName())
                        .field("fileName", locationInfo.getFileName())
                        .field("lineNumber", locationInfo.getLineNumber())
                        .field("methodName", locationInfo.getMethodName())
                        .field("serverIp", getIp())
                        .field("logName", event.getLogger().getName())
                        .field("logLevel", event.getLevel().toString())
                        .field("logThread", event.getThreadName())
                        .field("logMills", new Date(event.getTimeStamp()))
                        .field("logMessage", JSON.toJSONString(event.getMessage()))
                        .field("throwMessage", throwable.getMessage())
                        .field("throwDetailMessage", throwable.toString())
                        .field("throwStackTrace", JSON.toJSONString(throwable.getStackTrace()))
                        .endObject());
            } catch (IOException e) {
                errorHandler.error("Error parseLog", e, ErrorCode.GENERIC_FAILURE);
            }
        } else {
            // 下面这部分是打印普通日志信息的
            try {
                buffers.add(jsonBuilder()
                        .startObject()
                        .field("className", locationInfo.getClassName())
                        .field("fileName", locationInfo.getFileName())
                        .field("lineNumber", locationInfo.getLineNumber())
                        .field("methodName", locationInfo.getMethodName())
                        .field("serverIp", getIp())
                        .field("logName", event.getLogger().getName())
                        .field("logLevel", event.getLevel().toString())
                        .field("logThread", event.getThreadName())
                        .field("logMills", new Date(event.getTimeStamp()))
                        .field("logMessage", JSON.toJSONString(event.getMessage()))
                        .endObject());
            } catch (IOException e) {
                errorHandler.error("Error parseLog", e, ErrorCode.GENERIC_FAILURE);
            }
        }
    }


    /**
     * 将数据写入到ES中
     */
    private void flushBuffer() {
        if (!CollectionUtils.isEmpty(buffers)) {
            BulkRequestBuilder bulkRequestBuilder = getClient().prepareBulk();
            for (XContentBuilder xContentBuilder : buffers) {
                bulkRequestBuilder.add(getClient().prepareIndex(index, type).setSource(xContentBuilder));
                removes.add(xContentBuilder);
            }
            bulkRequestBuilder.get();
            buffers.removeAll(removes);
            removes.clear();
        }
    }

    @Override
    public void close() {
        flushBuffer();
        try {
            if (client != null) {
                client.close();
            }
        } catch (Exception e) {
            errorHandler.error("Error closing client", e, ErrorCode.GENERIC_FAILURE);
        }
        this.closed = true;
    }

    @Override
    public boolean requiresLayout() {
        return false;
    }

    private TransportClient getClient() {
        if (client == null) {
            try {
                System.setProperty("es.set.netty.runtime.available.processors", "false");
                Settings settings = Settings.builder().put("cluster.name", clusterName).build();
                client = new PreBuiltTransportClient(settings);
                String[] addressArr = address.split(COMMA_SEPARATOR);
                for (String address : addressArr) {
                    String[] arr = address.split(COLON_SEPARATOR);
                    client.addTransportAddresses(new TransportAddress(InetAddress.getByName(arr[0]), Integer.parseInt(arr[1])));
                }
            } catch (Exception e) {
                errorHandler.error("Error getClient", e, ErrorCode.GENERIC_FAILURE);
            }
        }
        return client;
    }

    private String getIp() throws UnknownHostException {
        try {
            InetAddress candidateAddress = null;
            // 遍历所有的网络接口
            for (Enumeration ifaces = NetworkInterface.getNetworkInterfaces(); ifaces.hasMoreElements(); ) {
                NetworkInterface iface = (NetworkInterface) ifaces.nextElement();
                // 在所有的接口下再遍历IP
                for (Enumeration inetAddrs = iface.getInetAddresses(); inetAddrs.hasMoreElements(); ) {
                    InetAddress inetAddr = (InetAddress) inetAddrs.nextElement();
                    // 排除loopback类型地址
                    if (!inetAddr.isLoopbackAddress()) {
                        if (inetAddr.isSiteLocalAddress()) {
                            // 如果是site-local地址,就是它了
                            return inetAddr.getHostAddress();
                        } else if (candidateAddress == null) {
                            // site-local类型的地址未被发现,先记录候选地址
                            candidateAddress = inetAddr;
                        }
                    }
                }
            }
            if (candidateAddress != null) {
                return candidateAddress.getHostAddress();
            }
            // 如果没有发现 non-loopback地址.只能用最次选的方案
            InetAddress jdkSuppliedAddress = InetAddress.getLocalHost();
            if (jdkSuppliedAddress == null) {
                throw new UnknownHostException("The JDK InetAddress.getLocalHost() method unexpectedly returned null.");
            }
            return jdkSuppliedAddress.getHostAddress();
        } catch (Exception e) {
            UnknownHostException unknownHostException = new UnknownHostException(
                    "Failed to determine LAN address: " + e);
            unknownHostException.initCause(e);
            throw unknownHostException;
        }
    }
}
接下来我们就写一个接口来测试一下效果吧
import lombok.extern.log4j.Log4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

//import org.apache.log4j.Logger;

@SpringBootApplication
@RestController
@RequestMapping("/")
@Log4j
public class Log4jtestApplication {

//    private Logger logger = Logger.getLogger(Log4jtestApplication.class);


    public static void main(String[] args) {
        SpringApplication.run(Log4jtestApplication.class, args);
    }


    @RequestMapping(value = "/log")
    public String welcome() {
        try {
            Integer a = null;
            int b = 1;
            System.out.println(a + b);
        } catch (Exception e) {
            // log.error("这是一个error日志:" + e.getMessage(), e);
            log.error("测试一个没有异常信息的日志添加");
        }
        return "success";
    }
}
在这里执行万之后, 我们就可以在 机器ip:9100 看到我们日志的索引, 里面可以看到有几条数据
具体数据可以在浏览器访问 http://机器ip:9200/索引名称/日志类型/_search 查看刚刚添加的数据了
Logo

权威|前沿|技术|干货|国内首个API全生命周期开发者社区

更多推荐