• springboot2.7.x 集成log4j2配置写入日志到mysql自定义表格


    在阅读之前请先查看【springboot集成log4j2】

    本文暂不考虑抽象等实现方式,只限于展示如何自定义配置log4j2并写入mysql数据库(自定义结构)

    先看下log4j2的配置

    <?xml version="1.0" encoding="UTF-8"?> <!--日志级别以及优先级排序: OFF > FATAL > ERROR > WARN > INFO > DEBUG > TRACE > ALL --> <!--Configuration后面的status,这个用于设置log4j2自身内部的信息输出,可以不设置,当设置成trace时,你会看到log4j2内部各种详细输出--> <!--monitorInterval:Log4j能够自动检测修改配置 文件和重新配置本身,设置间隔秒数-->
    <configuration status="WARN" monitorInterval="30">  <!-- 配置日志文件输出目录,此配置将日志输出到tomcat根目录下的指定文件夹 -->
        <properties>
            <property name="LOG_HOME">./logs/demo/log4j2/</property>
        </properties>
        <!--先定义所有的appender-->
        <appenders>    <!-- 优先级从高到低分别是 OFF、FATAL、ERROR、WARN、INFO、DEBUG、ALL -->
            <!-- 单词解释: Match:匹配 DENY:拒绝 Mismatch:不匹配 ACCEPT:接受 -->
            <!-- DENY,日志将立即被抛弃,不再过其他过滤器; NEUTRAL,有序列表里的下个过滤器过接着处理日志; ACCEPT,日志会被立即处理,不再经过剩余过滤器。 -->
            <!--输出日志的格式     %d{yyyy-MM-dd HH:mm:ss, SSS} : 日志生产时间     %p : 日志输出格式     %c : logger的名称     %m : 日志内容,即 logger.info("message")     %n : 换行符     %C : Java类名     %L : 日志输出所在行数     %M : 日志输出所在方法名     hostName : 本地机器名     hostAddress : 本地ip地址 -->
            <!--这个输出控制台的配置-->
            <console name="Console" target="SYSTEM_OUT">      <!--输出日志的格式-->
                <PatternLayout pattern="[%d{HH:mm:ss:SSS}] - [%t] [%p] - %logger{1.} - %m%n"/>
                <!--<PatternLayout pattern="[%d{HH:mm:ss:SSS}] - (%F:%l) - %m%n"/>-->
                <!--<PatternLayout pattern="[%d{HH:mm:ss:SSS}] (%F:%L) %m%n" />-->
            </console>
            <!-- 这个会打印出所有的info及以下级别的信息,每次大小超过size,则这size大小的日志会自动存入按年份-月份建立的文件夹下面并进行压缩,作为存档-->
            <!-- TRACE级别日志 ; 设置日志格式并配置日志压缩格式,压缩文件独立放在一个文件夹内, 日期格式不能为冒号,否则无法生成,因为文件名不允许有冒号,此appender只输出trace级别的数据到trace.log -->
    
            <RollingFile name="RollingFileTrace" immediateFlush="true" fileName="${LOG_HOME}/trace.log"
                         filePattern="${LOG_HOME}/trace_%d{yyyy-MM-dd-HH}-%i.log.zip">
                <ThresholdFilter level="trace" onMatch="ACCEPT" onMismatch="DENY"/>
    
                <PatternLayout pattern="[%d{HH:mm:ss:SSS}] - [%t] [%p] - %logger{36} - %m%n"/>
    
                <Policies>
                    <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
    
                    <SizeBasedTriggeringPolicy size="10 MB"/>
    
                </Policies>
                <!-- DefaultRolloverStrategy属性如不设置,则默认为最多同一文件夹下7个文件,这里设置了20 -->
                <DefaultRolloverStrategy max="20">
                    <!--这里的age必须和filePattern协调, 后者是精确到HH, 这里就要写成xH, xd就不起作用           另外, 数字最好>2, 否则可能造成删除的时候, 最近的文件还处于被占用状态,导致删除不成功!-->
    
                    <Delete basePath="${LOG_HOME}" maxDepth="2">
                        <IfFileName glob="trace_*.zip"/>
                        <!-- 保存时间与filePattern相同即可 -->
                        <!-- 如果filePattern为:yyyy-MM-dd-HH:mm:ss, age也可以为5s,表示日志存活时间为5s -->
                        <IfLastModified age="168H"/>
    
                    </Delete>
    
                </DefaultRolloverStrategy>
    
            </RollingFile>
    
            <RollingFile name="RollingFileDebug" immediateFlush="true" fileName="${LOG_HOME}/debug.log"
                         filePattern="${LOG_HOME}/debug_%d{yyyy-MM-dd-HH}-%i.log.zip">
                <ThresholdFilter level="debug" onMatch="ACCEPT" onMismatch="DENY"/>
    
                <PatternLayout pattern="[%d{HH:mm:ss:SSS}] - [%t] [%p] - %logger{36} - %m%n"/>
    
                <Policies>
                    <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
    
                    <SizeBasedTriggeringPolicy size="10 MB"/>
    
                </Policies>
    
                <DefaultRolloverStrategy max="20">
                    <Delete basePath="${LOG_HOME}" maxDepth="2">
                        <IfFileName glob="debug_*.zip"/>
    
                        <IfLastModified age="168H"/>
    
                    </Delete>
    
                </DefaultRolloverStrategy>
    
            </RollingFile>     <!-- info日志配置 -->
            <RollingFile name="RollingFileInfo" immediateFlush="true"
                         fileName="${LOG_HOME}/info.log"
                         filePattern="${LOG_HOME}/info_%d{yyyy-MM-dd-HH}-%i.log.zip">       <!--控制台只输出level及以上级别的信息(onMatch),其他的直接拒绝(onMismatch)-->
                <ThresholdFilter
                        level="info" onMatch="ACCEPT" onMismatch="DENY"/>
                <PatternLayout
                        pattern="[%d{HH:mm:ss:SSS}] - [%t] [%p] - %logger{36} - %m%n"/>
                <Policies>
                    <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
    
                    <SizeBasedTriggeringPolicy size="10 MB"/>
    
                </Policies>
                <DefaultRolloverStrategy max="20">
                    <Delete basePath="${LOG_HOME}" maxDepth="2">
                        <IfFileName glob="info_*.zip"/>
    
                        <IfLastModified age="168H"/>
    
                    </Delete>
    
                </DefaultRolloverStrategy>
            </RollingFile>     <!-- warn日志配置 -->
            <RollingFile name="RollingFileWarn"
                         immediateFlush="true"
                         fileName="${LOG_HOME}/warn.log" filePattern="${LOG_HOME}/warn_%d{yyyy-MM-dd-HH}-%i.log.zip">
                <ThresholdFilter
                        level="warn" onMatch="ACCEPT" onMismatch="DENY"/>
                <PatternLayout
                        pattern="[%d{HH:mm:ss:SSS}] - [%t] [%p] - %logger{36} - %m%n"/>
                <Policies>
                    <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
    
                    <SizeBasedTriggeringPolicy size="10 MB"/>
    
                </Policies>
                <DefaultRolloverStrategy max="20">
                    <Delete basePath="${LOG_HOME}" maxDepth="2">
                        <IfFileName glob="warn_*.zip"/>
    
                        <IfLastModified age="168H"/>
    
                    </Delete>
    
                </DefaultRolloverStrategy>
            </RollingFile>     <!-- error日志配置 -->
            <RollingFile
                    name="RollingFileError" immediateFlush="true"
                    fileName="${LOG_HOME}/error.log" filePattern="${LOG_HOME}/error_%d{yyyy-MM-dd-HH}-%i.log.zip">
                <ThresholdFilter
                        level="error" onMatch="ACCEPT" onMismatch="DENY"/>
                <PatternLayout
                        pattern="[%d{HH:mm:ss:SSS}] - [%t] [%p] - %logger{36} - %m%n"/>
                <Policies>
                    <TimeBasedTriggeringPolicy interval="1" modulate="true"/>
    
                    <SizeBasedTriggeringPolicy size="10 MB"/>
    
                </Policies>
                <DefaultRolloverStrategy max="20">
                    <Delete basePath="${LOG_HOME}" maxDepth="2">
                        <IfFileName glob="error_*.zip"/>
    
                        <IfLastModified age="168H"/>
    
                    </Delete>
    
                </DefaultRolloverStrategy>
            </RollingFile>
    
            <JDBC name="db" tableName="logs">
                <!-- 已经改造代码配置 -->
                <!--                        <DriverManager-->
                <!--                                connectionString="jdbc:mysql://localhost:3306/log4j2"-->
                <!--                                userName="root"-->
                <!--                                password="password"-->
                <!--                                driverClassName="com.mysql.cj.jdbc.Driver"-->
                <!--                        />-->
                <ConnectionFactory
                        class="nirvana.core.logger.LogConnectionFactory" method="getConnection"/>
                <ColumnMapping name="ID" pattern="%u"/>
                <ColumnMapping name="TRACE_ID"/>
                <ColumnMapping name="DATE_TIME"/>
                <ColumnMapping name="CLASS"/>
                <ColumnMapping name="LEVEL"/>
                <ColumnMapping name="MESSAGE"/>
                <ColumnMapping name="EXCEPTION"/>
                <ColumnMapping name="IP"/>
                <MessageLayout/>
            </JDBC>
        </appenders>   <!--然后定义logger,只有定义了logger并引入的appender,appender才会生效-->
        <loggers>
            <!--过滤掉spring和mybatis的一些无用的DEBUG信息-->
            <logger name="org.springframework" level="INFO"/>
    
            <logger name="org.mybatis" level="INFO"/>
    
            <root level="info">
                <appender-ref ref="Console"/>
    
                <appender-ref ref="RollingFileDebug"/>
    
                <appender-ref ref="RollingFileTrace"/>
    
                <appender-ref ref="RollingFileInfo"/>
    
                <appender-ref ref="RollingFileWarn"/>
    
                <appender-ref ref="RollingFileError"/>
    
            </root>
            <logger name="demo.mvc" level="warn">
                <appender-ref ref="db"/>
            </logger>
    
        </loggers>
    </configuration>
    
    

    注意看如下这段

      <ConnectionFactory
                        class="nirvana.core.logger.LogConnectionFactory" method="getConnection"/>
                <ColumnMapping name="ID" pattern="%u"/>
                <ColumnMapping name="TRACE_ID"/>
                <ColumnMapping name="DATE_TIME"/>
                <ColumnMapping name="CLASS"/>
                <ColumnMapping name="LEVEL"/>
                <ColumnMapping name="MESSAGE"/>
                <ColumnMapping name="EXCEPTION"/>
                <ColumnMapping name="IP"/>
    

    我们在此处并没有采用官方的jdbc配置方式而是采用了自己的nirvana.core.logger.LogConnectionFactory

    nirvana.core.logger.LogConnectionFactory 的代码

    package nirvana.core.logger;
    
    import com.alibaba.druid.pool.DruidDataSource;
    import org.springframework.boot.env.YamlPropertySourceLoader;
    import org.springframework.core.env.PropertySource;
    import org.springframework.core.io.ClassPathResource;
    import org.yaml.snakeyaml.Yaml;
    
    import javax.sql.DataSource;
    import java.io.IOException;
    import java.sql.Connection;
    import java.sql.SQLException;
    import java.util.List;
    import java.util.Objects;
    import java.util.Properties;
    
    /**
     * ConnectionFactory
     *
     * @author linkanyway
     * @version 1.0
     * @date 2022/05/26 20:05
     */
    
    public class LogConnectionFactory {
        private DataSource dataSource;
        private static String PRE_FIX = "spring.datasource.";
    
    
        /**
         * constructor
         */
        private LogConnectionFactory() {
            ClassPathResource ymlResource = new ClassPathResource ("application.yml");
            ClassPathResource propertyResource = new ClassPathResource ("application.properties");
    
            ClassPathResource resource;
    
            if (!(resource = new ClassPathResource ("application.yml")).exists ()) {
                resource = new ClassPathResource ("application.properties");
                if (!resource.exists ()) {
                    throw new RuntimeException ("no application configuration file found");
                }
            }
    
            Yaml yaml = new Yaml ();
            YamlPropertySourceLoader loader = new YamlPropertySourceLoader ();
            try {
                List<PropertySource<?>> list = loader.load ("log-datasource", resource);
                PropertySource<?> prop = list.get (0);
                Properties props = new Properties ();
                props.setProperty ("druid.url", Objects.requireNonNull (prop.getProperty (PRE_FIX + "url")).toString ());
                props.setProperty ("druid.username",
                        Objects.requireNonNull (prop.getProperty (PRE_FIX + "username")).toString ());
                props.setProperty ("druid.password",
                        Objects.requireNonNull (prop.getProperty (PRE_FIX + "password")).toString ());
                props.setProperty ("druid.driverClassName",
                        Objects.requireNonNull (prop.getProperty (PRE_FIX + "driver" + "-class-name")).toString ());
                this.dataSource = new DruidDataSource ();
                ((DruidDataSource) this.dataSource).configFromPropety (props);
            } catch (IOException e) {
                throw new RuntimeException (e);
            }
    
    
        }
    
        /**
         * get connection
         *
         * @return
         * @throws SQLException
         */
        public static Connection getConnection() throws SQLException {
            return Singleton.INSTANCE.dataSource.getConnection ();
        }
    
    
        /**
         * only used inner
         */
        private interface Singleton {
            LogConnectionFactory INSTANCE = new LogConnectionFactory ();
        }
    }
    
    

    此处只是粗糙的码了一个读取properties和yaml文件的类,结合配置文件内制定的factory即实现了log4j2读取mysql connection的目地

    建立自定义的表结构

        database sql
        /*
     Navicat Premium Data Transfer
    
     Source Server         : localhost
     Source Server Type    : MySQL
     Source Server Version : 80023
     Source Host           : localhost:3306
     Source Schema         : log4j2
    
     Target Server Type    : MySQL
     Target Server Version : 80023
     File Encoding         : 65001
    
     Date: 27/05/2022 11:35:19
    */
    
        SET NAMES utf8mb4;
        SET FOREIGN_KEY_CHECKS = 0;
    
    -- ----------------------------
            -- Table structure for logs
    -- ----------------------------
        DROP TABLE IF EXISTS `logs`;
        CREATE TABLE `logs` (
                `ID` varchar(50) NOT NULL,
      `TRACE_ID` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL,
      `DATE_TIME` timestamp NULL DEFAULT NULL,
      `CLASS` varchar(100) DEFAULT NULL,
      `LEVEL` varchar(10) DEFAULT NULL,
      `MESSAGE` text,
                `EXCEPTION` text,
                `IP` text,
        PRIMARY KEY (`ID`)
    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
    
        SET FOREIGN_KEY_CHECKS = 1;
    

    这个表结构是对应的配置文件中的如下内容

       <ColumnMapping name="ID" pattern="%u"/>
                <ColumnMapping name="TRACE_ID"/>
                <ColumnMapping name="DATE_TIME"/>
                <ColumnMapping name="CLASS"/>
                <ColumnMapping name="LEVEL"/>
                <ColumnMapping name="MESSAGE"/>
                <ColumnMapping name="EXCEPTION"/>
                <ColumnMapping name="IP"/>
    

    如果想知道表结构的列是如何和日志对上的接着往下看,

    核心调用类LoggerManager

    package nirvana.core.logger;
    
    import nirvana.core.context.WebContext;
    import nirvana.core.utils.NetUtils;
    import org.apache.logging.log4j.LogManager;
    import org.apache.logging.log4j.Logger;
    import org.apache.logging.log4j.ThreadContext;
    import org.apache.logging.log4j.core.net.TcpSocketManager;
    import org.apache.logging.log4j.message.StringMapMessage;
    import org.apache.logging.log4j.util.Strings;
    import org.jetbrains.annotations.NotNull;
    import org.springframework.beans.BeansException;
    import org.springframework.context.ApplicationContext;
    import org.springframework.context.ApplicationContextAware;
    import org.springframework.web.context.request.RequestContextHolder;
    import org.springframework.web.context.request.ServletRequestAttributes;
    import org.springframework.web.servlet.support.RequestContextUtils;
    
    import javax.servlet.http.HttpServletRequest;
    import java.io.PrintWriter;
    import java.io.StringWriter;
    import java.time.LocalDateTime;
    import java.util.UUID;
    
    /**
     * LogManager
     *
     * @author linkanyway
     * @version 1.0
     * @date 2022/05/27 11:06
     */
    public class LoggerManager {
    
    
    //    database sql
    //    /*
    // Navicat Premium Data Transfer
    //
    // Source Server         : localhost
    // Source Server Type    : MySQL
    // Source Server Version : 80023
    // Source Host           : localhost:3306
    // Source Schema         : log4j2
    //
    // Target Server Type    : MySQL
    // Target Server Version : 80023
    // File Encoding         : 65001
    //
    // Date: 27/05/2022 11:35:19
    //*/
    //
    //    SET NAMES utf8mb4;
    //    SET FOREIGN_KEY_CHECKS = 0;
    //
    //-- ----------------------------
    //        -- Table structure for logs
    //-- ----------------------------
    //    DROP TABLE IF EXISTS `logs`;
    //    CREATE TABLE `logs` (
    //            `ID` varchar(50) NOT NULL,
    //  `TRACE_ID` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci DEFAULT NULL,
    //  `DATE_TIME` timestamp NULL DEFAULT NULL,
    //  `CLASS` varchar(100) DEFAULT NULL,
    //  `LEVEL` varchar(10) DEFAULT NULL,
    //  `MESSAGE` text,
    //            `EXCEPTION` text,
    //            `IP` text,
    //    PRIMARY KEY (`ID`)
    //) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
    //
    //    SET FOREIGN_KEY_CHECKS = 1;
    
    
        /**
         * todo: use normal class
         * logger
         */
        public Logger logger;
    
        /**
         * get logger
         *
         * @param loggerName
         * @return
         */
        public static LoggerManager getLogger(String loggerName) {
            return new LoggerManager (loggerName);
        }
    
        private LoggerManager(String loggerName) {
            this.logger = LogManager.getLogger (loggerName);
        }
    
        /**
         * error
         *
         * @param message error message
         * @param ex      exception instance
         */
        public void error(String message, @NotNull Exception ex) {
    
            StringMapMessage msg = generate (message, "error", ex);
            logger.error (msg);
        }
    
        /**
         * info
         *
         * @param message info message
         */
        public void info(String message) {
            StringMapMessage msg = generate (message, "info");
            logger.info (msg);
        }
    
        /**
         * trace
         *
         * @param message trace message
         */
        public void trace(String message) {
            StringMapMessage msg = generate (message, "info");
            logger.trace (msg);
        }
    
    
        /**
         * debug
         *
         * @param message debug message
         */
        public void debug(String message) {
            StringMapMessage msg = generate (message, "debug");
            logger.debug (msg);
        }
    
        /**
         * warn
         *
         * @param message warn message
         */
        public void warn(String message) {
            StringMapMessage msg = generate (message, "warn");
            logger.warn (msg);
        }
    
        /**
         * generate map message
         *
         * @param message message content
         * @param level   level
         * @return StringMapMessage instance
         */
        private StringMapMessage generate(String message, String level) {
            return generate (message, level, null);
    
        }
    
    
        /**
         * get caller information
         *
         * @return caller class name
         */
        private String getCallerName() {
            StackTraceElement[] traces = Thread.currentThread ().getStackTrace ();
            for (Integer i = 1; i < traces.length - 1; i++) {
                StackTraceElement element = traces[i];
                if (element.getClassName ().indexOf (this.getClass ().getPackageName ()) != 0) {
                    return element.getClassName ();
                }
            }
            return Strings.EMPTY;
        }
    
    
        /**
         * generate message
         *
         * @param message message content
         * @param level   level
         * @param ex      exception
         * @return StringMapMessage instance
         */
        private StringMapMessage generate(String message, String level, Exception ex) {
            String sourceClassName = getCallerName ();
            StringMapMessage msg = new StringMapMessage ();
            if (ex != null) {
                // have to deal with the exception to prevent throw again
                try {
                    StringWriter sw = new StringWriter ();
                    PrintWriter pw = new PrintWriter (sw, true);
                    ex.printStackTrace (pw);
                    msg.put ("EXCEPTION", sw.getBuffer ().toString ());
                } catch (Exception e) {
                    throw e;
                }
            } else {
                msg.put ("EXCEPTION", "");
            }
    
            msg.put ("LEVEL", level);
            msg.put ("ID", UUID.randomUUID ().toString ());
            msg.put ("TRACE_ID", WebContext.getRequestId ());
            msg.put ("DATE_TIME", LocalDateTime.now ().toString ());
            msg.put ("CLASS", sourceClassName);
            msg.put ("MESSAGE", message);
            msg.put ("IP", NetUtils.getRemoteIp ());
            return msg;
        }
    
    }
    
    

    注意代码中这段

    
            msg.put ("LEVEL", level);
            msg.put ("ID", UUID.randomUUID ().toString ());
            msg.put ("TRACE_ID", WebContext.getRequestId ());
            msg.put ("DATE_TIME", LocalDateTime.now ().toString ());
            msg.put ("CLASS", sourceClassName);
            msg.put ("MESSAGE", message);
            msg.put ("IP", NetUtils.getRemoteIp ());
    

    其实我们只是使用了StringMapMessage实现了自定义消息结构

    而如下代码则是为了获取调用类,此处只是利用调用堆栈寻取了第一个和当前日志记录类LoggerManagger的package不同包的第一个类,实际对于调用堆栈获取需要的caller还是需要自己按照自己的方式寻找

        /**
         * get caller information
         *
         * @return caller class name
         */
        private String getCallerName() {
            StackTraceElement[] traces = Thread.currentThread ().getStackTrace ();
            for (Integer i = 1; i < traces.length - 1; i++) {
                StackTraceElement element = traces[i];
                if (element.getClassName ().indexOf (this.getClass ().getPackageName ()) != 0) {
                    return element.getClassName ();
                }
            }
            return Strings.EMPTY;
        }
    
  • 相关阅读:
    云打印api搭建,云打印api怎么对接?
    “安全即服务”为网络安全推开一道门
    计算机毕业设计Java重工教师职称管理系统(源码+系统+mysql数据库+lw文档)
    通信缓冲数据ModBusRTU
    3. 栈的应用2:【表达式求值】前缀表达式、中缀表达式、后缀表达式
    QCheckBox、margin、border、pandding、QHoxLayout、QSplitter
    Flutter macOS 教程之 02 手动安装macos_ui 如何添加macos_ui到您的历史项目pubspec.yaml文件 (教程含源码)
    钉钉事件订阅AES_KEY解密失败踩坑
    Kubernates 1.24.3 操作
    TensorFlow之文本分类算法-3
  • 原文地址:https://www.cnblogs.com/linkanyway/p/log4j2-customized-db-structure.html