| 知乎专栏 |
配置文件名默认是:logback-spring.xml,使用其他文件名通过下面配置项指定即可。
logging.config=classpath:logback.xml
基本配置
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{yy-MMMM-dd HH:mm:ss:SSS} %5p %t %c{2}:%L - %m%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="stdout"/>
</root>
</configuration>
<logger name="cn.netkiller.controller"/>
<logger name="cn.netkiller.controller.HomeController" level="WARN" additivity="false">
<appender-ref ref="console"/>
</logger>
scan:当此属性设置为true时,配置文件如果发生改变,将会被重新加载,默认值为true。 scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。 debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。
每个logger都关联到logger上下文,默认上下文名称为“default”。但可以使用设置成其他名字,用于区分不同应用程序的记录。设置后可以通过%contextName来打印日志上下文名称。 <contextName>logback</contextName>
用来定义变量值的标签, 有两个属性,name和value;其中name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义变量后,可以使“${}”来使用变量。
<property name="log.path" value="/tmp" />
<encoder>表示对日志进行编码:
%d{HH: mm:ss.SSS}——日志输出时间
%thread——输出日志的进程名字,这在Web应用以及异步任务处理中很有用
%-5level——日志级别,并且使用5个字符靠左对齐
%logger{36}——日志输出者的名字
%msg——日志消息
%n——平台的换行符
上例中<fileNamePattern>${log.path}/logback.%d{yyyy-MM-dd}.log</fileNamePattern>定义了日志的切分方式——把每一天的日志归档到一个文件中,同理,可以使用%d{yyyy-MM-dd_HH-mm}来定义精确到分的日志切分方式。
<maxHistory>30</maxHistory>表示只保留最近30天的日志,以防止日志填满整个磁盘空间。
<totalSizeCap>1GB</totalSizeCap>用来指定日志文件的上限大小,例如设置为1GB的话,那么到了这个值,就会删除旧的日志。
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<contextName>logback</contextName>
<property name="log.path" value="target" />
<!--输出到控制台-->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!--输出到文件-->
<appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/spring.%d{yyyy-MM-dd}.log</fileNamePattern>
</rollingPolicy>
<encoder>
<pattern>%d{HH:mm:ss.SSS} %contextName [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<root level="info">
<appender-ref ref="console" />
<appender-ref ref="file" />
</root>
</configuration>
按日期分割文件
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<include resource="org/springframework/boot/logging/logback/file-appender.xml" />
<appender name="dailyRollingFileAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>logs/spring.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- daily rollover -->
<FileNamePattern>spring.%d{yyyy-MM-dd}.log</FileNamePattern>
<!-- keep 30 days' worth of history -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<Pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{35} - %msg %n</Pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="FILE" />
<appender-ref ref="dailyRollingFileAppender" />
</root>
</configuration>
通过级别分割日志将 info, error, debug 分割到指定文件中。
<configuration scan="true" scanPeriod="10 seconds">
<!-- 控制台日志输出-->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d %p (%file:%line\)- %m%n</pattern>
<charset>UTF-8</charset>
</encoder>
</appender>
<!-- info日志输出-->
<appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<encoder>
<pattern>%d %p (%file:%line\)- %m%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<File>${LOG_PATH}/www.netkiller.cn.info.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}/www.netkiller.cn.info-%d{yyyyMMdd}.log.%i
</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>10MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>30</maxHistory>
</rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
<Pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} -%msg%n
</Pattern>
</layout>
</appender>
<!-- debug 日志输出-->
<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<encoder>
<pattern>%d %p (%file:%line\)- %m%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>DEBUG</level>
</filter>
<File>${LOG_PATH}/www.netkiller.cn.debug.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}/www.netkiller.cn.debug-%d{yyyyMMdd}.log.%i
</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>10MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>30</maxHistory>
</rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
<Pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} -%msg%n
</Pattern>
</layout>
</appender>
<!--error 日志输出配置 -->
<appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<encoder>
<pattern>%d %p (%file:%line\)- %m%n</pattern>
<charset>UTF-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<File>${LOG_PATH}/www.netkiller.cn.error.log</File>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}/www.netkiller.cn.error-%d{yyyyMMdd}.log.%i</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>10MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<maxHistory>30</maxHistory>
</rollingPolicy>
<layout class="ch.qos.logback.classic.PatternLayout">
<Pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} -%msg%n</Pattern>
</layout>
</appender>
<root level="DEBUG">
<!--
<appender-ref ref="STDOUT" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="ERROR_FILE" />
<appender-ref ref="DEBUG_FILE" />
-->
<appender-ref ref="ERROR_FILE" />
<appender-ref ref="INFO_FILE" />
<appender-ref ref="DEBUG_FILE" />
</root>
</configuration>
将特定日志输出保存到指定位置
package cn.netkiller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
public class Application {
public static void main(String[] args) {
final Logger logger = LoggerFactory.getLogger(Application.class);
Marker notifyAdmin = MarkerFactory.getMarker("netkiller");
logger.info("AAAAAAAAA");
logger.info(notifyAdmin, "BBBBBBBBB");
logger.error(notifyAdmin, "This is a serious an error requiring the admin's attention", new Exception("Just testing"));
}
}
匹配到 marker 的日志才输出,通过 RollingFileAppender 可以保存到指定文件。
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<include resource="org/springframework/boot/logging/logback/file-appender.xml" />
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.core.filter.EvaluatorFilter">
<evaluator class="ch.qos.logback.classic.boolex.OnMarkerEvaluator">
<marker>netkiller</marker>
</evaluator>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<encoder>
<pattern>%date{yyyy-MM-dd HH:mm:ss} %-4relative [%thread] %-5level %logger{35} : %msg %n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<include resource="org/springframework/boot/logging/logback/file-appender.xml" />
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%date{yyyy-MM-dd HH:mm:ss} %-4relative [%thread] %-5level %logger{35} : %msg %n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>
每个 userId 生成一个日志文件
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<include resource="org/springframework/boot/logging/logback/file-appender.xml" />
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} - [%25.25(%thread)] - [%-5level] - %-30.30(%logger{30}) : %msg%n" />
<appender name="siftingAppender" class="ch.qos.logback.classic.sift.SiftingAppender">
<discriminator>
<key>userId</key>
<defaultValue>unknown</defaultValue>
</discriminator>
<sift>
<appender name="${userId}" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/${userId}.%d{yyyy-MM-dd}.log</fileNamePattern>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
</sift>
</appender>
<root level="INFO">
<appender-ref ref="siftingAppender" />
</root>
</configuration>
package cn.netkiller.log;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
public class LogTest {
private static final Logger logger = LoggerFactory.getLogger(LogTest.class);
public LogTest() {
// TODO Auto-generated constructor stub
}
public static void main(String[] args) {
MDC.put("userId","0001");
logger.info("0001用户");
MDC.clear();
MDC.put("userId","0002");
logger.info("0002用户");
MDC.clear();
}
}
https://github.com/logfellow/logstash-logback-encoder/
配置 logstash 增加 tcp 接收输入端。
input {
tcp {
mode => "server"
host => "127.0.0.1"
port => 4567
codec => json_lines
}
}
Maven 配置文件 pom.xml 中添加
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>7.2</version>
</dependency>
然后再resources添加logback.xml文件
<?xml version="1.0" encoding="UTF-8"?> <configuration scan="true" scanPeriod="60 seconds" debug="true"> <include resource="org/springframework/boot/logging/logback/defaults.xml" /> <include resource="org/springframework/boot/logging/logback/console-appender.xml" /> <include resource="org/springframework/boot/logging/logback/file-appender.xml" /> <appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender"> <destination>127.0.0.1:4567</destination> <encoder class="net.logstash.logback.encoder.LogstashEncoder"> <providers> <timestamp /> <logLevel /> <threadName /> <loggerName /> <message /> </providers> </encoder> </appender> <root level="info"> <appender-ref ref="CONSOLE" /> <appender-ref ref="logstash" /> </root> </configuration>
[root@netkiller ~]# cat /etc/logstash/conf.d/file.conf
input {
tcp {
port => 4567
codec => json_lines
}
}
filter {
ruby {
code => "event.set('datetime', event.get('@timestamp').time.localtime.strftime('%Y-%m-%d %H:%M:%S'))"
}
}
output {
if "finance" in [tags] {
file {
path => "/opt/log/%{app}.finance.%{+yyyy}-%{+MM}-%{+dd}.log"
codec => line { format => "[%{datetime}] %{level} %{message} %{tags}"}
}
} else if "market" in [tags] {
file {
path => "/opt/log/%{app}.market.%{+yyyy}-%{+MM}-%{+dd}.log"
codec => line { format => "[%{datetime}] %{level} %{message} %{tags}"}
}
} else {
file {
path => "/opt/log/%{app}.unknow.%{+yyyy}-%{+MM}-%{+dd}.log"
codec => line { format => "[%{datetime}] %{level} %{message} %{tags}"}
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="false" scanPeriod="60 seconds" debug="false">
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<include resource="org/springframework/boot/logging/logback/console-appender.xml" />
<include resource="org/springframework/boot/logging/logback/file-appender.xml" />
<logger name="org.springframework.web" level="INFO" />
<logger name="org.springboot.sample" level="TRACE" />
<property name="log.pattern" value="%date{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{35}.%method: %msg%n" />
<springProperty scope="context" name="app" source="spring.application.name" defaultValue="spring-boot-fusion" />
<property name="log.path" value="/tmp" />
<appender name="siftingAppender" class="ch.qos.logback.classic.sift.SiftingAppender">
<discriminator>
<key>userId</key>
<defaultValue>unknown</defaultValue>
</discriminator>
<sift>
<appender name="${userId}" class="ch.qos.logback.core.rolling.RollingFileAppender">
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<fileNamePattern>${log.path}/${userId}.%d{yyyy-MM-dd}.log</fileNamePattern>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
</appender>
</sift>
</appender>
<springProfile name="prod">
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>172.18.200.10:4567</destination>
<keepAliveDuration>5 minutes</keepAliveDuration>
<reconnectionDelay>3 second</reconnectionDelay>
<writeBufferSize>8192</writeBufferSize>
<includeCallerData>true</includeCallerData>
<encoder class="net.logstash.logback.encoder.LogstashEncoder">
<shortenedLoggerNameLength>36</shortenedLoggerNameLength>
<timestampPattern>yyyy-MM-dd HH:mm:ss.Asia/Shanghai</timestampPattern>
<timeZone>Asia/Shanghai</timeZone>
<fieldNames>
<timestamp>@timestamp</timestamp>
<version>@version</version>
<message>message</message>
<logger>logger_name</logger>
<!-- <thread>thread_name</thread> -->
<level>level</level>
<thread>[ignore]</thread>
<levelValue>[ignore]</levelValue>
</fieldNames>
</encoder>
<filter class="ch.qos.logback.core.filter.EvaluatorFilter">
<evaluator class="ch.qos.logback.classic.boolex.OnMarkerEvaluator">
<marker>finance</marker>
<marker>market</marker>
<marker>customer</marker>
</evaluator>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
</appender>
</springProfile>
<root level="info">
<springProfile name="dev">
<appender-ref ref="CONSOLE" />
</springProfile>
<springProfile name="test">
<appender-ref ref="CONSOLE" />
<appender-ref ref="FILE" />
</springProfile>
<springProfile name="prod">
<appender-ref ref="CONSOLE" />
<appender-ref ref="logstash" />
</springProfile>
</root>
</configuration>
package cn.netkiller.controller;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import cn.netkiller.LogMarker;
import lombok.extern.slf4j.Slf4j;
@RestController
@Slf4j
public class HomeController {
private static final Logger logger = LoggerFactory.getLogger(HomeController.class);
public HomeController() {
// TODO Auto-generated constructor stub
}
@GetMapping("/")
public String index() {
Marker finance = MarkerFactory.getMarker(LogMarker.finance.toString());
Marker customer = MarkerFactory.getMarker(LogMarker.customer.toString());
Marker market = MarkerFactory.getMarker(LogMarker.market.toString());
logger.info("AAAAAAAAA");
logger.info(finance, "test");
logger.info(finance, "finance");
logger.info(customer, "customer");
logger.info(market, "market");
MDC.put("userId", "0001");
logger.info("0001用户");
MDC.clear();
MDC.put("userId", "0002");
logger.info("0002用户");
MDC.clear();
return "Hello world!!!";
}
}
<dependency> <groupId>org.fluentd</groupId> <artifactId>fluent-logger</artifactId> <version>0.3.4</version> </dependency> <dependency> <groupId>com.sndyuk</groupId> <artifactId>logback-more-appenders</artifactId> <version>1.8.7</version> </dependency>
dnf install -y fluent-bit
启动 fluent-bit
[root@netkiller ~]# fluent-bit -i forward -o stdout Fluent Bit v1.9.7 * Copyright (C) 2015-2022 The Fluent Bit Authors * Fluent Bit is a CNCF sub-project under the umbrella of Fluentd * https://fluentbit.io [2022/09/24 23:25:25] [ info] [fluent bit] version=1.9.7, commit=, pid=1191240 [2022/09/24 23:25:25] [ info] [storage] version=1.2.0, type=memory-only, sync=normal, checksum=disabled, max_chunks_up=128 [2022/09/24 23:25:25] [ info] [cmetrics] version=0.3.5 [2022/09/24 23:25:25] [ info] [input:forward:forward.0] listening on 0.0.0.0:24224 [2022/09/24 23:25:25] [ info] [sp] stream processor started [2022/09/24 23:25:25] [ info] [output:stdout:stdout.0] worker #0 started
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="true">
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%date - %level - [%thread] - %logger - [%file:%line] - %msg%n</pattern>
</encoder>
</appender>
<appender name="FLUENT" class="ch.qos.logback.more.appenders.DataFluentAppender">
<tag>development</tag>
<label>normal</label>
<remoteHost>localhost</remoteHost>
<port>24224</port>
<maxQueueSize>20</maxQueueSize>
</appender>
<logger name="cn.netkiller.log" level="DEBUG"/>
<root level="DEBUG">
<appender-ref ref="STDOUT" />
<appender-ref ref="FLUENT" />
</root>
</configuration>
[0] development.normal: [1664033186.000000000, {"level"=>"INFO", "logger"=>"cn.netkiller.Application", "thread"=>"main", "message"=>"Starting Application using Java 18 on MacBook-Pro-Neo.local with PID 85696 (/Users/neo/workspace/bottleneck/target/classes started by neo in /Users/neo/workspace/bottleneck)"}]
[1] development.normal: [1664033186.000000000, {"level"=>"INFO", "logger"=>"cn.netkiller.Application", "thread"=>"main", "message"=>"The following 1 profile is active: "prod""}]
[0] development.normal: [1664033187.000000000, {"level"=>"INFO", "logger"=>"org.springframework.data.repository.config.RepositoryConfigurationDelegate", "thread"=>"main", "message"=>"Multiple Spring Data modules found, entering strict repository configuration mode"}]
[1] development.normal: [1664033187.000000000, {"level"=>"INFO", "logger"=>"org.springframework.data.repository.config.RepositoryConfigurationDelegate", "thread"=>"main", "message"=>"Bootstrapping Spring Data Redis repositories in DEFAULT mode."}]
[2] development.normal: [1664033187.000000000, {"level"=>"INFO", "logger"=>"org.springframework.data.repository.config.RepositoryConfigurationDelegate", "thread"=>"main", "message"=>"Finished Spring Data repository scanning in 6 ms. Found 0 Redis repository interfaces."}]
[0] development.normal: [1664033188.000000000, {"level"=>"INFO", "logger"=>"org.springframework.boot.web.embedded.tomcat.TomcatWebServer", "thread"=>"main", "message"=>"Tomcat initialized with port(s): 8080 (http)"}]
[1] development.normal: [1664033188.000000000, {"level"=>"INFO", "logger"=>"org.apache.catalina.core.StandardService", "thread"=>"main", "message"=>"Starting service [Tomcat]"}]
[2] development.normal: [1664033188.000000000, {"level"=>"INFO", "logger"=>"org.apache.catalina.core.StandardEngine", "thread"=>"main", "message"=>"Starting Servlet engine: [Apache Tomcat/9.0.65]"}]
[3] development.normal: [1664033188.000000000, {"level"=>"INFO", "logger"=>"org.apache.catalina.core.ContainerBase.[Tomcat].[localhost].[/]", "thread"=>"main", "message"=>"Initializing Spring embedded WebApplicationContext"}]
[4] development.normal: [1664033188.000000000, {"level"=>"INFO", "logger"=>"org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext", "thread"=>"main", "message"=>"Root WebApplicationContext: initialization completed in 2133 ms"}]
[0] development.normal: [1664033189.000000000, {"level"=>"INFO", "logger"=>"org.springframework.boot.actuate.endpoint.web.EndpointLinksResolver", "thread"=>"main", "message"=>"Exposing 14 endpoint(s) beneath base path '/actuator'"}]
[0] development.normal: [1664033189.000000000, {"level"=>"INFO", "logger"=>"org.springframework.boot.web.embedded.tomcat.TomcatWebServer", "thread"=>"main", "message"=>"Tomcat started on port(s): 8080 (http) with context path ''"}]
[1] development.normal: [1664033189.000000000, {"level"=>"INFO", "logger"=>"cn.netkiller.Application", "thread"=>"main", "message"=>"Started Application in 4.224 seconds (JVM running for 4.918)"}]
https://loki4j.github.io/loki-logback-appender/
<dependency>
<groupId>com.github.loki4j</groupId>
<artifactId>loki-logback-appender</artifactId>
<version>1.3.2</version>
</dependency>
<appender name="LOKI" class="com.github.loki4j.logback.Loki4jAppender">
<http>
<url>http://localhost:3100/loki/api/v1/push</url>
</http>
<format>
<label>
<pattern>app=my-app,host=${HOSTNAME},level=%level</pattern>
</label>
<message>
<pattern>l=%level h=${HOSTNAME} c=%logger{20} t=%thread | %msg %ex</pattern>
</message>
<sortByTime>true</sortByTime>
</format>
</appender>
<root level="DEBUG">
<appender-ref ref="LOKI" />
</root>