(improvement)(chat) Introduce a separate log file to record key outputs in the pipeline. (#559)

This commit is contained in:
lexluo09
2023-12-20 22:45:20 +08:00
committed by GitHub
parent 2631352c30
commit 7b580b7c94
9 changed files with 142 additions and 42 deletions

View File

@@ -15,6 +15,8 @@ import dev.langchain4j.model.chat.ChatLanguageModel;
import java.util.Map;
import java.util.Objects;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
/**
@@ -24,6 +26,8 @@ import org.springframework.stereotype.Component;
@Component
public class JavaLLMProxy implements LLMProxy {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Override
public boolean isSkip(QueryContext queryContext) {
ChatLanguageModel chatLanguageModel = ContextUtils.getBean(ChatLanguageModel.class);
@@ -53,14 +57,13 @@ public class JavaLLMProxy implements LLMProxy {
FunctionPromptGenerator promptGenerator = ContextUtils.getBean(FunctionPromptGenerator.class);
ChatLanguageModel chatLanguageModel = ContextUtils.getBean(ChatLanguageModel.class);
String functionCallPrompt = promptGenerator.generateFunctionCallPrompt(functionReq.getQueryText(),
functionReq.getPluginConfigs());
ChatLanguageModel chatLanguageModel = ContextUtils.getBean(ChatLanguageModel.class);
String functionSelect = chatLanguageModel.generate(functionCallPrompt);
return OutputFormat.functionCallParse(functionSelect);
keyPipelineLog.info("functionCallPrompt:{}", functionCallPrompt);
String response = chatLanguageModel.generate(functionCallPrompt);
keyPipelineLog.info("functionCall response:{}", response);
return OutputFormat.functionCallParse(response);
}
}

View File

@@ -14,6 +14,8 @@ import java.net.URI;
import java.net.URL;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
@@ -30,6 +32,8 @@ import org.springframework.web.util.UriComponentsBuilder;
@Component
public class PythonLLMProxy implements LLMProxy {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Override
public boolean isSkip(QueryContext queryContext) {
LLMParserConfig llmParserConfig = ContextUtils.getBean(LLMParserConfig.class);
@@ -41,9 +45,9 @@ public class PythonLLMProxy implements LLMProxy {
}
public LLMResp query2sql(LLMReq llmReq, String modelClusterKey) {
long startTime = System.currentTimeMillis();
log.info("requestLLM request, modelId:{},llmReq:{}", modelClusterKey, llmReq);
keyPipelineLog.info("modelClusterKey:{},llmReq:{}", modelClusterKey, llmReq);
try {
LLMParserConfig llmParserConfig = ContextUtils.getBean(LLMParserConfig.class);
@@ -57,6 +61,7 @@ public class PythonLLMProxy implements LLMProxy {
log.info("requestLLM response,cost:{}, questUrl:{} \n entity:{} \n body:{}",
System.currentTimeMillis() - startTime, url, entity, responseEntity.getBody());
keyPipelineLog.info("LLMResp:{}", responseEntity.getBody());
return responseEntity.getBody();
} catch (Exception e) {
log.error("requestLLM error", e);
@@ -75,10 +80,12 @@ public class PythonLLMProxy implements LLMProxy {
RestTemplate restTemplate = ContextUtils.getBean(RestTemplate.class);
try {
log.info("requestFunction functionReq:{}", JsonUtil.toString(functionReq));
keyPipelineLog.info("requestFunction functionReq:{}", JsonUtil.toString(functionReq));
ResponseEntity<FunctionResp> responseEntity = restTemplate.exchange(requestUrl, HttpMethod.POST, entity,
FunctionResp.class);
log.info("requestFunction responseEntity:{},cost:{}", responseEntity,
System.currentTimeMillis() - startTime);
keyPipelineLog.info("response:{}", responseEntity.getBody());
return responseEntity.getBody();
} catch (Exception e) {
log.error("requestFunction error", e);

View File

@@ -17,6 +17,8 @@ import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -25,6 +27,7 @@ import org.springframework.stereotype.Service;
@Slf4j
public class OnePassSCSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@@ -40,6 +43,7 @@ public class OnePassSCSqlGeneration implements SqlGeneration, InitializingBean {
@Override
public Map<String, Double> generation(LLMReq llmReq, String modelClusterKey) {
//1.retriever sqlExamples and generate exampleListPool
keyPipelineLog.info("modelClusterKey:{},llmReq:{}", modelClusterKey, llmReq);
List<Map<String, String>> sqlExamples = sqlExampleLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlCollectionName(), optimizationConfig.getText2sqlExampleNum());
@@ -52,8 +56,11 @@ public class OnePassSCSqlGeneration implements SqlGeneration, InitializingBean {
linkingSqlPromptPool.parallelStream().forEach(linkingSqlPrompt -> {
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingSqlPrompt))
.apply(new HashMap<>());
keyPipelineLog.info("request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
llmResults.add(response.content().text());
String result = response.content().text();
llmResults.add(result);
keyPipelineLog.info("model response:{}", result);
}
);
//3.format response.
@@ -64,7 +71,7 @@ public class OnePassSCSqlGeneration implements SqlGeneration, InitializingBean {
List<String> sqlList = llmResults.stream()
.map(llmResult -> OutputFormat.getSql(llmResult)).collect(Collectors.toList());
Pair<String, Map<String, Double>> sqlMap = OutputFormat.selfConsistencyVote(sqlList);
log.info("linkingMap result:{},sqlMap:{}", linkingMap, sqlMap);
keyPipelineLog.info("linkingMap:{} sqlMap:{}", linkingMap, sqlMap);
return sqlMap.getRight();
}

View File

@@ -14,6 +14,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -22,6 +24,7 @@ import org.springframework.stereotype.Service;
@Slf4j
public class OnePassSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@@ -37,6 +40,7 @@ public class OnePassSqlGeneration implements SqlGeneration, InitializingBean {
@Override
public Map<String, Double> generation(LLMReq llmReq, String modelClusterKey) {
//1.retriever sqlExamples
keyPipelineLog.info("modelClusterKey:{},llmReq:{}", modelClusterKey, llmReq);
List<Map<String, String>> sqlExamples = sqlExampleLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlCollectionName(), optimizationConfig.getText2sqlExampleNum());
@@ -44,15 +48,16 @@ public class OnePassSqlGeneration implements SqlGeneration, InitializingBean {
String promptStr = sqlPromptGenerator.generatorLinkingAndSqlPrompt(llmReq, sqlExamples);
Prompt prompt = PromptTemplate.from(JsonUtil.toString(promptStr)).apply(new HashMap<>());
keyPipelineLog.info("request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
String result = response.content().text();
keyPipelineLog.info("model response:{}", result);
//3.format response.
String llmResult = response.content().text();
String schemaLinkStr = OutputFormat.getSchemaLinks(response.content().text());
String sql = OutputFormat.getSql(response.content().text());
Map<String, Double> sqlMap = new HashMap<>();
sqlMap.put(sql, 1D);
log.info("llmResult:{},schemaLinkStr:{},sql:{}", llmResult, schemaLinkStr, sql);
keyPipelineLog.info("schemaLinkStr:{},sqlMap:{}", schemaLinkStr, sqlMap);
return sqlMap;
}

View File

@@ -14,16 +14,17 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@Service
@Slf4j
public class TwoPassSCSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@@ -39,6 +40,7 @@ public class TwoPassSCSqlGeneration implements SqlGeneration, InitializingBean {
@Override
public Map<String, Double> generation(LLMReq llmReq, String modelClusterKey) {
//1.retriever sqlExamples and generate exampleListPool
keyPipelineLog.info("modelClusterKey:{},llmReq:{}", modelClusterKey, llmReq);
List<Map<String, String>> sqlExamples = sqlExampleLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlCollectionName(), optimizationConfig.getText2sqlExampleNum());
@@ -51,8 +53,10 @@ public class TwoPassSCSqlGeneration implements SqlGeneration, InitializingBean {
linkingPromptPool.parallelStream().forEach(
linkingPrompt -> {
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> linkingResult = chatLanguageModel.generate(prompt.toSystemMessage());
String result = linkingResult.content().text();
keyPipelineLog.info("step one model response:{}", result);
linkingResults.add(OutputFormat.getSchemaLink(result));
}
);
@@ -63,13 +67,15 @@ public class TwoPassSCSqlGeneration implements SqlGeneration, InitializingBean {
List<String> sqlTaskPool = new CopyOnWriteArrayList<>();
sqlPromptPool.parallelStream().forEach(sqlPrompt -> {
Prompt linkingPrompt = PromptTemplate.from(JsonUtil.toString(sqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", linkingPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(linkingPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
sqlTaskPool.add(result);
});
//4.format response.
Pair<String, Map<String, Double>> sqlMap = OutputFormat.selfConsistencyVote(sqlTaskPool);
log.info("linkingMap result:{},sqlMap:{}", linkingMap, sqlMap);
keyPipelineLog.info("linkingMap:{} sqlMap:{}", linkingMap, sqlMap);
return sqlMap.getRight();
}

View File

@@ -14,6 +14,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -22,6 +24,7 @@ import org.springframework.stereotype.Service;
@Slf4j
public class TwoPassSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@@ -36,23 +39,27 @@ public class TwoPassSqlGeneration implements SqlGeneration, InitializingBean {
@Override
public Map<String, Double> generation(LLMReq llmReq, String modelClusterKey) {
keyPipelineLog.info("modelClusterKey:{},llmReq:{}", modelClusterKey, llmReq);
List<Map<String, String>> sqlExamples = sqlExampleLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlCollectionName(), optimizationConfig.getText2sqlExampleNum());
String linkingPromptStr = sqlPromptGenerator.generateLinkingPrompt(llmReq, sqlExamples);
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPromptStr)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
keyPipelineLog.info("step one model response:{}", response.content().text());
String schemaLinkStr = OutputFormat.getSchemaLink(response.content().text());
String generateSqlPrompt = sqlPromptGenerator.generateSqlPrompt(llmReq, schemaLinkStr, sqlExamples);
Prompt sqlPrompt = PromptTemplate.from(JsonUtil.toString(generateSqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", sqlPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(sqlPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
Map<String, Double> sqlMap = new HashMap<>();
sqlMap.put(sqlResult.content().text(), 1D);
sqlMap.put(result, 1D);
keyPipelineLog.info("schemaLinkStr:{},sqlMap:{}", schemaLinkStr, sqlMap);
return sqlMap;
}

View File

@@ -6,8 +6,12 @@ import com.tencent.supersonic.chat.api.pojo.QueryContext;
import com.tencent.supersonic.chat.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.chat.api.pojo.request.QueryReq;
import com.tencent.supersonic.chat.api.pojo.response.ParseResp;
import com.tencent.supersonic.chat.api.pojo.response.SqlInfo;
import com.tencent.supersonic.chat.query.QueryManager;
import com.tencent.supersonic.chat.query.llm.s2sql.LLMSqlQuery;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.CollectionUtils;
import java.util.List;
import java.util.Objects;
@@ -19,6 +23,8 @@ import java.util.stream.Collectors;
**/
public class SqlInfoProcessor implements ParseResultProcessor {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Override
public void process(ParseResp parseResp, QueryContext queryContext, ChatContext chatContext) {
QueryReq queryReq = queryContext.getRequest();
@@ -52,7 +58,12 @@ public class SqlInfoProcessor implements ParseResultProcessor {
if (StringUtils.isBlank(explainSql)) {
return;
}
parseInfo.getSqlInfo().setQuerySQL(explainSql);
SqlInfo sqlInfo = parseInfo.getSqlInfo();
if (semanticQuery instanceof LLMSqlQuery) {
keyPipelineLog.info("s2sql:{}\ncorrectS2SQL:{}\nquerySQL:{}", sqlInfo.getS2SQL(), sqlInfo.getCorrectS2SQL(),
explainSql);
}
sqlInfo.setQuerySQL(explainSql);
}
}

View File

@@ -3,7 +3,7 @@
<contextName>logback</contextName>
<!-- <property name="LOG_PATH" value="${logback.logdir:-logs}"/>-->
<property name="LOG_PATH" value="${LOG_PATH:-logs}"/>
<property name="LOG_APPNAME" value="headless"/>
<property name="LOG_APPNAME" value="chat"/>
<!--输出到控制台-->
<appender name="consoleLog" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
@@ -29,7 +29,7 @@
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%d [%thread] %-5level [%X{TRACE_ID}] %logger{36} %line - %msg%n</pattern>
<pattern>%d [%thread] %-5level [%X{traceId}] %logger{36} %line - %msg%n</pattern>
</encoder>
</appender>
@@ -46,7 +46,8 @@
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}/error.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz</FileNamePattern>
<FileNamePattern>${LOG_PATH}/error.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz
</FileNamePattern>
<!--只保留最近90天的日志-->
<maxHistory>90</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
@@ -55,16 +56,10 @@
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%d [%thread] %-5level [%X{TRACE_ID}] %logger{36} - %msg%n</pattern>
<pattern>%d [%thread] %-5level [%X{traceId}] %logger{36} %line - %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="fileInfoLog"/>
<appender-ref ref="fileErrorLog"/>
<appender-ref ref="consoleLog"/>
</root>
<appender name="serviceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File>和<FileNamePattern>,那么当天日志是<File>,明天会自动把今天
@@ -74,7 +69,8 @@
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}/serviceinfo.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz</FileNamePattern>
<FileNamePattern>${LOG_PATH}/serviceinfo.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz
</FileNamePattern>
<!--只保留最近30天的日志-->
<maxHistory>30</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
@@ -83,11 +79,42 @@
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%d [%thread] %-5level [%X{TRACE_ID}] %logger{36} %line - %msg%n</pattern>
<pattern>%d [%thread] %-5level [%X{traceId}] %logger{36} %line - %msg%n</pattern>
</encoder>
</appender>
<logger name="com.tencent.supersonic" level="INFO" additivity="true">
<appender-ref ref="serviceLog"/>
</logger>
<!-- 业务日志输出 -->
<appender name="keyPipelineAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_PATH}/keyPipeline.log</File>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${LOG_PATH}/keyPipeline.%d{yyyy-MM-dd}.log</fileNamePattern>
<!--只保留最近30天的日志-->
<maxHistory>30</maxHistory>
<cleanHistoryOnStart>true</cleanHistoryOnStart>
</rollingPolicy>
<encoder>
<charset>UTF-8</charset>
<pattern>%d [%thread] %-5level [%X{traceId}] %logger{36} %line - %msg%n</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<!--keyPipeline相关日志-->
<logger name="keyPipeline" level="info" additivity="false">
<appender-ref ref="keyPipelineAppender"/>
</logger>
<root level="INFO">
<appender-ref ref="fileInfoLog"/>
<appender-ref ref="fileErrorLog"/>
<appender-ref ref="consoleLog"/>
</root>
</configuration>

View File

@@ -1,13 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true">
<contextName>logback</contextName>
<!-- <property name="LOG_PATH" value="${logback.logdir:-logs}"/>-->
<!-- <property name="LOG_PATH" value="${logback.logdir:-logs}"/>-->
<property name="LOG_PATH" value="${LOG_PATH:-logs}"/>
<property name="LOG_APPNAME" value="chat"/>
<!--输出到控制台-->
<appender name="consoleLog" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss} [%thread] %-5level %logger{36} %line - %msg%n</pattern>
<pattern>%d{HH:mm:ss} [%thread] %-5level %logger{36} %line - %msg%n</pattern>
</encoder>
</appender>
@@ -46,7 +46,8 @@
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}/error.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz</FileNamePattern>
<FileNamePattern>${LOG_PATH}/error.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz
</FileNamePattern>
<!--只保留最近90天的日志-->
<maxHistory>90</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
@@ -59,12 +60,6 @@
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="fileInfoLog"/>
<appender-ref ref="fileErrorLog"/>
<appender-ref ref="consoleLog"/>
</root>
<appender name="serviceLog" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File>和<FileNamePattern>,那么当天日志是<File>,明天会自动把今天
@@ -74,7 +69,8 @@
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}/serviceinfo.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz</FileNamePattern>
<FileNamePattern>${LOG_PATH}/serviceinfo.${LOG_APPNAME}.%d{yyyy-MM-dd}.log.gz
</FileNamePattern>
<!--只保留最近30天的日志-->
<maxHistory>30</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
@@ -90,4 +86,35 @@
<logger name="com.tencent.supersonic" level="INFO" additivity="true">
<appender-ref ref="serviceLog"/>
</logger>
<!-- 业务日志输出 -->
<appender name="keyPipelineAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
<File>${LOG_PATH}/keyPipeline.log</File>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${LOG_PATH}/keyPipeline.%d{yyyy-MM-dd}.log</fileNamePattern>
<!--只保留最近30天的日志-->
<maxHistory>30</maxHistory>
<cleanHistoryOnStart>true</cleanHistoryOnStart>
</rollingPolicy>
<encoder>
<charset>UTF-8</charset>
<pattern>%d [%thread] %-5level [%X{traceId}] %logger{36} %line - %msg%n</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<!--keyPipeline相关日志-->
<logger name="keyPipeline" level="info" additivity="false">
<appender-ref ref="keyPipelineAppender"/>
</logger>
<root level="INFO">
<appender-ref ref="fileInfoLog"/>
<appender-ref ref="fileErrorLog"/>
<appender-ref ref="consoleLog"/>
</root>
</configuration>