(improvement)(headless)Opt logging messages to make it cleaner.

This commit is contained in:
jerryjzhang
2024-06-30 13:16:39 +08:00
parent 6687856d59
commit 38b4154b9e
8 changed files with 28 additions and 24 deletions

View File

@@ -137,7 +137,7 @@ public class SearchService {
} }
public static void clear() { public static void clear() {
log.info("clear all trie"); log.debug("clear all trie");
trie = new BinTrie<>(); trie = new BinTrie<>();
suffixTrie = new BinTrie<>(); suffixTrie = new BinTrie<>();
} }

View File

@@ -86,8 +86,6 @@ public class HanlpHelper {
*/ */
public static boolean reloadCustomDictionary() throws IOException { public static boolean reloadCustomDictionary() throws IOException {
log.info("reloadCustomDictionary start");
final long startTime = System.currentTimeMillis(); final long startTime = System.currentTimeMillis();
if (HanLP.Config.CustomDictionaryPath == null || HanLP.Config.CustomDictionaryPath.length == 0) { if (HanLP.Config.CustomDictionaryPath == null || HanLP.Config.CustomDictionaryPath.length == 0) {
@@ -106,7 +104,10 @@ public class HanlpHelper {
SearchService.clear(); SearchService.clear();
boolean reload = getDynamicCustomDictionary().reload(); boolean reload = getDynamicCustomDictionary().reload();
log.info("reloadCustomDictionary end ,cost:{},reload:{}", System.currentTimeMillis() - startTime, reload); if (reload) {
log.info("Custom dictionary has been reloaded in {} milliseconds",
System.currentTimeMillis() - startTime);
}
return reload; return reload;
} }

View File

@@ -35,7 +35,7 @@ public class LLMSqlParser implements SemanticParser {
if (dataSetId == null) { if (dataSetId == null) {
return; return;
} }
log.info("Generate query statement for dataSetId:{}", dataSetId); log.info("Try generating query statement for dataSetId:{}", dataSetId);
//3.invoke LLM service to do parsing. //3.invoke LLM service to do parsing.
tryParse(queryCtx, dataSetId); tryParse(queryCtx, dataSetId);
@@ -51,11 +51,11 @@ public class LLMSqlParser implements SemanticParser {
LLMReq llmReq = requestService.getLlmReq(queryCtx, dataSetId); LLMReq llmReq = requestService.getLlmReq(queryCtx, dataSetId);
int currentRetry = 0; int currentRetry = 1;
Map<String, LLMSqlResp> sqlRespMap = new HashMap<>(); Map<String, LLMSqlResp> sqlRespMap = new HashMap<>();
ParseResult parseResult = null; ParseResult parseResult = null;
while (currentRetry < maxRetries) { while (currentRetry <= maxRetries) {
log.info("currentRetry:{},start runText2SQL", currentRetry); log.info("currentRetryRound:{}, start runText2SQL", currentRetry);
try { try {
LLMResp llmResp = requestService.runText2SQL(llmReq); LLMResp llmResp = requestService.runText2SQL(llmReq);
if (Objects.nonNull(llmResp)) { if (Objects.nonNull(llmResp)) {
@@ -68,7 +68,7 @@ public class LLMSqlParser implements SemanticParser {
} }
} }
} catch (Exception e) { } catch (Exception e) {
log.error("currentRetry:{},runText2SQL error", currentRetry, e); log.error("currentRetryRound:{}, runText2SQL failed", currentRetry, e);
} }
currentRetry++; currentRetry++;
} }

View File

@@ -226,14 +226,14 @@ public class DataSourceNode extends SemanticNode {
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope, boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope,
engineType); engineType);
if (isAllMatch) { if (isAllMatch) {
log.info("baseDataSource match all "); log.debug("baseDataSource match all ");
return dataSources; return dataSources;
} }
// find all dataSource has the same identifiers // find all dataSource has the same identifiers
List<DataSource> linkDataSources = getLinkDataSourcesByJoinRelation(queryDimension, measures, List<DataSource> linkDataSources = getLinkDataSourcesByJoinRelation(queryDimension, measures,
baseDataSource, schema); baseDataSource, schema);
if (CollectionUtils.isEmpty(linkDataSources)) { if (CollectionUtils.isEmpty(linkDataSources)) {
log.info("baseDataSource get by identifiers "); log.debug("baseDataSource get by identifiers ");
Set<String> baseIdentifiers = baseDataSource.getIdentifiers().stream().map(i -> i.getName()) Set<String> baseIdentifiers = baseDataSource.getIdentifiers().stream().map(i -> i.getName())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) { if (baseIdentifiers.isEmpty()) {
@@ -279,7 +279,7 @@ public class DataSourceNode extends SemanticNode {
dimension.retainAll(queryDimension); dimension.retainAll(queryDimension);
if (dimension.size() < queryDimension.size()) { if (dimension.size() < queryDimension.size()) {
log.info("baseDataSource not match all dimension"); log.debug("baseDataSource not match all dimension");
isAllMatch = false; isAllMatch = false;
} }
queryDimension.removeAll(dimension); queryDimension.removeAll(dimension);

View File

@@ -75,7 +75,7 @@ public class RetrieveServiceImpl implements RetrieveService {
dataSetService.getModelIdToDataSetIds(new ArrayList<>(dataSetIdToName.keySet()), User.getFakeUser()); dataSetService.getModelIdToDataSetIds(new ArrayList<>(dataSetIdToName.keySet()), User.getFakeUser());
// 2.detect by segment // 2.detect by segment
List<S2Term> originals = knowledgeBaseService.getTerms(queryText, modelIdToDataSetIds); List<S2Term> originals = knowledgeBaseService.getTerms(queryText, modelIdToDataSetIds);
log.info("hanlp parse result: {}", originals); log.debug("hanlp parse result: {}", originals);
Set<Long> dataSetIds = queryReq.getDataSetIds(); Set<Long> dataSetIds = queryReq.getDataSetIds();
QueryContext queryContext = new QueryContext(); QueryContext queryContext = new QueryContext();
@@ -100,7 +100,7 @@ public class RetrieveServiceImpl implements RetrieveService {
return Lists.newArrayList(); return Lists.newArrayList();
} }
Map.Entry<MatchText, List<HanlpMapResult>> searchTextEntry = mostSimilarSearchResult.get(); Map.Entry<MatchText, List<HanlpMapResult>> searchTextEntry = mostSimilarSearchResult.get();
log.info("searchTextEntry:{},queryReq:{}", searchTextEntry, queryReq); log.debug("searchTextEntry:{},queryReq:{}", searchTextEntry, queryReq);
Set<SearchResult> searchResults = new LinkedHashSet(); Set<SearchResult> searchResults = new LinkedHashSet();
DataSetInfoStat dataSetInfoStat = NatureHelper.getDataSetStat(originals); DataSetInfoStat dataSetInfoStat = NatureHelper.getDataSetStat(originals);
@@ -273,7 +273,7 @@ public class RetrieveServiceImpl implements RetrieveService {
Map.Entry<MatchText, List<HanlpMapResult>> searchTextEntry, Map.Entry<MatchText, List<HanlpMapResult>> searchTextEntry,
Set<SearchResult> searchResults) { Set<SearchResult> searchResults) {
boolean existMetric = false; boolean existMetric = false;
log.info("searchMetricAndDimension searchTextEntry:{}", searchTextEntry); log.debug("searchMetricAndDimension searchTextEntry:{}", searchTextEntry);
MatchText matchText = searchTextEntry.getKey(); MatchText matchText = searchTextEntry.getKey();
List<HanlpMapResult> hanlpMapResults = searchTextEntry.getValue(); List<HanlpMapResult> hanlpMapResults = searchTextEntry.getValue();
@@ -301,7 +301,7 @@ public class RetrieveServiceImpl implements RetrieveService {
//visibility to filter metrics //visibility to filter metrics
searchResults.add(searchResult); searchResults.add(searchResult);
} }
log.info("parseResult:{},dimensionMetricClassIds:{},possibleDataSets:{}", hanlpMapResult, log.debug("parseResult:{},dimensionMetricClassIds:{},possibleDataSets:{}", hanlpMapResult,
dimensionMetricClassIds, possibleDataSets); dimensionMetricClassIds, possibleDataSets);
} }
log.info("searchMetricAndDimension searchResults:{}", searchResults); log.info("searchMetricAndDimension searchResults:{}", searchResults);

View File

@@ -110,7 +110,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
//2.query from cache //2.query from cache
QueryCache queryCache = ComponentFactory.getQueryCache(); QueryCache queryCache = ComponentFactory.getQueryCache();
String cacheKey = queryCache.getCacheKey(queryReq); String cacheKey = queryCache.getCacheKey(queryReq);
log.info("cacheKey:{}", cacheKey); log.debug("cacheKey:{}", cacheKey);
Object query = queryCache.query(queryReq, cacheKey); Object query = queryCache.query(queryReq, cacheKey);
if (Objects.nonNull(query)) { if (Objects.nonNull(query)) {
SemanticQueryResp queryResp = (SemanticQueryResp) query; SemanticQueryResp queryResp = (SemanticQueryResp) query;

View File

@@ -45,11 +45,12 @@ public class MetaEmbeddingTask implements CommandLineRunner {
private void embeddingStorePersistFile() { private void embeddingStorePersistFile() {
if (embeddingStoreFactory instanceof InMemoryEmbeddingStoreFactory) { if (embeddingStoreFactory instanceof InMemoryEmbeddingStoreFactory) {
log.info("start persistFile"); long startTime = System.currentTimeMillis();
InMemoryEmbeddingStoreFactory inMemoryFactory = InMemoryEmbeddingStoreFactory inMemoryFactory =
(InMemoryEmbeddingStoreFactory) embeddingStoreFactory; (InMemoryEmbeddingStoreFactory) embeddingStoreFactory;
inMemoryFactory.persistFile(); inMemoryFactory.persistFile();
log.info("end persistFile"); long duration = System.currentTimeMillis() - startTime;
log.info("Embedding file has been regularly persisted in {} milliseconds", duration);
} }
} }
@@ -63,7 +64,7 @@ public class MetaEmbeddingTask implements CommandLineRunner {
*/ */
@Scheduled(cron = "${s2.reload.meta.embedding.corn:0 0 */2 * * ?}") @Scheduled(cron = "${s2.reload.meta.embedding.corn:0 0 */2 * * ?}")
public void reloadMetaEmbedding() { public void reloadMetaEmbedding() {
log.info("reload.meta.embedding start"); long startTime = System.currentTimeMillis();
try { try {
List<DataItem> metricDataItems = metricService.getDataEvent().getDataItems(); List<DataItem> metricDataItems = metricService.getDataEvent().getDataItems();
@@ -74,10 +75,10 @@ public class MetaEmbeddingTask implements CommandLineRunner {
embeddingService.addQuery(embeddingConfig.getMetaCollectionName(), embeddingService.addQuery(embeddingConfig.getMetaCollectionName(),
TextSegmentConvert.convertToEmbedding(dimensionDataItems)); TextSegmentConvert.convertToEmbedding(dimensionDataItems));
} catch (Exception e) { } catch (Exception e) {
log.error("reload.meta.embedding error", e); log.error("Failed to reload meta embedding.", e);
} }
long duration = System.currentTimeMillis() - startTime;
log.info("reload.meta.embedding end"); log.info("Embedding has been regularly reloaded in {} milliseconds", duration);
} }
@Override @Override

View File

@@ -37,15 +37,17 @@ public class DictWordService {
} }
public void reloadDictWord() { public void reloadDictWord() {
long startTime = System.currentTimeMillis();
List<DictWord> dictWords = getAllDictWords(); List<DictWord> dictWords = getAllDictWords();
List<DictWord> preDictWords = getPreDictWords(); List<DictWord> preDictWords = getPreDictWords();
if (org.apache.commons.collections.CollectionUtils.isEqualCollection(dictWords, preDictWords)) { if (org.apache.commons.collections.CollectionUtils.isEqualCollection(dictWords, preDictWords)) {
log.debug("Dictionary hasn't been reloaded."); log.debug("Dictionary hasn't been reloaded.");
return; return;
} }
log.info("Dictionary has been reloaded.");
setPreDictWords(dictWords); setPreDictWords(dictWords);
knowledgeBaseService.updateOnlineKnowledge(getAllDictWords()); knowledgeBaseService.updateOnlineKnowledge(getAllDictWords());
long duration = System.currentTimeMillis() - startTime;
log.info("Dictionary has been regularly reloaded in {} milliseconds", duration);
} }
public List<DictWord> getAllDictWords() { public List<DictWord> getAllDictWords() {