(improvement)(headless)Opt logging messages to make it cleaner.

This commit is contained in:
jerryjzhang
2024-06-30 13:16:39 +08:00
parent 6687856d59
commit 38b4154b9e
8 changed files with 28 additions and 24 deletions

View File

@@ -137,7 +137,7 @@ public class SearchService {
}
public static void clear() {
log.info("clear all trie");
log.debug("clear all trie");
trie = new BinTrie<>();
suffixTrie = new BinTrie<>();
}

View File

@@ -86,8 +86,6 @@ public class HanlpHelper {
*/
public static boolean reloadCustomDictionary() throws IOException {
log.info("reloadCustomDictionary start");
final long startTime = System.currentTimeMillis();
if (HanLP.Config.CustomDictionaryPath == null || HanLP.Config.CustomDictionaryPath.length == 0) {
@@ -106,7 +104,10 @@ public class HanlpHelper {
SearchService.clear();
boolean reload = getDynamicCustomDictionary().reload();
log.info("reloadCustomDictionary end ,cost:{},reload:{}", System.currentTimeMillis() - startTime, reload);
if (reload) {
log.info("Custom dictionary has been reloaded in {} milliseconds",
System.currentTimeMillis() - startTime);
}
return reload;
}

View File

@@ -35,7 +35,7 @@ public class LLMSqlParser implements SemanticParser {
if (dataSetId == null) {
return;
}
log.info("Generate query statement for dataSetId:{}", dataSetId);
log.info("Try generating query statement for dataSetId:{}", dataSetId);
//3.invoke LLM service to do parsing.
tryParse(queryCtx, dataSetId);
@@ -51,11 +51,11 @@ public class LLMSqlParser implements SemanticParser {
LLMReq llmReq = requestService.getLlmReq(queryCtx, dataSetId);
int currentRetry = 0;
int currentRetry = 1;
Map<String, LLMSqlResp> sqlRespMap = new HashMap<>();
ParseResult parseResult = null;
while (currentRetry < maxRetries) {
log.info("currentRetry:{},start runText2SQL", currentRetry);
while (currentRetry <= maxRetries) {
log.info("currentRetryRound:{}, start runText2SQL", currentRetry);
try {
LLMResp llmResp = requestService.runText2SQL(llmReq);
if (Objects.nonNull(llmResp)) {
@@ -68,7 +68,7 @@ public class LLMSqlParser implements SemanticParser {
}
}
} catch (Exception e) {
log.error("currentRetry:{},runText2SQL error", currentRetry, e);
log.error("currentRetryRound:{}, runText2SQL failed", currentRetry, e);
}
currentRetry++;
}

View File

@@ -226,14 +226,14 @@ public class DataSourceNode extends SemanticNode {
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope,
engineType);
if (isAllMatch) {
log.info("baseDataSource match all ");
log.debug("baseDataSource match all ");
return dataSources;
}
// find all dataSource has the same identifiers
List<DataSource> linkDataSources = getLinkDataSourcesByJoinRelation(queryDimension, measures,
baseDataSource, schema);
if (CollectionUtils.isEmpty(linkDataSources)) {
log.info("baseDataSource get by identifiers ");
log.debug("baseDataSource get by identifiers ");
Set<String> baseIdentifiers = baseDataSource.getIdentifiers().stream().map(i -> i.getName())
.collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) {
@@ -279,7 +279,7 @@ public class DataSourceNode extends SemanticNode {
dimension.retainAll(queryDimension);
if (dimension.size() < queryDimension.size()) {
log.info("baseDataSource not match all dimension");
log.debug("baseDataSource not match all dimension");
isAllMatch = false;
}
queryDimension.removeAll(dimension);

View File

@@ -75,7 +75,7 @@ public class RetrieveServiceImpl implements RetrieveService {
dataSetService.getModelIdToDataSetIds(new ArrayList<>(dataSetIdToName.keySet()), User.getFakeUser());
// 2.detect by segment
List<S2Term> originals = knowledgeBaseService.getTerms(queryText, modelIdToDataSetIds);
log.info("hanlp parse result: {}", originals);
log.debug("hanlp parse result: {}", originals);
Set<Long> dataSetIds = queryReq.getDataSetIds();
QueryContext queryContext = new QueryContext();
@@ -100,7 +100,7 @@ public class RetrieveServiceImpl implements RetrieveService {
return Lists.newArrayList();
}
Map.Entry<MatchText, List<HanlpMapResult>> searchTextEntry = mostSimilarSearchResult.get();
log.info("searchTextEntry:{},queryReq:{}", searchTextEntry, queryReq);
log.debug("searchTextEntry:{},queryReq:{}", searchTextEntry, queryReq);
Set<SearchResult> searchResults = new LinkedHashSet();
DataSetInfoStat dataSetInfoStat = NatureHelper.getDataSetStat(originals);
@@ -273,7 +273,7 @@ public class RetrieveServiceImpl implements RetrieveService {
Map.Entry<MatchText, List<HanlpMapResult>> searchTextEntry,
Set<SearchResult> searchResults) {
boolean existMetric = false;
log.info("searchMetricAndDimension searchTextEntry:{}", searchTextEntry);
log.debug("searchMetricAndDimension searchTextEntry:{}", searchTextEntry);
MatchText matchText = searchTextEntry.getKey();
List<HanlpMapResult> hanlpMapResults = searchTextEntry.getValue();
@@ -301,7 +301,7 @@ public class RetrieveServiceImpl implements RetrieveService {
//visibility to filter metrics
searchResults.add(searchResult);
}
log.info("parseResult:{},dimensionMetricClassIds:{},possibleDataSets:{}", hanlpMapResult,
log.debug("parseResult:{},dimensionMetricClassIds:{},possibleDataSets:{}", hanlpMapResult,
dimensionMetricClassIds, possibleDataSets);
}
log.info("searchMetricAndDimension searchResults:{}", searchResults);

View File

@@ -110,7 +110,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
//2.query from cache
QueryCache queryCache = ComponentFactory.getQueryCache();
String cacheKey = queryCache.getCacheKey(queryReq);
log.info("cacheKey:{}", cacheKey);
log.debug("cacheKey:{}", cacheKey);
Object query = queryCache.query(queryReq, cacheKey);
if (Objects.nonNull(query)) {
SemanticQueryResp queryResp = (SemanticQueryResp) query;

View File

@@ -45,11 +45,12 @@ public class MetaEmbeddingTask implements CommandLineRunner {
private void embeddingStorePersistFile() {
if (embeddingStoreFactory instanceof InMemoryEmbeddingStoreFactory) {
log.info("start persistFile");
long startTime = System.currentTimeMillis();
InMemoryEmbeddingStoreFactory inMemoryFactory =
(InMemoryEmbeddingStoreFactory) embeddingStoreFactory;
inMemoryFactory.persistFile();
log.info("end persistFile");
long duration = System.currentTimeMillis() - startTime;
log.info("Embedding file has been regularly persisted in {} milliseconds", duration);
}
}
@@ -63,7 +64,7 @@ public class MetaEmbeddingTask implements CommandLineRunner {
*/
@Scheduled(cron = "${s2.reload.meta.embedding.corn:0 0 */2 * * ?}")
public void reloadMetaEmbedding() {
log.info("reload.meta.embedding start");
long startTime = System.currentTimeMillis();
try {
List<DataItem> metricDataItems = metricService.getDataEvent().getDataItems();
@@ -74,10 +75,10 @@ public class MetaEmbeddingTask implements CommandLineRunner {
embeddingService.addQuery(embeddingConfig.getMetaCollectionName(),
TextSegmentConvert.convertToEmbedding(dimensionDataItems));
} catch (Exception e) {
log.error("reload.meta.embedding error", e);
log.error("Failed to reload meta embedding.", e);
}
log.info("reload.meta.embedding end");
long duration = System.currentTimeMillis() - startTime;
log.info("Embedding has been regularly reloaded in {} milliseconds", duration);
}
@Override

View File

@@ -37,15 +37,17 @@ public class DictWordService {
}
public void reloadDictWord() {
long startTime = System.currentTimeMillis();
List<DictWord> dictWords = getAllDictWords();
List<DictWord> preDictWords = getPreDictWords();
if (org.apache.commons.collections.CollectionUtils.isEqualCollection(dictWords, preDictWords)) {
log.debug("Dictionary hasn't been reloaded.");
return;
}
log.info("Dictionary has been reloaded.");
setPreDictWords(dictWords);
knowledgeBaseService.updateOnlineKnowledge(getAllDictWords());
long duration = System.currentTimeMillis() - startTime;
log.info("Dictionary has been regularly reloaded in {} milliseconds", duration);
}
public List<DictWord> getAllDictWords() {