[improvement][headless&chat]Move EntityInfoProcessor from chat to headless module and optimize code.

[improvement][headless&chat]Move `EntityInfoProcessor` from `chat` to `headless` module and optimize code.
This commit is contained in:
jerryjzhang
2024-07-27 16:40:05 +08:00
parent ebbb519c07
commit e5504473a4
19 changed files with 108 additions and 134 deletions

View File

@@ -5,5 +5,4 @@ package com.tencent.supersonic.chat.server.processor;
*/
public interface ResultProcessor {
}

View File

@@ -5,7 +5,8 @@ import com.tencent.supersonic.chat.server.processor.ResultProcessor;
import com.tencent.supersonic.headless.api.pojo.response.QueryResult;
/**
* A ExecuteResultProcessor wraps things up before returning results to users in execute stage.
* A ExecuteResultProcessor wraps things up before returning
* execution results to the users.
*/
public interface ExecuteResultProcessor extends ResultProcessor {

View File

@@ -1,43 +0,0 @@
package com.tencent.supersonic.chat.server.processor.parse;
import com.tencent.supersonic.chat.server.pojo.ParseContext;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.EntityInfo;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.chat.query.QueryManager;
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
import org.springframework.util.CollectionUtils;
import java.util.List;
/**
* EntityInfoProcessor fills core attributes of an entity so that
* users get to know which entity is parsed out.
*/
public class EntityInfoProcessor implements ParseResultProcessor {
@Override
public void process(ParseContext parseContext, ParseResp parseResp) {
List<SemanticParseInfo> selectedParses = parseResp.getSelectedParses();
if (CollectionUtils.isEmpty(selectedParses)) {
return;
}
selectedParses.forEach(parseInfo -> {
String queryMode = parseInfo.getQueryMode();
if (QueryManager.containsRuleQuery(queryMode) || "PLAIN".equals(queryMode)) {
return;
}
//1. set entity info
SemanticLayerService semanticService = ContextUtils.getBean(SemanticLayerService.class);
DataSetSchema dataSetSchema = semanticService.getDataSetSchema(parseInfo.getDataSetId());
EntityInfo entityInfo = semanticService.getEntityInfo(parseInfo, dataSetSchema, parseContext.getUser());
if (QueryManager.isTagQuery(queryMode)
|| QueryManager.isMetricQuery(queryMode)) {
parseInfo.setEntityInfo(entityInfo);
}
});
}
}

View File

@@ -1,9 +1,14 @@
package com.tencent.supersonic.chat.server.processor.parse;
import com.tencent.supersonic.chat.server.pojo.ParseContext;
import com.tencent.supersonic.chat.server.processor.ResultProcessor;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
public interface ParseResultProcessor {
/**
* A ParseResultProcessor wraps things up before returning
* parsing results to the users.
*/
public interface ParseResultProcessor extends ResultProcessor {
void process(ParseContext parseContext, ParseResp parseResp);

View File

@@ -213,7 +213,7 @@ public class ChatQueryServiceImpl implements ChatQueryService {
if (Objects.nonNull(parseInfo.getSqlInfo())
&& StringUtils.isNotBlank(parseInfo.getSqlInfo().getCorrectedS2SQL())) {
String correctorSql = parseInfo.getSqlInfo().getCorrectedS2SQL();
fields = SqlSelectHelper.getAllFields(correctorSql);
fields = SqlSelectHelper.getAllSelectFields(correctorSql);
}
if (LLMSqlQuery.QUERY_MODE.equalsIgnoreCase(parseInfo.getQueryMode())
&& checkMetricReplace(fields, chatQueryDataReq.getMetrics())) {

View File

@@ -244,7 +244,7 @@ public class SqlSelectHelper {
return plainSelects;
}
public static List<String> getAllFields(String sql) {
public static List<String> getAllSelectFields(String sql) {
List<PlainSelect> plainSelects = getPlainSelects(getPlainSelect(sql));
Set<String> results = new HashSet<>();
for (PlainSelect plainSelect : plainSelects) {

View File

@@ -28,8 +28,8 @@ public class SqlValidHelper {
}
//2. all fields
List<String> thisAllFields = SqlSelectHelper.getAllFields(thisSql);
List<String> otherAllFields = SqlSelectHelper.getAllFields(otherSql);
List<String> thisAllFields = SqlSelectHelper.getAllSelectFields(thisSql);
List<String> otherAllFields = SqlSelectHelper.getAllSelectFields(otherSql);
if (!CollectionUtils.isEqualCollection(thisAllFields, otherAllFields)) {
return false;

View File

@@ -24,12 +24,12 @@ class SqlAddHelperTest {
String sql = "select 部门,sum (访问次数) from 超音数 where 数据日期 = '2023-08-08' "
+ "and 用户 =alice and 发布日期 ='11' group by 部门 limit 1";
sql = SqlAddHelper.addWhere(sql, "column_a", 123444555);
List<String> selectFields = SqlSelectHelper.getAllFields(sql);
List<String> selectFields = SqlSelectHelper.getAllSelectFields(sql);
Assert.assertEquals(selectFields.contains("column_a"), true);
sql = SqlAddHelper.addWhere(sql, "column_b", "123456666");
selectFields = SqlSelectHelper.getAllFields(sql);
selectFields = SqlSelectHelper.getAllSelectFields(sql);
Assert.assertEquals(selectFields.contains("column_b"), true);

View File

@@ -131,55 +131,55 @@ class SqlSelectHelperTest {
@Test
void testGetAllFields() {
List<String> allFields = SqlSelectHelper.getAllFields(
List<String> allFields = SqlSelectHelper.getAllSelectFields(
"SELECT department, user_id, field_a FROM s2 WHERE sys_imp_date = '2023-08-08'"
+ " AND user_id = 'alice' AND publish_date = '11' ORDER BY pv DESC LIMIT 1");
Assert.assertEquals(allFields.size(), 6);
allFields = SqlSelectHelper.getAllFields(
allFields = SqlSelectHelper.getAllSelectFields(
"SELECT department, user_id, field_a FROM s2 WHERE sys_imp_date >= '2023-08-08'"
+ " AND user_id = 'alice' AND publish_date = '11' ORDER BY pv DESC LIMIT 1");
Assert.assertEquals(allFields.size(), 6);
allFields = SqlSelectHelper.getAllFields(
allFields = SqlSelectHelper.getAllSelectFields(
"select 部门,sum (访问次数) from 超音数 where 数据日期 = '2023-08-08' and 用户 = 'alice'"
+ " and 发布日期 ='11' group by 部门 limit 1");
Assert.assertEquals(allFields.size(), 5);
allFields = SqlSelectHelper.getAllFields(
allFields = SqlSelectHelper.getAllSelectFields(
"SELECT user_name FROM 超音数 WHERE sys_imp_date <= '2023-09-03' AND "
+ "sys_imp_date >= '2023-08-04' GROUP BY user_name ORDER BY sum(pv) DESC LIMIT 10 ");
Assert.assertEquals(allFields.size(), 3);
allFields = SqlSelectHelper.getAllFields(
allFields = SqlSelectHelper.getAllSelectFields(
"SELECT user_name FROM 超音数 WHERE sys_imp_date <= '2023-09-03' AND "
+ "sys_imp_date >= '2023-08-04' GROUP BY user_name HAVING sum(pv) > 1000");
Assert.assertEquals(allFields.size(), 3);
allFields = SqlSelectHelper.getAllFields(
allFields = SqlSelectHelper.getAllSelectFields(
"SELECT department, user_id, field_a FROM s2 WHERE "
+ "(user_id = 'alice' AND publish_date = '11') and sys_imp_date "
+ "= '2023-08-08' ORDER BY pv DESC LIMIT 1");
Assert.assertEquals(allFields.size(), 6);
allFields = SqlSelectHelper.getAllFields(
allFields = SqlSelectHelper.getAllSelectFields(
"SELECT * FROM CSpider WHERE (评分 < (SELECT min(评分) FROM CSpider WHERE 语种 = '英文' ))"
+ " AND 数据日期 = '2023-10-12'");
Assert.assertEquals(allFields.size(), 3);
allFields = SqlSelectHelper.getAllFields("SELECT sum(销量) / (SELECT sum(销量) FROM 营销 "
allFields = SqlSelectHelper.getAllSelectFields("SELECT sum(销量) / (SELECT sum(销量) FROM 营销 "
+ "WHERE MONTH(数据日期) = 9) FROM 营销 WHERE 国家中文名 = '中国' AND MONTH(数据日期) = 9");
Assert.assertEquals(allFields.size(), 3);
allFields = SqlSelectHelper.getAllFields(
allFields = SqlSelectHelper.getAllSelectFields(
"SELECT 用户, 页面 FROM 超音数用户部门 GROUP BY 用户, 页面 ORDER BY count(*) DESC");
Assert.assertEquals(allFields.size(), 2);

View File

@@ -87,7 +87,7 @@ public class SemanticSchema implements Serializable {
public List<SchemaElement> getMetrics() {
List<SchemaElement> metrics = new ArrayList<>();
dataSetSchemaList.stream().forEach(d -> metrics.addAll(d.getMetrics()));
dataSetSchemaList.forEach(d -> metrics.addAll(d.getMetrics()));
return metrics;
}

View File

@@ -57,7 +57,7 @@ public class ChatQueryContext {
public List<SemanticQuery> getCandidateQueries() {
ParserConfig parserConfig = ContextUtils.getBean(ParserConfig.class);
int parseShowCount = Integer.valueOf(parserConfig.getParameterValue(ParserConfig.PARSER_SHOW_COUNT));
int parseShowCount = Integer.parseInt(parserConfig.getParameterValue(ParserConfig.PARSER_SHOW_COUNT));
candidateQueries = candidateQueries.stream()
.sorted(Comparator.comparing(semanticQuery -> semanticQuery.getParseInfo().getScore(),
Comparator.reverseOrder()))

View File

@@ -478,8 +478,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
SemanticQueryResp queryResultWithColumns =
getQueryResultWithSchemaResp(entityInfo, dataSetSchema, user);
if (queryResultWithColumns != null) {
if (!org.springframework.util.CollectionUtils.isEmpty(queryResultWithColumns.getResultList())
&& queryResultWithColumns.getResultList().size() > 0) {
if (!CollectionUtils.isEmpty(queryResultWithColumns.getResultList())) {
Map<String, Object> result = queryResultWithColumns.getResultList().get(0);
for (Map.Entry<String, Object> entry : result.entrySet()) {
String entryKey = getEntryKey(entry);

View File

@@ -0,0 +1,31 @@
package com.tencent.supersonic.headless.server.processor;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.EntityInfo;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.query.QueryManager;
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
/**
* EntityInfoProcessor fills core attributes of an entity so that
* users get to know which entity is parsed out.
*/
public class EntityInfoProcessor implements ResultProcessor {
@Override
public void process(ParseResp parseResp, ChatQueryContext chatQueryContext) {
parseResp.getSelectedParses().forEach(parseInfo -> {
String queryMode = parseInfo.getQueryMode();
if (!QueryManager.isTagQuery(queryMode) && !QueryManager.isMetricQuery(queryMode)) {
return;
}
SemanticLayerService semanticService = ContextUtils.getBean(SemanticLayerService.class);
DataSetSchema dataSetSchema = semanticService.getDataSetSchema(parseInfo.getDataSetId());
EntityInfo entityInfo = semanticService.getEntityInfo(parseInfo, dataSetSchema, chatQueryContext.getUser());
parseInfo.setEntityInfo(entityInfo);
});
}
}

View File

@@ -8,21 +8,19 @@ import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.SemanticSchema;
import com.tencent.supersonic.headless.api.pojo.SqlInfo;
import com.tencent.supersonic.headless.api.pojo.request.QueryFilter;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.chat.ChatQueryContext;
import com.tencent.supersonic.headless.chat.query.SemanticQuery;
import com.tencent.supersonic.headless.server.service.SchemaService;
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
@@ -40,94 +38,79 @@ public class ParseInfoProcessor implements ResultProcessor {
@Override
public void process(ParseResp parseResp, ChatQueryContext chatQueryContext) {
List<SemanticQuery> candidateQueries = chatQueryContext.getCandidateQueries();
if (CollectionUtils.isEmpty(candidateQueries)) {
return;
}
List<SemanticParseInfo> candidateParses = candidateQueries.stream()
.map(SemanticQuery::getParseInfo).collect(Collectors.toList());
candidateParses.forEach(this::updateParseInfo);
parseResp.getSelectedParses().forEach(this::updateParseInfo);
}
public void updateParseInfo(SemanticParseInfo parseInfo) {
SqlInfo sqlInfo = parseInfo.getSqlInfo();
String correctS2SQL = sqlInfo.getCorrectedS2SQL();
if (StringUtils.isBlank(correctS2SQL)) {
String s2SQL = sqlInfo.getCorrectedS2SQL();
if (StringUtils.isBlank(s2SQL)) {
return;
}
List<FieldExpression> expressions = SqlSelectHelper.getFilterExpression(correctS2SQL);
List<FieldExpression> expressions = SqlSelectHelper.getFilterExpression(s2SQL);
//set dataInfo
//extract date filter from S2SQL
try {
if (!org.apache.commons.collections.CollectionUtils.isEmpty(expressions)) {
DateConf dateInfo = getDateInfo(expressions);
if (dateInfo != null && parseInfo.getDateInfo() == null) {
parseInfo.setDateInfo(dateInfo);
}
if (parseInfo.getDateInfo() == null && !CollectionUtils.isEmpty(expressions)) {
parseInfo.setDateInfo(extractDateFilter(expressions));
}
} catch (Exception e) {
log.error("set dateInfo error :", e);
log.error("failed to extract date range:", e);
}
if (correctS2SQL.equals(sqlInfo.getParsedS2SQL())) {
return;
}
//set filter
//extract dimension filters from S2SQL
Long dataSetId = parseInfo.getDataSetId();
SemanticLayerService semanticLayerService = ContextUtils.getBean(SemanticLayerService.class);
DataSetSchema dsSchema = semanticLayerService.getDataSetSchema(dataSetId);
try {
Map<String, SchemaElement> fieldNameToElement = getNameToElement(dataSetId);
List<QueryFilter> result = getDimensionFilter(fieldNameToElement, expressions);
parseInfo.getDimensionFilters().addAll(result);
Map<String, SchemaElement> fieldNameToElement = getNameToElement(dsSchema);
parseInfo.getDimensionFilters().addAll(extractDimensionFilter(fieldNameToElement, expressions));
} catch (Exception e) {
log.error("set dimensionFilter error :", e);
log.error("failed to extract dimension filters:", e);
}
SemanticSchema semanticSchema = ContextUtils.getBean(SchemaService.class).getSemanticSchema();
if (Objects.isNull(semanticSchema)) {
return;
}
List<String> allFields = getFieldsExceptDate(SqlSelectHelper.getAllFields(sqlInfo.getCorrectedS2SQL()));
Set<SchemaElement> metrics = getElements(dataSetId, allFields, semanticSchema.getMetrics());
//extract metrics from S2SQL
List<String> allFields = filterDateField(SqlSelectHelper.getAllSelectFields(s2SQL));
Set<SchemaElement> metrics = matchSchemaElements(allFields, dsSchema.getMetrics());
parseInfo.setMetrics(metrics);
//extract dimensions from S2SQL
if (QueryType.METRIC.equals(parseInfo.getQueryType())) {
List<String> groupByFields = SqlSelectHelper.getGroupByFields(sqlInfo.getCorrectedS2SQL());
List<String> groupByDimensions = getFieldsExceptDate(groupByFields);
parseInfo.setDimensions(getElements(dataSetId, groupByDimensions, semanticSchema.getDimensions()));
List<String> groupByFields = SqlSelectHelper.getGroupByFields(s2SQL);
List<String> groupByDimensions = filterDateField(groupByFields);
parseInfo.setDimensions(matchSchemaElements(groupByDimensions, dsSchema.getDimensions()));
} else if (QueryType.DETAIL.equals(parseInfo.getQueryType())) {
List<String> selectFields = SqlSelectHelper.getSelectFields(sqlInfo.getCorrectedS2SQL());
List<String> selectDimensions = getFieldsExceptDate(selectFields);
parseInfo.setDimensions(getElements(dataSetId, selectDimensions, semanticSchema.getDimensions()));
List<String> selectFields = SqlSelectHelper.getSelectFields(s2SQL);
List<String> selectDimensions = filterDateField(selectFields);
parseInfo.setDimensions(matchSchemaElements(selectDimensions, dsSchema.getDimensions()));
}
}
private Set<SchemaElement> getElements(Long dataSetId, List<String> allFields, List<SchemaElement> elements) {
private Set<SchemaElement> matchSchemaElements(List<String> allFields, Set<SchemaElement> elements) {
return elements.stream()
.filter(schemaElement -> {
if (CollectionUtils.isEmpty(schemaElement.getAlias())) {
return dataSetId.equals(schemaElement.getDataSet()) && allFields.contains(
schemaElement.getName());
return allFields.contains(schemaElement.getName());
}
Set<String> allFieldsSet = new HashSet<>(allFields);
Set<String> aliasSet = new HashSet<>(schemaElement.getAlias());
List<String> intersection = allFieldsSet.stream()
.filter(aliasSet::contains).collect(Collectors.toList());
return dataSetId.equals(schemaElement.getDataSet()) && (allFields.contains(
schemaElement.getName()) || !CollectionUtils.isEmpty(intersection));
return allFields.contains(schemaElement.getName())
|| !CollectionUtils.isEmpty(intersection);
}
).collect(Collectors.toSet());
}
private List<String> getFieldsExceptDate(List<String> allFields) {
if (org.springframework.util.CollectionUtils.isEmpty(allFields)) {
return new ArrayList<>();
}
private List<String> filterDateField(List<String> allFields) {
return allFields.stream()
.filter(entry -> !TimeDimensionEnum.DAY.getChName().equalsIgnoreCase(entry))
.collect(Collectors.toList());
}
private List<QueryFilter> getDimensionFilter(Map<String, SchemaElement> fieldNameToElement,
List<FieldExpression> fieldExpressions) {
private List<QueryFilter> extractDimensionFilter(Map<String, SchemaElement> fieldNameToElement,
List<FieldExpression> fieldExpressions) {
List<QueryFilter> result = Lists.newArrayList();
for (FieldExpression expression : fieldExpressions) {
QueryFilter dimensionFilter = new QueryFilter();
@@ -148,7 +131,7 @@ public class ParseInfoProcessor implements ResultProcessor {
return result;
}
private DateConf getDateInfo(List<FieldExpression> fieldExpressions) {
private DateConf extractDateFilter(List<FieldExpression> fieldExpressions) {
List<FieldExpression> dateExpressions = fieldExpressions.stream()
.filter(expression -> TimeDimensionEnum.DAY.getChName().equalsIgnoreCase(expression.getFieldName()))
.collect(Collectors.toList());
@@ -193,10 +176,9 @@ public class ParseInfoProcessor implements ResultProcessor {
return dateExpressions.size() > 1 && Objects.nonNull(dateExpressions.get(1).getFieldValue());
}
protected Map<String, SchemaElement> getNameToElement(Long dataSetId) {
SemanticSchema semanticSchema = ContextUtils.getBean(SchemaService.class).getSemanticSchema();
List<SchemaElement> dimensions = semanticSchema.getDimensions(dataSetId);
List<SchemaElement> metrics = semanticSchema.getMetrics(dataSetId);
protected Map<String, SchemaElement> getNameToElement(DataSetSchema dsSchema) {
Set<SchemaElement> dimensions = dsSchema.getDimensions();
Set<SchemaElement> metrics = dsSchema.getMetrics();
List<SchemaElement> allElements = Lists.newArrayList();
allElements.addAll(dimensions);
@@ -214,7 +196,7 @@ public class ParseInfoProcessor implements ResultProcessor {
}
return result.stream();
})
.collect(Collectors.toMap(pair -> pair.getLeft(), pair -> pair.getRight(),
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight,
(value1, value2) -> value2));
}

View File

@@ -33,10 +33,10 @@ import java.util.stream.Collectors;
public class ChatWorkflowEngine {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
private List<SchemaMapper> schemaMappers = ComponentFactory.getSchemaMappers();
private List<SemanticParser> semanticParsers = ComponentFactory.getSemanticParsers();
private List<SemanticCorrector> semanticCorrectors = ComponentFactory.getSemanticCorrectors();
private List<ResultProcessor> resultProcessors = ComponentFactory.getResultProcessors();
private final List<SchemaMapper> schemaMappers = ComponentFactory.getSchemaMappers();
private final List<SemanticParser> semanticParsers = ComponentFactory.getSemanticParsers();
private final List<SemanticCorrector> semanticCorrectors = ComponentFactory.getSemanticCorrectors();
private final List<ResultProcessor> resultProcessors = ComponentFactory.getResultProcessors();
public void execute(ChatQueryContext queryCtx, ParseResp parseResult) {
queryCtx.setChatWorkflowState(ChatWorkflowState.MAPPING);
@@ -44,7 +44,7 @@ public class ChatWorkflowEngine {
switch (queryCtx.getChatWorkflowState()) {
case MAPPING:
performMapping(queryCtx);
if (queryCtx.getMapInfo().getMatchedDataSetInfos().size() == 0) {
if (queryCtx.getMapInfo().getMatchedDataSetInfos().isEmpty()) {
parseResult.setState(ParseResp.ParseState.FAILED);
parseResult.setErrorMsg("No semantic entities can be mapped against user question.");
queryCtx.setChatWorkflowState(ChatWorkflowState.FINISHED);
@@ -54,7 +54,7 @@ public class ChatWorkflowEngine {
break;
case PARSING:
performParsing(queryCtx);
if (queryCtx.getCandidateQueries().size() == 0) {
if (queryCtx.getCandidateQueries().isEmpty()) {
parseResult.setState(ParseResp.ParseState.FAILED);
parseResult.setErrorMsg("No semantic queries can be parsed out.");
queryCtx.setChatWorkflowState(ChatWorkflowState.FINISHED);

View File

@@ -78,7 +78,7 @@ public class QueryReqConverter {
querySQLReq.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(querySQLReq.getSql()));
log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, querySQLReq.getSql());
//4.build MetricTables
List<String> allFields = SqlSelectHelper.getAllFields(querySQLReq.getSql());
List<String> allFields = SqlSelectHelper.getAllSelectFields(querySQLReq.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics = metricSchemas.stream().map(m -> m.getBizName()).collect(Collectors.toList());
QueryStructReq queryStructReq = new QueryStructReq();

View File

@@ -124,7 +124,7 @@ public class QueryStructUtils {
}
public Set<String> getResName(QuerySqlReq querySqlReq) {
return new HashSet<>(SqlSelectHelper.getAllFields(querySqlReq.getSql()));
return new HashSet<>(SqlSelectHelper.getAllSelectFields(querySqlReq.getSql()));
}
public Set<String> getBizNameFromSql(QuerySqlReq querySqlReq,

View File

@@ -141,7 +141,7 @@ public class StatUtils {
public void initSqlStatInfo(QuerySqlReq querySqlReq, User facadeUser) {
QueryStat queryStatInfo = new QueryStat();
List<String> aggFields = SqlSelectHelper.getAggregateFields(querySqlReq.getSql());
List<String> allFields = SqlSelectHelper.getAllFields(querySqlReq.getSql());
List<String> allFields = SqlSelectHelper.getAllSelectFields(querySqlReq.getSql());
List<String> dimensions = allFields.stream().filter(aggFields::contains).collect(Collectors.toList());
String userName = getUserName(facadeUser);

View File

@@ -47,7 +47,8 @@ com.tencent.supersonic.headless.core.cache.QueryCache=\
### headless-server SPIs
com.tencent.supersonic.headless.server.processor.ResultProcessor=\
com.tencent.supersonic.headless.server.processor.ParseInfoProcessor
com.tencent.supersonic.headless.server.processor.ParseInfoProcessor,\
com.tencent.supersonic.headless.server.processor.EntityInfoProcessor
### chat-server SPIs
@@ -66,7 +67,6 @@ com.tencent.supersonic.chat.server.plugin.recognize.PluginRecognizer=\
com.tencent.supersonic.chat.server.processor.parse.ParseResultProcessor=\
com.tencent.supersonic.chat.server.processor.parse.QueryRecommendProcessor,\
com.tencent.supersonic.chat.server.processor.parse.EntityInfoProcessor,\
com.tencent.supersonic.chat.server.processor.parse.TimeCostProcessor
com.tencent.supersonic.chat.server.processor.execute.ExecuteResultProcessor=\