[improvement][headless]Merge function of QueryConverter abstraction to QueryParser.

This commit is contained in:
jerryjzhang
2024-12-19 21:41:47 +08:00
parent 8b69d57c4b
commit 4cb2256351
39 changed files with 440 additions and 500 deletions

View File

@@ -65,7 +65,7 @@ public class LLMSqlCorrector extends BaseSemanticCorrector {
return; return;
} }
Text2SQLExemplar exemplar = (Text2SQLExemplar)semanticParseInfo.getProperties() Text2SQLExemplar exemplar = (Text2SQLExemplar) semanticParseInfo.getProperties()
.get(Text2SQLExemplar.PROPERTY_KEY); .get(Text2SQLExemplar.PROPERTY_KEY);
ChatLanguageModel chatLanguageModel = ChatLanguageModel chatLanguageModel =

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql; package com.tencent.supersonic.headless.core.pojo;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -1,6 +1,9 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql; package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data; import lombok.Data;
import java.util.ArrayList; import java.util.ArrayList;

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql; package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.ColumnOrder;
@@ -9,7 +9,7 @@ import java.util.List;
import java.util.Set; import java.util.Set;
@Data @Data
public class OntologyQueryParam { public class OntologyQuery {
private Set<String> metrics = Sets.newHashSet(); private Set<String> metrics = Sets.newHashSet();
private Set<String> dimensions = Sets.newHashSet(); private Set<String> dimensions = Sets.newHashSet();
private String where; private String where;

View File

@@ -1,8 +1,6 @@
package com.tencent.supersonic.headless.core.pojo; package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import lombok.Data; import lombok.Data;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple; import org.apache.commons.lang3.tuple.Triple;
@@ -13,15 +11,15 @@ public class QueryStatement {
private Long dataSetId; private Long dataSetId;
private String sql; private String sql;
private String errMsg; private String errMsg;
private StructQueryParam structQueryParam; private StructQuery structQuery;
private SqlQueryParam sqlQueryParam; private SqlQuery sqlQuery;
private OntologyQueryParam ontologyQueryParam; private OntologyQuery ontologyQuery;
private Integer status = 0; private Integer status = 0;
private Boolean isS2SQL = false; private Boolean isS2SQL = false;
private Boolean enableOptimize = true; private Boolean enableOptimize = true;
private Triple<String, String, String> minMaxTime; private Triple<String, String, String> minMaxTime;
private Ontology ontology; private Ontology ontology;
private SemanticSchemaResp semanticSchemaResp; private SemanticSchemaResp semanticSchema;
private Integer limit = 1000; private Integer limit = 1000;
private Boolean isTranslated = false; private Boolean isTranslated = false;

View File

@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.core.pojo;
import lombok.Data; import lombok.Data;
@Data @Data
public class SqlQueryParam { public class SqlQuery {
private String sql; private String sql;
private String table; private String table;
private boolean supportWith = true; private boolean supportWith = true;

View File

@@ -12,7 +12,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
@Data @Data
public class StructQueryParam { public class StructQuery {
private List<String> groups = new ArrayList(); private List<String> groups = new ArrayList();
private List<Aggregator> aggregators = new ArrayList(); private List<Aggregator> aggregators = new ArrayList();
private List<Order> orders = new ArrayList(); private List<Order> orders = new ArrayList();

View File

@@ -3,10 +3,10 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer; import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -26,23 +26,30 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
return; return;
} }
try { try {
for (QueryConverter converter : ComponentFactory.getQueryConverters()) { for (QueryParser parser : ComponentFactory.getQueryParser()) {
if (converter.accept(queryStatement)) { if (parser.accept(queryStatement)) {
log.debug("QueryConverter accept [{}]", converter.getClass().getName()); log.debug("QueryConverter accept [{}]", parser.getClass().getName());
converter.convert(queryStatement); parser.parse(queryStatement);
} }
} }
doOntologyParse(queryStatement); if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQuery().getTable(), queryStatement.getErrMsg()));
}
if (StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSimplifiedSql())) { mergeOntologyQuery(queryStatement);
queryStatement.setSql(queryStatement.getSqlQueryParam().getSimplifiedSql());
if (StringUtils.isNotBlank(queryStatement.getSqlQuery().getSimplifiedSql())) {
queryStatement.setSql(queryStatement.getSqlQuery().getSimplifiedSql());
} }
if (StringUtils.isBlank(queryStatement.getSql())) { if (StringUtils.isBlank(queryStatement.getSql())) {
throw new RuntimeException("parse exception: " + queryStatement.getErrMsg()); throw new RuntimeException("parse exception: " + queryStatement.getErrMsg());
} }
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) { for (QueryOptimizer optimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStatement); if (optimizer.accept(queryStatement)) {
optimizer.rewrite(queryStatement);
}
} }
log.info("translated query SQL: [{}]", log.info("translated query SQL: [{}]",
StringUtils.normalizeSpace(queryStatement.getSql())); StringUtils.normalizeSpace(queryStatement.getSql()));
@@ -52,23 +59,18 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
} }
private void doOntologyParse(QueryStatement queryStatement) throws Exception { private void mergeOntologyQuery(QueryStatement queryStatement) throws Exception {
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam(); OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
log.info("parse with ontology: [{}]", ontologyQueryParam); log.info("parse with ontology: [{}]", ontologyQuery);
ComponentFactory.getQueryParser().parse(queryStatement);
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQueryParam().getTable(), queryStatement.getErrMsg()));
}
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); SqlQuery sqlQuery = queryStatement.getSqlQuery();
String ontologyQuerySql = sqlQueryParam.getSql(); String ontologyQuerySql = sqlQuery.getSql();
String ontologyInnerTable = sqlQueryParam.getTable(); String ontologyInnerTable = sqlQuery.getTable();
String ontologyInnerSql = queryStatement.getSql(); String ontologyInnerSql = queryStatement.getSql();
List<Pair<String, String>> tables = new ArrayList<>(); List<Pair<String, String>> tables = new ArrayList<>();
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql)); tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
if (sqlQueryParam.isSupportWith()) { if (sqlQuery.isSupportWith()) {
EngineType engineType = queryStatement.getOntology().getDatabase().getType(); EngineType engineType = queryStatement.getOntology().getDatabase().getType();
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) { if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
String withSql = "with " + tables.stream() String withSql = "with " + tables.stream()
@@ -86,9 +88,9 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
} else { } else {
for (Pair<String, String> tb : tables) { for (Pair<String, String> tb : tables) {
ontologyQuerySql = ontologyQuerySql = StringUtils.replace(ontologyQuerySql, tb.getLeft(),
StringUtils.replace(ontologyQuerySql, tb.getLeft(), "(" + tb.getRight() "(" + tb.getRight() + ") " + (sqlQuery.isWithAlias() ? "" : tb.getLeft()),
+ ") " + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1); -1);
} }
queryStatement.setSql(ontologyQuerySql); queryStatement.setSql(ontologyQuerySql);
} }

View File

@@ -1,11 +0,0 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** to supplement,translate the request Body */
public interface QueryConverter {
boolean accept(QueryStatement queryStatement);
void convert(QueryStatement queryStatement) throws Exception;
}

View File

@@ -1,74 +0,0 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("ParserDefaultConverter")
@Slf4j
public class StructQueryConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQueryParam())
&& !queryStatement.getIsS2SQL();
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
String dsTable = "t_1";
SqlQueryParam sqlParam = new SqlQueryParam();
sqlParam.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam),
sqlGenerateUtils.getOrderBy(structQueryParam),
sqlGenerateUtils.getLimit(structQueryParam));
Database database = queryStatement.getOntology().getDatabase();
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
sqlParam.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam),
sqlGenerateUtils.getOrderBy(structQueryParam),
sqlGenerateUtils.getLimit(structQueryParam));
}
sqlParam.setSql(sql);
queryStatement.setSqlQueryParam(sqlParam);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups());
ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
ontologyQueryParam.setWhere(where);
if (ontologyQueryParam.getMetrics().isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
} else {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery());
ontologyQueryParam.setOrder(structQueryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQueryParam.setLimit(structQueryParam.getLimit());
queryStatement.setOntologyQueryParam(ontologyQueryParam);
log.info("parse structQuery [{}] ", queryStatement.getSqlQueryParam());
}
}

View File

@@ -14,9 +14,16 @@ import java.util.Objects;
@Component("DbDialectOptimizer") @Component("DbDialectOptimizer")
public class DbDialectOptimizer implements QueryOptimizer { public class DbDialectOptimizer implements QueryOptimizer {
@Override
public boolean accept(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
return Objects.nonNull(database) && Objects.nonNull(database.getType());
}
@Override @Override
public void rewrite(QueryStatement queryStatement) { public void rewrite(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
DatabaseResp database = semanticSchemaResp.getDatabaseResp(); DatabaseResp database = semanticSchemaResp.getDatabaseResp();
String sql = queryStatement.getSql(); String sql = queryStatement.getSql();
if (Objects.isNull(database) || Objects.isNull(database.getType())) { if (Objects.isNull(database) || Objects.isNull(database.getType())) {

View File

@@ -1,40 +0,0 @@
package com.tencent.supersonic.headless.core.translator.optimizer;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.util.Objects;
/** Remove the default metric added by the system when the query only has dimensions */
@Slf4j
@Component("DetailQueryOptimizer")
public class DetailQueryOptimizer implements QueryOptimizer {
@Override
public void rewrite(QueryStatement queryStatement) {
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
String sqlRaw = queryStatement.getSql().trim();
if (StringUtils.isEmpty(sqlRaw)) {
throw new RuntimeException("sql is empty or null");
}
log.debug("before handleNoMetric, sql:{}", sqlRaw);
// if (isDetailQuery(structQueryParam)) {
// if (!CollectionUtils.isEmpty(structQueryParam.getGroups())) {
// String sqlForm = "select %s from ( %s ) src_no_metric";
// String sql = String.format(sqlForm,
// structQueryParam.getGroups().stream().collect(Collectors.joining(",")),
// sqlRaw);
// queryStatement.setSql(sql);
// }
// }
log.debug("after handleNoMetric, sql:{}", queryStatement.getSql());
}
public boolean isDetailQuery(StructQueryParam structQueryParam) {
return Objects.nonNull(structQueryParam)
&& structQueryParam.getQueryType().isNativeAggQuery();
}
}

View File

@@ -7,5 +7,9 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement;
* derive the most efficient query. * derive the most efficient query.
*/ */
public interface QueryOptimizer { public interface QueryOptimizer {
boolean accept(QueryStatement queryStatement);
void rewrite(QueryStatement queryStatement); void rewrite(QueryStatement queryStatement);
} }

View File

@@ -9,10 +9,13 @@ import org.springframework.stereotype.Component;
@Component("ResultLimitOptimizer") @Component("ResultLimitOptimizer")
public class ResultLimitOptimizer implements QueryOptimizer { public class ResultLimitOptimizer implements QueryOptimizer {
@Override
public boolean accept(QueryStatement queryStatement) {
return !SqlSelectHelper.hasLimit(queryStatement.getSql());
}
@Override @Override
public void rewrite(QueryStatement queryStatement) { public void rewrite(QueryStatement queryStatement) {
if (!SqlSelectHelper.hasLimit(queryStatement.getSql())) {
queryStatement.setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit()); queryStatement.setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit());
} }
}
} }

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter; package com.tencent.supersonic.headless.core.translator.parser;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper; import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
@@ -22,24 +22,24 @@ import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Slf4j @Slf4j
@Component("DefaultDimValueConverter") @Component("DefaultDimValueParser")
public class DefaultDimValueConverter implements QueryConverter { public class DefaultDimValueParser implements QueryParser {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam()) return Objects.nonNull(queryStatement.getSqlQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSql()); && StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql());
} }
@Override @Override
public void convert(QueryStatement queryStatement) { public void parse(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream() List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues())) .filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) { if (CollectionUtils.isEmpty(dimensions)) {
return; return;
} }
String sql = queryStatement.getSqlQueryParam().getSql(); String sql = queryStatement.getSqlQuery().getSql();
List<String> whereFields = SqlSelectHelper.getWhereFields(sql).stream() List<String> whereFields = SqlSelectHelper.getWhereFields(sql).stream()
.filter(field -> !TimeDimensionEnum.containsTimeDimension(field)) .filter(field -> !TimeDimensionEnum.containsTimeDimension(field))
.collect(Collectors.toList()); .collect(Collectors.toList());
@@ -56,11 +56,11 @@ public class DefaultDimValueConverter implements QueryConverter {
inExpression.setLeftExpression(new Column(dimension.getBizName())); inExpression.setLeftExpression(new Column(dimension.getBizName()));
inExpression.setRightExpression(expressionList); inExpression.setRightExpression(expressionList);
expressions.add(inExpression); expressions.add(inExpression);
if (Objects.nonNull(queryStatement.getSqlQueryParam().getTable())) { if (Objects.nonNull(queryStatement.getSqlQuery().getTable())) {
queryStatement.getOntologyQueryParam().getDimensions().add(dimension.getBizName()); queryStatement.getOntologyQuery().getDimensions().add(dimension.getBizName());
} }
} }
sql = SqlAddHelper.addWhere(sql, expressions); sql = SqlAddHelper.addWhere(sql, expressions);
queryStatement.getSqlQueryParam().setSql(sql); queryStatement.getSqlQuery().setSql(sql);
} }
} }

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter; package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.pojo.Aggregator; import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum; import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
@@ -9,9 +9,9 @@ import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -22,30 +22,29 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Component("CalculateAggConverter") @Component("MetricRatioParser")
@Slf4j @Slf4j
public class MetricRatioConverter implements QueryConverter { public class MetricRatioParser implements QueryParser {
public interface EngineSql { public interface EngineSql {
String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, String sql(StructQuery structQuery, boolean isOver, boolean asWith, String metricSql);
String metricSql);
} }
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getStructQueryParam()) || queryStatement.getIsS2SQL() if (Objects.isNull(queryStatement.getStructQuery()) || queryStatement.getIsS2SQL()
|| !isRatioAccept(queryStatement.getStructQueryParam())) { || !isRatioAccept(queryStatement.getStructQuery())) {
return false; return false;
} }
StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); StructQuery structQuery = queryStatement.getStructQuery();
if (structQueryParam.getQueryType().isNativeAggQuery() if (structQuery.getQueryType().isNativeAggQuery()
|| CollectionUtils.isEmpty(structQueryParam.getAggregators())) { || CollectionUtils.isEmpty(structQuery.getAggregators())) {
return false; return false;
} }
int nonSumFunction = 0; int nonSumFunction = 0;
for (Aggregator agg : structQueryParam.getAggregators()) { for (Aggregator agg : structQuery.getAggregators()) {
if (agg.getFunc() == null || "".equals(agg.getFunc())) { if (agg.getFunc() == null || "".equals(agg.getFunc())) {
return false; return false;
} }
@@ -60,14 +59,14 @@ public class MetricRatioConverter implements QueryConverter {
} }
@Override @Override
public void convert(QueryStatement queryStatement) throws Exception { public void parse(QueryStatement queryStatement) throws Exception {
Database database = queryStatement.getOntology().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
generateRatioSql(queryStatement, database.getType(), database.getVersion()); generateRatioSql(queryStatement, database.getType(), database.getVersion());
} }
/** Ratio */ /** Ratio */
public boolean isRatioAccept(StructQueryParam structQueryParam) { public boolean isRatioAccept(StructQuery structQuery) {
Long ratioFuncNum = structQueryParam.getAggregators().stream() Long ratioFuncNum = structQuery.getAggregators().stream()
.filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL) .filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)
|| f.getFunc().equals(AggOperatorEnum.RATIO_OVER))) || f.getFunc().equals(AggOperatorEnum.RATIO_OVER)))
.count(); .count();
@@ -80,20 +79,20 @@ public class MetricRatioConverter implements QueryConverter {
public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum, public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum,
String version) throws Exception { String version) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); StructQuery structQuery = queryStatement.getStructQuery();
check(structQueryParam); check(structQuery);
queryStatement.setEnableOptimize(false); queryStatement.setEnableOptimize(false);
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam(); OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
ontologyQueryParam.setAggOption(AggOption.AGGREGATION); ontologyQuery.setAggOption(AggOption.AGGREGATION);
String metricTableName = "v_metric_tb_tmp"; String metricTableName = "v_metric_tb_tmp";
boolean isOver = isOverRatio(structQueryParam); boolean isOver = isOverRatio(structQuery);
String sql = ""; String sql = "";
SqlQueryParam dsParam = queryStatement.getSqlQueryParam(); SqlQuery dsParam = queryStatement.getSqlQuery();
dsParam.setTable(metricTableName); dsParam.setTable(metricTableName);
switch (engineTypeEnum) { switch (engineTypeEnum) {
case H2: case H2:
sql = new H2EngineSql().sql(structQueryParam, isOver, true, metricTableName); sql = new H2EngineSql().sql(structQuery, isOver, true, metricTableName);
break; break;
case MYSQL: case MYSQL:
case DORIS: case DORIS:
@@ -102,10 +101,10 @@ public class MetricRatioConverter implements QueryConverter {
dsParam.setSupportWith(false); dsParam.setSupportWith(false);
} }
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) { if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(structQueryParam, isOver, sql = new MysqlEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(),
dsParam.isSupportWith(), metricTableName); metricTableName);
} else { } else {
sql = new CkEngineSql().sql(structQueryParam, isOver, dsParam.isSupportWith(), sql = new CkEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(),
metricTableName); metricTableName);
} }
break; break;
@@ -116,8 +115,8 @@ public class MetricRatioConverter implements QueryConverter {
public class H2EngineSql implements EngineSql { public class H2EngineSql implements EngineSql {
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) { public String getOverSelect(StructQuery structQuery, boolean isOver) {
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s", return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
@@ -127,44 +126,43 @@ public class MetricRatioConverter implements QueryConverter {
return f.getColumn(); return f.getColumn();
} }
}).collect(Collectors.joining(",")); }).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver, public String getTimeSpan(StructQuery structQuery, boolean isOver, boolean isAdd) {
boolean isAdd) { if (Objects.nonNull(structQuery.getDateInfo())) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
String addStr = isAdd ? "" : "-"; String addStr = isAdd ? "" : "-";
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return "day," + (isOver ? addStr + "7" : addStr + "1"); return "day," + (isOver ? addStr + "7" : addStr + "1");
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? "month," + addStr + "1" : "day," + addStr + "7"; return isOver ? "month," + addStr + "1" : "day," + addStr + "7";
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) {
return isOver ? "year," + addStr + "1" : "month," + addStr + "1"; return isOver ? "year," + addStr + "1" : "month," + addStr + "1";
} }
} }
return ""; return "";
} }
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(structQueryParam); String timeDim = getTimeDim(structQuery);
String timeSpan = getTimeSpan(structQueryParam, isOver, true); String timeSpan = getTimeSpan(structQuery, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ", "%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
aliasRight + timeDim, aliasLeft + timeDim, timeSpan, aliasRight + timeDim, aliasLeft + timeDim, timeSpan,
aliasRight + timeDim); aliasRight + timeDim);
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ", return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false), aliasLeft + timeDim,
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan, return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan,
@@ -174,7 +172,7 @@ public class MetricRatioConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -185,36 +183,36 @@ public class MetricRatioConverter implements QueryConverter {
} }
@Override @Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) { String metricSql) {
String sql = String.format( String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."), getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQueryParam)); getLimit(structQuery));
return sql; return sql;
} }
} }
public class CkEngineSql extends MysqlEngineSql { public class CkEngineSql extends MysqlEngineSql {
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(structQueryParam); String timeDim = getTimeDim(structQuery);
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true); String timeSpan = "INTERVAL " + getTimeSpan(structQuery, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ", "toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan); aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s", return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false),
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -224,7 +222,7 @@ public class MetricRatioConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -235,49 +233,46 @@ public class MetricRatioConverter implements QueryConverter {
} }
@Override @Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) { String metricSql) {
if (!asWith) { if (!asWith) {
return String.format( return String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllSelect(structQueryParam, "t0."), getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getLimit(structQuery));
getOrderBy(structQueryParam), getLimit(structQueryParam));
} }
return String.format( return String.format(
",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s " ",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s "
+ "from t0 left join t1 on %s ) metric_tb_src %s %s ", + "from t0 left join t1 on %s ) metric_tb_src %s %s ",
metricSql, metricSql, getOverSelect(structQueryParam, isOver), metricSql, metricSql, getOverSelect(structQuery, isOver),
getAllSelect(structQueryParam, "t0."), getAllSelect(structQuery, "t0."), getAllJoinSelect(structQuery, "t1."),
getAllJoinSelect(structQueryParam, "t1."), getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), getLimit(structQuery));
getLimit(structQueryParam));
} }
} }
public class MysqlEngineSql implements EngineSql { public class MysqlEngineSql implements EngineSql {
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver, public String getTimeSpan(StructQuery structQuery, boolean isOver, boolean isAdd) {
boolean isAdd) { if (Objects.nonNull(structQuery.getDateInfo())) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
String addStr = isAdd ? "" : "-"; String addStr = isAdd ? "" : "-";
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return isOver ? addStr + "7 day" : addStr + "1 day"; return isOver ? addStr + "7 day" : addStr + "1 day";
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) {
return isOver ? addStr + "1 month" : addStr + "7 day"; return isOver ? addStr + "1 month" : addStr + "7 day";
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? addStr + "1 year" : addStr + "1 month"; return isOver ? addStr + "1 year" : addStr + "1 month";
} }
} }
return ""; return "";
} }
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) { public String getOverSelect(StructQuery structQuery, boolean isOver) {
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s", return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
@@ -287,26 +282,26 @@ public class MetricRatioConverter implements QueryConverter {
return f.getColumn(); return f.getColumn();
} }
}).collect(Collectors.joining(",")); }).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(structQueryParam); String timeDim = getTimeDim(structQuery);
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true); String timeSpan = "INTERVAL " + getTimeSpan(structQuery, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ", "%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan); aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s", return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false),
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -316,7 +311,7 @@ public class MetricRatioConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -327,53 +322,52 @@ public class MetricRatioConverter implements QueryConverter {
} }
@Override @Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) { String metricSql) {
String sql = String.format( String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."), getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQueryParam)); getLimit(structQuery));
return sql; return sql;
} }
} }
private String getAllJoinSelect(StructQueryParam structQueryParam, String alias) { private String getAllJoinSelect(StructQuery structQuery, String alias) {
String aggStr = structQueryParam.getAggregators().stream() String aggStr = structQuery.getAggregators().stream()
.map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll") .map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
groups.add(alias + group + " as " + group + "_roll"); groups.add(alias + group + " as " + group + "_roll");
} }
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr; return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
} }
private String getGroupDimWithOutTime(StructQueryParam structQueryParam) { private String getGroupDimWithOutTime(StructQuery structQuery) {
String timeDim = getTimeDim(structQueryParam); String timeDim = getTimeDim(structQuery);
return structQueryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim)) return structQuery.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
private static String getTimeDim(StructQueryParam structQueryParam) { private static String getTimeDim(StructQuery structQuery) {
DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class); DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class);
return dateModeUtils.getSysDateCol(structQueryParam.getDateInfo()); return dateModeUtils.getSysDateCol(structQuery.getDateInfo());
} }
private static String getLimit(StructQueryParam structQueryParam) { private static String getLimit(StructQuery structQuery) {
if (structQueryParam != null && structQueryParam.getLimit() != null if (structQuery != null && structQuery.getLimit() != null && structQuery.getLimit() > 0) {
&& structQueryParam.getLimit() > 0) { return " limit " + String.valueOf(structQuery.getLimit());
return " limit " + String.valueOf(structQueryParam.getLimit());
} }
return ""; return "";
} }
private String getAllSelect(StructQueryParam structQueryParam, String alias) { private String getAllSelect(StructQuery structQuery, String alias) {
String aggStr = structQueryParam.getAggregators().stream() String aggStr = structQuery.getAggregators().stream().map(f -> getSelectField(f, alias))
.map(f -> getSelectField(f, alias)).collect(Collectors.joining(",")); .collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: alias + String.join("," + alias, structQueryParam.getGroups()) + "," + aggStr; : alias + String.join("," + alias, structQuery.getGroups()) + "," + aggStr;
} }
private String getSelectField(final Aggregator agg, String alias) { private String getSelectField(final Aggregator agg, String alias) {
@@ -385,32 +379,32 @@ public class MetricRatioConverter implements QueryConverter {
return sqlGenerateUtils.getSelectField(agg); return sqlGenerateUtils.getSelectField(agg);
} }
private String getGroupBy(StructQueryParam structQueryParam) { private String getGroupBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQueryParam.getGroups())) { if (CollectionUtils.isEmpty(structQuery.getGroups())) {
return ""; return "";
} }
return "group by " + String.join(",", structQueryParam.getGroups()); return "group by " + String.join(",", structQuery.getGroups());
} }
private static String getOrderBy(StructQueryParam structQueryParam) { private static String getOrderBy(StructQuery structQuery) {
return "order by " + getTimeDim(structQueryParam) + " desc"; return "order by " + getTimeDim(structQuery) + " desc";
} }
private boolean isOverRatio(StructQueryParam structQueryParam) { private boolean isOverRatio(StructQuery structQuery) {
Long overCt = structQueryParam.getAggregators().stream() Long overCt = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
return overCt > 0; return overCt > 0;
} }
private void check(StructQueryParam structQueryParam) throws Exception { private void check(StructQuery structQuery) throws Exception {
Long ratioOverNum = structQueryParam.getAggregators().stream() Long ratioOverNum = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
Long ratioRollNum = structQueryParam.getAggregators().stream() Long ratioRollNum = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count();
if (ratioOverNum > 0 && ratioRollNum > 0) { if (ratioOverNum > 0 && ratioRollNum > 0) {
throw new Exception("not support over ratio and roll ratio together "); throw new Exception("not support over ratio and roll ratio together ");
} }
if (getTimeDim(structQueryParam).isEmpty()) { if (getTimeDim(structQuery).isEmpty()) {
throw new Exception("miss time filter"); throw new Exception("miss time filter");
} }
} }

View File

@@ -1,24 +1,28 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite; package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.QueryParser; import com.tencent.supersonic.headless.core.translator.parser.calcite.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.Objects;
/** the calcite parse implements */ /** the calcite parse implements */
@Component("CalciteQueryParser") @Component("OntologyQueryParser")
@Slf4j @Slf4j
public class CalciteQueryParser implements QueryParser { public class OntologyQueryParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getOntologyQuery());
}
@Override @Override
public void parse(QueryStatement queryStatement) throws Exception { public void parse(QueryStatement queryStatement) throws Exception {
Ontology ontology = queryStatement.getOntology(); Ontology ontology = queryStatement.getOntology();
if (ontology == null) {
queryStatement.setErrMsg("No ontology could be found");
return;
}
S2CalciteSchema semanticSchema = S2CalciteSchema.builder() S2CalciteSchema semanticSchema = S2CalciteSchema.builder()
.schemaKey("DATASET_" + queryStatement.getDataSetId()).ontology(ontology) .schemaKey("DATASET_" + queryStatement.getDataSetId()).ontology(ontology)
.runtimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime()) .runtimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())

View File

@@ -4,5 +4,8 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** A query parser generates physical SQL for the QueryStatement. */ /** A query parser generates physical SQL for the QueryStatement. */
public interface QueryParser { public interface QueryParser {
boolean accept(QueryStatement queryStatement);
void parse(QueryStatement queryStatement) throws Exception; void parse(QueryStatement queryStatement) throws Exception;
} }

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter; package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlAsHelper; import com.tencent.supersonic.common.jsqlparser.SqlAsHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper; import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
@@ -14,8 +14,8 @@ import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType; import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.response.*; import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -27,62 +27,62 @@ import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
@Component("SqlQueryConverter") @Component("SqlQueryParser")
@Slf4j @Slf4j
public class SqlQueryConverter implements QueryConverter { public class SqlQueryParser implements QueryParser {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL(); return Objects.nonNull(queryStatement.getSqlQuery()) && queryStatement.getIsS2SQL();
} }
@Override @Override
public void convert(QueryStatement queryStatement) throws Exception { public void parse(QueryStatement queryStatement) throws Exception {
convertNameToBizName(queryStatement); convertNameToBizName(queryStatement);
rewriteOrderBy(queryStatement); rewriteOrderBy(queryStatement);
// fill sqlQuery // fill sqlQuery
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); SqlQuery sqlQuery = queryStatement.getSqlQuery();
String tableName = SqlSelectHelper.getTableName(sqlQueryParam.getSql()); String tableName = SqlSelectHelper.getTableName(sqlQuery.getSql());
if (StringUtils.isEmpty(tableName)) { if (StringUtils.isEmpty(tableName)) {
return; return;
} }
sqlQueryParam.setTable(tableName.toLowerCase()); sqlQuery.setTable(tableName.toLowerCase());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
if (!sqlGenerateUtils.isSupportWith( if (!sqlGenerateUtils.isSupportWith(
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()), EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()),
semanticSchemaResp.getDatabaseResp().getVersion())) { semanticSchemaResp.getDatabaseResp().getVersion())) {
sqlQueryParam.setSupportWith(false); sqlQuery.setSupportWith(false);
sqlQueryParam.setWithAlias(false); sqlQuery.setWithAlias(false);
} }
// build ontologyQuery // build ontologyQuery
List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQueryParam.getSql()); List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQuery.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields); List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics = List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList()); metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields); Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQueryParam.getMetrics().addAll(metrics); ontologyQuery.getMetrics().addAll(metrics);
ontologyQueryParam.getDimensions().addAll(dimensions); ontologyQuery.getDimensions().addAll(dimensions);
AggOption sqlQueryAggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas); AggOption sqlQueryAggOption = getAggOption(sqlQuery.getSql(), metricSchemas);
// if sql query itself has aggregation, ontology query just returns detail // if sql query itself has aggregation, ontology query just returns detail
if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) { if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) {
ontologyQueryParam.setAggOption(AggOption.NATIVE); ontologyQuery.setAggOption(AggOption.NATIVE);
} else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) { } else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.DEFAULT); ontologyQuery.setAggOption(AggOption.DEFAULT);
} }
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(ontologyQueryParam.getAggOption())); ontologyQuery.setNativeQuery(!AggOption.isAgg(ontologyQuery.getAggOption()));
queryStatement.setOntologyQueryParam(ontologyQueryParam); queryStatement.setOntologyQuery(ontologyQuery);
generateDerivedMetric(sqlGenerateUtils, queryStatement); generateDerivedMetric(sqlGenerateUtils, queryStatement);
queryStatement.setSql(sqlQueryParam.getSql()); queryStatement.setSql(sqlQuery.getSql());
// replace sql fields for db, must called after convertNameToBizName // replace sql fields for db, must called after convertNameToBizName
String sqlRewrite = replaceSqlFieldsForHanaDB(queryStatement, sqlQueryParam.getSql()); String sqlRewrite = replaceSqlFieldsForHanaDB(queryStatement, sqlQuery.getSql());
sqlQueryParam.setSql(sqlRewrite); sqlQuery.setSql(sqlRewrite);
log.info("parse sqlQuery [{}] ", sqlQueryParam); log.info("parse sqlQuery [{}] ", sqlQuery);
} }
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) { private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) {
@@ -145,9 +145,9 @@ public class SqlQueryConverter implements QueryConverter {
private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils, private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
QueryStatement queryStatement) { QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQueryParam sqlParam = queryStatement.getSqlQueryParam(); SqlQuery sqlParam = queryStatement.getSqlQuery();
OntologyQueryParam ontologyParam = queryStatement.getOntologyQueryParam(); OntologyQuery ontologyParam = queryStatement.getOntologyQuery();
String sql = sqlParam.getSql(); String sql = sqlParam.getSql();
Set<String> measures = new HashSet<>(); Set<String> measures = new HashSet<>();
@@ -229,22 +229,20 @@ public class SqlQueryConverter implements QueryConverter {
/** /**
* special process for hanaDB,the sap hana DB don't support the chinese name as * special process for hanaDB,the sap hana DB don't support the chinese name as the column name,
* the column name, * so we need to quote the column name after converting the convertNameToBizName called
* so we need to quote the column name after converting the convertNameToBizName
* called
* *
* sap hana DB will auto translate the colume to upper case letter if not * sap hana DB will auto translate the colume to upper case letter if not quoted. also we need
* quoted. * to quote the field name if it is a lower case letter.
* also we need to quote the field name if it is a lower case letter.
* *
* @param queryStatement * @param queryStatement
* @param sql * @param sql
* @return * @return
*/ */
private String replaceSqlFieldsForHanaDB(QueryStatement queryStatement, String sql) { private String replaceSqlFieldsForHanaDB(QueryStatement queryStatement, String sql) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
if (!semanticSchemaResp.getDatabaseResp().getType().equalsIgnoreCase(EngineType.HANADB.getName())) { if (!semanticSchemaResp.getDatabaseResp().getType()
.equalsIgnoreCase(EngineType.HANADB.getName())) {
return sql; return sql;
} }
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp); Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
@@ -277,9 +275,9 @@ public class SqlQueryConverter implements QueryConverter {
} }
private void convertNameToBizName(QueryStatement queryStatement) { private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp); Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = queryStatement.getSqlQueryParam().getSql(); String sql = queryStatement.getSqlQuery().getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(), log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql); sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true); sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
@@ -288,15 +286,15 @@ public class SqlQueryConverter implements QueryConverter {
sql = SqlReplaceHelper.replaceTable(sql, sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId()); Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql); log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQueryParam().setSql(sql); queryStatement.getSqlQuery().setSql(sql);
} }
private void rewriteOrderBy(QueryStatement queryStatement) { private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number // replace order by field with the select sequence number
String sql = queryStatement.getSqlQueryParam().getSql(); String sql = queryStatement.getSqlQuery().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql); String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql); log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQueryParam().setSql(newSql); queryStatement.getSqlQuery().setSql(newSql);
} }
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) { protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter; package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType; import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
@@ -14,18 +14,17 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
@Slf4j @Slf4j
@Component("SqlVariableConverter") @Component("SqlVariableParser")
public class SqlVariableConverter implements QueryConverter { public class SqlVariableParser implements QueryParser {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQueryParam()) return Objects.nonNull(queryStatement.getStructQuery()) && !queryStatement.getIsS2SQL();
&& !queryStatement.getIsS2SQL();
} }
@Override @Override
public void convert(QueryStatement queryStatement) { public void parse(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
List<ModelResp> modelResps = semanticSchemaResp.getModelResps(); List<ModelResp> modelResps = semanticSchemaResp.getModelResps();
if (CollectionUtils.isEmpty(modelResps)) { if (CollectionUtils.isEmpty(modelResps)) {
return; return;
@@ -36,7 +35,7 @@ public class SqlVariableConverter implements QueryConverter {
String sqlParsed = String sqlParsed =
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(), modelResp.getModelDetail().getSqlVariables(),
queryStatement.getStructQueryParam().getParams()); queryStatement.getStructQuery().getParams());
DataModel dataModel = DataModel dataModel =
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName()); queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
dataModel.setSqlQuery(sqlParsed); dataModel.setSqlQuery(sqlParsed);

View File

@@ -0,0 +1,72 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("StructQueryParser")
@Slf4j
public class StructQueryParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQuery()) && !queryStatement.getIsS2SQL();
}
@Override
public void parse(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQuery structQuery = queryStatement.getStructQuery();
String dsTable = "t_1";
SqlQuery sqlParam = new SqlQuery();
sqlParam.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery));
Database database = queryStatement.getOntology().getDatabase();
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
sqlParam.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQuery),
sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery));
}
sqlParam.setSql(sql);
queryStatement.setSqlQuery(sqlParam);
OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQuery.getDimensions().addAll(structQuery.getGroups());
ontologyQuery.getMetrics().addAll(structQuery.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQuery, null);
ontologyQuery.setWhere(where);
if (ontologyQuery.getMetrics().isEmpty()) {
ontologyQuery.setAggOption(AggOption.NATIVE);
} else {
ontologyQuery.setAggOption(AggOption.DEFAULT);
}
ontologyQuery.setNativeQuery(structQuery.getQueryType().isNativeAggQuery());
ontologyQuery.setOrder(structQuery.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQuery.setLimit(structQuery.getLimit());
queryStatement.setOntologyQuery(ontologyQuery);
log.info("parse structQuery [{}] ", queryStatement.getSqlQuery());
}
}

View File

@@ -2,9 +2,9 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology; import com.tencent.supersonic.headless.core.pojo.Ontology;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;
import org.apache.calcite.schema.Schema; import org.apache.calcite.schema.Schema;

View File

@@ -13,7 +13,7 @@ import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Ren
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender; import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
@@ -25,7 +25,7 @@ import java.util.*;
public class SqlBuilder { public class SqlBuilder {
private final S2CalciteSchema schema; private final S2CalciteSchema schema;
private OntologyQueryParam ontologyQueryParam; private OntologyQuery ontologyQuery;
private SqlValidatorScope scope; private SqlValidatorScope scope;
private SqlNode parserNode; private SqlNode parserNode;
private boolean isAgg = false; private boolean isAgg = false;
@@ -36,11 +36,11 @@ public class SqlBuilder {
} }
public String buildOntologySql(QueryStatement queryStatement) throws Exception { public String buildOntologySql(QueryStatement queryStatement) throws Exception {
this.ontologyQueryParam = queryStatement.getOntologyQueryParam(); this.ontologyQuery = queryStatement.getOntologyQuery();
if (ontologyQueryParam.getLimit() == null) { if (ontologyQuery.getLimit() == null) {
ontologyQueryParam.setLimit(0L); ontologyQuery.setLimit(0L);
} }
this.aggOption = ontologyQueryParam.getAggOption(); this.aggOption = ontologyQuery.getAggOption();
buildParseNode(); buildParseNode();
Database database = queryStatement.getOntology().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
@@ -51,8 +51,7 @@ public class SqlBuilder {
private void buildParseNode() throws Exception { private void buildParseNode() throws Exception {
// find relevant data models // find relevant data models
scope = SchemaBuilder.getScope(schema); scope = SchemaBuilder.getScope(schema);
List<DataModel> dataModels = List<DataModel> dataModels = DataModelNode.getQueryDataModels(scope, schema, ontologyQuery);
DataModelNode.getQueryDataModels(scope, schema, ontologyQueryParam);
if (dataModels == null || dataModels.isEmpty()) { if (dataModels == null || dataModels.isEmpty()) {
throw new Exception("data model not found"); throw new Exception("data model not found");
} }
@@ -69,14 +68,14 @@ public class SqlBuilder {
while (it.hasNext()) { while (it.hasNext()) {
Renderer renderer = it.next(); Renderer renderer = it.next();
if (previous != null) { if (previous != null) {
previous.render(ontologyQueryParam, dataModels, scope, schema, !isAgg); previous.render(ontologyQuery, dataModels, scope, schema, !isAgg);
renderer.setTable(previous renderer.setTable(previous
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i))); .builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
i++; i++;
} }
previous = renderer; previous = renderer;
} }
builders.getLast().render(ontologyQueryParam, dataModels, scope, schema, !isAgg); builders.getLast().render(ontologyQuery, dataModels, scope, schema, !isAgg);
parserNode = builders.getLast().builder(); parserNode = builders.getLast().builder();
} }
@@ -87,7 +86,7 @@ public class SqlBuilder {
// default by dataModel time aggregation // default by dataModel time aggregation
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime() if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) { .equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!ontologyQueryParam.isNativeQuery()) { if (!ontologyQuery.isNativeQuery()) {
return true; return true;
} }
} }

View File

@@ -4,6 +4,9 @@ import com.google.common.collect.Lists;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder; import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*; import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
@@ -130,7 +133,7 @@ public class DataModelNode extends SemanticNode {
return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_")); return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_"));
} }
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam, public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
Set<String> queryDimensions, Set<String> queryMeasures) { Set<String> queryDimensions, Set<String> queryMeasures) {
queryDimensions.addAll(queryParam.getDimensions().stream() queryDimensions.addAll(queryParam.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) .map(d -> d.contains(Constants.DIMENSION_IDENTIFY)
@@ -146,9 +149,9 @@ public class DataModelNode extends SemanticNode {
.forEach(queryMeasures::add); .forEach(queryMeasures::add);
} }
public static void mergeQueryFilterDimensionMeasure(Ontology ontology, public static void mergeQueryFilterDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
OntologyQueryParam queryParam, Set<String> dimensions, Set<String> measures, Set<String> dimensions, Set<String> measures, SqlValidatorScope scope)
SqlValidatorScope scope) throws Exception { throws Exception {
EngineType engineType = ontology.getDatabase().getType(); EngineType engineType = ontology.getDatabase().getType();
if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) { if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>(); Set<String> filterConditions = new HashSet<>();
@@ -173,7 +176,7 @@ public class DataModelNode extends SemanticNode {
} }
public static List<DataModel> getQueryDataModels(SqlValidatorScope scope, public static List<DataModel> getQueryDataModels(SqlValidatorScope scope,
S2CalciteSchema schema, OntologyQueryParam queryParam) throws Exception { S2CalciteSchema schema, OntologyQuery queryParam) throws Exception {
Ontology ontology = schema.getOntology(); Ontology ontology = schema.getOntology();
// get query measures and dimensions // get query measures and dimensions
Set<String> queryMeasures = new HashSet<>(); Set<String> queryMeasures = new HashSet<>();
@@ -282,7 +285,7 @@ public class DataModelNode extends SemanticNode {
} }
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology, private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
OntologyQueryParam queryParam, DataModel baseDataModel, Set<String> queryDimensions, OntologyQuery queryParam, DataModel baseDataModel, Set<String> queryDimensions,
Set<String> queryMeasures) { Set<String> queryMeasures) {
Set<String> joinDataModelNames = new HashSet<>(); Set<String> joinDataModelNames = new HashSet<>();
List<DataModel> joinDataModels = new ArrayList<>(); List<DataModel> joinDataModels = new ArrayList<>();

View File

@@ -9,7 +9,7 @@ import com.tencent.supersonic.headless.core.translator.parser.calcite.node.Seman
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.parser.SqlParserPos;
@@ -26,7 +26,7 @@ import java.util.stream.Collectors;
public class FilterRender extends Renderer { public class FilterRender extends Renderer {
@Override @Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView; TableView tableView = super.tableView;
SqlNode filterNode = null; SqlNode filterNode = null;

View File

@@ -13,10 +13,10 @@ import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.JoinConditionType; import org.apache.calcite.sql.JoinConditionType;
import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlBasicCall;
@@ -47,7 +47,7 @@ import java.util.stream.Collectors;
public class JoinRender extends Renderer { public class JoinRender extends Renderer {
@Override @Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere(); String queryWhere = metricCommand.getWhere();
EngineType engineType = schema.getOntology().getDatabase().getType(); EngineType engineType = schema.getOntology().getDatabase().getType();

View File

@@ -7,7 +7,7 @@ import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.fun.SqlStdOperatorTable;
@@ -22,7 +22,7 @@ import java.util.List;
public class OutputRender extends Renderer { public class OutputRender extends Renderer {
@Override @Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView; TableView selectDataSet = super.tableView;
EngineType engineType = schema.getOntology().getDatabase().getType(); EngineType engineType = schema.getOntology().getDatabase().getType();

View File

@@ -11,7 +11,7 @@ import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.Data; import lombok.Data;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -114,6 +114,6 @@ public abstract class Renderer {
return SemanticNode.buildAs(alias, tableView.build()); return SemanticNode.buildAs(alias, tableView.build());
} }
public abstract void render(OntologyQueryParam metricCommand, List<DataModel> dataModels, public abstract void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception; SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
} }

View File

@@ -16,7 +16,7 @@ import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -334,9 +334,9 @@ public class SourceRender extends Renderer {
} }
} }
public void render(OntologyQueryParam ontologyQueryParam, List<DataModel> dataModels, public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = ontologyQueryParam.getWhere(); String queryWhere = ontologyQuery.getWhere();
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>(); List<String> fieldWhere = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabase().getType(); EngineType engineType = schema.getOntology().getDatabase().getType();
@@ -347,13 +347,13 @@ public class SourceRender extends Renderer {
} }
if (dataModels.size() == 1) { if (dataModels.size() == 1) {
DataModel dataModel = dataModels.get(0); DataModel dataModel = dataModels.get(0);
super.tableView = renderOne("", fieldWhere, ontologyQueryParam.getMetrics(), super.tableView = renderOne("", fieldWhere, ontologyQuery.getMetrics(),
ontologyQueryParam.getDimensions(), ontologyQueryParam.getWhere(), dataModel, ontologyQuery.getDimensions(), ontologyQuery.getWhere(), dataModel, scope,
scope, schema, nonAgg); schema, nonAgg);
return; return;
} }
JoinRender joinRender = new JoinRender(); JoinRender joinRender = new JoinRender();
joinRender.render(ontologyQueryParam, dataModels, scope, schema, nonAgg); joinRender.render(ontologyQuery, dataModels, scope, schema, nonAgg);
super.tableView = joinRender.getTableView(); super.tableView = joinRender.getTableView();
} }
} }

View File

@@ -4,7 +4,6 @@ import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.core.cache.QueryCache; import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryAccelerator; import com.tencent.supersonic.headless.core.executor.QueryAccelerator;
import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer; import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.translator.parser.QueryParser; import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -20,29 +19,20 @@ import java.util.stream.Collectors;
@Slf4j @Slf4j
public class ComponentFactory { public class ComponentFactory {
private static List<QueryConverter> queryConverters = new ArrayList<>();
private static Map<String, QueryOptimizer> queryOptimizers = new HashMap<>(); private static Map<String, QueryOptimizer> queryOptimizers = new HashMap<>();
private static List<QueryExecutor> queryExecutors = new ArrayList<>(); private static List<QueryExecutor> queryExecutors = new ArrayList<>();
private static List<QueryAccelerator> queryAccelerators = new ArrayList<>(); private static List<QueryAccelerator> queryAccelerators = new ArrayList<>();
private static QueryParser queryParser; private static List<QueryParser> queryParsers = new ArrayList<>();
private static QueryCache queryCache; private static QueryCache queryCache;
static { static {
initQueryConverter();
initQueryOptimizer(); initQueryOptimizer();
initQueryExecutors(); initQueryExecutors();
initQueryAccelerators(); initQueryAccelerators();
initQueryParser(); initQueryParsers();
initQueryCache(); initQueryCache();
} }
public static List<QueryConverter> getQueryConverters() {
if (queryConverters.isEmpty()) {
initQueryConverter();
}
return queryConverters;
}
public static List<QueryOptimizer> getQueryOptimizers() { public static List<QueryOptimizer> getQueryOptimizers() {
if (queryOptimizers.isEmpty()) { if (queryOptimizers.isEmpty()) {
initQueryOptimizer(); initQueryOptimizer();
@@ -64,11 +54,11 @@ public class ComponentFactory {
return queryAccelerators; return queryAccelerators;
} }
public static QueryParser getQueryParser() { public static List<QueryParser> getQueryParser() {
if (queryParser == null) { if (queryParsers.isEmpty()) {
initQueryParser(); initQueryParsers();
} }
return queryParser; return queryParsers;
} }
public static QueryCache getQueryCache() { public static QueryCache getQueryCache() {
@@ -92,23 +82,15 @@ public class ComponentFactory {
} }
private static void initQueryExecutors() { private static void initQueryExecutors() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryExecutor.class, queryExecutors); init(QueryExecutor.class, queryExecutors);
} }
private static void initQueryAccelerators() { private static void initQueryAccelerators() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryAccelerator.class, queryAccelerators); init(QueryAccelerator.class, queryAccelerators);
} }
private static void initQueryConverter() { private static void initQueryParsers() {
init(QueryConverter.class, queryConverters); init(QueryParser.class, queryParsers);
}
private static void initQueryParser() {
queryParser = init(QueryParser.class);
} }
private static void initQueryCache() { private static void initQueryCache() {

View File

@@ -19,7 +19,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp; import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.config.ExecutorConfig; import com.tencent.supersonic.headless.core.config.ExecutorConfig;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutablePair;
@@ -85,26 +85,25 @@ public class SqlGenerateUtils {
return selectSql; return selectSql;
} }
public String getLimit(StructQueryParam structQueryParam) { public String getLimit(StructQuery structQuery) {
if (structQueryParam != null && structQueryParam.getLimit() != null if (structQuery != null && structQuery.getLimit() != null && structQuery.getLimit() > 0) {
&& structQueryParam.getLimit() > 0) { return " limit " + structQuery.getLimit();
return " limit " + structQueryParam.getLimit();
} }
return ""; return "";
} }
public String getSelect(StructQueryParam structQueryParam) { public String getSelect(StructQuery structQuery) {
String aggStr = structQueryParam.getAggregators().stream().map(this::getSelectField) String aggStr = structQuery.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getSelect(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) { public String getSelect(StructQuery structQuery, Map<String, String> deriveMetrics) {
String aggStr = structQueryParam.getAggregators().stream() String aggStr = structQuery.getAggregators().stream()
.map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(",")); .map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getSelectField(final Aggregator agg) { public String getSelectField(final Aggregator agg) {
@@ -129,46 +128,46 @@ public class SqlGenerateUtils {
return deriveMetrics.get(agg.getColumn()); return deriveMetrics.get(agg.getColumn());
} }
public String getGroupBy(StructQueryParam structQueryParam) { public String getGroupBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQueryParam.getGroups())) { if (CollectionUtils.isEmpty(structQuery.getGroups())) {
return ""; return "";
} }
return "group by " + String.join(",", structQueryParam.getGroups()); return "group by " + String.join(",", structQuery.getGroups());
} }
public String getOrderBy(StructQueryParam structQueryParam) { public String getOrderBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) { if (CollectionUtils.isEmpty(structQuery.getOrders())) {
return ""; return "";
} }
return "order by " + structQueryParam.getOrders().stream() return "order by " + structQuery.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ") .map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
public String getOrderBy(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) { public String getOrderBy(StructQuery structQuery, Map<String, String> deriveMetrics) {
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) { if (CollectionUtils.isEmpty(structQuery.getOrders())) {
return ""; return "";
} }
if (!structQueryParam.getOrders().stream() if (!structQuery.getOrders().stream()
.anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) { .anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) {
return getOrderBy(structQueryParam); return getOrderBy(structQuery);
} }
return "order by " + structQueryParam.getOrders().stream() return "order by " + structQuery.getOrders().stream()
.map(order -> " " + (deriveMetrics.containsKey(order.getColumn()) .map(order -> " " + (deriveMetrics.containsKey(order.getColumn())
? deriveMetrics.get(order.getColumn()) ? deriveMetrics.get(order.getColumn())
: order.getColumn()) + " " + order.getDirection() + " ") : order.getColumn()) + " " + order.getDirection() + " ")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
public String generateWhere(StructQueryParam structQueryParam, ItemDateResp itemDateResp) { public String generateWhere(StructQuery structQuery, ItemDateResp itemDateResp) {
String whereClauseFromFilter = String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(structQueryParam.getDimensionFilters()); sqlFilterUtils.getWhereClause(structQuery.getDimensionFilters());
String whereFromDate = getDateWhereClause(structQueryParam.getDateInfo(), itemDateResp); String whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp);
return mergeDateWhereClause(structQueryParam, whereClauseFromFilter, whereFromDate); return mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate);
} }
private String mergeDateWhereClause(StructQueryParam structQueryParam, private String mergeDateWhereClause(StructQuery structQuery, String whereClauseFromFilter,
String whereClauseFromFilter, String whereFromDate) { String whereFromDate) {
if (StringUtils.isNotEmpty(whereFromDate) if (StringUtils.isNotEmpty(whereFromDate)
&& StringUtils.isNotEmpty(whereClauseFromFilter)) { && StringUtils.isNotEmpty(whereClauseFromFilter)) {
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter); return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
@@ -180,7 +179,7 @@ public class SqlGenerateUtils {
return whereFromDate; return whereFromDate;
} else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) { } else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) {
log.debug("the current date information is empty, enter the date initialization logic"); log.debug("the current date information is empty, enter the date initialization logic");
return dateModeUtils.defaultRecentDateInfo(structQueryParam.getDateInfo()); return dateModeUtils.defaultRecentDateInfo(structQuery.getDateInfo());
} }
return whereClauseFromFilter; return whereClauseFromFilter;
} }
@@ -204,12 +203,12 @@ public class SqlGenerateUtils {
return dateModeUtils.getDateWhereStr(dateInfo, dateDate); return dateModeUtils.getDateWhereStr(dateInfo, dateDate);
} }
public Triple<String, String, String> getBeginEndTime(StructQueryParam structQueryParam, public Triple<String, String, String> getBeginEndTime(StructQuery structQuery,
ItemDateResp dataDate) { ItemDateResp dataDate) {
if (Objects.isNull(structQueryParam.getDateInfo())) { if (Objects.isNull(structQuery.getDateInfo())) {
return Triple.of("", "", ""); return Triple.of("", "", "");
} }
DateConf dateConf = structQueryParam.getDateInfo(); DateConf dateConf = structQuery.getDateInfo();
String dateInfo = dateModeUtils.getSysDateCol(dateConf); String dateInfo = dateModeUtils.getSysDateCol(dateConf);
if (dateInfo.isEmpty()) { if (dateInfo.isEmpty()) {
return Triple.of("", "", ""); return Triple.of("", "", "");

View File

@@ -2,7 +2,7 @@ package com.tencent.supersonic.chat.core.parser.aggregate;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.CalciteQueryParser; import com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.testng.Assert; import org.testng.Assert;
@@ -316,7 +316,7 @@ public class CalciteSqlParserTest {
+ " \"createdAt\": 1711367511146,\n" + " \"createdAt\": 1711367511146,\n"
+ " \"updatedAt\": 1711367511146\n" + " }\n" + " }\n" + "}"; + " \"updatedAt\": 1711367511146\n" + " }\n" + " }\n" + "}";
QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class); QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class);
CalciteQueryParser calciteSqlParser = new CalciteQueryParser(); OntologyQueryParser calciteSqlParser = new OntologyQueryParser();
calciteSqlParser.parse(queryStatement); calciteSqlParser.parse(queryStatement);
Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""), Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""),
"SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`" + "FROM" + "`s2_pv_uv_statis`" "SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`" + "FROM" + "`s2_pv_uv_statis`"

View File

@@ -19,8 +19,8 @@ import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
import com.tencent.supersonic.headless.core.cache.QueryCache; import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.translator.SemanticTranslator; import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.annotation.S2DataPermission; import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
@@ -128,8 +128,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
for (QueryExecutor queryExecutor : queryExecutors) { for (QueryExecutor queryExecutor : queryExecutors) {
if (queryExecutor.accept(queryStatement)) { if (queryExecutor.accept(queryStatement)) {
queryResp = queryExecutor.execute(queryStatement); queryResp = queryExecutor.execute(queryStatement);
queryUtils.populateQueryColumns(queryResp, queryUtils.populateQueryColumns(queryResp, queryStatement.getSemanticSchema());
queryStatement.getSemanticSchemaResp());
} }
} }
@@ -299,7 +298,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = new QueryStatement(); QueryStatement queryStatement = new QueryStatement();
queryStatement.setEnableOptimize(queryUtils.enableOptimize()); queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setDataSetId(queryReq.getDataSetId()); queryStatement.setDataSetId(queryReq.getDataSetId());
queryStatement.setSemanticSchemaResp(semanticSchemaResp); queryStatement.setSemanticSchema(semanticSchemaResp);
queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp)); queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp));
return queryStatement; return queryStatement;
} }
@@ -308,9 +307,9 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = buildQueryStatement(querySqlReq); QueryStatement queryStatement = buildQueryStatement(querySqlReq);
queryStatement.setIsS2SQL(true); queryStatement.setIsS2SQL(true);
SqlQueryParam sqlQueryParam = new SqlQueryParam(); SqlQuery sqlQuery = new SqlQuery();
sqlQueryParam.setSql(querySqlReq.getSql()); sqlQuery.setSql(querySqlReq.getSql());
queryStatement.setSqlQueryParam(sqlQueryParam); queryStatement.setSqlQuery(sqlQuery);
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL // If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) { if (querySqlReq.needGetDataSetId()) {
@@ -322,9 +321,9 @@ public class S2SemanticLayerService implements SemanticLayerService {
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) { private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
QueryStatement queryStatement = buildQueryStatement(queryReq); QueryStatement queryStatement = buildQueryStatement(queryReq);
StructQueryParam structQueryParam = new StructQueryParam(); StructQuery structQuery = new StructQuery();
BeanUtils.copyProperties(queryReq, structQueryParam); BeanUtils.copyProperties(queryReq, structQuery);
queryStatement.setStructQueryParam(structQueryParam); queryStatement.setStructQuery(structQuery);
queryStatement.setIsS2SQL(false); queryStatement.setIsS2SQL(false);
return queryStatement; return queryStatement;
} }

View File

@@ -4,6 +4,8 @@ import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*; import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType;

View File

@@ -31,7 +31,7 @@ public class MetricDrillDownChecker {
private MetricService metricService; private MetricService metricService;
public void checkQuery(QueryStatement queryStatement) { public void checkQuery(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
String sql = queryStatement.getSql(); String sql = queryStatement.getSql();
if (StringUtils.isBlank(sql)) { if (StringUtils.isBlank(sql)) {
return; return;

View File

@@ -6,7 +6,7 @@ import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder; import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.yaml.*; import com.tencent.supersonic.headless.server.pojo.yaml.*;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -19,8 +19,8 @@ import java.util.List;
@Slf4j @Slf4j
class HeadlessParserServiceTest { class HeadlessParserServiceTest {
public static SqlParserResp parser(S2CalciteSchema semanticSchema, public static SqlParserResp parser(S2CalciteSchema semanticSchema, OntologyQuery ontologyQuery,
OntologyQueryParam ontologyQueryParam, boolean isAgg) { boolean isAgg) {
SqlParserResp sqlParser = new SqlParserResp(); SqlParserResp sqlParser = new SqlParserResp();
try { try {
if (semanticSchema == null) { if (semanticSchema == null) {
@@ -29,14 +29,14 @@ class HeadlessParserServiceTest {
} }
SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
QueryStatement queryStatement = new QueryStatement(); QueryStatement queryStatement = new QueryStatement();
queryStatement.setOntologyQueryParam(ontologyQueryParam); queryStatement.setOntologyQuery(ontologyQuery);
String sql = aggBuilder.buildOntologySql(queryStatement); String sql = aggBuilder.buildOntologySql(queryStatement);
queryStatement.setSql(sql); queryStatement.setSql(sql);
EngineType engineType = semanticSchema.getOntology().getDatabase().getType(); EngineType engineType = semanticSchema.getOntology().getDatabase().getType();
sqlParser.setSql(aggBuilder.getSql(engineType)); sqlParser.setSql(aggBuilder.getSql(engineType));
} catch (Exception e) { } catch (Exception e) {
sqlParser.setErrMsg(e.getMessage()); sqlParser.setErrMsg(e.getMessage());
log.error("parser error metricQueryReq[{}] error [{}]", ontologyQueryParam, e); log.error("parser error metricQueryReq[{}] error [{}]", ontologyQuery, e);
} }
return sqlParser; return sqlParser;
} }
@@ -155,7 +155,7 @@ class HeadlessParserServiceTest {
// HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric)); // HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric));
OntologyQueryParam metricCommand = new OntologyQueryParam(); OntologyQuery metricCommand = new OntologyQuery();
metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date"))); metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date")));
metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv"))); metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv")));
metricCommand.setWhere( metricCommand.setWhere(
@@ -168,7 +168,7 @@ class HeadlessParserServiceTest {
addDepartment(semanticSchema); addDepartment(semanticSchema);
OntologyQueryParam metricCommand2 = new OntologyQueryParam(); OntologyQuery metricCommand2 = new OntologyQuery();
metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date", metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date",
"user_name__department", "user_name", "user_name__page"))); "user_name__department", "user_name", "user_name__page")));
metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv"))); metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv")));

View File

@@ -25,21 +25,18 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
### headless-core SPIs ### headless-core SPIs
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\ com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\ com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\ com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.calcite.CalciteQueryParser
com.tencent.supersonic.headless.core.executor.QueryExecutor=\ com.tencent.supersonic.headless.core.executor.QueryExecutor=\
com.tencent.supersonic.headless.core.executor.JdbcExecutor com.tencent.supersonic.headless.core.executor.JdbcExecutor

View File

@@ -25,21 +25,18 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
### headless-core SPIs ### headless-core SPIs
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\ com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\ com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\ com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.calcite.CalciteQueryParser
com.tencent.supersonic.headless.core.executor.QueryExecutor=\ com.tencent.supersonic.headless.core.executor.QueryExecutor=\
com.tencent.supersonic.headless.core.executor.JdbcExecutor com.tencent.supersonic.headless.core.executor.JdbcExecutor