(improvement)(Headless) Support optimizing and generating SQL based on the engine type. (#666)

This commit is contained in:
lexluo09
2024-01-19 22:32:28 +08:00
committed by GitHub
parent 20c8456705
commit 9c6bd7cf19
30 changed files with 309 additions and 277 deletions

View File

@@ -28,4 +28,14 @@ public enum EngineType {
public String getName() {
return name;
}
public static EngineType fromString(String value) {
for (EngineType engineType : EngineType.values()) {
if (engineType.name().equalsIgnoreCase(value)) {
return engineType;
}
}
throw new IllegalArgumentException("Invalid value: " + value);
}
}

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite;
import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.SqlParser;
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
@@ -31,13 +32,15 @@ public class CalciteSqlParser implements SqlParser {
SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement);
AggPlanner aggBuilder = new AggPlanner(semanticSchema);
aggBuilder.explain(queryStatement, isAgg);
queryStatement.setSql(aggBuilder.getSql());
EngineType engineType = EngineType.fromString(semanticSchema.getSemanticModel().getDatabase().getType());
queryStatement.setSql(aggBuilder.getSql(engineType));
queryStatement.setSourceId(aggBuilder.getSourceId());
if (Objects.nonNull(queryStatement.getEnableOptimize()) && queryStatement.getEnableOptimize()
&& Objects.nonNull(queryStatement.getViewAlias()) && !queryStatement.getViewAlias().isEmpty()) {
// simplify model sql with query sql
String simplifySql = aggBuilder.simplify(
getSqlByView(aggBuilder.getSql(), queryStatement.getViewSql(), queryStatement.getViewAlias()));
getSqlByView(aggBuilder.getSql(engineType), queryStatement.getViewSql(),
queryStatement.getViewAlias()), engineType);
if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) {
log.info("simplifySql [{}]", simplifySql);
queryStatement.setViewSimplifySql(simplifySql);
@@ -48,6 +51,7 @@ public class CalciteSqlParser implements SqlParser {
private SemanticSchema getSemanticSchema(SemanticModel semanticModel, QueryStatement queryStatement) {
SemanticSchema semanticSchema = SemanticSchema.newBuilder(semanticModel.getRootPath()).build();
semanticSchema.setSemanticModel(semanticModel);
semanticSchema.setDatasource(semanticModel.getDatasourceMap());
semanticSchema.setDimension(semanticModel.getDimensionMap());
semanticSchema.setMetric(semanticModel.getMetrics());

View File

@@ -1,9 +1,11 @@
package com.tencent.supersonic.headless.core.parser.calcite;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSqlTypeFactoryImpl;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlTypeFactoryImpl;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.parser.calcite.schema.ViewExpanderImpl;
import com.tencent.supersonic.headless.core.utils.SqlDialectFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -42,13 +44,17 @@ import org.apache.calcite.tools.Frameworks;
public class Configuration {
public static Properties configProperties = new Properties();
public static RelDataTypeFactory typeFactory = new HeadlessSqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static RelDataTypeFactory typeFactory = new SemanticSqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static SqlOperatorTable operatorTable = SqlStdOperatorTable.instance();
public static CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
public static SqlValidator.Config validatorConfig = SqlValidator.Config.DEFAULT
.withConformance(SemanticSqlDialect.DEFAULT.getConformance())
.withDefaultNullCollation(config.defaultNullCollation())
.withLenientOperatorLookup(true);
public static SqlValidator.Config getValidatorConfig(EngineType engineType) {
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
return SqlValidator.Config.DEFAULT
.withConformance(sqlDialect.getConformance())
.withDefaultNullCollation(config.defaultNullCollation())
.withLenientOperatorLookup(true);
}
static {
configProperties.put(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString());
@@ -57,8 +63,10 @@ public class Configuration {
}
public static SqlParser.Config getParserConfig() {
public static SqlParser.Config getParserConfig(EngineType engineType) {
CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder();
parserConfig.setCaseSensitive(config.caseSensitive());
parserConfig.setUnquotedCasing(config.unquotedCasing());
@@ -71,12 +79,12 @@ public class Configuration {
.setQuoting(Quoting.SINGLE_QUOTE)
.setQuotedCasing(Casing.TO_UPPER)
.setUnquotedCasing(Casing.TO_UPPER)
.setConformance(SemanticSqlDialect.DEFAULT.getConformance())
.setConformance(sqlDialect.getConformance())
.setLex(Lex.BIG_QUERY);
return parserConfig.build();
}
public static SqlValidator getSqlValidator(CalciteSchema rootSchema) {
public static SqlValidator getSqlValidator(CalciteSchema rootSchema, EngineType engineType) {
List<SqlOperatorTable> tables = new ArrayList<>();
tables.add(SqlStdOperatorTable.instance());
SqlOperatorTable operatorTable = new ChainedSqlOperatorTable(tables);
@@ -87,9 +95,8 @@ public class Configuration {
typeFactory,
config
);
SqlValidator validator = SqlValidatorUtil.newValidator(operatorTable, catalogReader, typeFactory,
validatorConfig);
return validator;
return SqlValidatorUtil.newValidator(operatorTable, catalogReader, typeFactory,
Configuration.getValidatorConfig(engineType));
}
public static SqlToRelConverter.Config getConverterConfig() {
@@ -102,11 +109,11 @@ public class Configuration {
}
public static SqlToRelConverter getSqlToRelConverter(SqlValidatorScope scope, SqlValidator sqlValidator,
RelOptPlanner relOptPlanner) {
RelOptPlanner relOptPlanner, EngineType engineType) {
RexBuilder rexBuilder = new RexBuilder(typeFactory);
RelOptCluster cluster = RelOptCluster.create(relOptPlanner, rexBuilder);
FrameworkConfig fromworkConfig = Frameworks.newConfigBuilder()
.parserConfig(getParserConfig())
.parserConfig(getParserConfig(engineType))
.defaultSchema(scope.getValidator().getCatalogReader().getRootSchema().plus())
.build();
return new SqlToRelConverter(new ViewExpanderImpl(),

View File

@@ -2,12 +2,13 @@ package com.tencent.supersonic.headless.core.parser.calcite.planner;
import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DataSourceNode;
@@ -15,6 +16,7 @@ import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.FilterRender;
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.OutputRender;
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.SourceRender;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import java.util.ArrayList;
import java.util.LinkedList;
@@ -113,12 +115,14 @@ public class AggPlanner implements Planner {
// build a parse Node
parse();
// optimizer
optimize();
Database database = queryStatement.getSemanticModel().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
optimize(engineType);
}
@Override
public String getSql() {
return SemanticNode.getSql(parserNode);
public String getSql(EngineType engineType) {
return SemanticNode.getSql(parserNode, engineType);
}
@Override
@@ -127,26 +131,26 @@ public class AggPlanner implements Planner {
}
@Override
public String simplify(String sql) {
return optimize(sql);
public String simplify(String sql, EngineType engineType) {
return optimize(sql, engineType);
}
public void optimize() {
public void optimize(EngineType engineType) {
if (Objects.isNull(schema.getRuntimeOptions()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) {
return;
}
SqlNode optimizeNode = optimizeSql(SemanticNode.getSql(parserNode));
SqlNode optimizeNode = optimizeSql(SemanticNode.getSql(parserNode, engineType), engineType);
if (Objects.nonNull(optimizeNode)) {
parserNode = optimizeNode;
}
}
public String optimize(String sql) {
public String optimize(String sql, EngineType engineType) {
try {
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig()).parseStmt();
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) {
return SemanticNode.getSql(SemanticNode.optimize(scope, schema, sqlNode));
return SemanticNode.getSql(SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType);
}
} catch (Exception e) {
log.error("optimize error {}", e);
@@ -154,12 +158,12 @@ public class AggPlanner implements Planner {
return "";
}
private SqlNode optimizeSql(String sql) {
private SqlNode optimizeSql(String sql, EngineType engineType) {
try {
log.info("before optimize:[{}]", sql);
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig()).parseStmt();
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) {
return SemanticNode.optimize(scope, schema, sqlNode);
return SemanticNode.optimize(scope, schema, sqlNode, engineType);
}
} catch (Exception e) {
log.error("optimize error {}", e);

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.parser.calcite.planner;
import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
@@ -11,9 +12,9 @@ public interface Planner {
public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception;
public String getSql();
public String getSql(EngineType enginType);
public String getSourceId();
public String simplify(String sql);
public String simplify(String sql, EngineType engineType);
}

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.sql.S2SQLSqlValidatorImpl;
@@ -37,8 +38,9 @@ public class SchemaBuilder {
Configuration.typeFactory,
Configuration.config
);
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
S2SQLSqlValidatorImpl s2SQLSqlValidator = new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.validatorConfig);
Configuration.typeFactory, Configuration.getValidatorConfig(engineType));
return new ParameterScope(s2SQLSqlValidator, nameToTypeMap);
}

View File

@@ -7,7 +7,7 @@ import org.apache.calcite.sql.validate.SqlConformanceEnum;
/**
* customize the SqlConformance
*/
public class HeadlessSqlConformance implements SqlConformance {
public class SemanticSqlConformance implements SqlConformance {
@Override
public boolean isLiberal() {

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema;
import com.google.common.base.Preconditions;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlIntervalLiteral;
import org.apache.calcite.sql.SqlNode;
@@ -14,15 +13,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
*/
public class SemanticSqlDialect extends SqlDialect {
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final SqlDialect DEFAULT = new SemanticSqlDialect(DEFAULT_CONTEXT);
private static final SqlConformance tagTdwSqlConformance = new HeadlessSqlConformance();
private static final SqlConformance tagTdwSqlConformance = new SemanticSqlConformance();
public SemanticSqlDialect(Context context) {
super(context);

View File

@@ -7,9 +7,9 @@ import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
/**
* customize the SqlTypeFactoryImpl
*/
public class HeadlessSqlTypeFactoryImpl extends SqlTypeFactoryImpl {
public class SemanticSqlTypeFactoryImpl extends SqlTypeFactoryImpl {
public HeadlessSqlTypeFactoryImpl(RelDataTypeSystem typeSystem) {
public SemanticSqlTypeFactoryImpl(RelDataTypeSystem typeSystem) {
super(typeSystem);
}

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MeasureNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MetricNode;
@@ -47,27 +48,31 @@ public abstract class Renderer {
}
public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg, String alias) throws Exception {
SemanticSchema schema, boolean nonAgg, String alias) throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode();
EngineType engineType = EngineType.fromString(datasource.getType());
if (metricOpt.isPresent()) {
metricNode.setMetric(metricOpt.get());
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope));
.put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode()
.put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope));
.put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
} else {
metricNode.getNonAggNode().put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope));
metricNode.getAggNode().put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope));
metricNode.getNonAggNode().put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope, engineType));
metricNode.getAggNode().put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope, engineType));
metricNode.getAggFunction().put(m.getName(), m.getAgg());
}
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(m.getName(), SemanticNode.parse(m.getConstraint(), scope));
metricNode.getMeasureFilter()
.put(m.getName(), SemanticNode.parse(m.getConstraint(), scope, engineType));
}
}
return metricNode;
@@ -75,13 +80,14 @@ public abstract class Renderer {
Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope));
metricNode.getAggNode().put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope));
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode()
.put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter()
.put(measure.get().getName(), SemanticNode.parse(measure.get().getConstraint(), scope));
metricNode.getMeasureFilter().put(measure.get().getName(),
SemanticNode.parse(measure.get().getConstraint(), scope, engineType));
}
}
return metricNode;
@@ -105,5 +111,5 @@ public abstract class Renderer {
}
public abstract void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception;
SemanticSchema schema, boolean nonAgg) throws Exception;
}

View File

@@ -1,19 +1,22 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import java.util.Objects;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class AggFunctionNode extends SemanticNode {
public static SqlNode build(String agg, String name, SqlValidatorScope scope) throws Exception {
public static SqlNode build(String agg, String name, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (Objects.isNull(agg) || agg.isEmpty()) {
return parse(name, scope);
return parse(name, scope, engineType);
}
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope);
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope,
engineType);
}
return parse(agg + " ( " + name + " ) ", scope);
return parse(agg + " ( " + name + " ) ", scope, engineType);
}
public static enum AggFunction {

View File

@@ -54,7 +54,7 @@ public class DataSourceNode extends SemanticNode {
if (sqlTable.isEmpty()) {
throw new Exception("DatasourceNode build error [tableSqlNode not found]");
}
SqlNode source = getTable(sqlTable, scope);
SqlNode source = getTable(sqlTable, scope, EngineType.fromString(datasource.getType()));
addSchema(scope, datasource, source);
return buildAs(datasource.getName(), source);
}
@@ -87,8 +87,9 @@ public class DataSourceNode extends SemanticNode {
Set<String> dateInfo = new HashSet<>();
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
EngineType engineType = EngineType.fromString(datasource.getType());
for (Dimension d : datasource.getDimensions()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(d.getExpr(), scope), scope);
List<SqlNode> identifiers = expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName());
}
@@ -96,7 +97,7 @@ public class DataSourceNode extends SemanticNode {
dimensions.add(i.getName());
}
for (Measure m : datasource.getMeasures()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(m.getExpr(), scope), scope);
List<SqlNode> identifiers = expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> {
if (!dimensions.contains(i.toString())) {
metrics.add(i.toString());
@@ -124,15 +125,17 @@ public class DataSourceNode extends SemanticNode {
if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope);
}
EngineType engineType = EngineType.fromString(datasource.getType());
SqlNode view = new SqlBasicCall(new LateralViewExplodeNode(), Arrays.asList(build(datasource, scope),
new SqlNodeList(getExtendField(exprList, scope), SqlParserPos.ZERO)), SqlParserPos.ZERO);
new SqlNodeList(getExtendField(exprList, scope, engineType), SqlParserPos.ZERO)), SqlParserPos.ZERO);
return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, view);
}
public static List<SqlNode> getExtendField(Set<String> exprList, SqlValidatorScope scope) throws Exception {
public static List<SqlNode> getExtendField(Set<String> exprList, SqlValidatorScope scope, EngineType engineType)
throws Exception {
List<SqlNode> sqlNodeList = new ArrayList<>();
for (String expr : exprList) {
sqlNodeList.add(parse(expr, scope));
sqlNodeList.add(parse(expr, scope, engineType));
sqlNodeList.add(new SqlDataTypeSpec(
new SqlUserDefinedTypeNameSpec(expr + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, SqlParserPos.ZERO),
SqlParserPos.ZERO));
@@ -140,8 +143,8 @@ public class DataSourceNode extends SemanticNode {
return sqlNodeList;
}
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig());
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig(engineType));
SqlNode sqlNode = sqlParser.parseQuery();
scope.validateExpr(sqlNode);
return sqlNode;
@@ -166,9 +169,10 @@ public class DataSourceNode extends SemanticNode {
public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, MetricQueryReq metricCommand,
Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope), filterConditions);
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), filterConditions);
Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName = schema.getMetrics().stream()
.map(m -> m.getName()).collect(Collectors.toSet());
@@ -223,8 +227,10 @@ public class DataSourceNode extends SemanticNode {
}
filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension);
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope,
engineType);
if (isAllMatch) {
log.info("baseDataSource match all ");
return dataSources;
@@ -260,7 +266,8 @@ public class DataSourceNode extends SemanticNode {
List<String> measures,
Set<String> dimension,
MetricQueryReq metricCommand,
SqlValidatorScope scope) throws Exception {
SqlValidatorScope scope,
EngineType engineType) throws Exception {
boolean isAllMatch = true;
sourceMeasure.retainAll(measures);
if (sourceMeasure.size() < measures.size()) {
@@ -285,7 +292,7 @@ public class DataSourceNode extends SemanticNode {
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
Set<String> whereFields = new HashSet<>();
SqlNode sqlNode = parse(metricCommand.getWhere(), scope);
SqlNode sqlNode = parse(metricCommand.getWhere(), scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
}
return isAllMatch;

View File

@@ -1,9 +1,9 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import org.apache.calcite.sql.SqlNode;
@@ -11,55 +11,47 @@ import org.apache.calcite.sql.validate.SqlValidatorScope;
public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
public static SqlNode build(Dimension dimension, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return buildAs(dimension.getName(), sqlNode);
}
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return expand(sqlNode, scope);
}
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope) throws Exception {
return parse(dimension.getName(), scope);
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
return parse(dimension.getName(), scope, engineType);
}
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope) throws Exception {
return parse(dimension.getExpr(), scope);
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
return parse(dimension.getExpr(), scope, engineType);
}
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope) throws Exception {
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if ("".equals(alias)) {
return buildName(dimension, scope);
return buildName(dimension, scope, engineType);
}
SqlNode sqlNode = parse(dimension.getName(), scope);
SqlNode sqlNode = parse(dimension.getName(), scope, engineType);
return buildAs(alias, sqlNode);
}
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope) throws Exception {
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope));
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope, engineType));
}
throw new Exception("array dimension expr should only identify");
}
return build(dimension, scope);
}
public static List<SqlNode> expandArray(Dimension dimension, SqlValidatorScope scope)
throws Exception {
if (dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
if (isIdentifier(sqlNode)) {
return Arrays.asList(buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope)));
}
throw new Exception("array dimension expr should only identify");
}
return expand(dimension, scope);
return build(dimension, scope, engineType);
}
}

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify.Type;
import java.util.List;
@@ -11,8 +12,8 @@ import org.apache.calcite.sql.validate.SqlValidatorScope;
public class IdentifyNode extends SemanticNode {
public static SqlNode build(Identify identify, SqlValidatorScope scope) throws Exception {
return parse(identify.getName(), scope);
public static SqlNode build(Identify identify, SqlValidatorScope scope, EngineType engineType) throws Exception {
return parse(identify.getName(), scope, engineType);
}
public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) {

View File

@@ -1,50 +1,33 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class MeasureNode extends SemanticNode {
public static SqlNode build(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception {
boolean addAgg = false;
if (!noAgg && measure.getAgg() != null && !measure.getAgg().isEmpty()) {
addAgg = true;
}
if (measure.getExpr() == null) {
if (addAgg) {
return parse(measure.getAgg() + " ( " + measure.getName() + " ) ", scope);
}
return parse(measure.getName(), scope);
} else {
if (addAgg) {
return buildAs(measure.getName(), parse(measure.getAgg() + " ( " + measure.getExpr() + " ) ", scope));
}
return buildAs(measure.getName(), parse(measure.getExpr(), scope));
}
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope, EngineType engineType)
throws Exception {
return buildAs(measure.getName(), getExpr(measure, alias, scope, engineType));
}
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope) throws Exception {
return buildAs(measure.getName(), getExpr(measure, alias, scope));
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception {
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope);
return parse(measure.getName(), scope, engineType);
}
return buildAs(measure.getName(), AggFunctionNode.build(measure.getAgg(), measure.getName(), scope));
return buildAs(measure.getName(),
AggFunctionNode.build(measure.getAgg(), measure.getName(), scope, engineType));
}
public static SqlNode buildAggAs(String aggFunc, String name, SqlValidatorScope scope) throws Exception {
return buildAs(name, AggFunctionNode.build(aggFunc, name, scope));
}
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope) throws Exception {
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope, EngineType enginType)
throws Exception {
if (measure.getExpr() == null) {
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope);
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope, enginType);
}
return parse(measure.getExpr(), scope);
return parse(measure.getExpr(), scope, enginType);
}
}

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import java.util.HashMap;
@@ -19,12 +20,12 @@ public class MetricNode extends SemanticNode {
private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope) throws Exception {
public static SqlNode build(Metric metric, SqlValidatorScope scope, EngineType engineType) throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null
|| metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope);
return parse(metric.getName(), scope, engineType);
}
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope);
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope, engineType);
return buildAs(metric.getName(), sqlNode);
}

View File

@@ -1,11 +1,13 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.utils.SqlDialectFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -18,7 +20,6 @@ import java.util.function.UnaryOperator;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder;
@@ -40,7 +41,6 @@ import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.SqlWith;
import org.apache.calcite.sql.SqlWriterConfig;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.pretty.SqlPrettyWriter;
@@ -74,8 +74,8 @@ public abstract class SemanticNode {
AGGREGATION_FUNC.add("min");
}
public static SqlNode parse(String expression, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig());
public static SqlNode parse(String expression, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig(engineType));
SqlNode sqlNode = sqlParser.parseExpression();
scope.validateExpr(sqlNode);
return sqlNode;
@@ -88,8 +88,9 @@ public abstract class SemanticNode {
SqlParserPos.ZERO);
}
public static String getSql(SqlNode sqlNode) {
SqlWriterConfig config = SqlPrettyWriter.config().withDialect(SemanticSqlDialect.DEFAULT)
public static String getSql(SqlNode sqlNode, EngineType engineType) {
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
SqlWriterConfig config = SqlPrettyWriter.config().withDialect(sqlDialect)
.withKeywordsLowerCase(true).withClauseEndsLine(true).withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false).withUpdateSetListNewline(false).withIndentation(0);
@@ -390,20 +391,22 @@ public abstract class SemanticNode {
return parseInfo;
}
public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode) {
public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode,
EngineType engineType) {
try {
HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
hepProgramBuilder.addRuleInstance(new FilterToGroupScanRule(FilterToGroupScanRule.DEFAULT, schema));
RelOptPlanner relOptPlanner = new HepPlanner(hepProgramBuilder.build());
RelToSqlConverter converter = new RelToSqlConverter(SemanticSqlDialect.DEFAULT);
RelToSqlConverter converter = new RelToSqlConverter(sqlDialect);
SqlValidator sqlValidator = Configuration.getSqlValidator(
scope.getValidator().getCatalogReader().getRootSchema());
scope.getValidator().getCatalogReader().getRootSchema(), engineType);
SqlToRelConverter sqlToRelConverter = Configuration.getSqlToRelConverter(scope, sqlValidator,
relOptPlanner);
relOptPlanner, engineType);
RelNode sqlRel = sqlToRelConverter.convertQuery(
sqlValidator.validate(sqlNode), false, true).rel;
log.debug("RelNode optimize {}", SemanticNode.getSql(converter.visitRoot(sqlRel).asStatement()));
log.debug("RelNode optimize {}",
SemanticNode.getSql(converter.visitRoot(sqlRel).asStatement(), engineType));
relOptPlanner.setRoot(sqlRel);
RelNode relNode = relOptPlanner.findBestExp();
return converter.visitRoot(relNode).asStatement();
@@ -413,13 +416,6 @@ public abstract class SemanticNode {
return null;
}
public static RelNode getRelNode(CalciteSchema rootSchema, SqlToRelConverter sqlToRelConverter, String sql)
throws SqlParseException {
SqlValidator sqlValidator = Configuration.getSqlValidator(rootSchema);
return sqlToRelConverter.convertQuery(
sqlValidator.validate(SqlParser.create(sql, SqlParser.Config.DEFAULT).parseStmt()), false, true).rel;
}
public static SqlBinaryOperator getBinaryOperator(String val) {
if (val.equals("=")) {
return SqlStdOperatorTable.EQUALS;

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
@@ -34,8 +35,10 @@ public class FilterRender extends Renderer {
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope);
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
Set<String> whereFields = new HashSet<>();
FilterNode.getFilterField(filterNode, whereFields);
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
@@ -49,7 +52,7 @@ public class FilterRender extends Renderer {
queryDimensions.addAll(dimensions);
}
for (String dimension : queryDimensions) {
tableView.getMeasure().add(SemanticNode.parse(dimension, scope));
tableView.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
for (String metric : queryMetrics) {
Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema);
@@ -58,9 +61,9 @@ public class FilterRender extends Renderer {
continue;
}
if (optionalMetric.isPresent()) {
tableView.getMeasure().add(MetricNode.build(optionalMetric.get(), scope));
tableView.getMeasure().add(MetricNode.build(optionalMetric.get(), scope, engineType));
} else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope));
tableView.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
}
if (filterNode != null) {

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
@@ -49,13 +50,13 @@ public class JoinRender extends Renderer {
@Override
public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
//dataSources = getOrderSource(dataSources);
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
@@ -129,9 +130,9 @@ public class JoinRender extends Renderer {
if (!filterDimension.isEmpty()) {
for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.parse(d, scope));
filterView.getMeasure().add(SemanticNode.parse(d, scope, engineType));
} else {
filterView.getDimension().add(SemanticNode.parse(d, scope));
filterView.getDimension().add(SemanticNode.parse(d, scope, engineType));
}
}
@@ -143,6 +144,7 @@ public class JoinRender extends Renderer {
List<String> reqMetrics, DataSource dataSource, Set<String> sourceMeasure, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataSource, scope, schema, nonAgg, alias);
@@ -150,7 +152,8 @@ public class JoinRender extends Renderer {
if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure,
SemanticNode.buildAs(measure, SemanticNode.parse(alias + "." + measure, scope)));
SemanticNode.buildAs(measure,
SemanticNode.parse(alias + "." + measure, scope, engineType)));
}
}
@@ -159,10 +162,10 @@ public class JoinRender extends Renderer {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope)));
SemanticNode.parse(entry.getKey(), scope, engineType)));
} else {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(), entry.getKey(), scope)));
AggFunctionNode.build(entry.getValue(), entry.getKey(), scope, engineType)));
}
}
@@ -177,14 +180,16 @@ public class JoinRender extends Renderer {
List<String> reqDimensions, DataSource dataSource, Set<String> dimension, SqlValidatorScope scope,
SemanticSchema schema) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d,
SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + identifyDimension[1], scope)));
SemanticNode.buildAs(d,
SemanticNode.parse(alias + "." + identifyDimension[1], scope, engineType)));
} else {
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + d, scope)));
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + d, scope, engineType)));
}
filterDimension.add(d);
@@ -253,16 +258,16 @@ public class JoinRender extends Renderer {
}
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, Map<String, String> before,
DataSource dataSource,
SemanticSchema schema, SqlValidatorScope scope)
DataSource dataSource, SemanticSchema schema, SqlValidatorScope scope)
throws Exception {
SqlNode condition = getCondition(leftTable, tableView, dataSource, schema, scope);
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
SqlNode condition = getCondition(leftTable, tableView, dataSource, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema);
SqlNode joinRelationCondition = null;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope);
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition;
}
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataSource().getTimePartType())
@@ -307,13 +312,13 @@ public class JoinRender extends Renderer {
return matchJoinRelation;
}
private SqlNode getCondition(JoinRelation joinRelation,
SqlValidatorScope scope) throws Exception {
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(con.getLeft(), scope));
ons.add(SemanticNode.parse(con.getRight(), scope));
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
if (Objects.isNull(condition)) {
condition = new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()),
@@ -334,7 +339,7 @@ public class JoinRender extends Renderer {
}
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema,
SqlValidatorScope scope) throws Exception {
SqlValidatorScope scope, EngineType engineType) throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
@@ -355,8 +360,8 @@ public class JoinRender extends Renderer {
}
}
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope));
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
if (condition == null) {
condition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS,
@@ -454,18 +459,21 @@ public class JoinRender extends Renderer {
endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName();
}
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
ArrayList<SqlNode> operandList = new ArrayList<>(
Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType)));
condition =
new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<SqlNode>(Arrays.asList(new SqlBasicCall(
SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(Arrays.asList(SemanticNode.parse(startTime, scope),
SemanticNode.parse(dateTime, scope))),
new ArrayList<SqlNode>(
Arrays.asList(SemanticNode.parse(startTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))),
SqlParserPos.ZERO, null), new SqlBasicCall(
SqlStdOperatorTable.GREATER_THAN,
new ArrayList<SqlNode>(Arrays.asList(SemanticNode.parse(endTime, scope),
SemanticNode.parse(dateTime, scope))),
operandList,
SqlParserPos.ZERO, null))),
SqlParserPos.ZERO, null);

View File

@@ -1,8 +1,9 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
@@ -27,19 +28,20 @@ public class OutputRender extends Renderer {
public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope));
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
for (String metric : metricCommand.getMetrics()) {
if (MetricNode.isMetricField(metric, schema)) {
// metric from field ignore
continue;
}
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope));
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
if (metricCommand.getLimit() > 0) {
SqlNode offset = SemanticNode.parse(metricCommand.getLimit().toString(), scope);
SqlNode offset = SemanticNode.parse(metricCommand.getLimit().toString(), scope, engineType);
selectDataSet.setOffset(offset);
}
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) {
@@ -47,9 +49,9 @@ public class OutputRender extends Renderer {
for (ColumnOrder columnOrder : metricCommand.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[]{SemanticNode.parse(columnOrder.getCol(), scope)}));
new SqlNode[]{SemanticNode.parse(columnOrder.getCol(), scope, engineType)}));
} else {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope));
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope, engineType));
}
}
selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO));

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
@@ -39,9 +40,9 @@ import java.util.stream.Collectors;
public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres,
List<String> reqMetrics, List<String> reqDimensions,
String queryWhere, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
List<String> reqMetrics, List<String> reqDimensions,
String queryWhere, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
TableView dataSet = new TableView();
TableView output = new TableView();
@@ -97,24 +98,24 @@ public class SourceRender extends Renderer {
boolean nonAgg, Set<String> extendFields, TableView dataSet, TableView output, SqlValidatorScope scope)
throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(dimension)) {
continue;
}
dataSet.getMeasure().add(DimensionNode.build(dim, scope));
dataSet.getMeasure().add(DimensionNode.build(dim, scope, engineType));
if (nonAgg) {
//dataSet.getMeasure().addAll(DimensionNode.expand(dim, scope));
output.getMeasure().add(DimensionNode.buildName(dim, scope));
output.getMeasure().add(DimensionNode.buildName(dim, scope, engineType));
isAdd = true;
continue;
}
if ("".equals(alias)) {
output.getDimension().add(DimensionNode.buildName(dim, scope));
output.getDimension().add(DimensionNode.buildName(dim, scope, engineType));
} else {
output.getDimension().add(DimensionNode.buildNameAs(alias, dim, scope));
output.getDimension().add(DimensionNode.buildNameAs(alias, dim, scope, engineType));
}
isAdd = true;
break;
@@ -125,11 +126,11 @@ public class SourceRender extends Renderer {
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
if (identify.isPresent()) {
if (nonAgg) {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
output.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
} else {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
output.getDimension().add(SemanticNode.parse(identify.get().getName(), scope));
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getDimension().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
}
isAdd = true;
}
@@ -139,15 +140,15 @@ public class SourceRender extends Renderer {
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
dataSet.getMeasure().add(DimensionNode.buildArray(dimensionOptional.get(), scope));
dataSet.getMeasure().add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr());
}
if (nonAgg) {
output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope));
output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
return;
}
output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope));
output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
}
}
@@ -161,10 +162,10 @@ public class SourceRender extends Renderer {
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema,
boolean nonAgg) throws Exception {
SemanticSchema schema, boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
@@ -179,13 +180,13 @@ public class SourceRender extends Renderer {
if (!dim.getName().equalsIgnoreCase(where)) {
continue;
}
whereNode.addAll(DimensionNode.expand(dim, scope));
whereNode.addAll(DimensionNode.expand(dim, scope, engineType));
isAdd = true;
}
}
Optional<Identify> identify = getIdentifyByName(where, datasource);
if (identify.isPresent()) {
whereNode.add(IdentifyNode.build(identify.get(), scope));
whereNode.add(IdentifyNode.build(identify.get(), scope, engineType));
isAdd = true;
}
if (isAdd) {
@@ -193,7 +194,7 @@ public class SourceRender extends Renderer {
}
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) {
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope));
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr());
}
@@ -317,12 +318,13 @@ public class SourceRender extends Renderer {
}
public void render(MetricQueryReq metricQueryReq, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricQueryReq.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}

View File

@@ -109,7 +109,7 @@ public class CalculateAggConverter implements HeadlessConverter {
ParseSqlReq sqlCommend = queryStatement.getParseSqlReq();
Database database = queryStatement.getSemanticModel().getDatabase();
ParseSqlReq parseSqlReq = generateSqlCommend(queryStatement,
EngineType.valueOf(database.getType().toUpperCase()), database.getVersion());
EngineType.fromString(database.getType().toUpperCase()), database.getVersion());
sqlCommend.setSql(parseSqlReq.getSql());
sqlCommend.setTables(parseSqlReq.getTables());
sqlCommend.setRootPath(parseSqlReq.getRootPath());

View File

@@ -0,0 +1,48 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlDialect.Context;
import org.apache.calcite.sql.SqlDialect.DatabaseProduct;
public class SqlDialectFactory {
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withIdentifierQuoteString("`")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final Context POSTGRESQL_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
private static Map<EngineType, SemanticSqlDialect> sqlDialectMap;
static {
sqlDialectMap = new HashMap<>();
sqlDialectMap.put(EngineType.CLICKHOUSE, new SemanticSqlDialect(DEFAULT_CONTEXT));
sqlDialectMap.put(EngineType.MYSQL, new SemanticSqlDialect(DEFAULT_CONTEXT));
sqlDialectMap.put(EngineType.H2, new SemanticSqlDialect(DEFAULT_CONTEXT));
sqlDialectMap.put(EngineType.POSTGRESQL, new SemanticSqlDialect(POSTGRESQL_CONTEXT));
}
public static SemanticSqlDialect getSqlDialect(EngineType engineType) {
SemanticSqlDialect semanticSqlDialect = sqlDialectMap.get(engineType);
if (Objects.isNull(semanticSqlDialect)) {
return new SemanticSqlDialect(DEFAULT_CONTEXT);
}
return semanticSqlDialect;
}
}

View File

@@ -14,17 +14,11 @@ public class PostgresqlParametersBuilder implements DbParametersBuilder {
public List<DatabaseParameter> build() {
List<DatabaseParameter> databaseParameters = new ArrayList<>();
DatabaseParameter host = new DatabaseParameter();
host.setComment("host");
host.setName("host");
host.setPlaceholder("请输入host");
host.setComment("链接");
host.setName("url");
host.setPlaceholder("请输入链接");
databaseParameters.add(host);
DatabaseParameter port = new DatabaseParameter();
port.setComment("port");
port.setName("port");
port.setPlaceholder("请输入端口号");
databaseParameters.add(port);
DatabaseParameter userName = new DatabaseParameter();
userName.setComment("用户名");
userName.setName("username");

View File

@@ -60,7 +60,6 @@ public class QueryController {
User user = UserHolder.findUser(request, response);
QuerySqlReq querySqlReq = queryStructReq.convert(queryStructReq);
return queryService.queryBySql(querySqlReq, user);
//return queryService.queryByStructWithAuth(queryStructReq, user);
}
@PostMapping("/queryMetricDataById")

View File

@@ -21,7 +21,7 @@ import java.util.List;
public interface QueryService {
Object queryBySql(QuerySqlReq querySqlCmd, User user) throws Exception;
SemanticQueryResp queryBySql(QuerySqlReq querySqlCmd, User user) throws Exception;
SemanticQueryResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception;

View File

@@ -44,7 +44,6 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.annotation.S2SQLDataPermission;
import com.tencent.supersonic.headless.server.annotation.StructDataPermission;
import com.tencent.supersonic.headless.server.aspect.ApiHeaderCheckAspect;
import com.tencent.supersonic.headless.server.cache.CacheManager;
import com.tencent.supersonic.headless.server.cache.QueryCache;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
@@ -67,7 +66,6 @@ import javax.servlet.http.HttpServletRequest;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@@ -79,17 +77,11 @@ public class QueryServiceImpl implements QueryService {
CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.DAYS).build();
private final StatUtils statUtils;
private final CacheManager cacheManager;
private final QueryUtils queryUtils;
private final QueryReqConverter queryReqConverter;
private final Catalog catalog;
private final AppService appService;
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
private final QueryCache queryCache;
private final SemanticSchemaManager semanticSchemaManager;
private final QueryParser queryParser;
@@ -98,7 +90,6 @@ public class QueryServiceImpl implements QueryService {
public QueryServiceImpl(
StatUtils statUtils,
CacheManager cacheManager,
QueryUtils queryUtils,
QueryReqConverter queryReqConverter,
Catalog catalog,
@@ -108,7 +99,6 @@ public class QueryServiceImpl implements QueryService {
QueryParser queryParser,
QueryPlanner queryPlanner) {
this.statUtils = statUtils;
this.cacheManager = cacheManager;
this.queryUtils = queryUtils;
this.queryReqConverter = queryReqConverter;
this.catalog = catalog;
@@ -122,10 +112,7 @@ public class QueryServiceImpl implements QueryService {
@Override
@S2SQLDataPermission
@SneakyThrows
public Object queryBySql(QuerySqlReq querySQLReq, User user) {
statUtils.initStatInfo(querySQLReq, user);
QueryStatement queryStatement = new QueryStatement();
SemanticQueryResp results = null;
public SemanticQueryResp queryBySql(QuerySqlReq querySQLReq, User user) {
TaskStatusEnum state = TaskStatusEnum.SUCCESS;
try {
//1.initStatInfo
@@ -137,23 +124,25 @@ public class QueryServiceImpl implements QueryService {
}
StatUtils.get().setUseResultCache(false);
//3 query from db
queryStatement = convertToQueryStatement(querySQLReq, user);
QueryStatement queryStatement = convertToQueryStatement(querySQLReq, user);
log.info("queryStatement:{}", queryStatement);
results = query(queryStatement);
SemanticQueryResp result = query(queryStatement);
//4 reset cache and set stateInfo
Boolean setCacheSuccess = queryCache.put(querySQLReq, results);
Boolean setCacheSuccess = queryCache.put(querySQLReq, result);
if (setCacheSuccess) {
// if semanticQueryResp is not null, update cache data
statUtils.updateResultCacheKey(queryCache.getCacheKey(querySQLReq));
}
if (Objects.isNull(results)) {
if (Objects.isNull(result)) {
state = TaskStatusEnum.ERROR;
}
return result;
} catch (Exception e) {
log.info("convertToQueryStatement has a exception:", e);
throw e;
} finally {
statUtils.statInfo2DbAsync(state);
}
statUtils.statInfo2DbAsync(state);
return results;
}
public SemanticQueryResp queryByQueryStatement(QueryStatement queryStatement) {
@@ -186,7 +175,6 @@ public class QueryServiceImpl implements QueryService {
@Override
public SemanticQueryResp queryByStruct(QueryStructReq queryStructReq, User user) throws Exception {
SemanticQueryResp semanticQueryResp = null;
TaskStatusEnum state = TaskStatusEnum.SUCCESS;
log.info("[queryStructReq:{}]", queryStructReq);
try {
@@ -200,17 +188,17 @@ public class QueryServiceImpl implements QueryService {
StatUtils.get().setUseResultCache(false);
//3 query
QueryStatement queryStatement = buildQueryStatement(queryStructReq);
semanticQueryResp = query(queryStatement);
SemanticQueryResp result = query(queryStatement);
//4 reset cache and set stateInfo
Boolean setCacheSuccess = queryCache.put(queryStructReq, semanticQueryResp);
Boolean setCacheSuccess = queryCache.put(queryStructReq, result);
if (setCacheSuccess) {
// if semanticQueryResp is not null, update cache data
// if result is not null, update cache data
statUtils.updateResultCacheKey(queryCache.getCacheKey(queryStructReq));
}
if (Objects.isNull(semanticQueryResp)) {
if (Objects.isNull(result)) {
state = TaskStatusEnum.ERROR;
}
return semanticQueryResp;
return result;
} catch (Exception e) {
log.error("exception in queryByStruct, e: ", e);
state = TaskStatusEnum.ERROR;
@@ -447,28 +435,6 @@ public class QueryServiceImpl implements QueryService {
return ExplainResp.builder().sql(sql).build();
}
private boolean isCache(QueryMultiStructReq queryStructReq) {
if (!cacheEnable) {
return false;
}
if (!CollectionUtils.isEmpty(queryStructReq.getQueryStructReqs())
&& queryStructReq.getQueryStructReqs().get(0).getCacheInfo() != null) {
return queryStructReq.getQueryStructReqs().get(0).getCacheInfo().getCache();
}
return false;
}
private SemanticQueryResp queryByCache(String key, Object queryCmd) {
Object resultObject = cacheManager.get(key);
if (Objects.nonNull(resultObject)) {
log.info("queryByStructWithCache, key:{}, queryCmd:{}", key, queryCmd.toString());
statUtils.updateResultCacheKey(key);
return (SemanticQueryResp) resultObject;
}
return null;
}
private QuerySqlReq buildQuerySqlReq(QueryDimValueReq queryDimValueReq) {
QuerySqlReq querySQLReq = new QuerySqlReq();
List<ModelResp> modelResps = catalog.getModelList(Lists.newArrayList(queryDimValueReq.getModelId()));

View File

@@ -115,7 +115,7 @@ public class QueryReqConverter {
result.setRootPath(querySQLReq.getModelIdStr());
result.setTables(tables);
DatabaseResp database = catalog.getDatabaseByModelId(querySQLReq.getModelIds().get(0));
if (!sqlGenerateUtils.isSupportWith(EngineType.valueOf(database.getType().toUpperCase()),
if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()),
database.getVersion())) {
result.setSupportWith(false);
result.setWithAlias(false);

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.server.calcite;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.api.response.SqlParserResp;
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
@@ -43,7 +44,8 @@ class HeadlessParserServiceTest {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setMetricReq(metricCommand);
aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg));
sqlParser.setSql(aggBuilder.getSql());
EngineType engineType = EngineType.fromString(semanticSchema.getSemanticModel().getDatabase().getType());
sqlParser.setSql(aggBuilder.getSql(engineType));
sqlParser.setSourceId(aggBuilder.getSourceId());
} catch (Exception e) {
sqlParser.setErrMsg(e.getMessage());