(improvement)(Headless) Support optimizing and generating SQL based on the engine type. (#666)

This commit is contained in:
lexluo09
2024-01-19 22:32:28 +08:00
committed by GitHub
parent 20c8456705
commit 9c6bd7cf19
30 changed files with 309 additions and 277 deletions

View File

@@ -67,7 +67,7 @@ public class LocalSemanticInterpreter extends BaseSemanticInterpreter {
@SneakyThrows @SneakyThrows
public SemanticQueryResp queryByS2SQL(QuerySqlReq querySQLReq, User user) { public SemanticQueryResp queryByS2SQL(QuerySqlReq querySQLReq, User user) {
queryService = ContextUtils.getBean(QueryService.class); queryService = ContextUtils.getBean(QueryService.class);
Object object = queryService.queryBySql(querySQLReq, user); SemanticQueryResp object = queryService.queryBySql(querySQLReq, user);
return JsonUtil.toObject(JsonUtil.toString(object), SemanticQueryResp.class); return JsonUtil.toObject(JsonUtil.toString(object), SemanticQueryResp.class);
} }

View File

@@ -28,4 +28,14 @@ public enum EngineType {
public String getName() { public String getName() {
return name; return name;
} }
public static EngineType fromString(String value) {
for (EngineType engineType : EngineType.values()) {
if (engineType.name().equalsIgnoreCase(value)) {
return engineType;
}
}
throw new IllegalArgumentException("Invalid value: " + value);
}
} }

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite; package com.tencent.supersonic.headless.core.parser.calcite;
import com.tencent.supersonic.headless.api.enums.AggOption; import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.SqlParser; import com.tencent.supersonic.headless.core.parser.SqlParser;
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner; import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
@@ -31,13 +32,15 @@ public class CalciteSqlParser implements SqlParser {
SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement); SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement);
AggPlanner aggBuilder = new AggPlanner(semanticSchema); AggPlanner aggBuilder = new AggPlanner(semanticSchema);
aggBuilder.explain(queryStatement, isAgg); aggBuilder.explain(queryStatement, isAgg);
queryStatement.setSql(aggBuilder.getSql()); EngineType engineType = EngineType.fromString(semanticSchema.getSemanticModel().getDatabase().getType());
queryStatement.setSql(aggBuilder.getSql(engineType));
queryStatement.setSourceId(aggBuilder.getSourceId()); queryStatement.setSourceId(aggBuilder.getSourceId());
if (Objects.nonNull(queryStatement.getEnableOptimize()) && queryStatement.getEnableOptimize() if (Objects.nonNull(queryStatement.getEnableOptimize()) && queryStatement.getEnableOptimize()
&& Objects.nonNull(queryStatement.getViewAlias()) && !queryStatement.getViewAlias().isEmpty()) { && Objects.nonNull(queryStatement.getViewAlias()) && !queryStatement.getViewAlias().isEmpty()) {
// simplify model sql with query sql // simplify model sql with query sql
String simplifySql = aggBuilder.simplify( String simplifySql = aggBuilder.simplify(
getSqlByView(aggBuilder.getSql(), queryStatement.getViewSql(), queryStatement.getViewAlias())); getSqlByView(aggBuilder.getSql(engineType), queryStatement.getViewSql(),
queryStatement.getViewAlias()), engineType);
if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) { if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) {
log.info("simplifySql [{}]", simplifySql); log.info("simplifySql [{}]", simplifySql);
queryStatement.setViewSimplifySql(simplifySql); queryStatement.setViewSimplifySql(simplifySql);
@@ -48,6 +51,7 @@ public class CalciteSqlParser implements SqlParser {
private SemanticSchema getSemanticSchema(SemanticModel semanticModel, QueryStatement queryStatement) { private SemanticSchema getSemanticSchema(SemanticModel semanticModel, QueryStatement queryStatement) {
SemanticSchema semanticSchema = SemanticSchema.newBuilder(semanticModel.getRootPath()).build(); SemanticSchema semanticSchema = SemanticSchema.newBuilder(semanticModel.getRootPath()).build();
semanticSchema.setSemanticModel(semanticModel);
semanticSchema.setDatasource(semanticModel.getDatasourceMap()); semanticSchema.setDatasource(semanticModel.getDatasourceMap());
semanticSchema.setDimension(semanticModel.getDimensionMap()); semanticSchema.setDimension(semanticModel.getDimensionMap());
semanticSchema.setMetric(semanticModel.getMetrics()); semanticSchema.setMetric(semanticModel.getMetrics());

View File

@@ -1,9 +1,11 @@
package com.tencent.supersonic.headless.core.parser.calcite; package com.tencent.supersonic.headless.core.parser.calcite;
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSqlTypeFactoryImpl; import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlTypeFactoryImpl;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect; import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.parser.calcite.schema.ViewExpanderImpl; import com.tencent.supersonic.headless.core.parser.calcite.schema.ViewExpanderImpl;
import com.tencent.supersonic.headless.core.utils.SqlDialectFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@@ -42,13 +44,17 @@ import org.apache.calcite.tools.Frameworks;
public class Configuration { public class Configuration {
public static Properties configProperties = new Properties(); public static Properties configProperties = new Properties();
public static RelDataTypeFactory typeFactory = new HeadlessSqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT); public static RelDataTypeFactory typeFactory = new SemanticSqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static SqlOperatorTable operatorTable = SqlStdOperatorTable.instance(); public static SqlOperatorTable operatorTable = SqlStdOperatorTable.instance();
public static CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties); public static CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
public static SqlValidator.Config validatorConfig = SqlValidator.Config.DEFAULT
.withConformance(SemanticSqlDialect.DEFAULT.getConformance()) public static SqlValidator.Config getValidatorConfig(EngineType engineType) {
.withDefaultNullCollation(config.defaultNullCollation()) SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
.withLenientOperatorLookup(true); return SqlValidator.Config.DEFAULT
.withConformance(sqlDialect.getConformance())
.withDefaultNullCollation(config.defaultNullCollation())
.withLenientOperatorLookup(true);
}
static { static {
configProperties.put(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString()); configProperties.put(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString());
@@ -57,8 +63,10 @@ public class Configuration {
} }
public static SqlParser.Config getParserConfig() { public static SqlParser.Config getParserConfig(EngineType engineType) {
CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties); CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder(); SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder();
parserConfig.setCaseSensitive(config.caseSensitive()); parserConfig.setCaseSensitive(config.caseSensitive());
parserConfig.setUnquotedCasing(config.unquotedCasing()); parserConfig.setUnquotedCasing(config.unquotedCasing());
@@ -71,12 +79,12 @@ public class Configuration {
.setQuoting(Quoting.SINGLE_QUOTE) .setQuoting(Quoting.SINGLE_QUOTE)
.setQuotedCasing(Casing.TO_UPPER) .setQuotedCasing(Casing.TO_UPPER)
.setUnquotedCasing(Casing.TO_UPPER) .setUnquotedCasing(Casing.TO_UPPER)
.setConformance(SemanticSqlDialect.DEFAULT.getConformance()) .setConformance(sqlDialect.getConformance())
.setLex(Lex.BIG_QUERY); .setLex(Lex.BIG_QUERY);
return parserConfig.build(); return parserConfig.build();
} }
public static SqlValidator getSqlValidator(CalciteSchema rootSchema) { public static SqlValidator getSqlValidator(CalciteSchema rootSchema, EngineType engineType) {
List<SqlOperatorTable> tables = new ArrayList<>(); List<SqlOperatorTable> tables = new ArrayList<>();
tables.add(SqlStdOperatorTable.instance()); tables.add(SqlStdOperatorTable.instance());
SqlOperatorTable operatorTable = new ChainedSqlOperatorTable(tables); SqlOperatorTable operatorTable = new ChainedSqlOperatorTable(tables);
@@ -87,9 +95,8 @@ public class Configuration {
typeFactory, typeFactory,
config config
); );
SqlValidator validator = SqlValidatorUtil.newValidator(operatorTable, catalogReader, typeFactory, return SqlValidatorUtil.newValidator(operatorTable, catalogReader, typeFactory,
validatorConfig); Configuration.getValidatorConfig(engineType));
return validator;
} }
public static SqlToRelConverter.Config getConverterConfig() { public static SqlToRelConverter.Config getConverterConfig() {
@@ -102,11 +109,11 @@ public class Configuration {
} }
public static SqlToRelConverter getSqlToRelConverter(SqlValidatorScope scope, SqlValidator sqlValidator, public static SqlToRelConverter getSqlToRelConverter(SqlValidatorScope scope, SqlValidator sqlValidator,
RelOptPlanner relOptPlanner) { RelOptPlanner relOptPlanner, EngineType engineType) {
RexBuilder rexBuilder = new RexBuilder(typeFactory); RexBuilder rexBuilder = new RexBuilder(typeFactory);
RelOptCluster cluster = RelOptCluster.create(relOptPlanner, rexBuilder); RelOptCluster cluster = RelOptCluster.create(relOptPlanner, rexBuilder);
FrameworkConfig fromworkConfig = Frameworks.newConfigBuilder() FrameworkConfig fromworkConfig = Frameworks.newConfigBuilder()
.parserConfig(getParserConfig()) .parserConfig(getParserConfig(engineType))
.defaultSchema(scope.getValidator().getCatalogReader().getRootSchema().plus()) .defaultSchema(scope.getValidator().getCatalogReader().getRootSchema().plus())
.build(); .build();
return new SqlToRelConverter(new ViewExpanderImpl(), return new SqlToRelConverter(new ViewExpanderImpl(),

View File

@@ -2,12 +2,13 @@ package com.tencent.supersonic.headless.core.parser.calcite.planner;
import com.tencent.supersonic.headless.api.enums.AggOption; import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration; import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder; import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer; import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView; import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DataSourceNode; import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DataSourceNode;
@@ -15,6 +16,7 @@ import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.FilterRender; import com.tencent.supersonic.headless.core.parser.calcite.sql.render.FilterRender;
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.OutputRender; import com.tencent.supersonic.headless.core.parser.calcite.sql.render.OutputRender;
import com.tencent.supersonic.headless.core.parser.calcite.sql.render.SourceRender; import com.tencent.supersonic.headless.core.parser.calcite.sql.render.SourceRender;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedList; import java.util.LinkedList;
@@ -113,12 +115,14 @@ public class AggPlanner implements Planner {
// build a parse Node // build a parse Node
parse(); parse();
// optimizer // optimizer
optimize(); Database database = queryStatement.getSemanticModel().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
optimize(engineType);
} }
@Override @Override
public String getSql() { public String getSql(EngineType engineType) {
return SemanticNode.getSql(parserNode); return SemanticNode.getSql(parserNode, engineType);
} }
@Override @Override
@@ -127,26 +131,26 @@ public class AggPlanner implements Planner {
} }
@Override @Override
public String simplify(String sql) { public String simplify(String sql, EngineType engineType) {
return optimize(sql); return optimize(sql, engineType);
} }
public void optimize() { public void optimize(EngineType engineType) {
if (Objects.isNull(schema.getRuntimeOptions()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize()) if (Objects.isNull(schema.getRuntimeOptions()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) { || !schema.getRuntimeOptions().getEnableOptimize()) {
return; return;
} }
SqlNode optimizeNode = optimizeSql(SemanticNode.getSql(parserNode)); SqlNode optimizeNode = optimizeSql(SemanticNode.getSql(parserNode, engineType), engineType);
if (Objects.nonNull(optimizeNode)) { if (Objects.nonNull(optimizeNode)) {
parserNode = optimizeNode; parserNode = optimizeNode;
} }
} }
public String optimize(String sql) { public String optimize(String sql, EngineType engineType) {
try { try {
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig()).parseStmt(); SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) { if (Objects.nonNull(sqlNode)) {
return SemanticNode.getSql(SemanticNode.optimize(scope, schema, sqlNode)); return SemanticNode.getSql(SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType);
} }
} catch (Exception e) { } catch (Exception e) {
log.error("optimize error {}", e); log.error("optimize error {}", e);
@@ -154,12 +158,12 @@ public class AggPlanner implements Planner {
return ""; return "";
} }
private SqlNode optimizeSql(String sql) { private SqlNode optimizeSql(String sql, EngineType engineType) {
try { try {
log.info("before optimize:[{}]", sql); log.info("before optimize:[{}]", sql);
SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig()).parseStmt(); SqlNode sqlNode = SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
if (Objects.nonNull(sqlNode)) { if (Objects.nonNull(sqlNode)) {
return SemanticNode.optimize(scope, schema, sqlNode); return SemanticNode.optimize(scope, schema, sqlNode, engineType);
} }
} catch (Exception e) { } catch (Exception e) {
log.error("optimize error {}", e); log.error("optimize error {}", e);

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.parser.calcite.planner;
import com.tencent.supersonic.headless.api.enums.AggOption; import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** /**
@@ -11,9 +12,9 @@ public interface Planner {
public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception; public void explain(QueryStatement queryStatement, AggOption aggOption) throws Exception;
public String getSql(); public String getSql(EngineType enginType);
public String getSourceId(); public String getSourceId();
public String simplify(String sql); public String simplify(String sql, EngineType engineType);
} }

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema; package com.tencent.supersonic.headless.core.parser.calcite.schema;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration; import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.sql.S2SQLSqlValidatorImpl; import com.tencent.supersonic.headless.core.parser.calcite.sql.S2SQLSqlValidatorImpl;
@@ -37,8 +38,9 @@ public class SchemaBuilder {
Configuration.typeFactory, Configuration.typeFactory,
Configuration.config Configuration.config
); );
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
S2SQLSqlValidatorImpl s2SQLSqlValidator = new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader, S2SQLSqlValidatorImpl s2SQLSqlValidator = new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.validatorConfig); Configuration.typeFactory, Configuration.getValidatorConfig(engineType));
return new ParameterScope(s2SQLSqlValidator, nameToTypeMap); return new ParameterScope(s2SQLSqlValidator, nameToTypeMap);
} }

View File

@@ -7,7 +7,7 @@ import org.apache.calcite.sql.validate.SqlConformanceEnum;
/** /**
* customize the SqlConformance * customize the SqlConformance
*/ */
public class HeadlessSqlConformance implements SqlConformance { public class SemanticSqlConformance implements SqlConformance {
@Override @Override
public boolean isLiberal() { public boolean isLiberal() {

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.core.parser.calcite.schema; package com.tencent.supersonic.headless.core.parser.calcite.schema;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlIntervalLiteral; import org.apache.calcite.sql.SqlIntervalLiteral;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
@@ -14,15 +13,7 @@ import org.checkerframework.checker.nullness.qual.Nullable;
*/ */
public class SemanticSqlDialect extends SqlDialect { public class SemanticSqlDialect extends SqlDialect {
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT private static final SqlConformance tagTdwSqlConformance = new SemanticSqlConformance();
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final SqlDialect DEFAULT = new SemanticSqlDialect(DEFAULT_CONTEXT);
private static final SqlConformance tagTdwSqlConformance = new HeadlessSqlConformance();
public SemanticSqlDialect(Context context) { public SemanticSqlDialect(Context context) {
super(context); super(context);

View File

@@ -7,9 +7,9 @@ import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
/** /**
* customize the SqlTypeFactoryImpl * customize the SqlTypeFactoryImpl
*/ */
public class HeadlessSqlTypeFactoryImpl extends SqlTypeFactoryImpl { public class SemanticSqlTypeFactoryImpl extends SqlTypeFactoryImpl {
public HeadlessSqlTypeFactoryImpl(RelDataTypeSystem typeSystem) { public SemanticSqlTypeFactoryImpl(RelDataTypeSystem typeSystem) {
super(typeSystem); super(typeSystem);
} }

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql; package com.tencent.supersonic.headless.core.parser.calcite.sql;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MeasureNode; import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MeasureNode;
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.parser.calcite.sql.node.MetricNode;
@@ -47,27 +48,31 @@ public abstract class Renderer {
} }
public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope, public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg, String alias) throws Exception { SemanticSchema schema, boolean nonAgg, String alias) throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema); Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode(); MetricNode metricNode = new MetricNode();
EngineType engineType = EngineType.fromString(datasource.getType());
if (metricOpt.isPresent()) { if (metricOpt.isPresent()) {
metricNode.setMetric(metricOpt.get()); metricNode.setMetric(metricOpt.get());
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) { for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource); Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) { if (measure.isPresent()) {
metricNode.getNonAggNode() metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope)); .put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode() metricNode.getAggNode()
.put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope)); .put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg()); metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
} else { } else {
metricNode.getNonAggNode().put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope)); metricNode.getNonAggNode().put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope, engineType));
metricNode.getAggNode().put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope)); metricNode.getAggNode().put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope, engineType));
metricNode.getAggFunction().put(m.getName(), m.getAgg()); metricNode.getAggFunction().put(m.getName(), m.getAgg());
} }
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) { if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(m.getName(), SemanticNode.parse(m.getConstraint(), scope)); metricNode.getMeasureFilter()
.put(m.getName(), SemanticNode.parse(m.getConstraint(), scope, engineType));
} }
} }
return metricNode; return metricNode;
@@ -75,13 +80,14 @@ public abstract class Renderer {
Optional<Measure> measure = getMeasureByName(metric, datasource); Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) { if (measure.isPresent()) {
metricNode.getNonAggNode() metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope)); .put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode().put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope)); metricNode.getAggNode()
.put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg()); metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) { if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter() metricNode.getMeasureFilter().put(measure.get().getName(),
.put(measure.get().getName(), SemanticNode.parse(measure.get().getConstraint(), scope)); SemanticNode.parse(measure.get().getConstraint(), scope, engineType));
} }
} }
return metricNode; return metricNode;
@@ -105,5 +111,5 @@ public abstract class Renderer {
} }
public abstract void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope, public abstract void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception; SemanticSchema schema, boolean nonAgg) throws Exception;
} }

View File

@@ -1,19 +1,22 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node; package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import java.util.Objects; import java.util.Objects;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
public class AggFunctionNode extends SemanticNode { public class AggFunctionNode extends SemanticNode {
public static SqlNode build(String agg, String name, SqlValidatorScope scope) throws Exception { public static SqlNode build(String agg, String name, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (Objects.isNull(agg) || agg.isEmpty()) { if (Objects.isNull(agg) || agg.isEmpty()) {
return parse(name, scope); return parse(name, scope, engineType);
} }
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) { if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope); return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope,
engineType);
} }
return parse(agg + " ( " + name + " ) ", scope); return parse(agg + " ( " + name + " ) ", scope, engineType);
} }
public static enum AggFunction { public static enum AggFunction {

View File

@@ -54,7 +54,7 @@ public class DataSourceNode extends SemanticNode {
if (sqlTable.isEmpty()) { if (sqlTable.isEmpty()) {
throw new Exception("DatasourceNode build error [tableSqlNode not found]"); throw new Exception("DatasourceNode build error [tableSqlNode not found]");
} }
SqlNode source = getTable(sqlTable, scope); SqlNode source = getTable(sqlTable, scope, EngineType.fromString(datasource.getType()));
addSchema(scope, datasource, source); addSchema(scope, datasource, source);
return buildAs(datasource.getName(), source); return buildAs(datasource.getName(), source);
} }
@@ -87,8 +87,9 @@ public class DataSourceNode extends SemanticNode {
Set<String> dateInfo = new HashSet<>(); Set<String> dateInfo = new HashSet<>();
Set<String> dimensions = new HashSet<>(); Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>(); Set<String> metrics = new HashSet<>();
EngineType engineType = EngineType.fromString(datasource.getType());
for (Dimension d : datasource.getDimensions()) { for (Dimension d : datasource.getDimensions()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(d.getExpr(), scope), scope); List<SqlNode> identifiers = expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> dimensions.add(i.toString())); identifiers.stream().forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName()); dimensions.add(d.getName());
} }
@@ -96,7 +97,7 @@ public class DataSourceNode extends SemanticNode {
dimensions.add(i.getName()); dimensions.add(i.getName());
} }
for (Measure m : datasource.getMeasures()) { for (Measure m : datasource.getMeasures()) {
List<SqlNode> identifiers = expand(SemanticNode.parse(m.getExpr(), scope), scope); List<SqlNode> identifiers = expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.stream().forEach(i -> { identifiers.stream().forEach(i -> {
if (!dimensions.contains(i.toString())) { if (!dimensions.contains(i.toString())) {
metrics.add(i.toString()); metrics.add(i.toString());
@@ -124,15 +125,17 @@ public class DataSourceNode extends SemanticNode {
if (CollectionUtils.isEmpty(exprList)) { if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope); return build(datasource, scope);
} }
EngineType engineType = EngineType.fromString(datasource.getType());
SqlNode view = new SqlBasicCall(new LateralViewExplodeNode(), Arrays.asList(build(datasource, scope), SqlNode view = new SqlBasicCall(new LateralViewExplodeNode(), Arrays.asList(build(datasource, scope),
new SqlNodeList(getExtendField(exprList, scope), SqlParserPos.ZERO)), SqlParserPos.ZERO); new SqlNodeList(getExtendField(exprList, scope, engineType), SqlParserPos.ZERO)), SqlParserPos.ZERO);
return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, view); return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, view);
} }
public static List<SqlNode> getExtendField(Set<String> exprList, SqlValidatorScope scope) throws Exception { public static List<SqlNode> getExtendField(Set<String> exprList, SqlValidatorScope scope, EngineType engineType)
throws Exception {
List<SqlNode> sqlNodeList = new ArrayList<>(); List<SqlNode> sqlNodeList = new ArrayList<>();
for (String expr : exprList) { for (String expr : exprList) {
sqlNodeList.add(parse(expr, scope)); sqlNodeList.add(parse(expr, scope, engineType));
sqlNodeList.add(new SqlDataTypeSpec( sqlNodeList.add(new SqlDataTypeSpec(
new SqlUserDefinedTypeNameSpec(expr + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, SqlParserPos.ZERO), new SqlUserDefinedTypeNameSpec(expr + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, SqlParserPos.ZERO),
SqlParserPos.ZERO)); SqlParserPos.ZERO));
@@ -140,8 +143,8 @@ public class DataSourceNode extends SemanticNode {
return sqlNodeList; return sqlNodeList;
} }
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope) throws Exception { private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig()); SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig(engineType));
SqlNode sqlNode = sqlParser.parseQuery(); SqlNode sqlNode = sqlParser.parseQuery();
scope.validateExpr(sqlNode); scope.validateExpr(sqlNode);
return sqlNode; return sqlNode;
@@ -166,9 +169,10 @@ public class DataSourceNode extends SemanticNode {
public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, MetricQueryReq metricCommand, public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, MetricQueryReq metricCommand,
Set<String> queryDimension, List<String> measures, Set<String> queryDimension, List<String> measures,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>(); Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope), filterConditions); FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), filterConditions);
Set<String> queryMeasures = new HashSet<>(measures); Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName = schema.getMetrics().stream() Set<String> schemaMetricName = schema.getMetrics().stream()
.map(m -> m.getName()).collect(Collectors.toSet()); .map(m -> m.getName()).collect(Collectors.toSet());
@@ -223,8 +227,10 @@ public class DataSourceNode extends SemanticNode {
} }
filterMeasure.addAll(sourceMeasure); filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension); filterMeasure.addAll(dimension);
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, scope); mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures, scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope); boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope,
engineType);
if (isAllMatch) { if (isAllMatch) {
log.info("baseDataSource match all "); log.info("baseDataSource match all ");
return dataSources; return dataSources;
@@ -260,7 +266,8 @@ public class DataSourceNode extends SemanticNode {
List<String> measures, List<String> measures,
Set<String> dimension, Set<String> dimension,
MetricQueryReq metricCommand, MetricQueryReq metricCommand,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope,
EngineType engineType) throws Exception {
boolean isAllMatch = true; boolean isAllMatch = true;
sourceMeasure.retainAll(measures); sourceMeasure.retainAll(measures);
if (sourceMeasure.size() < measures.size()) { if (sourceMeasure.size() < measures.size()) {
@@ -285,7 +292,7 @@ public class DataSourceNode extends SemanticNode {
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) { if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
SqlNode sqlNode = parse(metricCommand.getWhere(), scope); SqlNode sqlNode = parse(metricCommand.getWhere(), scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields); FilterNode.getFilterField(sqlNode, whereFields);
} }
return isAllMatch; return isAllMatch;

View File

@@ -1,9 +1,9 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node; package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
@@ -11,55 +11,47 @@ import org.apache.calcite.sql.validate.SqlValidatorScope;
public class DimensionNode extends SemanticNode { public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope) throws Exception { public static SqlNode build(Dimension dimension, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope); SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return buildAs(dimension.getName(), sqlNode); return buildAs(dimension.getName(), sqlNode);
} }
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope) throws Exception { public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
SqlNode sqlNode = parse(dimension.getExpr(), scope); throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return expand(sqlNode, scope); return expand(sqlNode, scope);
} }
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope) throws Exception { public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
return parse(dimension.getName(), scope); throws Exception {
return parse(dimension.getName(), scope, engineType);
} }
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope) throws Exception { public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
return parse(dimension.getExpr(), scope); throws Exception {
return parse(dimension.getExpr(), scope, engineType);
} }
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope) throws Exception { public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if ("".equals(alias)) { if ("".equals(alias)) {
return buildName(dimension, scope); return buildName(dimension, scope, engineType);
} }
SqlNode sqlNode = parse(dimension.getName(), scope); SqlNode sqlNode = parse(dimension.getName(), scope, engineType);
return buildAs(alias, sqlNode); return buildAs(alias, sqlNode);
} }
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope) throws Exception { public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) { if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope); SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (isIdentifier(sqlNode)) { if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(), return buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope)); parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope, engineType));
} }
throw new Exception("array dimension expr should only identify"); throw new Exception("array dimension expr should only identify");
} }
return build(dimension, scope); return build(dimension, scope, engineType);
}
public static List<SqlNode> expandArray(Dimension dimension, SqlValidatorScope scope)
throws Exception {
if (dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
if (isIdentifier(sqlNode)) {
return Arrays.asList(buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope)));
}
throw new Exception("array dimension expr should only identify");
}
return expand(dimension, scope);
} }
} }

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node; package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify.Type; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify.Type;
import java.util.List; import java.util.List;
@@ -11,8 +12,8 @@ import org.apache.calcite.sql.validate.SqlValidatorScope;
public class IdentifyNode extends SemanticNode { public class IdentifyNode extends SemanticNode {
public static SqlNode build(Identify identify, SqlValidatorScope scope) throws Exception { public static SqlNode build(Identify identify, SqlValidatorScope scope, EngineType engineType) throws Exception {
return parse(identify.getName(), scope); return parse(identify.getName(), scope, engineType);
} }
public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) { public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) {

View File

@@ -1,50 +1,33 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node; package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
public class MeasureNode extends SemanticNode { public class MeasureNode extends SemanticNode {
public static SqlNode build(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception { public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope, EngineType engineType)
boolean addAgg = false; throws Exception {
if (!noAgg && measure.getAgg() != null && !measure.getAgg().isEmpty()) { return buildAs(measure.getName(), getExpr(measure, alias, scope, engineType));
addAgg = true;
}
if (measure.getExpr() == null) {
if (addAgg) {
return parse(measure.getAgg() + " ( " + measure.getName() + " ) ", scope);
}
return parse(measure.getName(), scope);
} else {
if (addAgg) {
return buildAs(measure.getName(), parse(measure.getAgg() + " ( " + measure.getExpr() + " ) ", scope));
}
return buildAs(measure.getName(), parse(measure.getExpr(), scope));
}
} }
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope) throws Exception { public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope, EngineType engineType)
return buildAs(measure.getName(), getExpr(measure, alias, scope)); throws Exception {
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) { if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope); return parse(measure.getName(), scope, engineType);
} }
return buildAs(measure.getName(), AggFunctionNode.build(measure.getAgg(), measure.getName(), scope)); return buildAs(measure.getName(),
AggFunctionNode.build(measure.getAgg(), measure.getName(), scope, engineType));
} }
public static SqlNode buildAggAs(String aggFunc, String name, SqlValidatorScope scope) throws Exception { private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope, EngineType enginType)
return buildAs(name, AggFunctionNode.build(aggFunc, name, scope)); throws Exception {
}
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope) throws Exception {
if (measure.getExpr() == null) { if (measure.getExpr() == null) {
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope); return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope, enginType);
} }
return parse(measure.getExpr(), scope); return parse(measure.getExpr(), scope, enginType);
} }
} }

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node; package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import java.util.HashMap; import java.util.HashMap;
@@ -19,12 +20,12 @@ public class MetricNode extends SemanticNode {
private Map<String, SqlNode> measureFilter = new HashMap<>(); private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>(); private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope) throws Exception { public static SqlNode build(Metric metric, SqlValidatorScope scope, EngineType engineType) throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null
|| metric.getMetricTypeParams().getExpr().isEmpty()) { || metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope); return parse(metric.getName(), scope, engineType);
} }
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope); SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope, engineType);
return buildAs(metric.getName(), sqlNode); return buildAs(metric.getName(), sqlNode);
} }

View File

@@ -1,11 +1,13 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.node; package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.Configuration; import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect; import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer.FilterToGroupScanRule; import com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.utils.SqlDialectFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
@@ -18,7 +20,6 @@ import java.util.function.UnaryOperator;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepPlanner;
import org.apache.calcite.plan.hep.HepProgramBuilder; import org.apache.calcite.plan.hep.HepProgramBuilder;
@@ -40,7 +41,6 @@ import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.SqlWith; import org.apache.calcite.sql.SqlWith;
import org.apache.calcite.sql.SqlWriterConfig; import org.apache.calcite.sql.SqlWriterConfig;
import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.pretty.SqlPrettyWriter; import org.apache.calcite.sql.pretty.SqlPrettyWriter;
@@ -74,8 +74,8 @@ public abstract class SemanticNode {
AGGREGATION_FUNC.add("min"); AGGREGATION_FUNC.add("min");
} }
public static SqlNode parse(String expression, SqlValidatorScope scope) throws Exception { public static SqlNode parse(String expression, SqlValidatorScope scope, EngineType engineType) throws Exception {
SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig()); SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig(engineType));
SqlNode sqlNode = sqlParser.parseExpression(); SqlNode sqlNode = sqlParser.parseExpression();
scope.validateExpr(sqlNode); scope.validateExpr(sqlNode);
return sqlNode; return sqlNode;
@@ -88,8 +88,9 @@ public abstract class SemanticNode {
SqlParserPos.ZERO); SqlParserPos.ZERO);
} }
public static String getSql(SqlNode sqlNode) { public static String getSql(SqlNode sqlNode, EngineType engineType) {
SqlWriterConfig config = SqlPrettyWriter.config().withDialect(SemanticSqlDialect.DEFAULT) SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
SqlWriterConfig config = SqlPrettyWriter.config().withDialect(sqlDialect)
.withKeywordsLowerCase(true).withClauseEndsLine(true).withAlwaysUseParentheses(false) .withKeywordsLowerCase(true).withClauseEndsLine(true).withAlwaysUseParentheses(false)
.withSelectListItemsOnSeparateLines(false).withUpdateSetListNewline(false).withIndentation(0); .withSelectListItemsOnSeparateLines(false).withUpdateSetListNewline(false).withIndentation(0);
@@ -390,20 +391,22 @@ public abstract class SemanticNode {
return parseInfo; return parseInfo;
} }
public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode) { public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode,
EngineType engineType) {
try { try {
HepProgramBuilder hepProgramBuilder = new HepProgramBuilder(); HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();
SemanticSqlDialect sqlDialect = SqlDialectFactory.getSqlDialect(engineType);
hepProgramBuilder.addRuleInstance(new FilterToGroupScanRule(FilterToGroupScanRule.DEFAULT, schema)); hepProgramBuilder.addRuleInstance(new FilterToGroupScanRule(FilterToGroupScanRule.DEFAULT, schema));
RelOptPlanner relOptPlanner = new HepPlanner(hepProgramBuilder.build()); RelOptPlanner relOptPlanner = new HepPlanner(hepProgramBuilder.build());
RelToSqlConverter converter = new RelToSqlConverter(SemanticSqlDialect.DEFAULT); RelToSqlConverter converter = new RelToSqlConverter(sqlDialect);
SqlValidator sqlValidator = Configuration.getSqlValidator( SqlValidator sqlValidator = Configuration.getSqlValidator(
scope.getValidator().getCatalogReader().getRootSchema()); scope.getValidator().getCatalogReader().getRootSchema(), engineType);
SqlToRelConverter sqlToRelConverter = Configuration.getSqlToRelConverter(scope, sqlValidator, SqlToRelConverter sqlToRelConverter = Configuration.getSqlToRelConverter(scope, sqlValidator,
relOptPlanner); relOptPlanner, engineType);
RelNode sqlRel = sqlToRelConverter.convertQuery( RelNode sqlRel = sqlToRelConverter.convertQuery(
sqlValidator.validate(sqlNode), false, true).rel; sqlValidator.validate(sqlNode), false, true).rel;
log.debug("RelNode optimize {}", SemanticNode.getSql(converter.visitRoot(sqlRel).asStatement())); log.debug("RelNode optimize {}",
SemanticNode.getSql(converter.visitRoot(sqlRel).asStatement(), engineType));
relOptPlanner.setRoot(sqlRel); relOptPlanner.setRoot(sqlRel);
RelNode relNode = relOptPlanner.findBestExp(); RelNode relNode = relOptPlanner.findBestExp();
return converter.visitRoot(relNode).asStatement(); return converter.visitRoot(relNode).asStatement();
@@ -413,13 +416,6 @@ public abstract class SemanticNode {
return null; return null;
} }
public static RelNode getRelNode(CalciteSchema rootSchema, SqlToRelConverter sqlToRelConverter, String sql)
throws SqlParseException {
SqlValidator sqlValidator = Configuration.getSqlValidator(rootSchema);
return sqlToRelConverter.convertQuery(
sqlValidator.validate(SqlParser.create(sql, SqlParser.Config.DEFAULT).parseStmt()), false, true).rel;
}
public static SqlBinaryOperator getBinaryOperator(String val) { public static SqlBinaryOperator getBinaryOperator(String val) {
if (val.equals("=")) { if (val.equals("=")) {
return SqlStdOperatorTable.EQUALS; return SqlStdOperatorTable.EQUALS;

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render; package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
@@ -34,8 +35,10 @@ public class FilterRender extends Renderer {
SqlNode filterNode = null; SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics()); List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions()); List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) { if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope); filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
FilterNode.getFilterField(filterNode, whereFields); FilterNode.getFilterField(filterNode, whereFields);
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList()); List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
@@ -49,7 +52,7 @@ public class FilterRender extends Renderer {
queryDimensions.addAll(dimensions); queryDimensions.addAll(dimensions);
} }
for (String dimension : queryDimensions) { for (String dimension : queryDimensions) {
tableView.getMeasure().add(SemanticNode.parse(dimension, scope)); tableView.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
} }
for (String metric : queryMetrics) { for (String metric : queryMetrics) {
Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema); Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema);
@@ -58,9 +61,9 @@ public class FilterRender extends Renderer {
continue; continue;
} }
if (optionalMetric.isPresent()) { if (optionalMetric.isPresent()) {
tableView.getMeasure().add(MetricNode.build(optionalMetric.get(), scope)); tableView.getMeasure().add(MetricNode.build(optionalMetric.get(), scope, engineType));
} else { } else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope)); tableView.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
} }
} }
if (filterNode != null) { if (filterNode != null) {

View File

@@ -1,5 +1,6 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render; package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
@@ -49,13 +50,13 @@ public class JoinRender extends Renderer {
@Override @Override
public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope, public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception { SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere(); String queryWhere = metricCommand.getWhere();
//dataSources = getOrderSource(dataSources); EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>(); List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) { if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope); SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields); FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList()); fieldWhere = whereFields.stream().collect(Collectors.toList());
} }
@@ -129,9 +130,9 @@ public class JoinRender extends Renderer {
if (!filterDimension.isEmpty()) { if (!filterDimension.isEmpty()) {
for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) { for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) {
if (nonAgg) { if (nonAgg) {
filterView.getMeasure().add(SemanticNode.parse(d, scope)); filterView.getMeasure().add(SemanticNode.parse(d, scope, engineType));
} else { } else {
filterView.getDimension().add(SemanticNode.parse(d, scope)); filterView.getDimension().add(SemanticNode.parse(d, scope, engineType));
} }
} }
@@ -143,6 +144,7 @@ public class JoinRender extends Renderer {
List<String> reqMetrics, DataSource dataSource, Set<String> sourceMeasure, SqlValidatorScope scope, List<String> reqMetrics, DataSource dataSource, Set<String> sourceMeasure, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception { SemanticSchema schema, boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String m : reqMetrics) { for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) { if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataSource, scope, schema, nonAgg, alias); MetricNode metricNode = buildMetricNode(m, dataSource, scope, schema, nonAgg, alias);
@@ -150,7 +152,8 @@ public class JoinRender extends Renderer {
if (!metricNode.getNonAggNode().isEmpty()) { if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) { for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure, innerSelect.put(measure,
SemanticNode.buildAs(measure, SemanticNode.parse(alias + "." + measure, scope))); SemanticNode.buildAs(measure,
SemanticNode.parse(alias + "." + measure, scope, engineType)));
} }
} }
@@ -159,10 +162,10 @@ public class JoinRender extends Renderer {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) { if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) { if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(), filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope))); SemanticNode.parse(entry.getKey(), scope, engineType)));
} else { } else {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(), filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(), entry.getKey(), scope))); AggFunctionNode.build(entry.getValue(), entry.getKey(), scope, engineType)));
} }
} }
@@ -177,14 +180,16 @@ public class JoinRender extends Renderer {
List<String> reqDimensions, DataSource dataSource, Set<String> dimension, SqlValidatorScope scope, List<String> reqDimensions, DataSource dataSource, Set<String> dimension, SqlValidatorScope scope,
SemanticSchema schema) throws Exception { SemanticSchema schema) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String d : reqDimensions) { for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) { if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY); String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d, innerSelect.put(d,
SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + identifyDimension[1], scope))); SemanticNode.buildAs(d,
SemanticNode.parse(alias + "." + identifyDimension[1], scope, engineType)));
} else { } else {
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + d, scope))); innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + d, scope, engineType)));
} }
filterDimension.add(d); filterDimension.add(d);
@@ -253,16 +258,16 @@ public class JoinRender extends Renderer {
} }
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, Map<String, String> before, private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, Map<String, String> before,
DataSource dataSource, DataSource dataSource, SemanticSchema schema, SqlValidatorScope scope)
SemanticSchema schema, SqlValidatorScope scope)
throws Exception { throws Exception {
SqlNode condition = getCondition(leftTable, tableView, dataSource, schema, scope); EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
SqlNode condition = getCondition(leftTable, tableView, dataSource, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral(""); SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema); JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema);
SqlNode joinRelationCondition = null; SqlNode joinRelationCondition = null;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) { if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType()); sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope); joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition; condition = joinRelationCondition;
} }
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataSource().getTimePartType()) if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataSource().getTimePartType())
@@ -307,13 +312,13 @@ public class JoinRender extends Renderer {
return matchJoinRelation; return matchJoinRelation;
} }
private SqlNode getCondition(JoinRelation joinRelation, private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope, EngineType engineType)
SqlValidatorScope scope) throws Exception { throws Exception {
SqlNode condition = null; SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) { for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
List<SqlNode> ons = new ArrayList<>(); List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(con.getLeft(), scope)); ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
ons.add(SemanticNode.parse(con.getRight(), scope)); ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
if (Objects.isNull(condition)) { if (Objects.isNull(condition)) {
condition = new SqlBasicCall( condition = new SqlBasicCall(
SemanticNode.getBinaryOperator(con.getMiddle()), SemanticNode.getBinaryOperator(con.getMiddle()),
@@ -334,7 +339,7 @@ public class JoinRender extends Renderer {
} }
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema, private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope, EngineType engineType) throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable()); Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable()); Set<String> selectRight = SemanticNode.getSelect(right.getTable());
@@ -355,8 +360,8 @@ public class JoinRender extends Renderer {
} }
} }
List<SqlNode> ons = new ArrayList<>(); List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope)); ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope)); ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
if (condition == null) { if (condition == null) {
condition = new SqlBasicCall( condition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS, SqlStdOperatorTable.EQUALS,
@@ -454,18 +459,21 @@ public class JoinRender extends Renderer {
endTime = zipper.getAlias() + "." + endTimeOp.get().getName(); endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName(); dateTime = partMetric.getAlias() + "." + partTime.get().getName();
} }
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
ArrayList<SqlNode> operandList = new ArrayList<>(
Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType)));
condition = condition =
new SqlBasicCall( new SqlBasicCall(
SqlStdOperatorTable.AND, SqlStdOperatorTable.AND,
new ArrayList<SqlNode>(Arrays.asList(new SqlBasicCall( new ArrayList<SqlNode>(Arrays.asList(new SqlBasicCall(
SqlStdOperatorTable.LESS_THAN_OR_EQUAL, SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(Arrays.asList(SemanticNode.parse(startTime, scope), new ArrayList<SqlNode>(
SemanticNode.parse(dateTime, scope))), Arrays.asList(SemanticNode.parse(startTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))),
SqlParserPos.ZERO, null), new SqlBasicCall( SqlParserPos.ZERO, null), new SqlBasicCall(
SqlStdOperatorTable.GREATER_THAN, SqlStdOperatorTable.GREATER_THAN,
new ArrayList<SqlNode>(Arrays.asList(SemanticNode.parse(endTime, scope), operandList,
SemanticNode.parse(dateTime, scope))),
SqlParserPos.ZERO, null))), SqlParserPos.ZERO, null))),
SqlParserPos.ZERO, null); SqlParserPos.ZERO, null);

View File

@@ -1,8 +1,9 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render; package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema; import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer; import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
@@ -27,19 +28,20 @@ public class OutputRender extends Renderer {
public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope, public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception { SemanticSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView; TableView selectDataSet = super.tableView;
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
for (String dimension : metricCommand.getDimensions()) { for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope)); selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
} }
for (String metric : metricCommand.getMetrics()) { for (String metric : metricCommand.getMetrics()) {
if (MetricNode.isMetricField(metric, schema)) { if (MetricNode.isMetricField(metric, schema)) {
// metric from field ignore // metric from field ignore
continue; continue;
} }
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope)); selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
} }
if (metricCommand.getLimit() > 0) { if (metricCommand.getLimit() > 0) {
SqlNode offset = SemanticNode.parse(metricCommand.getLimit().toString(), scope); SqlNode offset = SemanticNode.parse(metricCommand.getLimit().toString(), scope, engineType);
selectDataSet.setOffset(offset); selectDataSet.setOffset(offset);
} }
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) { if (!CollectionUtils.isEmpty(metricCommand.getOrder())) {
@@ -47,9 +49,9 @@ public class OutputRender extends Renderer {
for (ColumnOrder columnOrder : metricCommand.getOrder()) { for (ColumnOrder columnOrder : metricCommand.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) { if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO, orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[]{SemanticNode.parse(columnOrder.getCol(), scope)})); new SqlNode[]{SemanticNode.parse(columnOrder.getCol(), scope, engineType)}));
} else { } else {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope)); orderList.add(SemanticNode.parse(columnOrder.getCol(), scope, engineType));
} }
} }
selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO)); selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO));

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.core.parser.calcite.sql.render; package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource; import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
@@ -39,9 +40,9 @@ import java.util.stream.Collectors;
public class SourceRender extends Renderer { public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres, public static TableView renderOne(String alias, List<String> fieldWheres,
List<String> reqMetrics, List<String> reqDimensions, List<String> reqMetrics, List<String> reqDimensions,
String queryWhere, DataSource datasource, SqlValidatorScope scope, String queryWhere, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception { SemanticSchema schema, boolean nonAgg) throws Exception {
TableView dataSet = new TableView(); TableView dataSet = new TableView();
TableView output = new TableView(); TableView output = new TableView();
@@ -97,24 +98,24 @@ public class SourceRender extends Renderer {
boolean nonAgg, Set<String> extendFields, TableView dataSet, TableView output, SqlValidatorScope scope) boolean nonAgg, Set<String> extendFields, TableView dataSet, TableView output, SqlValidatorScope scope)
throws Exception { throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName()); List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
boolean isAdd = false; boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) { if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) { for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(dimension)) { if (!dim.getName().equalsIgnoreCase(dimension)) {
continue; continue;
} }
dataSet.getMeasure().add(DimensionNode.build(dim, scope)); dataSet.getMeasure().add(DimensionNode.build(dim, scope, engineType));
if (nonAgg) { if (nonAgg) {
//dataSet.getMeasure().addAll(DimensionNode.expand(dim, scope)); output.getMeasure().add(DimensionNode.buildName(dim, scope, engineType));
output.getMeasure().add(DimensionNode.buildName(dim, scope));
isAdd = true; isAdd = true;
continue; continue;
} }
if ("".equals(alias)) { if ("".equals(alias)) {
output.getDimension().add(DimensionNode.buildName(dim, scope)); output.getDimension().add(DimensionNode.buildName(dim, scope, engineType));
} else { } else {
output.getDimension().add(DimensionNode.buildNameAs(alias, dim, scope)); output.getDimension().add(DimensionNode.buildNameAs(alias, dim, scope, engineType));
} }
isAdd = true; isAdd = true;
break; break;
@@ -125,11 +126,11 @@ public class SourceRender extends Renderer {
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst(); .filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
if (identify.isPresent()) { if (identify.isPresent()) {
if (nonAgg) { if (nonAgg) {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope)); dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope)); output.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
} else { } else {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope)); dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getDimension().add(SemanticNode.parse(identify.get().getName(), scope)); output.getDimension().add(SemanticNode.parse(identify.get().getName(), scope, engineType));
} }
isAdd = true; isAdd = true;
} }
@@ -139,15 +140,15 @@ public class SourceRender extends Renderer {
} }
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource); Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) { if (dimensionOptional.isPresent()) {
dataSet.getMeasure().add(DimensionNode.buildArray(dimensionOptional.get(), scope)); dataSet.getMeasure().add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
if (dimensionOptional.get().getDataType().isArray()) { if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr()); extendFields.add(dimensionOptional.get().getExpr());
} }
if (nonAgg) { if (nonAgg) {
output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope)); output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
return; return;
} }
output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope)); output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
} }
} }
@@ -161,10 +162,10 @@ public class SourceRender extends Renderer {
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics, private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope, List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, SemanticSchema schema, boolean nonAgg) throws Exception {
boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator(); Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>(); List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
while (iterator.hasNext()) { while (iterator.hasNext()) {
String cur = iterator.next(); String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) { if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
@@ -179,13 +180,13 @@ public class SourceRender extends Renderer {
if (!dim.getName().equalsIgnoreCase(where)) { if (!dim.getName().equalsIgnoreCase(where)) {
continue; continue;
} }
whereNode.addAll(DimensionNode.expand(dim, scope)); whereNode.addAll(DimensionNode.expand(dim, scope, engineType));
isAdd = true; isAdd = true;
} }
} }
Optional<Identify> identify = getIdentifyByName(where, datasource); Optional<Identify> identify = getIdentifyByName(where, datasource);
if (identify.isPresent()) { if (identify.isPresent()) {
whereNode.add(IdentifyNode.build(identify.get(), scope)); whereNode.add(IdentifyNode.build(identify.get(), scope, engineType));
isAdd = true; isAdd = true;
} }
if (isAdd) { if (isAdd) {
@@ -193,7 +194,7 @@ public class SourceRender extends Renderer {
} }
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource); Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) { if (dimensionOptional.isPresent()) {
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope)); whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
if (dimensionOptional.get().getDataType().isArray()) { if (dimensionOptional.get().getDataType().isArray()) {
extendFields.add(dimensionOptional.get().getExpr()); extendFields.add(dimensionOptional.get().getExpr());
} }
@@ -317,12 +318,13 @@ public class SourceRender extends Renderer {
} }
public void render(MetricQueryReq metricQueryReq, List<DataSource> dataSources, SqlValidatorScope scope, public void render(MetricQueryReq metricQueryReq, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception { SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricQueryReq.getWhere(); String queryWhere = metricQueryReq.getWhere();
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>(); List<String> fieldWhere = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getSemanticModel().getDatabase().getType());
if (queryWhere != null && !queryWhere.isEmpty()) { if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope); SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields); FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList()); fieldWhere = whereFields.stream().collect(Collectors.toList());
} }

View File

@@ -109,7 +109,7 @@ public class CalculateAggConverter implements HeadlessConverter {
ParseSqlReq sqlCommend = queryStatement.getParseSqlReq(); ParseSqlReq sqlCommend = queryStatement.getParseSqlReq();
Database database = queryStatement.getSemanticModel().getDatabase(); Database database = queryStatement.getSemanticModel().getDatabase();
ParseSqlReq parseSqlReq = generateSqlCommend(queryStatement, ParseSqlReq parseSqlReq = generateSqlCommend(queryStatement,
EngineType.valueOf(database.getType().toUpperCase()), database.getVersion()); EngineType.fromString(database.getType().toUpperCase()), database.getVersion());
sqlCommend.setSql(parseSqlReq.getSql()); sqlCommend.setSql(parseSqlReq.getSql());
sqlCommend.setTables(parseSqlReq.getTables()); sqlCommend.setTables(parseSqlReq.getTables());
sqlCommend.setRootPath(parseSqlReq.getRootPath()); sqlCommend.setRootPath(parseSqlReq.getRootPath());

View File

@@ -0,0 +1,48 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlDialect.Context;
import org.apache.calcite.sql.SqlDialect.DatabaseProduct;
public class SqlDialectFactory {
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withIdentifierQuoteString("`")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final Context POSTGRESQL_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
private static Map<EngineType, SemanticSqlDialect> sqlDialectMap;
static {
sqlDialectMap = new HashMap<>();
sqlDialectMap.put(EngineType.CLICKHOUSE, new SemanticSqlDialect(DEFAULT_CONTEXT));
sqlDialectMap.put(EngineType.MYSQL, new SemanticSqlDialect(DEFAULT_CONTEXT));
sqlDialectMap.put(EngineType.H2, new SemanticSqlDialect(DEFAULT_CONTEXT));
sqlDialectMap.put(EngineType.POSTGRESQL, new SemanticSqlDialect(POSTGRESQL_CONTEXT));
}
public static SemanticSqlDialect getSqlDialect(EngineType engineType) {
SemanticSqlDialect semanticSqlDialect = sqlDialectMap.get(engineType);
if (Objects.isNull(semanticSqlDialect)) {
return new SemanticSqlDialect(DEFAULT_CONTEXT);
}
return semanticSqlDialect;
}
}

View File

@@ -14,17 +14,11 @@ public class PostgresqlParametersBuilder implements DbParametersBuilder {
public List<DatabaseParameter> build() { public List<DatabaseParameter> build() {
List<DatabaseParameter> databaseParameters = new ArrayList<>(); List<DatabaseParameter> databaseParameters = new ArrayList<>();
DatabaseParameter host = new DatabaseParameter(); DatabaseParameter host = new DatabaseParameter();
host.setComment("host"); host.setComment("链接");
host.setName("host"); host.setName("url");
host.setPlaceholder("请输入host"); host.setPlaceholder("请输入链接");
databaseParameters.add(host); databaseParameters.add(host);
DatabaseParameter port = new DatabaseParameter();
port.setComment("port");
port.setName("port");
port.setPlaceholder("请输入端口号");
databaseParameters.add(port);
DatabaseParameter userName = new DatabaseParameter(); DatabaseParameter userName = new DatabaseParameter();
userName.setComment("用户名"); userName.setComment("用户名");
userName.setName("username"); userName.setName("username");

View File

@@ -60,7 +60,6 @@ public class QueryController {
User user = UserHolder.findUser(request, response); User user = UserHolder.findUser(request, response);
QuerySqlReq querySqlReq = queryStructReq.convert(queryStructReq); QuerySqlReq querySqlReq = queryStructReq.convert(queryStructReq);
return queryService.queryBySql(querySqlReq, user); return queryService.queryBySql(querySqlReq, user);
//return queryService.queryByStructWithAuth(queryStructReq, user);
} }
@PostMapping("/queryMetricDataById") @PostMapping("/queryMetricDataById")

View File

@@ -21,7 +21,7 @@ import java.util.List;
public interface QueryService { public interface QueryService {
Object queryBySql(QuerySqlReq querySqlCmd, User user) throws Exception; SemanticQueryResp queryBySql(QuerySqlReq querySqlCmd, User user) throws Exception;
SemanticQueryResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception; SemanticQueryResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception;

View File

@@ -44,7 +44,6 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.server.annotation.S2SQLDataPermission; import com.tencent.supersonic.headless.server.annotation.S2SQLDataPermission;
import com.tencent.supersonic.headless.server.annotation.StructDataPermission; import com.tencent.supersonic.headless.server.annotation.StructDataPermission;
import com.tencent.supersonic.headless.server.aspect.ApiHeaderCheckAspect; import com.tencent.supersonic.headless.server.aspect.ApiHeaderCheckAspect;
import com.tencent.supersonic.headless.server.cache.CacheManager;
import com.tencent.supersonic.headless.server.cache.QueryCache; import com.tencent.supersonic.headless.server.cache.QueryCache;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.DimensionFilter; import com.tencent.supersonic.headless.server.pojo.DimensionFilter;
@@ -67,7 +66,6 @@ import javax.servlet.http.HttpServletRequest;
import lombok.SneakyThrows; import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -79,17 +77,11 @@ public class QueryServiceImpl implements QueryService {
CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.DAYS).build(); CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.DAYS).build();
private final StatUtils statUtils; private final StatUtils statUtils;
private final CacheManager cacheManager;
private final QueryUtils queryUtils; private final QueryUtils queryUtils;
private final QueryReqConverter queryReqConverter; private final QueryReqConverter queryReqConverter;
private final Catalog catalog; private final Catalog catalog;
private final AppService appService; private final AppService appService;
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
private final QueryCache queryCache; private final QueryCache queryCache;
private final SemanticSchemaManager semanticSchemaManager; private final SemanticSchemaManager semanticSchemaManager;
private final QueryParser queryParser; private final QueryParser queryParser;
@@ -98,7 +90,6 @@ public class QueryServiceImpl implements QueryService {
public QueryServiceImpl( public QueryServiceImpl(
StatUtils statUtils, StatUtils statUtils,
CacheManager cacheManager,
QueryUtils queryUtils, QueryUtils queryUtils,
QueryReqConverter queryReqConverter, QueryReqConverter queryReqConverter,
Catalog catalog, Catalog catalog,
@@ -108,7 +99,6 @@ public class QueryServiceImpl implements QueryService {
QueryParser queryParser, QueryParser queryParser,
QueryPlanner queryPlanner) { QueryPlanner queryPlanner) {
this.statUtils = statUtils; this.statUtils = statUtils;
this.cacheManager = cacheManager;
this.queryUtils = queryUtils; this.queryUtils = queryUtils;
this.queryReqConverter = queryReqConverter; this.queryReqConverter = queryReqConverter;
this.catalog = catalog; this.catalog = catalog;
@@ -122,10 +112,7 @@ public class QueryServiceImpl implements QueryService {
@Override @Override
@S2SQLDataPermission @S2SQLDataPermission
@SneakyThrows @SneakyThrows
public Object queryBySql(QuerySqlReq querySQLReq, User user) { public SemanticQueryResp queryBySql(QuerySqlReq querySQLReq, User user) {
statUtils.initStatInfo(querySQLReq, user);
QueryStatement queryStatement = new QueryStatement();
SemanticQueryResp results = null;
TaskStatusEnum state = TaskStatusEnum.SUCCESS; TaskStatusEnum state = TaskStatusEnum.SUCCESS;
try { try {
//1.initStatInfo //1.initStatInfo
@@ -137,23 +124,25 @@ public class QueryServiceImpl implements QueryService {
} }
StatUtils.get().setUseResultCache(false); StatUtils.get().setUseResultCache(false);
//3 query from db //3 query from db
queryStatement = convertToQueryStatement(querySQLReq, user); QueryStatement queryStatement = convertToQueryStatement(querySQLReq, user);
log.info("queryStatement:{}", queryStatement); log.info("queryStatement:{}", queryStatement);
results = query(queryStatement); SemanticQueryResp result = query(queryStatement);
//4 reset cache and set stateInfo //4 reset cache and set stateInfo
Boolean setCacheSuccess = queryCache.put(querySQLReq, results); Boolean setCacheSuccess = queryCache.put(querySQLReq, result);
if (setCacheSuccess) { if (setCacheSuccess) {
// if semanticQueryResp is not null, update cache data // if semanticQueryResp is not null, update cache data
statUtils.updateResultCacheKey(queryCache.getCacheKey(querySQLReq)); statUtils.updateResultCacheKey(queryCache.getCacheKey(querySQLReq));
} }
if (Objects.isNull(results)) { if (Objects.isNull(result)) {
state = TaskStatusEnum.ERROR; state = TaskStatusEnum.ERROR;
} }
return result;
} catch (Exception e) { } catch (Exception e) {
log.info("convertToQueryStatement has a exception:", e); log.info("convertToQueryStatement has a exception:", e);
throw e;
} finally {
statUtils.statInfo2DbAsync(state);
} }
statUtils.statInfo2DbAsync(state);
return results;
} }
public SemanticQueryResp queryByQueryStatement(QueryStatement queryStatement) { public SemanticQueryResp queryByQueryStatement(QueryStatement queryStatement) {
@@ -186,7 +175,6 @@ public class QueryServiceImpl implements QueryService {
@Override @Override
public SemanticQueryResp queryByStruct(QueryStructReq queryStructReq, User user) throws Exception { public SemanticQueryResp queryByStruct(QueryStructReq queryStructReq, User user) throws Exception {
SemanticQueryResp semanticQueryResp = null;
TaskStatusEnum state = TaskStatusEnum.SUCCESS; TaskStatusEnum state = TaskStatusEnum.SUCCESS;
log.info("[queryStructReq:{}]", queryStructReq); log.info("[queryStructReq:{}]", queryStructReq);
try { try {
@@ -200,17 +188,17 @@ public class QueryServiceImpl implements QueryService {
StatUtils.get().setUseResultCache(false); StatUtils.get().setUseResultCache(false);
//3 query //3 query
QueryStatement queryStatement = buildQueryStatement(queryStructReq); QueryStatement queryStatement = buildQueryStatement(queryStructReq);
semanticQueryResp = query(queryStatement); SemanticQueryResp result = query(queryStatement);
//4 reset cache and set stateInfo //4 reset cache and set stateInfo
Boolean setCacheSuccess = queryCache.put(queryStructReq, semanticQueryResp); Boolean setCacheSuccess = queryCache.put(queryStructReq, result);
if (setCacheSuccess) { if (setCacheSuccess) {
// if semanticQueryResp is not null, update cache data // if result is not null, update cache data
statUtils.updateResultCacheKey(queryCache.getCacheKey(queryStructReq)); statUtils.updateResultCacheKey(queryCache.getCacheKey(queryStructReq));
} }
if (Objects.isNull(semanticQueryResp)) { if (Objects.isNull(result)) {
state = TaskStatusEnum.ERROR; state = TaskStatusEnum.ERROR;
} }
return semanticQueryResp; return result;
} catch (Exception e) { } catch (Exception e) {
log.error("exception in queryByStruct, e: ", e); log.error("exception in queryByStruct, e: ", e);
state = TaskStatusEnum.ERROR; state = TaskStatusEnum.ERROR;
@@ -447,28 +435,6 @@ public class QueryServiceImpl implements QueryService {
return ExplainResp.builder().sql(sql).build(); return ExplainResp.builder().sql(sql).build();
} }
private boolean isCache(QueryMultiStructReq queryStructReq) {
if (!cacheEnable) {
return false;
}
if (!CollectionUtils.isEmpty(queryStructReq.getQueryStructReqs())
&& queryStructReq.getQueryStructReqs().get(0).getCacheInfo() != null) {
return queryStructReq.getQueryStructReqs().get(0).getCacheInfo().getCache();
}
return false;
}
private SemanticQueryResp queryByCache(String key, Object queryCmd) {
Object resultObject = cacheManager.get(key);
if (Objects.nonNull(resultObject)) {
log.info("queryByStructWithCache, key:{}, queryCmd:{}", key, queryCmd.toString());
statUtils.updateResultCacheKey(key);
return (SemanticQueryResp) resultObject;
}
return null;
}
private QuerySqlReq buildQuerySqlReq(QueryDimValueReq queryDimValueReq) { private QuerySqlReq buildQuerySqlReq(QueryDimValueReq queryDimValueReq) {
QuerySqlReq querySQLReq = new QuerySqlReq(); QuerySqlReq querySQLReq = new QuerySqlReq();
List<ModelResp> modelResps = catalog.getModelList(Lists.newArrayList(queryDimValueReq.getModelId())); List<ModelResp> modelResps = catalog.getModelList(Lists.newArrayList(queryDimValueReq.getModelId()));

View File

@@ -115,7 +115,7 @@ public class QueryReqConverter {
result.setRootPath(querySQLReq.getModelIdStr()); result.setRootPath(querySQLReq.getModelIdStr());
result.setTables(tables); result.setTables(tables);
DatabaseResp database = catalog.getDatabaseByModelId(querySQLReq.getModelIds().get(0)); DatabaseResp database = catalog.getDatabaseByModelId(querySQLReq.getModelIds().get(0));
if (!sqlGenerateUtils.isSupportWith(EngineType.valueOf(database.getType().toUpperCase()), if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()),
database.getVersion())) { database.getVersion())) {
result.setSupportWith(false); result.setSupportWith(false);
result.setWithAlias(false); result.setWithAlias(false);

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.server.calcite;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.enums.AggOption; import com.tencent.supersonic.headless.api.enums.AggOption;
import com.tencent.supersonic.headless.api.enums.EngineType;
import com.tencent.supersonic.headless.api.request.MetricQueryReq; import com.tencent.supersonic.headless.api.request.MetricQueryReq;
import com.tencent.supersonic.headless.api.response.SqlParserResp; import com.tencent.supersonic.headless.api.response.SqlParserResp;
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner; import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
@@ -43,7 +44,8 @@ class HeadlessParserServiceTest {
QueryStatement queryStatement = new QueryStatement(); QueryStatement queryStatement = new QueryStatement();
queryStatement.setMetricReq(metricCommand); queryStatement.setMetricReq(metricCommand);
aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg)); aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg));
sqlParser.setSql(aggBuilder.getSql()); EngineType engineType = EngineType.fromString(semanticSchema.getSemanticModel().getDatabase().getType());
sqlParser.setSql(aggBuilder.getSql(engineType));
sqlParser.setSourceId(aggBuilder.getSourceId()); sqlParser.setSourceId(aggBuilder.getSourceId());
} catch (Exception e) { } catch (Exception e) {
sqlParser.setErrMsg(e.getMessage()); sqlParser.setErrMsg(e.getMessage());