mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-10 11:07:06 +00:00
(improvement)(headless) Refactor HeadlessModel-related code, remove modelSchemaResps. (#647)
This commit is contained in:
@@ -30,7 +30,7 @@ public class JdbcExecutor implements QueryExecutor {
|
||||
return null;
|
||||
}
|
||||
log.info("query SQL: {}", queryStatement.getSql());
|
||||
DatabaseResp databaseResp = queryStatement.getHeadlessModel().getDatabaseResp();
|
||||
DatabaseResp databaseResp = queryStatement.getSemanticModel().getDatabaseResp();
|
||||
log.info("database info:{}", databaseResp);
|
||||
QueryResultWithSchemaResp queryResultWithColumns = new QueryResultWithSchemaResp();
|
||||
SqlUtils sqlUtils = this.sqlUtils.init(databaseResp);
|
||||
|
||||
@@ -128,7 +128,7 @@ public class QueryParser {
|
||||
tableSql.setMinMaxTime(queryStatement.getMinMaxTime());
|
||||
tableSql.setEnableOptimize(queryStatement.getEnableOptimize());
|
||||
tableSql.setModelIds(queryStatement.getModelIds());
|
||||
tableSql.setHeadlessModel(queryStatement.getHeadlessModel());
|
||||
tableSql.setSemanticModel(queryStatement.getSemanticModel());
|
||||
if (isSingleMetricTable) {
|
||||
tableSql.setViewSql(parseSqlReq.getSql());
|
||||
tableSql.setViewAlias(metricTable.getAlias());
|
||||
|
||||
@@ -4,8 +4,8 @@ import com.tencent.supersonic.headless.api.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
|
||||
import com.tencent.supersonic.headless.core.parser.SqlParser;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.SemanticModel;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.RuntimeOptions;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import java.util.Objects;
|
||||
@@ -22,14 +22,14 @@ public class CalciteSqlParser implements SqlParser {
|
||||
@Override
|
||||
public QueryStatement explain(QueryStatement queryStatement, AggOption isAgg) throws Exception {
|
||||
MetricQueryReq metricReq = queryStatement.getMetricReq();
|
||||
HeadlessModel headlessModel = queryStatement.getHeadlessModel();
|
||||
if (headlessModel == null) {
|
||||
SemanticModel semanticModel = queryStatement.getSemanticModel();
|
||||
if (semanticModel == null) {
|
||||
queryStatement.setErrMsg("semanticSchema not found");
|
||||
return queryStatement;
|
||||
}
|
||||
queryStatement.setMetricReq(metricReq);
|
||||
HeadlessSchema headlessSchema = getSemanticSchema(headlessModel, queryStatement);
|
||||
AggPlanner aggBuilder = new AggPlanner(headlessSchema);
|
||||
SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement);
|
||||
AggPlanner aggBuilder = new AggPlanner(semanticSchema);
|
||||
aggBuilder.explain(queryStatement, isAgg);
|
||||
queryStatement.setSql(aggBuilder.getSql());
|
||||
queryStatement.setSourceId(aggBuilder.getSourceId());
|
||||
@@ -46,15 +46,15 @@ public class CalciteSqlParser implements SqlParser {
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private HeadlessSchema getSemanticSchema(HeadlessModel headlessModel, QueryStatement queryStatement) {
|
||||
HeadlessSchema headlessSchema = HeadlessSchema.newBuilder(headlessModel.getRootPath()).build();
|
||||
headlessSchema.setDatasource(headlessModel.getDatasourceMap());
|
||||
headlessSchema.setDimension(headlessModel.getDimensionMap());
|
||||
headlessSchema.setMetric(headlessModel.getMetrics());
|
||||
headlessSchema.setJoinRelations(headlessModel.getJoinRelations());
|
||||
headlessSchema.setRuntimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())
|
||||
private SemanticSchema getSemanticSchema(SemanticModel semanticModel, QueryStatement queryStatement) {
|
||||
SemanticSchema semanticSchema = SemanticSchema.newBuilder(semanticModel.getRootPath()).build();
|
||||
semanticSchema.setDatasource(semanticModel.getDatasourceMap());
|
||||
semanticSchema.setDimension(semanticModel.getDimensionMap());
|
||||
semanticSchema.setMetric(semanticModel.getMetrics());
|
||||
semanticSchema.setJoinRelations(semanticModel.getJoinRelations());
|
||||
semanticSchema.setRuntimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())
|
||||
.enableOptimize(queryStatement.getEnableOptimize()).build());
|
||||
return headlessSchema;
|
||||
return semanticSchema;
|
||||
}
|
||||
|
||||
private String getSqlByView(String sql, String viewSql, String viewAlias) {
|
||||
|
||||
@@ -6,7 +6,7 @@ import com.tencent.supersonic.headless.api.request.MetricQueryReq;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
|
||||
@@ -34,7 +34,7 @@ import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
public class AggPlanner implements Planner {
|
||||
|
||||
private MetricQueryReq metricReq;
|
||||
private HeadlessSchema schema;
|
||||
private SemanticSchema schema;
|
||||
private SqlValidatorScope scope;
|
||||
private Stack<TableView> dataSets = new Stack<>();
|
||||
private SqlNode parserNode;
|
||||
@@ -42,7 +42,7 @@ public class AggPlanner implements Planner {
|
||||
private boolean isAgg = false;
|
||||
private AggOption aggOption = AggOption.DEFAULT;
|
||||
|
||||
public AggPlanner(HeadlessSchema schema) {
|
||||
public AggPlanner(SemanticSchema schema) {
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
|
||||
|
||||
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticItem;
|
||||
import java.util.List;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
|
||||
@@ -15,8 +16,9 @@ public class Dimension implements SemanticItem {
|
||||
private String type;
|
||||
private String expr;
|
||||
private DimensionTimeTypeParams dimensionTimeTypeParams;
|
||||
|
||||
private DataType dataType = DataType.UNKNOWN;
|
||||
private String bizName;
|
||||
private List<String> defaultValues;
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
|
||||
@@ -2,18 +2,16 @@ package com.tencent.supersonic.headless.core.parser.calcite.s2sql;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.ItemDateResp;
|
||||
import com.tencent.supersonic.headless.api.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.api.response.ModelSchemaResp;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class HeadlessModel {
|
||||
public class SemanticModel {
|
||||
|
||||
private String rootPath;
|
||||
private List<Metric> metrics = new ArrayList<>();
|
||||
@@ -23,7 +21,6 @@ public class HeadlessModel {
|
||||
private List<JoinRelation> joinRelations;
|
||||
private ItemDateResp dataDate;
|
||||
private DatabaseResp databaseResp;
|
||||
private List<ModelSchemaResp> modelSchemaResps;
|
||||
|
||||
public List<Dimension> getDimensions() {
|
||||
return dimensionMap.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
|
||||
@@ -27,7 +27,7 @@ public class SchemaBuilder {
|
||||
public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1";
|
||||
public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2";
|
||||
|
||||
public static SqlValidatorScope getScope(HeadlessSchema schema) throws Exception {
|
||||
public static SqlValidatorScope getScope(SemanticSchema schema) throws Exception {
|
||||
Map<String, RelDataType> nameToTypeMap = new HashMap<>();
|
||||
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
|
||||
rootSchema.add(schema.getRootPath(), schema);
|
||||
|
||||
@@ -6,7 +6,7 @@ import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.SemanticModel;
|
||||
import org.apache.calcite.schema.Schema;
|
||||
import org.apache.calcite.schema.SchemaVersion;
|
||||
import org.apache.calcite.schema.Table;
|
||||
@@ -17,19 +17,19 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
public class HeadlessSchema extends AbstractSchema {
|
||||
public class SemanticSchema extends AbstractSchema {
|
||||
|
||||
private final String rootPath;
|
||||
private final Map<String, Table> tableMap;
|
||||
|
||||
private HeadlessModel headlessModel = new HeadlessModel();
|
||||
private SemanticModel semanticModel = new SemanticModel();
|
||||
|
||||
private List<JoinRelation> joinRelations;
|
||||
|
||||
private RuntimeOptions runtimeOptions;
|
||||
|
||||
|
||||
private HeadlessSchema(String rootPath, Map<String, Table> tableMap) {
|
||||
private SemanticSchema(String rootPath, Map<String, Table> tableMap) {
|
||||
this.rootPath = rootPath;
|
||||
this.tableMap = tableMap;
|
||||
}
|
||||
@@ -42,12 +42,12 @@ public class HeadlessSchema extends AbstractSchema {
|
||||
return rootPath;
|
||||
}
|
||||
|
||||
public void setSemanticModel(HeadlessModel headlessModel) {
|
||||
this.headlessModel = headlessModel;
|
||||
public void setSemanticModel(SemanticModel semanticModel) {
|
||||
this.semanticModel = semanticModel;
|
||||
}
|
||||
|
||||
public HeadlessModel getSemanticModel() {
|
||||
return headlessModel;
|
||||
public SemanticModel getSemanticModel() {
|
||||
return semanticModel;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -61,35 +61,35 @@ public class HeadlessSchema extends AbstractSchema {
|
||||
}
|
||||
|
||||
public Map<String, DataSource> getDatasource() {
|
||||
return headlessModel.getDatasourceMap();
|
||||
return semanticModel.getDatasourceMap();
|
||||
}
|
||||
|
||||
public void setDatasource(Map<String, DataSource> datasource) {
|
||||
headlessModel.setDatasourceMap(datasource);
|
||||
semanticModel.setDatasourceMap(datasource);
|
||||
}
|
||||
|
||||
public Map<String, List<Dimension>> getDimension() {
|
||||
return headlessModel.getDimensionMap();
|
||||
return semanticModel.getDimensionMap();
|
||||
}
|
||||
|
||||
public void setDimension(Map<String, List<Dimension>> dimensions) {
|
||||
headlessModel.setDimensionMap(dimensions);
|
||||
semanticModel.setDimensionMap(dimensions);
|
||||
}
|
||||
|
||||
public List<Metric> getMetrics() {
|
||||
return headlessModel.getMetrics();
|
||||
return semanticModel.getMetrics();
|
||||
}
|
||||
|
||||
public void setMetric(List<Metric> metric) {
|
||||
headlessModel.setMetrics(metric);
|
||||
semanticModel.setMetrics(metric);
|
||||
}
|
||||
|
||||
public void setMaterializationList(List<Materialization> materializationList) {
|
||||
headlessModel.setMaterializationList(materializationList);
|
||||
semanticModel.setMaterializationList(materializationList);
|
||||
}
|
||||
|
||||
public List<Materialization> getMaterializationList() {
|
||||
return headlessModel.getMaterializationList();
|
||||
return semanticModel.getMaterializationList();
|
||||
}
|
||||
|
||||
public void setJoinRelations(List<JoinRelation> joinRelations) {
|
||||
@@ -131,8 +131,8 @@ public class HeadlessSchema extends AbstractSchema {
|
||||
return this;
|
||||
}
|
||||
|
||||
public HeadlessSchema build() {
|
||||
return new HeadlessSchema(rootPath, tableMap);
|
||||
public SemanticSchema build() {
|
||||
return new SemanticSchema(rootPath, tableMap);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package com.tencent.supersonic.headless.core.parser.calcite.sql;
|
||||
|
||||
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
|
||||
|
||||
public interface Optimization {
|
||||
|
||||
public void visit(HeadlessSchema headlessSchema);
|
||||
public void visit(SemanticSchema semanticSchema);
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
@@ -36,7 +36,7 @@ public abstract class Renderer {
|
||||
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name)).findFirst();
|
||||
}
|
||||
|
||||
public static Optional<Metric> getMetricByName(String name, HeadlessSchema schema) {
|
||||
public static Optional<Metric> getMetricByName(String name, SemanticSchema schema) {
|
||||
Optional<Metric> metric = schema.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(name))
|
||||
.findFirst();
|
||||
return metric;
|
||||
@@ -47,7 +47,7 @@ public abstract class Renderer {
|
||||
}
|
||||
|
||||
public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg, String alias) throws Exception {
|
||||
SemanticSchema schema, boolean nonAgg, String alias) throws Exception {
|
||||
Optional<Metric> metricOpt = getMetricByName(metric, schema);
|
||||
MetricNode metricNode = new MetricNode();
|
||||
if (metricOpt.isPresent()) {
|
||||
@@ -105,5 +105,5 @@ public abstract class Renderer {
|
||||
}
|
||||
|
||||
public abstract void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg) throws Exception;
|
||||
SemanticSchema schema, boolean nonAgg) throws Exception;
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SchemaBuilder;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.extend.LateralViewExplodeNode;
|
||||
import java.util.ArrayList;
|
||||
@@ -149,7 +149,7 @@ public class DataSourceNode extends SemanticNode {
|
||||
return dataSourceList.stream().map(d -> d.getName()).collect(Collectors.joining("_"));
|
||||
}
|
||||
|
||||
public static void getQueryDimensionMeasure(HeadlessSchema schema, MetricQueryReq metricCommand,
|
||||
public static void getQueryDimensionMeasure(SemanticSchema schema, MetricQueryReq metricCommand,
|
||||
Set<String> queryDimension, List<String> measures) {
|
||||
queryDimension.addAll(metricCommand.getDimensions().stream()
|
||||
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] : d)
|
||||
@@ -161,7 +161,7 @@ public class DataSourceNode extends SemanticNode {
|
||||
|
||||
}
|
||||
|
||||
public static void mergeQueryFilterDimensionMeasure(HeadlessSchema schema, MetricQueryReq metricCommand,
|
||||
public static void mergeQueryFilterDimensionMeasure(SemanticSchema schema, MetricQueryReq metricCommand,
|
||||
Set<String> queryDimension, List<String> measures,
|
||||
SqlValidatorScope scope) throws Exception {
|
||||
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
|
||||
@@ -184,7 +184,7 @@ public class DataSourceNode extends SemanticNode {
|
||||
}
|
||||
}
|
||||
|
||||
public static List<DataSource> getMatchDataSources(SqlValidatorScope scope, HeadlessSchema schema,
|
||||
public static List<DataSource> getMatchDataSources(SqlValidatorScope scope, SemanticSchema schema,
|
||||
MetricQueryReq metricCommand) throws Exception {
|
||||
List<DataSource> dataSources = new ArrayList<>();
|
||||
|
||||
@@ -283,7 +283,7 @@ public class DataSourceNode extends SemanticNode {
|
||||
}
|
||||
|
||||
private static List<DataSource> getLinkDataSourcesByJoinRelation(Set<String> queryDimension, List<String> measures,
|
||||
DataSource baseDataSource, HeadlessSchema schema) {
|
||||
DataSource baseDataSource, SemanticSchema schema) {
|
||||
Set<String> linkDataSourceName = new HashSet<>();
|
||||
List<DataSource> linkDataSources = new ArrayList<>();
|
||||
Set<String> before = new HashSet<>();
|
||||
@@ -370,7 +370,7 @@ public class DataSourceNode extends SemanticNode {
|
||||
Set<String> queryDimension,
|
||||
List<String> measures,
|
||||
DataSource baseDataSource,
|
||||
HeadlessSchema schema) {
|
||||
SemanticSchema schema) {
|
||||
Set<String> linkDataSourceName = new HashSet<>();
|
||||
List<DataSource> linkDataSources = new ArrayList<>();
|
||||
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) {
|
||||
|
||||
@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.core.parser.calcite.sql.node;
|
||||
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.Configuration;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSqlDialect;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer.FilterToGroupScanRule;
|
||||
import java.util.ArrayList;
|
||||
@@ -335,7 +335,7 @@ public abstract class SemanticNode {
|
||||
return parseInfo;
|
||||
}
|
||||
|
||||
public static SqlNode optimize(SqlValidatorScope scope, HeadlessSchema schema, SqlNode sqlNode) {
|
||||
public static SqlNode optimize(SqlValidatorScope scope, SemanticSchema schema, SqlNode sqlNode) {
|
||||
try {
|
||||
HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package com.tencent.supersonic.headless.core.parser.calcite.sql.optimizer;
|
||||
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
@@ -44,23 +44,23 @@ public class FilterToGroupScanRule extends RelRule<Config>
|
||||
|
||||
}).as(FilterTableScanRule.Config.class);
|
||||
|
||||
private HeadlessSchema headlessSchema;
|
||||
private SemanticSchema semanticSchema;
|
||||
|
||||
public FilterToGroupScanRule(FilterTableScanRule.Config config, HeadlessSchema headlessSchema) {
|
||||
public FilterToGroupScanRule(FilterTableScanRule.Config config, SemanticSchema semanticSchema) {
|
||||
super(config);
|
||||
this.headlessSchema = headlessSchema;
|
||||
this.semanticSchema = semanticSchema;
|
||||
}
|
||||
|
||||
public void onMatch(RelOptRuleCall call) {
|
||||
if (call.rels.length != 4) {
|
||||
return;
|
||||
}
|
||||
if (Objects.isNull(headlessSchema.getRuntimeOptions()) || Objects.isNull(
|
||||
headlessSchema.getRuntimeOptions().getMinMaxTime()) || headlessSchema.getRuntimeOptions()
|
||||
if (Objects.isNull(semanticSchema.getRuntimeOptions()) || Objects.isNull(
|
||||
semanticSchema.getRuntimeOptions().getMinMaxTime()) || semanticSchema.getRuntimeOptions()
|
||||
.getMinMaxTime().getLeft().isEmpty()) {
|
||||
return;
|
||||
}
|
||||
Triple<String, String, String> minMax = headlessSchema.getRuntimeOptions().getMinMaxTime();
|
||||
Triple<String, String, String> minMax = semanticSchema.getRuntimeOptions().getMinMaxTime();
|
||||
Filter filter = (Filter) call.rel(0);
|
||||
Project project0 = (Project) call.rel(1);
|
||||
Project project1 = (Project) call.rel(3);
|
||||
|
||||
@@ -5,7 +5,7 @@ import com.tencent.supersonic.headless.api.request.MetricQueryReq;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.FilterNode;
|
||||
@@ -30,7 +30,7 @@ public class FilterRender extends Renderer {
|
||||
|
||||
@Override
|
||||
public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg) throws Exception {
|
||||
SemanticSchema schema, boolean nonAgg) throws Exception {
|
||||
TableView tableView = super.tableView;
|
||||
SqlNode filterNode = null;
|
||||
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
|
||||
|
||||
@@ -8,7 +8,7 @@ import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.AggFunctionNode;
|
||||
@@ -49,7 +49,7 @@ public class JoinRender extends Renderer {
|
||||
|
||||
@Override
|
||||
public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg) throws Exception {
|
||||
SemanticSchema schema, boolean nonAgg) throws Exception {
|
||||
String queryWhere = metricCommand.getWhere();
|
||||
//dataSources = getOrderSource(dataSources);
|
||||
Set<String> whereFields = new HashSet<>();
|
||||
@@ -141,7 +141,7 @@ public class JoinRender extends Renderer {
|
||||
|
||||
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView, List<String> queryMetrics,
|
||||
List<String> reqMetrics, DataSource dataSource, Set<String> sourceMeasure, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg) throws Exception {
|
||||
SemanticSchema schema, boolean nonAgg) throws Exception {
|
||||
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
|
||||
for (String m : reqMetrics) {
|
||||
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
|
||||
@@ -175,7 +175,7 @@ public class JoinRender extends Renderer {
|
||||
|
||||
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension, List<String> queryDimension,
|
||||
List<String> reqDimensions, DataSource dataSource, Set<String> dimension, SqlValidatorScope scope,
|
||||
HeadlessSchema schema) throws Exception {
|
||||
SemanticSchema schema) throws Exception {
|
||||
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
|
||||
for (String d : reqDimensions) {
|
||||
if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) {
|
||||
@@ -198,7 +198,7 @@ public class JoinRender extends Renderer {
|
||||
Collectors.toSet());
|
||||
}
|
||||
|
||||
private boolean getMatchMetric(HeadlessSchema schema, Set<String> sourceMeasure, String m,
|
||||
private boolean getMatchMetric(SemanticSchema schema, Set<String> sourceMeasure, String m,
|
||||
List<String> queryMetrics) {
|
||||
Optional<Metric> metric = schema.getMetrics().stream().filter(mm -> mm.getName().equalsIgnoreCase(m))
|
||||
.findFirst();
|
||||
@@ -219,7 +219,7 @@ public class JoinRender extends Renderer {
|
||||
return isAdd;
|
||||
}
|
||||
|
||||
private boolean getMatchDimension(HeadlessSchema schema, Set<String> sourceDimension, DataSource dataSource,
|
||||
private boolean getMatchDimension(SemanticSchema schema, Set<String> sourceDimension, DataSource dataSource,
|
||||
String d, List<String> queryDimension) {
|
||||
String oriDimension = d;
|
||||
boolean isAdd = false;
|
||||
@@ -254,7 +254,7 @@ public class JoinRender extends Renderer {
|
||||
|
||||
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, Map<String, String> before,
|
||||
DataSource dataSource,
|
||||
HeadlessSchema schema, SqlValidatorScope scope)
|
||||
SemanticSchema schema, SqlValidatorScope scope)
|
||||
throws Exception {
|
||||
SqlNode condition = getCondition(leftTable, tableView, dataSource, schema, scope);
|
||||
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
|
||||
@@ -289,7 +289,7 @@ public class JoinRender extends Renderer {
|
||||
);
|
||||
}
|
||||
|
||||
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView, HeadlessSchema schema) {
|
||||
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView, SemanticSchema schema) {
|
||||
JoinRelation matchJoinRelation = JoinRelation.builder().build();
|
||||
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
|
||||
for (JoinRelation joinRelation : schema.getJoinRelations()) {
|
||||
@@ -333,7 +333,7 @@ public class JoinRender extends Renderer {
|
||||
return condition;
|
||||
}
|
||||
|
||||
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, HeadlessSchema schema,
|
||||
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema,
|
||||
SqlValidatorScope scope) throws Exception {
|
||||
|
||||
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
|
||||
@@ -413,7 +413,7 @@ public class JoinRender extends Renderer {
|
||||
}
|
||||
}
|
||||
|
||||
private SqlNode getZipperCondition(TableView left, TableView right, DataSource dataSource, HeadlessSchema schema,
|
||||
private SqlNode getZipperCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema,
|
||||
SqlValidatorScope scope) throws Exception {
|
||||
if (Materialization.TimePartType.ZIPPER.equals(left.getDataSource().getTimePartType())
|
||||
&& Materialization.TimePartType.ZIPPER.equals(right.getDataSource().getTimePartType())) {
|
||||
|
||||
@@ -4,7 +4,7 @@ package com.tencent.supersonic.headless.core.parser.calcite.sql.render;
|
||||
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
|
||||
import com.tencent.supersonic.common.pojo.ColumnOrder;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.SemanticNode;
|
||||
@@ -25,7 +25,7 @@ public class OutputRender extends Renderer {
|
||||
|
||||
@Override
|
||||
public void render(MetricQueryReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg) throws Exception {
|
||||
SemanticSchema schema, boolean nonAgg) throws Exception {
|
||||
TableView selectDataSet = super.tableView;
|
||||
for (String dimension : metricCommand.getDimensions()) {
|
||||
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope));
|
||||
|
||||
@@ -9,7 +9,7 @@ import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.Renderer;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.sql.node.DataSourceNode;
|
||||
@@ -41,7 +41,7 @@ public class SourceRender extends Renderer {
|
||||
public static TableView renderOne(String alias, List<String> fieldWheres,
|
||||
List<String> reqMetrics, List<String> reqDimensions,
|
||||
String queryWhere, DataSource datasource, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg) throws Exception {
|
||||
SemanticSchema schema, boolean nonAgg) throws Exception {
|
||||
|
||||
TableView dataSet = new TableView();
|
||||
TableView output = new TableView();
|
||||
@@ -93,7 +93,7 @@ public class SourceRender extends Renderer {
|
||||
return output;
|
||||
}
|
||||
|
||||
private static void buildDimension(String alias, String dimension, DataSource datasource, HeadlessSchema schema,
|
||||
private static void buildDimension(String alias, String dimension, DataSource datasource, SemanticSchema schema,
|
||||
boolean nonAgg, Set<String> extendFields, TableView dataSet, TableView output, SqlValidatorScope scope)
|
||||
throws Exception {
|
||||
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
|
||||
@@ -161,7 +161,7 @@ public class SourceRender extends Renderer {
|
||||
|
||||
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
|
||||
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
|
||||
HeadlessSchema schema,
|
||||
SemanticSchema schema,
|
||||
boolean nonAgg) throws Exception {
|
||||
Iterator<String> iterator = fields.iterator();
|
||||
List<SqlNode> whereNode = new ArrayList<>();
|
||||
@@ -205,7 +205,7 @@ public class SourceRender extends Renderer {
|
||||
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
|
||||
List<String> queryMetrics,
|
||||
List<String> queryDimensions, Set<String> extendFields, DataSource datasource, SqlValidatorScope scope,
|
||||
HeadlessSchema schema,
|
||||
SemanticSchema schema,
|
||||
boolean nonAgg) throws Exception {
|
||||
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, extendFields, datasource,
|
||||
scope, schema,
|
||||
@@ -215,7 +215,7 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
|
||||
public static void whereDimMetric(List<String> fields, List<String> queryMetrics,
|
||||
List<String> queryDimensions, DataSource datasource, HeadlessSchema schema, Set<String> dimensions,
|
||||
List<String> queryDimensions, DataSource datasource, SemanticSchema schema, Set<String> dimensions,
|
||||
Set<String> metrics) {
|
||||
for (String field : fields) {
|
||||
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
|
||||
@@ -229,7 +229,7 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
}
|
||||
|
||||
private static void addField(String field, String oriField, DataSource datasource, HeadlessSchema schema,
|
||||
private static void addField(String field, String oriField, DataSource datasource, SemanticSchema schema,
|
||||
Set<String> dimensions,
|
||||
Set<String> metrics) {
|
||||
Optional<Dimension> dimension = datasource.getDimensions().stream()
|
||||
@@ -272,7 +272,7 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isDimension(String name, DataSource datasource, HeadlessSchema schema) {
|
||||
public static boolean isDimension(String name, DataSource datasource, SemanticSchema schema) {
|
||||
Optional<Dimension> dimension = datasource.getDimensions().stream()
|
||||
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
|
||||
if (dimension.isPresent()) {
|
||||
@@ -317,7 +317,7 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
|
||||
public void render(MetricQueryReq metricQueryReq, List<DataSource> dataSources, SqlValidatorScope scope,
|
||||
HeadlessSchema schema, boolean nonAgg) throws Exception {
|
||||
SemanticSchema schema, boolean nonAgg) throws Exception {
|
||||
String queryWhere = metricQueryReq.getWhere();
|
||||
Set<String> whereFields = new HashSet<>();
|
||||
List<String> fieldWhere = new ArrayList<>();
|
||||
|
||||
@@ -58,7 +58,7 @@ public class CalculateAggConverter implements HeadlessConverter {
|
||||
metricTable.setMetrics(queryStructReq.getMetrics());
|
||||
metricTable.setDimensions(queryStructReq.getGroups());
|
||||
String where = sqlGenerateUtils.generateWhere(queryStructReq,
|
||||
queryStatement.getHeadlessModel().getDataDate());
|
||||
queryStatement.getSemanticModel().getDataDate());
|
||||
log.info("in generateSqlCommand, complete where:{}", where);
|
||||
metricTable.setWhere(where);
|
||||
metricTable.setAggOption(AggOption.AGGREGATION);
|
||||
@@ -109,7 +109,7 @@ public class CalculateAggConverter implements HeadlessConverter {
|
||||
@Override
|
||||
public void convert(QueryStatement queryStatement) throws Exception {
|
||||
ParseSqlReq sqlCommend = queryStatement.getParseSqlReq();
|
||||
DatabaseResp databaseResp = queryStatement.getHeadlessModel().getDatabaseResp();
|
||||
DatabaseResp databaseResp = queryStatement.getSemanticModel().getDatabaseResp();
|
||||
ParseSqlReq parseSqlReq = generateSqlCommend(queryStatement,
|
||||
EngineType.valueOf(databaseResp.getType().toUpperCase()), databaseResp.getVersion());
|
||||
sqlCommend.setSql(parseSqlReq.getSql());
|
||||
@@ -145,7 +145,7 @@ public class CalculateAggConverter implements HeadlessConverter {
|
||||
metricTable.setAlias(metricTableName);
|
||||
metricTable.setMetrics(queryStructReq.getMetrics());
|
||||
metricTable.setDimensions(queryStructReq.getGroups());
|
||||
String where = sqlGenerateUtils.generateWhere(queryStructReq, queryStatement.getHeadlessModel().getDataDate());
|
||||
String where = sqlGenerateUtils.generateWhere(queryStructReq, queryStatement.getSemanticModel().getDataDate());
|
||||
log.info("in generateSqlCommend, complete where:{}", where);
|
||||
metricTable.setWhere(where);
|
||||
metricTable.setAggOption(AggOption.AGGREGATION);
|
||||
|
||||
@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.parser.converter;
|
||||
import com.tencent.supersonic.common.pojo.Filter;
|
||||
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
|
||||
import com.tencent.supersonic.headless.api.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.api.response.DimensionResp;
|
||||
import com.tencent.supersonic.headless.core.parser.HeadlessConverter;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
@@ -28,21 +28,20 @@ public class DefaultDimValueConverter implements HeadlessConverter {
|
||||
@Override
|
||||
public void convert(QueryStatement queryStatement) {
|
||||
QueryStructReq queryStructCmd = queryStatement.getQueryStructReq();
|
||||
List<DimensionResp> dimensionResps = queryStatement.getHeadlessModel().getModelSchemaResps()
|
||||
.stream().flatMap(modelSchemaResp -> modelSchemaResp.getDimensions().stream())
|
||||
.filter(dimSchemaResp -> !CollectionUtils.isEmpty(dimSchemaResp.getDefaultValues()))
|
||||
List<Dimension> dimensions = queryStatement.getSemanticModel().getDimensions().stream()
|
||||
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
|
||||
.collect(Collectors.toList());
|
||||
if (CollectionUtils.isEmpty(dimensionResps)) {
|
||||
if (CollectionUtils.isEmpty(dimensions)) {
|
||||
return;
|
||||
}
|
||||
log.info("dimension with default values:{}, queryStruct:{}", dimensionResps, queryStructCmd);
|
||||
log.info("dimension with default values:{}, queryStruct:{}", dimensions, queryStructCmd);
|
||||
//add dimension default value to filter
|
||||
List<String> dimensionFilterBizName = queryStructCmd.getDimensionFilters().stream()
|
||||
.map(Filter::getBizName).collect(Collectors.toList());
|
||||
if (!CollectionUtils.isEmpty(dimensionFilterBizName)) {
|
||||
return;
|
||||
}
|
||||
for (DimensionResp dimensionResp : dimensionResps) {
|
||||
for (Dimension dimensionResp : dimensions) {
|
||||
Filter filter = new Filter();
|
||||
filter.setBizName(dimensionResp.getBizName());
|
||||
filter.setValue(dimensionResp.getDefaultValues());
|
||||
|
||||
@@ -48,7 +48,7 @@ public class ParserDefaultConverter implements HeadlessConverter {
|
||||
MetricQueryReq metricQueryReq = queryStatement.getMetricReq();
|
||||
MetricQueryReq metricReq = generateSqlCommand(queryStructReq, queryStatement);
|
||||
queryStatement.setMinMaxTime(sqlGenerateUtils.getBeginEndTime(queryStructReq,
|
||||
queryStatement.getHeadlessModel().getDataDate()));
|
||||
queryStatement.getSemanticModel().getDataDate()));
|
||||
BeanUtils.copyProperties(metricReq, metricQueryReq);
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ public class ParserDefaultConverter implements HeadlessConverter {
|
||||
metricQueryReq.setMetrics(queryStructReq.getMetrics());
|
||||
metricQueryReq.setDimensions(queryStructReq.getGroups());
|
||||
String where = sqlGenerateUtils.generateWhere(queryStructReq,
|
||||
queryStatement.getHeadlessModel().getDataDate());
|
||||
queryStatement.getSemanticModel().getDataDate());
|
||||
log.info("in generateSqlCommend, complete where:{}", where);
|
||||
|
||||
metricQueryReq.setWhere(where);
|
||||
@@ -71,7 +71,7 @@ public class ParserDefaultConverter implements HeadlessConverter {
|
||||
|
||||
// support detail query
|
||||
if (queryStructReq.getQueryType().isNativeAggQuery() && CollectionUtils.isEmpty(metricQueryReq.getMetrics())) {
|
||||
Map<Long, DataSource> dataSourceMap = queryStatement.getHeadlessModel().getModelMap();
|
||||
Map<Long, DataSource> dataSourceMap = queryStatement.getSemanticModel().getModelMap();
|
||||
for (Long modelId : queryStructReq.getModelIds()) {
|
||||
String modelBizName = dataSourceMap.get(modelId).getName();
|
||||
String internalMetricName = sqlGenerateUtils.generateInternalMetricName(modelBizName);
|
||||
|
||||
@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.core.pojo;
|
||||
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
|
||||
import com.tencent.supersonic.headless.api.request.ParseSqlReq;
|
||||
import com.tencent.supersonic.headless.api.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.SemanticModel;
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||
@@ -30,7 +30,7 @@ public class QueryStatement {
|
||||
private String viewSimplifySql = "";
|
||||
|
||||
|
||||
private HeadlessModel headlessModel;
|
||||
private SemanticModel semanticModel;
|
||||
|
||||
public boolean isOk() {
|
||||
this.ok = "".equals(errMsg) && !"".equals(sql);
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.pojo.yaml;
|
||||
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.DataTypeEnums;
|
||||
import java.util.List;
|
||||
import lombok.Data;
|
||||
|
||||
|
||||
@@ -10,6 +11,8 @@ public class DimensionYamlTpl {
|
||||
|
||||
private String name;
|
||||
|
||||
private String bizName;
|
||||
|
||||
private String owners;
|
||||
|
||||
private String type;
|
||||
@@ -19,4 +22,6 @@ public class DimensionYamlTpl {
|
||||
private DimensionTimeTypeParamsTpl typeParams;
|
||||
|
||||
private DataTypeEnums dataType;
|
||||
|
||||
private List<String> defaultValues;
|
||||
}
|
||||
|
||||
@@ -11,14 +11,14 @@ import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataSource;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DataType;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.DimensionTimeTypeParams;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.SemanticModel;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Materialization.TimePartType;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.MetricTypeParams;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.pojo.yaml.DataModelYamlTpl;
|
||||
import com.tencent.supersonic.headless.core.pojo.yaml.DimensionTimeTypeParamsTpl;
|
||||
import com.tencent.supersonic.headless.core.pojo.yaml.DimensionYamlTpl;
|
||||
@@ -55,21 +55,21 @@ import java.util.stream.Collectors;
|
||||
public class HeadlessSchemaManager {
|
||||
|
||||
@Autowired
|
||||
private LoadingCache<String, HeadlessModel> loadingCache;
|
||||
private LoadingCache<String, SemanticModel> loadingCache;
|
||||
private final Catalog catalog;
|
||||
|
||||
public HeadlessSchemaManager(Catalog catalog) {
|
||||
this.catalog = catalog;
|
||||
}
|
||||
|
||||
public HeadlessModel reload(String rootPath) {
|
||||
HeadlessModel headlessModel = new HeadlessModel();
|
||||
headlessModel.setRootPath(rootPath);
|
||||
public SemanticModel reload(String rootPath) {
|
||||
SemanticModel semanticModel = new SemanticModel();
|
||||
semanticModel.setRootPath(rootPath);
|
||||
Set<Long> modelIds = Arrays.stream(rootPath.split(",")).map(s -> Long.parseLong(s.trim()))
|
||||
.collect(Collectors.toSet());
|
||||
if (modelIds.isEmpty()) {
|
||||
log.error("get modelIds empty {}", rootPath);
|
||||
return headlessModel;
|
||||
return semanticModel;
|
||||
}
|
||||
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
|
||||
List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>();
|
||||
@@ -77,33 +77,33 @@ public class HeadlessSchemaManager {
|
||||
Map<Long, String> modelIdName = new HashMap<>();
|
||||
catalog.getModelYamlTplByModelIds(modelIds, dimensionYamlTpls, dataModelYamlTpls, metricYamlTpls, modelIdName);
|
||||
DatabaseResp databaseResp = catalog.getDatabaseByModelId(modelIds.iterator().next());
|
||||
headlessModel.setDatabaseResp(databaseResp);
|
||||
semanticModel.setDatabaseResp(databaseResp);
|
||||
List<ModelRela> modelRelas = catalog.getModelRela(new ArrayList<>(modelIds));
|
||||
if (!CollectionUtils.isEmpty(modelRelas)) {
|
||||
headlessModel.setJoinRelations(getJoinRelation(modelRelas, modelIdName));
|
||||
semanticModel.setJoinRelations(getJoinRelation(modelRelas, modelIdName));
|
||||
}
|
||||
if (!dataModelYamlTpls.isEmpty()) {
|
||||
Map<String, DataSource> dataSourceMap = dataModelYamlTpls.stream().map(HeadlessSchemaManager::getDatasource)
|
||||
.collect(Collectors.toMap(DataSource::getName, item -> item, (k1, k2) -> k1));
|
||||
headlessModel.setDatasourceMap(dataSourceMap);
|
||||
semanticModel.setDatasourceMap(dataSourceMap);
|
||||
}
|
||||
if (!dimensionYamlTpls.isEmpty()) {
|
||||
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
|
||||
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
|
||||
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
|
||||
}
|
||||
headlessModel.setDimensionMap(dimensionMap);
|
||||
semanticModel.setDimensionMap(dimensionMap);
|
||||
}
|
||||
if (!metricYamlTpls.isEmpty()) {
|
||||
headlessModel.setMetrics(getMetrics(metricYamlTpls));
|
||||
semanticModel.setMetrics(getMetrics(metricYamlTpls));
|
||||
}
|
||||
return headlessModel;
|
||||
return semanticModel;
|
||||
}
|
||||
|
||||
//private Map<String, SemanticSchema> semanticSchemaMap = new HashMap<>();
|
||||
public HeadlessModel get(String rootPath) throws Exception {
|
||||
public SemanticModel get(String rootPath) throws Exception {
|
||||
rootPath = formatKey(rootPath);
|
||||
HeadlessModel schema = loadingCache.get(rootPath);
|
||||
SemanticModel schema = loadingCache.get(rootPath);
|
||||
if (schema == null) {
|
||||
return null;
|
||||
}
|
||||
@@ -182,6 +182,8 @@ public class HeadlessSchemaManager {
|
||||
dimension.setExpr(dimensionYamlTpl.getExpr());
|
||||
dimension.setName(dimensionYamlTpl.getName());
|
||||
dimension.setOwners(dimensionYamlTpl.getOwners());
|
||||
dimension.setBizName(dimensionYamlTpl.getBizName());
|
||||
dimension.setDefaultValues(dimensionYamlTpl.getDefaultValues());
|
||||
if (Objects.nonNull(dimensionYamlTpl.getDataType())) {
|
||||
dimension.setDataType(DataType.of(dimensionYamlTpl.getDataType().getType()));
|
||||
}
|
||||
@@ -235,13 +237,13 @@ public class HeadlessSchemaManager {
|
||||
return joinRelations;
|
||||
}
|
||||
|
||||
public static void update(HeadlessSchema schema, List<Metric> metric) throws Exception {
|
||||
public static void update(SemanticSchema schema, List<Metric> metric) throws Exception {
|
||||
if (schema != null) {
|
||||
updateMetric(metric, schema.getMetrics());
|
||||
}
|
||||
}
|
||||
|
||||
public static void update(HeadlessSchema schema, DataSource datasourceYamlTpl) throws Exception {
|
||||
public static void update(SemanticSchema schema, DataSource datasourceYamlTpl) throws Exception {
|
||||
if (schema != null) {
|
||||
String dataSourceName = datasourceYamlTpl.getName();
|
||||
Optional<Entry<String, DataSource>> datasourceYamlTplMap = schema.getDatasource().entrySet().stream()
|
||||
@@ -254,7 +256,7 @@ public class HeadlessSchemaManager {
|
||||
}
|
||||
}
|
||||
|
||||
public static void update(HeadlessSchema schema, String datasourceBizName, List<Dimension> dimensionYamlTpls)
|
||||
public static void update(SemanticSchema schema, String datasourceBizName, List<Dimension> dimensionYamlTpls)
|
||||
throws Exception {
|
||||
if (schema != null) {
|
||||
Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema.getDimension().entrySet()
|
||||
@@ -320,15 +322,15 @@ public class HeadlessSchemaManager {
|
||||
private Integer maximumSize = 1000;
|
||||
|
||||
@Bean
|
||||
public LoadingCache<String, HeadlessModel> getCache() {
|
||||
LoadingCache<String, HeadlessModel> cache
|
||||
public LoadingCache<String, SemanticModel> getCache() {
|
||||
LoadingCache<String, SemanticModel> cache
|
||||
= CacheBuilder.newBuilder()
|
||||
.expireAfterWrite(saveMinutes, TimeUnit.MINUTES)
|
||||
.initialCapacity(10)
|
||||
.maximumSize(maximumSize).build(
|
||||
new CacheLoader<String, HeadlessModel>() {
|
||||
new CacheLoader<String, SemanticModel>() {
|
||||
@Override
|
||||
public HeadlessModel load(String key) {
|
||||
public SemanticModel load(String key) {
|
||||
log.info("load SemanticSchema [{}]", key);
|
||||
return HeadlessSchemaManager.this.reload(key);
|
||||
}
|
||||
|
||||
@@ -4,39 +4,34 @@ import com.tencent.supersonic.common.pojo.ItemDateResp;
|
||||
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
|
||||
import com.tencent.supersonic.headless.api.request.ParseSqlReq;
|
||||
import com.tencent.supersonic.headless.api.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.api.response.ModelSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.response.QueryResultWithSchemaResp;
|
||||
import com.tencent.supersonic.headless.core.optimizer.QueryOptimizer;
|
||||
import com.tencent.supersonic.headless.core.parser.QueryParser;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.HeadlessModel;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.s2sql.SemanticModel;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
|
||||
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
|
||||
import com.tencent.supersonic.headless.server.manager.HeadlessSchemaManager;
|
||||
import com.tencent.supersonic.headless.server.service.Catalog;
|
||||
import com.tencent.supersonic.headless.server.service.HeadlessQueryEngine;
|
||||
import com.tencent.supersonic.headless.server.utils.QueryStructUtils;
|
||||
import com.tencent.supersonic.headless.server.utils.QueryUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import java.util.List;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
public class HeadlessQueryEngineImpl implements HeadlessQueryEngine {
|
||||
|
||||
private final QueryParser queryParser;
|
||||
private final Catalog catalog;
|
||||
private final QueryUtils queryUtils;
|
||||
private final QueryStructUtils queryStructUtils;
|
||||
private final HeadlessSchemaManager headlessSchemaManager;
|
||||
|
||||
public HeadlessQueryEngineImpl(QueryParser queryParser, Catalog catalog,
|
||||
public HeadlessQueryEngineImpl(QueryParser queryParser,
|
||||
QueryUtils queryUtils, HeadlessSchemaManager headlessSchemaManager,
|
||||
QueryStructUtils queryStructUtils) {
|
||||
this.queryParser = queryParser;
|
||||
this.catalog = catalog;
|
||||
this.queryUtils = queryUtils;
|
||||
this.headlessSchemaManager = headlessSchemaManager;
|
||||
this.queryStructUtils = queryStructUtils;
|
||||
@@ -57,7 +52,7 @@ public class HeadlessQueryEngineImpl implements HeadlessQueryEngine {
|
||||
|
||||
public QueryStatement plan(QueryStatement queryStatement) throws Exception {
|
||||
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
|
||||
queryStatement.setHeadlessModel(getHeadLessModel(queryStatement));
|
||||
queryStatement.setSemanticModel(getSemanticModel(queryStatement));
|
||||
queryStatement = queryParser.logicSql(queryStatement);
|
||||
queryUtils.checkSqlParse(queryStatement);
|
||||
queryStatement.setModelIds(queryStatement.getQueryStructReq().getModelIds());
|
||||
@@ -82,31 +77,31 @@ public class HeadlessQueryEngineImpl implements HeadlessQueryEngine {
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryStatement physicalSql(QueryStructReq queryStructCmd, ParseSqlReq sqlCommend) {
|
||||
public QueryStatement physicalSql(QueryStructReq queryStructCmd, ParseSqlReq sqlCommend) throws Exception {
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
queryStatement.setQueryStructReq(queryStructCmd);
|
||||
queryStatement.setParseSqlReq(sqlCommend);
|
||||
queryStatement.setSql(sqlCommend.getSql());
|
||||
queryStatement.setIsS2SQL(true);
|
||||
queryStatement.setSemanticModel(getSemanticModel(queryStatement));
|
||||
return optimize(queryStructCmd, queryParser.parser(sqlCommend, queryStatement));
|
||||
}
|
||||
|
||||
public QueryStatement physicalSql(QueryStructReq queryStructCmd, MetricQueryReq metricCommand) {
|
||||
public QueryStatement physicalSql(QueryStructReq queryStructCmd, MetricQueryReq metricCommand) throws Exception {
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
queryStatement.setQueryStructReq(queryStructCmd);
|
||||
queryStatement.setMetricReq(metricCommand);
|
||||
queryStatement.setIsS2SQL(false);
|
||||
queryStatement.setSemanticModel(getSemanticModel(queryStatement));
|
||||
return queryParser.parser(queryStatement);
|
||||
}
|
||||
|
||||
private HeadlessModel getHeadLessModel(QueryStatement queryStatement) throws Exception {
|
||||
private SemanticModel getSemanticModel(QueryStatement queryStatement) throws Exception {
|
||||
QueryStructReq queryStructReq = queryStatement.getQueryStructReq();
|
||||
HeadlessModel headlessModel = headlessSchemaManager.get(queryStructReq.getModelIdStr());
|
||||
SemanticModel semanticModel = headlessSchemaManager.get(queryStructReq.getModelIdStr());
|
||||
ItemDateResp itemDateResp = queryStructUtils.getItemDateResp(queryStructReq);
|
||||
headlessModel.setDataDate(itemDateResp);
|
||||
List<ModelSchemaResp> modelSchemaResps = catalog.getModelSchema(queryStructReq.getModelIds());
|
||||
headlessModel.setModelSchemaResps(modelSchemaResps);
|
||||
return headlessModel;
|
||||
semanticModel.setDataDate(itemDateResp);
|
||||
return semanticModel;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import com.tencent.supersonic.headless.api.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.api.request.MetricQueryReq;
|
||||
import com.tencent.supersonic.headless.api.response.SqlParserResp;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.planner.AggPlanner;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.HeadlessSchema;
|
||||
import com.tencent.supersonic.headless.core.parser.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.pojo.yaml.DataModelYamlTpl;
|
||||
import com.tencent.supersonic.headless.core.pojo.yaml.DimensionTimeTypeParamsTpl;
|
||||
@@ -26,20 +26,20 @@ import java.util.Map;
|
||||
@Slf4j
|
||||
class HeadlessParserServiceTest {
|
||||
|
||||
private static Map<String, HeadlessSchema> headlessSchemaMap = new HashMap<>();
|
||||
private static Map<String, SemanticSchema> headlessSchemaMap = new HashMap<>();
|
||||
|
||||
public static SqlParserResp parser(HeadlessSchema headlessSchema, MetricQueryReq metricCommand, boolean isAgg) {
|
||||
public static SqlParserResp parser(SemanticSchema semanticSchema, MetricQueryReq metricCommand, boolean isAgg) {
|
||||
SqlParserResp sqlParser = new SqlParserResp();
|
||||
if (metricCommand.getRootPath().isEmpty()) {
|
||||
sqlParser.setErrMsg("rootPath empty");
|
||||
return sqlParser;
|
||||
}
|
||||
try {
|
||||
if (headlessSchema == null) {
|
||||
if (semanticSchema == null) {
|
||||
sqlParser.setErrMsg("headlessSchema not found");
|
||||
return sqlParser;
|
||||
}
|
||||
AggPlanner aggBuilder = new AggPlanner(headlessSchema);
|
||||
AggPlanner aggBuilder = new AggPlanner(semanticSchema);
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
queryStatement.setMetricReq(metricCommand);
|
||||
aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg));
|
||||
@@ -122,9 +122,9 @@ class HeadlessParserServiceTest {
|
||||
identifies.add(identify);
|
||||
datasource.setIdentifiers(identifies);
|
||||
|
||||
HeadlessSchema headlessSchema = HeadlessSchema.newBuilder("s2").build();
|
||||
SemanticSchema semanticSchema = SemanticSchema.newBuilder("s2").build();
|
||||
|
||||
HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getDatasource(datasource));
|
||||
HeadlessSchemaManager.update(semanticSchema, HeadlessSchemaManager.getDatasource(datasource));
|
||||
|
||||
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
|
||||
dimension1.setExpr("page");
|
||||
@@ -133,7 +133,7 @@ class HeadlessParserServiceTest {
|
||||
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
|
||||
dimensionYamlTpls.add(dimension1);
|
||||
|
||||
HeadlessSchemaManager.update(headlessSchema, "s2_pv_uv_statis",
|
||||
HeadlessSchemaManager.update(semanticSchema, "s2_pv_uv_statis",
|
||||
HeadlessSchemaManager.getDimensions(dimensionYamlTpls));
|
||||
|
||||
MetricYamlTpl metric1 = new MetricYamlTpl();
|
||||
@@ -174,9 +174,9 @@ class HeadlessParserServiceTest {
|
||||
List<ColumnOrder> orders = new ArrayList<>();
|
||||
orders.add(ColumnOrder.buildDesc("sys_imp_date"));
|
||||
metricCommand.setOrder(orders);
|
||||
System.out.println(parser(headlessSchema, metricCommand, true));
|
||||
System.out.println(parser(semanticSchema, metricCommand, true));
|
||||
|
||||
addDepartment(headlessSchema);
|
||||
addDepartment(semanticSchema);
|
||||
|
||||
MetricQueryReq metricCommand2 = new MetricQueryReq();
|
||||
metricCommand2.setRootPath("s2");
|
||||
@@ -189,12 +189,12 @@ class HeadlessParserServiceTest {
|
||||
List<ColumnOrder> orders2 = new ArrayList<>();
|
||||
orders2.add(ColumnOrder.buildDesc("sys_imp_date"));
|
||||
metricCommand2.setOrder(orders2);
|
||||
System.out.println(parser(headlessSchema, metricCommand2, true));
|
||||
System.out.println(parser(semanticSchema, metricCommand2, true));
|
||||
|
||||
|
||||
}
|
||||
|
||||
private static void addDepartment(HeadlessSchema headlessSchema) {
|
||||
private static void addDepartment(SemanticSchema semanticSchema) {
|
||||
DataModelYamlTpl datasource = new DataModelYamlTpl();
|
||||
datasource.setName("user_department");
|
||||
datasource.setSourceId(1L);
|
||||
@@ -240,7 +240,7 @@ class HeadlessParserServiceTest {
|
||||
identifies.add(identify);
|
||||
datasource.setIdentifiers(identifies);
|
||||
|
||||
headlessSchema.getDatasource().put("user_department", HeadlessSchemaManager.getDatasource(datasource));
|
||||
semanticSchema.getDatasource().put("user_department", HeadlessSchemaManager.getDatasource(datasource));
|
||||
|
||||
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
|
||||
dimension1.setExpr("department");
|
||||
@@ -249,7 +249,7 @@ class HeadlessParserServiceTest {
|
||||
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
|
||||
dimensionYamlTpls.add(dimension1);
|
||||
|
||||
headlessSchema.getDimension()
|
||||
semanticSchema.getDimension()
|
||||
.put("user_department", HeadlessSchemaManager.getDimensions(dimensionYamlTpls));
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user