[improvement][headless]Clean code logic of headless core.

This commit is contained in:
jerryjzhang
2024-11-19 20:13:49 +08:00
parent f4f0e58bfb
commit 62a4d60a0b
14 changed files with 96 additions and 101 deletions

View File

@@ -38,7 +38,7 @@ public class JdbcExecutor implements QueryExecutor {
SqlUtils sqlUtils = ContextUtils.getBean(SqlUtils.class);
String sql = StringUtils.normalizeSpace(queryStatement.getSql());
log.info("executing SQL: {}", sql);
Database database = queryStatement.getSemanticModel().getDatabase();
Database database = queryStatement.getOntology().getDatabase();
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
try {
SqlUtils sqlUtil = sqlUtils.init(database);

View File

@@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
@@ -29,7 +29,7 @@ public class QueryStatement {
private String dataSetAlias;
private String dataSetSimplifySql;
private Boolean enableLimitWrapper = false;
private SemanticModel semanticModel;
private Ontology ontology;
private SemanticSchemaResp semanticSchemaResp;
private Integer limit = 1000;
private Boolean isTranslated = false;

View File

@@ -10,7 +10,7 @@ import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import lombok.extern.slf4j.Slf4j;
@@ -81,8 +81,8 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
private QueryStatement doParse(DataSetQueryParam dataSetQueryParam,
QueryStatement queryStatement) {
log.info("parse dataSetQuery [{}] ", dataSetQueryParam);
SemanticModel semanticModel = queryStatement.getSemanticModel();
EngineType engineType = EngineType.fromString(semanticModel.getDatabase().getType());
Ontology ontology = queryStatement.getOntology();
EngineType engineType = EngineType.fromString(ontology.getDatabase().getType());
try {
if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) {
List<String[]> tables = new ArrayList<>();
@@ -158,7 +158,7 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
tableSql.setMinMaxTime(queryStatement.getMinMaxTime());
tableSql.setEnableOptimize(queryStatement.getEnableOptimize());
tableSql.setDataSetId(queryStatement.getDataSetId());
tableSql.setSemanticModel(queryStatement.getSemanticModel());
tableSql.setOntology(queryStatement.getOntology());
if (isSingleMetricTable) {
tableSql.setDataSetSql(dataSetQueryParam.getSql());
tableSql.setDataSetAlias(metricTable.getAlias());

View File

@@ -7,7 +7,7 @@ import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.QueryParser;
import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.calcite.schema.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema;
import lombok.extern.slf4j.Slf4j;
@@ -25,16 +25,16 @@ public class CalciteQueryParser implements QueryParser {
@Override
public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception {
MetricQueryParam metricReq = queryStatement.getMetricQueryParam();
SemanticModel semanticModel = queryStatement.getSemanticModel();
if (semanticModel == null) {
Ontology ontology = queryStatement.getOntology();
if (ontology == null) {
queryStatement.setErrMsg("semanticSchema not found");
return;
}
queryStatement.setMetricQueryParam(metricReq);
S2SemanticSchema semanticSchema = getSemanticSchema(semanticModel, queryStatement);
S2SemanticSchema semanticSchema = getSemanticSchema(ontology, queryStatement);
AggPlanner aggPlanner = new AggPlanner(semanticSchema);
aggPlanner.plan(queryStatement, isAgg);
EngineType engineType = EngineType.fromString(semanticModel.getDatabase().getType());
EngineType engineType = EngineType.fromString(ontology.getDatabase().getType());
queryStatement.setSql(aggPlanner.getSql(engineType));
if (Objects.nonNull(queryStatement.getEnableOptimize())
&& queryStatement.getEnableOptimize()
@@ -52,15 +52,14 @@ public class CalciteQueryParser implements QueryParser {
}
}
private S2SemanticSchema getSemanticSchema(SemanticModel semanticModel,
QueryStatement queryStatement) {
private S2SemanticSchema getSemanticSchema(Ontology ontology, QueryStatement queryStatement) {
S2SemanticSchema semanticSchema =
S2SemanticSchema.newBuilder(semanticModel.getSchemaKey()).build();
semanticSchema.setSemanticModel(semanticModel);
semanticSchema.setDatasource(semanticModel.getDatasourceMap());
semanticSchema.setDimension(semanticModel.getDimensionMap());
semanticSchema.setMetric(semanticModel.getMetrics());
semanticSchema.setJoinRelations(semanticModel.getJoinRelations());
S2SemanticSchema.newBuilder(ontology.getSchemaKey()).build();
semanticSchema.setSemanticModel(ontology);
semanticSchema.setDatasource(ontology.getDatasourceMap());
semanticSchema.setDimension(ontology.getDimensionMap());
semanticSchema.setMetric(ontology.getMetrics());
semanticSchema.setJoinRelations(ontology.getJoinRelations());
semanticSchema.setRuntimeOptions(
RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())
.enableOptimize(queryStatement.getEnableOptimize()).build());

View File

@@ -109,7 +109,7 @@ public class AggPlanner implements Planner {
// build a parse Node
parse();
// optimizer
Database database = queryStatement.getSemanticModel().getDatabase();
Database database = queryStatement.getOntology().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
optimize(engineType);
}

View File

@@ -11,7 +11,7 @@ import java.util.Map;
import java.util.stream.Collectors;
@Data
public class SemanticModel {
public class Ontology {
private String schemaKey;
private List<Metric> metrics = new ArrayList<>();

View File

@@ -5,7 +5,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.Table;
@@ -21,7 +21,7 @@ public class S2SemanticSchema extends AbstractSchema {
private final Map<String, Table> tableMap;
private SemanticModel semanticModel = new SemanticModel();
private Ontology ontology = new Ontology();
private List<JoinRelation> joinRelations;
@@ -40,12 +40,12 @@ public class S2SemanticSchema extends AbstractSchema {
return schemaKey;
}
public void setSemanticModel(SemanticModel semanticModel) {
this.semanticModel = semanticModel;
public void setSemanticModel(Ontology ontology) {
this.ontology = ontology;
}
public SemanticModel getSemanticModel() {
return semanticModel;
public Ontology getSemanticModel() {
return ontology;
}
@Override
@@ -59,35 +59,35 @@ public class S2SemanticSchema extends AbstractSchema {
}
public Map<String, DataModel> getDatasource() {
return semanticModel.getDatasourceMap();
return ontology.getDatasourceMap();
}
public void setDatasource(Map<String, DataModel> datasource) {
semanticModel.setDatasourceMap(datasource);
ontology.setDatasourceMap(datasource);
}
public Map<String, List<Dimension>> getDimension() {
return semanticModel.getDimensionMap();
return ontology.getDimensionMap();
}
public void setDimension(Map<String, List<Dimension>> dimensions) {
semanticModel.setDimensionMap(dimensions);
ontology.setDimensionMap(dimensions);
}
public List<Metric> getMetrics() {
return semanticModel.getMetrics();
return ontology.getMetrics();
}
public void setMetric(List<Metric> metric) {
semanticModel.setMetrics(metric);
ontology.setMetrics(metric);
}
public void setMaterializationList(List<Materialization> materializationList) {
semanticModel.setMaterializationList(materializationList);
ontology.setMaterializationList(materializationList);
}
public List<Materialization> getMaterializationList() {
return semanticModel.getMaterializationList();
return ontology.getMaterializationList();
}
public void setJoinRelations(List<JoinRelation> joinRelations) {

View File

@@ -97,7 +97,7 @@ public class CalculateAggConverter implements QueryConverter {
@Override
public void convert(QueryStatement queryStatement) throws Exception {
Database database = queryStatement.getSemanticModel().getDatabase();
Database database = queryStatement.getOntology().getDatabase();
DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement,
EngineType.fromString(database.getType().toUpperCase()), database.getVersion());
queryStatement.setDataSetQueryParam(dataSetQueryParam);

View File

@@ -34,7 +34,7 @@ public class DefaultDimValueConverter implements QueryConverter {
@Override
public void convert(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getSemanticModel().getDimensions().stream()
List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) {

View File

@@ -60,7 +60,7 @@ public class ParserDefaultConverter implements QueryConverter {
// support detail query
if (queryParam.getQueryType().isNativeAggQuery()
&& CollectionUtils.isEmpty(metricQueryParam.getMetrics())) {
Map<Long, DataModel> modelMap = queryStatement.getSemanticModel().getModelMap();
Map<Long, DataModel> modelMap = queryStatement.getOntology().getModelMap();
for (Long modelId : modelMap.keySet()) {
String modelBizName = modelMap.get(modelId).getName();
String internalMetricName =

View File

@@ -39,8 +39,8 @@ public class SqlVariableParseConverter implements QueryConverter {
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(),
queryStatement.getQueryParam().getParams());
DataModel dataModel = queryStatement.getSemanticModel().getDatasourceMap()
.get(modelResp.getBizName());
DataModel dataModel =
queryStatement.getOntology().getDatasourceMap().get(modelResp.getBizName());
dataModel.setSqlQuery(sqlParsed);
}
}

View File

@@ -34,7 +34,7 @@ import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
@@ -51,6 +51,7 @@ import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
@@ -271,23 +272,6 @@ public class S2SemanticLayerService implements SemanticLayerService {
return metricService.getMetrics(metaFilter);
}
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user)
throws Exception {
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) {
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
querySqlReq.setDataSetId(dataSetId);
}
SchemaFilterReq filter = buildSchemaFilterReq(querySqlReq);
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter);
QueryStatement queryStatement = queryReqConverter.convert(querySqlReq, semanticSchemaResp);
queryStatement.setModelIds(querySqlReq.getModelIds());
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp));
return queryStatement;
}
private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user)
throws Exception {
QueryStatement queryStatement = null;
@@ -310,18 +294,29 @@ public class S2SemanticLayerService implements SemanticLayerService {
return queryStatement;
}
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) {
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) {
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
querySqlReq.setDataSetId(dataSetId);
}
SchemaFilterReq filter = buildSchemaFilterReq(querySqlReq);
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter);
return queryReqConverter.buildQueryStatement(querySqlReq, semanticSchemaResp);
}
private QueryStatement buildStructQueryStatement(QueryStructReq queryStructReq) {
SchemaFilterReq filter = buildSchemaFilterReq(queryStructReq);
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter);
QueryStatement queryStatement = new QueryStatement();
QueryParam queryParam = new QueryParam();
queryReqConverter.convert(queryStructReq, queryParam);
BeanUtils.copyProperties(queryStructReq, queryParam);
queryStatement.setQueryParam(queryParam);
queryStatement.setIsS2SQL(false);
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setDataSetId(queryStructReq.getDataSetId());
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp));
queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp));
return queryStatement;
}
@@ -330,9 +325,9 @@ public class S2SemanticLayerService implements SemanticLayerService {
List<QueryStatement> sqlParsers = new ArrayList<>();
for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) {
QueryStatement queryStatement = buildQueryStatement(queryStructReq, user);
SemanticModel semanticModel = queryStatement.getSemanticModel();
Ontology ontology = queryStatement.getOntology();
queryStatement.setModelIds(queryStructReq.getModelIds());
queryStatement.setSemanticModel(semanticModel);
queryStatement.setOntology(ontology);
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
semanticTranslator.translate(queryStatement);
sqlParsers.add(queryStatement);

View File

@@ -18,7 +18,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materializa
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.calcite.schema.S2SemanticSchema;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
@@ -57,9 +57,9 @@ public class SemanticSchemaManager {
this.schemaService = schemaService;
}
public SemanticModel getSemanticModel(SemanticSchemaResp semanticSchemaResp) {
SemanticModel semanticModel = new SemanticModel();
semanticModel.setSchemaKey(semanticSchemaResp.getSchemaKey());
public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) {
Ontology ontology = new Ontology();
ontology.setSchemaKey(semanticSchemaResp.getSchemaKey());
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
@@ -67,36 +67,35 @@ public class SemanticSchemaManager {
schemaService.getSchemaYamlTpl(semanticSchemaResp, dimensionYamlTpls, dataModelYamlTpls,
metricYamlTpls, modelIdName);
DatabaseResp databaseResp = semanticSchemaResp.getDatabaseResp();
semanticModel.setDatabase(DatabaseConverter.convert(databaseResp));
ontology.setDatabase(DatabaseConverter.convert(databaseResp));
if (!CollectionUtils.isEmpty(semanticSchemaResp.getModelRelas())) {
semanticModel.setJoinRelations(
ontology.setJoinRelations(
getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName));
}
if (!dataModelYamlTpls.isEmpty()) {
Map<String, DataModel> dataSourceMap =
dataModelYamlTpls.stream().map(SemanticSchemaManager::getDatasource).collect(
Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1));
semanticModel.setDatasourceMap(dataSourceMap);
ontology.setDatasourceMap(dataSourceMap);
}
if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
}
semanticModel.setDimensionMap(dimensionMap);
ontology.setDimensionMap(dimensionMap);
}
if (!metricYamlTpls.isEmpty()) {
semanticModel.setMetrics(getMetrics(metricYamlTpls));
ontology.setMetrics(getMetrics(metricYamlTpls));
}
return semanticModel;
return ontology;
}
public SemanticModel getTagSemanticModel(SemanticSchemaResp semanticSchemaResp)
throws Exception {
public Ontology getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) throws Exception {
if (CollectionUtils.isEmpty(semanticSchemaResp.getTags())) {
throw new Exception("semanticSchemaResp tag is empty");
}
SemanticModel semanticModel = getSemanticModel(semanticSchemaResp);
Ontology ontology = buildOntology(semanticSchemaResp);
// Map<String, List<Dimension>> dimensions = new HashMap<>();
Map<Long, List<TagResp>> tagMap = new HashMap<>();
for (TagResp tagResp : semanticSchemaResp.getTags()) {
@@ -105,24 +104,24 @@ public class SemanticSchemaManager {
}
tagMap.get(tagResp.getModelId()).add(tagResp);
}
if (Objects.nonNull(semanticModel.getDatasourceMap())
&& !semanticModel.getDatasourceMap().isEmpty()) {
for (Map.Entry<String, DataModel> entry : semanticModel.getDatasourceMap().entrySet()) {
if (Objects.nonNull(ontology.getDatasourceMap())
&& !ontology.getDatasourceMap().isEmpty()) {
for (Map.Entry<String, DataModel> entry : ontology.getDatasourceMap().entrySet()) {
List<Dimension> modelDimensions = new ArrayList<>();
if (!semanticModel.getDimensionMap().containsKey(entry.getKey())) {
semanticModel.getDimensionMap().put(entry.getKey(), modelDimensions);
if (!ontology.getDimensionMap().containsKey(entry.getKey())) {
ontology.getDimensionMap().put(entry.getKey(), modelDimensions);
} else {
modelDimensions = semanticModel.getDimensionMap().get(entry.getKey());
modelDimensions = ontology.getDimensionMap().get(entry.getKey());
}
if (tagMap.containsKey(entry.getValue().getId())) {
for (TagResp tagResp : tagMap.get(entry.getValue().getId())) {
addTagModel(tagResp, modelDimensions, semanticModel.getMetrics());
addTagModel(tagResp, modelDimensions, ontology.getMetrics());
}
}
}
}
return semanticModel;
return ontology;
}
private void addTagModel(TagResp tagResp, List<Dimension> modelDimensions,

View File

@@ -29,6 +29,7 @@ import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
@@ -58,8 +59,14 @@ public class QueryReqConverter {
@Autowired
private SqlGenerateUtils sqlGenerateUtils;
public QueryStatement convert(QuerySqlReq querySQLReq, SemanticSchemaResp semanticSchemaResp)
throws Exception {
@Autowired
private QueryUtils queryUtils;
@Autowired
private SemanticSchemaManager semanticSchemaManager;
public QueryStatement buildQueryStatement(QuerySqlReq querySQLReq,
SemanticSchemaResp semanticSchemaResp) {
if (semanticSchemaResp == null) {
return new QueryStatement();
@@ -87,17 +94,14 @@ public class QueryReqConverter {
List<String> metrics =
metricSchemas.stream().map(m -> m.getBizName()).collect(Collectors.toList());
QueryStructReq queryStructReq = new QueryStructReq();
MetricTable metricTable = new MetricTable();
metricTable.setMetrics(metrics);
metricTable.getMetrics().addAll(metrics);
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
metricTable.setDimensions(new ArrayList<>(dimensions));
metricTable.getDimensions().addAll(dimensions);
metricTable.setAlias(tableName.toLowerCase());
// if metric empty , fill model default
if (CollectionUtils.isEmpty(metricTable.getMetrics())) {
metricTable.setMetrics(new ArrayList<>());
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
} else {
@@ -122,14 +126,15 @@ public class QueryReqConverter {
}
// 7. do deriveMetric
generateDerivedMetric(semanticSchemaResp, aggOption, result);
// 8.physicalSql by ParseSqlReq
// 8.physicalSql by ParseSqlReq
queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(querySQLReq.getSql()));
queryStructReq.setDataSetId(querySQLReq.getDataSetId());
queryStructReq.setQueryType(getQueryType(aggOption));
log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq);
QueryParam queryParam = new QueryParam();
convert(queryStructReq, queryParam);
BeanUtils.copyProperties(queryStructReq, queryParam);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setQueryParam(queryParam);
queryStatement.setDataSetQueryParam(result);
@@ -137,17 +142,14 @@ public class QueryReqConverter {
queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq));
queryStatement.setDataSetId(querySQLReq.getDataSetId());
queryStatement.setLimit(querySQLReq.getLimit());
queryStatement.setModelIds(querySQLReq.getModelIds());
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp));
return queryStatement;
}
public void convert(QueryStructReq queryStructReq, QueryParam queryParam) {
BeanUtils.copyProperties(queryStructReq, queryParam);
queryParam.setOrders(queryStructReq.getOrders());
queryParam.setMetrics(queryStructReq.getMetrics());
queryParam.setGroups(queryStructReq.getGroups());
}
private AggOption getAggOption(QuerySqlReq databaseReq, List<MetricSchemaResp> metricSchemas) {
String sql = databaseReq.getSql();
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)