mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-13 04:57:28 +00:00
[improvement][Chat] Support agent permission management (#1923)
* [improvement][Chat] Support agent permission management #1143 * [improvement][chat]Iterate LLM prompts of parsing and correction. * [improvement][headless-fe] Added null-check conditions to the data formatting function. * [improvement][headless]Clean code logic of headless translator. --------- Co-authored-by: lxwcodemonkey <jolunoluo@tencent.com> Co-authored-by: tristanliu <tristanliu@tencent.com>
This commit is contained in:
@@ -260,9 +260,8 @@ public class S2DataPermissionAspect {
|
||||
}
|
||||
|
||||
public void checkModelVisible(User user, Set<Long> modelIds) {
|
||||
List<Long> modelListVisible =
|
||||
modelService.getModelListWithAuth(user, null, AuthType.VISIBLE).stream()
|
||||
.map(ModelResp::getId).collect(Collectors.toList());
|
||||
List<Long> modelListVisible = modelService.getModelListWithAuth(user, null, AuthType.VIEWER)
|
||||
.stream().map(ModelResp::getId).collect(Collectors.toList());
|
||||
List<Long> modelIdCopied = new ArrayList<>(modelIds);
|
||||
modelIdCopied.removeAll(modelListVisible);
|
||||
if (!CollectionUtils.isEmpty(modelIdCopied)) {
|
||||
|
||||
@@ -34,7 +34,6 @@ import com.tencent.supersonic.headless.core.cache.QueryCache;
|
||||
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
|
||||
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
|
||||
import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
|
||||
import com.tencent.supersonic.headless.server.facade.service.SemanticLayerService;
|
||||
@@ -44,13 +43,13 @@ import com.tencent.supersonic.headless.server.service.DimensionService;
|
||||
import com.tencent.supersonic.headless.server.service.MetricService;
|
||||
import com.tencent.supersonic.headless.server.service.SchemaService;
|
||||
import com.tencent.supersonic.headless.server.utils.MetricDrillDownChecker;
|
||||
import com.tencent.supersonic.headless.server.utils.QueryReqConverter;
|
||||
import com.tencent.supersonic.headless.server.utils.QueryUtils;
|
||||
import com.tencent.supersonic.headless.server.utils.StatUtils;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@@ -67,7 +66,6 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
|
||||
private final StatUtils statUtils;
|
||||
private final QueryUtils queryUtils;
|
||||
private final QueryReqConverter queryReqConverter;
|
||||
private final SemanticSchemaManager semanticSchemaManager;
|
||||
private final DataSetService dataSetService;
|
||||
private final SchemaService schemaService;
|
||||
@@ -80,14 +78,13 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
private final List<QueryExecutor> queryExecutors = ComponentFactory.getQueryExecutors();
|
||||
|
||||
public S2SemanticLayerService(StatUtils statUtils, QueryUtils queryUtils,
|
||||
QueryReqConverter queryReqConverter, SemanticSchemaManager semanticSchemaManager,
|
||||
DataSetService dataSetService, SchemaService schemaService,
|
||||
SemanticTranslator semanticTranslator, MetricDrillDownChecker metricDrillDownChecker,
|
||||
SemanticSchemaManager semanticSchemaManager, DataSetService dataSetService,
|
||||
SchemaService schemaService, SemanticTranslator semanticTranslator,
|
||||
MetricDrillDownChecker metricDrillDownChecker,
|
||||
KnowledgeBaseService knowledgeBaseService, MetricService metricService,
|
||||
DimensionService dimensionService) {
|
||||
this.statUtils = statUtils;
|
||||
this.queryUtils = queryUtils;
|
||||
this.queryReqConverter = queryReqConverter;
|
||||
this.semanticSchemaManager = semanticSchemaManager;
|
||||
this.dataSetService = dataSetService;
|
||||
this.schemaService = schemaService;
|
||||
@@ -122,7 +119,6 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
statUtils.initStatInfo(queryReq, user);
|
||||
|
||||
// 2.query from cache
|
||||
|
||||
String cacheKey = queryCache.getCacheKey(queryReq);
|
||||
Object query = queryCache.query(queryReq, cacheKey);
|
||||
if (Objects.nonNull(query)) {
|
||||
@@ -136,16 +132,16 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
}
|
||||
StatUtils.get().setUseResultCache(false);
|
||||
|
||||
// 3 query
|
||||
// 3 translate query
|
||||
QueryStatement queryStatement = buildQueryStatement(queryReq, user);
|
||||
semanticTranslator.translate(queryStatement);
|
||||
|
||||
// Check whether the dimensions of the metric drill-down are correct temporarily,
|
||||
// add the abstraction of a validator later.
|
||||
metricDrillDownChecker.checkQuery(queryStatement);
|
||||
|
||||
// 4.execute query
|
||||
SemanticQueryResp queryResp = null;
|
||||
|
||||
// skip translation if already done.
|
||||
if (!queryStatement.isTranslated()) {
|
||||
semanticTranslator.translate(queryStatement);
|
||||
}
|
||||
queryPreCheck(queryStatement);
|
||||
|
||||
for (QueryExecutor queryExecutor : queryExecutors) {
|
||||
if (queryExecutor.accept(queryStatement)) {
|
||||
queryResp = queryExecutor.execute(queryStatement);
|
||||
@@ -154,7 +150,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
}
|
||||
}
|
||||
|
||||
// 4 reset cache and set stateInfo
|
||||
// 5.reset cache and set stateInfo
|
||||
Boolean setCacheSuccess = queryCache.put(cacheKey, queryResp);
|
||||
if (setCacheSuccess) {
|
||||
// if result is not null, update cache data
|
||||
@@ -185,7 +181,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
|
||||
List<String> dimensionValues = getDimensionValuesFromDict(dimensionValueReq, dataSetIds);
|
||||
|
||||
// If the search results are null, search dimensionValue from the database
|
||||
// try to query dimensionValue from the database.
|
||||
if (CollectionUtils.isEmpty(dimensionValues)) {
|
||||
return getDimensionValuesFromDb(dimensionValueReq, user);
|
||||
}
|
||||
@@ -218,9 +214,29 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
.map(MapResult::getName).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private SemanticQueryResp getDimensionValuesFromDb(DimensionValueReq dimensionValueReq,
|
||||
private SemanticQueryResp getDimensionValuesFromDb(DimensionValueReq queryDimValueReq,
|
||||
User user) {
|
||||
QuerySqlReq querySqlReq = buildQuerySqlReq(dimensionValueReq);
|
||||
QuerySqlReq querySqlReq = new QuerySqlReq();
|
||||
List<ModelResp> modelResps =
|
||||
schemaService.getModelList(Lists.newArrayList(queryDimValueReq.getModelId()));
|
||||
DimensionResp dimensionResp = schemaService.getDimension(queryDimValueReq.getBizName(),
|
||||
queryDimValueReq.getModelId());
|
||||
ModelResp modelResp = modelResps.get(0);
|
||||
String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(),
|
||||
modelResp.getName());
|
||||
List<Dim> timeDims = modelResp.getTimeDimension();
|
||||
if (CollectionUtils.isNotEmpty(timeDims)) {
|
||||
sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql,
|
||||
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(),
|
||||
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getEndDate());
|
||||
}
|
||||
if (StringUtils.isNotBlank(queryDimValueReq.getValue())) {
|
||||
sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%"
|
||||
+ queryDimValueReq.getValue() + "%'";
|
||||
}
|
||||
querySqlReq.setModelIds(Sets.newHashSet(queryDimValueReq.getModelId()));
|
||||
querySqlReq.setSql(sql);
|
||||
|
||||
return queryByReq(querySqlReq, user);
|
||||
}
|
||||
|
||||
@@ -271,35 +287,16 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
return metricService.getMetrics(metaFilter);
|
||||
}
|
||||
|
||||
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user)
|
||||
throws Exception {
|
||||
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
|
||||
if (querySqlReq.needGetDataSetId()) {
|
||||
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
|
||||
querySqlReq.setDataSetId(dataSetId);
|
||||
}
|
||||
SchemaFilterReq filter = buildSchemaFilterReq(querySqlReq);
|
||||
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter);
|
||||
QueryStatement queryStatement = queryReqConverter.convert(querySqlReq, semanticSchemaResp);
|
||||
queryStatement.setModelIds(querySqlReq.getModelIds());
|
||||
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
|
||||
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
|
||||
queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp));
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user)
|
||||
throws Exception {
|
||||
private QueryStatement buildQueryStatement(SemanticQueryReq semanticQueryReq, User user) {
|
||||
QueryStatement queryStatement = null;
|
||||
if (semanticQueryReq instanceof QuerySqlReq) {
|
||||
queryStatement = buildSqlQueryStatement((QuerySqlReq) semanticQueryReq, user);
|
||||
}
|
||||
if (semanticQueryReq instanceof QueryStructReq) {
|
||||
queryStatement = buildStructQueryStatement((QueryStructReq) semanticQueryReq);
|
||||
queryStatement = buildStructQueryStatement(semanticQueryReq);
|
||||
}
|
||||
if (semanticQueryReq instanceof QueryMultiStructReq) {
|
||||
queryStatement =
|
||||
buildMultiStructQueryStatement((QueryMultiStructReq) semanticQueryReq, user);
|
||||
queryStatement = buildMultiStructQueryStatement((QueryMultiStructReq) semanticQueryReq);
|
||||
}
|
||||
if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo())
|
||||
&& StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) {
|
||||
@@ -310,72 +307,46 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildStructQueryStatement(QueryStructReq queryStructReq) {
|
||||
SchemaFilterReq filter = buildSchemaFilterReq(queryStructReq);
|
||||
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(filter);
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
QueryParam queryParam = new QueryParam();
|
||||
queryReqConverter.convert(queryStructReq, queryParam);
|
||||
queryStatement.setQueryParam(queryParam);
|
||||
queryStatement.setIsS2SQL(false);
|
||||
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
|
||||
queryStatement.setDataSetId(queryStructReq.getDataSetId());
|
||||
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
|
||||
queryStatement.setSemanticModel(semanticSchemaManager.getSemanticModel(semanticSchemaResp));
|
||||
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) {
|
||||
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
|
||||
if (querySqlReq.needGetDataSetId()) {
|
||||
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
|
||||
querySqlReq.setDataSetId(dataSetId);
|
||||
}
|
||||
|
||||
QueryStatement queryStatement = buildStructQueryStatement(querySqlReq);
|
||||
queryStatement.setIsS2SQL(true);
|
||||
queryStatement.setSql(querySqlReq.getSql());
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq,
|
||||
User user) throws Exception {
|
||||
List<QueryStatement> sqlParsers = new ArrayList<>();
|
||||
for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) {
|
||||
QueryStatement queryStatement = buildQueryStatement(queryStructReq, user);
|
||||
SemanticModel semanticModel = queryStatement.getSemanticModel();
|
||||
queryStatement.setModelIds(queryStructReq.getModelIds());
|
||||
queryStatement.setSemanticModel(semanticModel);
|
||||
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
|
||||
semanticTranslator.translate(queryStatement);
|
||||
sqlParsers.add(queryStatement);
|
||||
}
|
||||
log.info("multi sqlParser:{}", sqlParsers);
|
||||
return queryUtils.sqlParserUnion(queryMultiStructReq, sqlParsers);
|
||||
}
|
||||
|
||||
private SchemaFilterReq buildSchemaFilterReq(SemanticQueryReq semanticQueryReq) {
|
||||
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
|
||||
SchemaFilterReq schemaFilterReq = new SchemaFilterReq();
|
||||
schemaFilterReq.setDataSetId(semanticQueryReq.getDataSetId());
|
||||
schemaFilterReq.setModelIds(semanticQueryReq.getModelIds());
|
||||
return schemaFilterReq;
|
||||
schemaFilterReq.setDataSetId(queryReq.getDataSetId());
|
||||
schemaFilterReq.setModelIds(queryReq.getModelIds());
|
||||
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(schemaFilterReq);
|
||||
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
QueryParam queryParam = new QueryParam();
|
||||
BeanUtils.copyProperties(queryReq, queryParam);
|
||||
queryStatement.setQueryParam(queryParam);
|
||||
queryStatement.setModelIds(queryReq.getModelIds());
|
||||
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
|
||||
queryStatement.setDataSetId(queryReq.getDataSetId());
|
||||
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
|
||||
queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp));
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QuerySqlReq buildQuerySqlReq(DimensionValueReq queryDimValueReq) {
|
||||
QuerySqlReq querySqlReq = new QuerySqlReq();
|
||||
List<ModelResp> modelResps =
|
||||
schemaService.getModelList(Lists.newArrayList(queryDimValueReq.getModelId()));
|
||||
DimensionResp dimensionResp = schemaService.getDimension(queryDimValueReq.getBizName(),
|
||||
queryDimValueReq.getModelId());
|
||||
ModelResp modelResp = modelResps.get(0);
|
||||
String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(),
|
||||
modelResp.getName());
|
||||
List<Dim> timeDims = modelResp.getTimeDimension();
|
||||
if (CollectionUtils.isNotEmpty(timeDims)) {
|
||||
sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql,
|
||||
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getStartDate(),
|
||||
TimeDimensionEnum.DAY.getName(), queryDimValueReq.getDateInfo().getEndDate());
|
||||
private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq) {
|
||||
List<QueryStatement> queryStatements = new ArrayList<>();
|
||||
for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) {
|
||||
QueryStatement queryStatement = buildStructQueryStatement(queryStructReq);
|
||||
semanticTranslator.translate(queryStatement);
|
||||
queryStatements.add(queryStatement);
|
||||
}
|
||||
if (StringUtils.isNotBlank(queryDimValueReq.getValue())) {
|
||||
sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%"
|
||||
+ queryDimValueReq.getValue() + "%'";
|
||||
}
|
||||
querySqlReq.setModelIds(Sets.newHashSet(queryDimValueReq.getModelId()));
|
||||
querySqlReq.setSql(sql);
|
||||
return querySqlReq;
|
||||
}
|
||||
|
||||
private void queryPreCheck(QueryStatement queryStatement) {
|
||||
// Check whether the dimensions of the metric drill-down are correct temporarily,
|
||||
// add the abstraction of a validator later.
|
||||
metricDrillDownChecker.checkQuery(queryStatement);
|
||||
log.info("Union multiple query statements:{}", queryStatements);
|
||||
return queryUtils.unionAll(queryMultiStructReq, queryStatements);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.TagResp;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataSource;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DimensionTimeTypeParams;
|
||||
@@ -18,8 +18,8 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materializa
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.SemanticModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
|
||||
@@ -57,9 +57,8 @@ public class SemanticSchemaManager {
|
||||
this.schemaService = schemaService;
|
||||
}
|
||||
|
||||
public SemanticModel getSemanticModel(SemanticSchemaResp semanticSchemaResp) {
|
||||
SemanticModel semanticModel = new SemanticModel();
|
||||
semanticModel.setSchemaKey(semanticSchemaResp.getSchemaKey());
|
||||
public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) {
|
||||
Ontology ontology = new Ontology();
|
||||
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
|
||||
List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>();
|
||||
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
|
||||
@@ -67,36 +66,35 @@ public class SemanticSchemaManager {
|
||||
schemaService.getSchemaYamlTpl(semanticSchemaResp, dimensionYamlTpls, dataModelYamlTpls,
|
||||
metricYamlTpls, modelIdName);
|
||||
DatabaseResp databaseResp = semanticSchemaResp.getDatabaseResp();
|
||||
semanticModel.setDatabase(DatabaseConverter.convert(databaseResp));
|
||||
ontology.setDatabase(DatabaseConverter.convert(databaseResp));
|
||||
if (!CollectionUtils.isEmpty(semanticSchemaResp.getModelRelas())) {
|
||||
semanticModel.setJoinRelations(
|
||||
ontology.setJoinRelations(
|
||||
getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName));
|
||||
}
|
||||
if (!dataModelYamlTpls.isEmpty()) {
|
||||
Map<String, DataSource> dataSourceMap =
|
||||
dataModelYamlTpls.stream().map(SemanticSchemaManager::getDatasource).collect(
|
||||
Collectors.toMap(DataSource::getName, item -> item, (k1, k2) -> k1));
|
||||
semanticModel.setDatasourceMap(dataSourceMap);
|
||||
Map<String, DataModel> dataModelMap =
|
||||
dataModelYamlTpls.stream().map(SemanticSchemaManager::getDataModel).collect(
|
||||
Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1));
|
||||
ontology.setDataModelMap(dataModelMap);
|
||||
}
|
||||
if (!dimensionYamlTpls.isEmpty()) {
|
||||
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
|
||||
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
|
||||
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
|
||||
}
|
||||
semanticModel.setDimensionMap(dimensionMap);
|
||||
ontology.setDimensionMap(dimensionMap);
|
||||
}
|
||||
if (!metricYamlTpls.isEmpty()) {
|
||||
semanticModel.setMetrics(getMetrics(metricYamlTpls));
|
||||
ontology.setMetrics(getMetrics(metricYamlTpls));
|
||||
}
|
||||
return semanticModel;
|
||||
return ontology;
|
||||
}
|
||||
|
||||
public SemanticModel getTagSemanticModel(SemanticSchemaResp semanticSchemaResp)
|
||||
throws Exception {
|
||||
public Ontology getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) throws Exception {
|
||||
if (CollectionUtils.isEmpty(semanticSchemaResp.getTags())) {
|
||||
throw new Exception("semanticSchemaResp tag is empty");
|
||||
}
|
||||
SemanticModel semanticModel = getSemanticModel(semanticSchemaResp);
|
||||
Ontology ontology = buildOntology(semanticSchemaResp);
|
||||
// Map<String, List<Dimension>> dimensions = new HashMap<>();
|
||||
Map<Long, List<TagResp>> tagMap = new HashMap<>();
|
||||
for (TagResp tagResp : semanticSchemaResp.getTags()) {
|
||||
@@ -105,25 +103,23 @@ public class SemanticSchemaManager {
|
||||
}
|
||||
tagMap.get(tagResp.getModelId()).add(tagResp);
|
||||
}
|
||||
if (Objects.nonNull(semanticModel.getDatasourceMap())
|
||||
&& !semanticModel.getDatasourceMap().isEmpty()) {
|
||||
for (Map.Entry<String, DataSource> entry : semanticModel.getDatasourceMap()
|
||||
.entrySet()) {
|
||||
if (Objects.nonNull(ontology.getDataModelMap()) && !ontology.getDataModelMap().isEmpty()) {
|
||||
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
|
||||
List<Dimension> modelDimensions = new ArrayList<>();
|
||||
if (!semanticModel.getDimensionMap().containsKey(entry.getKey())) {
|
||||
semanticModel.getDimensionMap().put(entry.getKey(), modelDimensions);
|
||||
if (!ontology.getDimensionMap().containsKey(entry.getKey())) {
|
||||
ontology.getDimensionMap().put(entry.getKey(), modelDimensions);
|
||||
} else {
|
||||
modelDimensions = semanticModel.getDimensionMap().get(entry.getKey());
|
||||
modelDimensions = ontology.getDimensionMap().get(entry.getKey());
|
||||
}
|
||||
if (tagMap.containsKey(entry.getValue().getId())) {
|
||||
for (TagResp tagResp : tagMap.get(entry.getValue().getId())) {
|
||||
addTagModel(tagResp, modelDimensions, semanticModel.getMetrics());
|
||||
addTagModel(tagResp, modelDimensions, ontology.getMetrics());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return semanticModel;
|
||||
return ontology;
|
||||
}
|
||||
|
||||
private void addTagModel(TagResp tagResp, List<Dimension> modelDimensions,
|
||||
@@ -178,30 +174,30 @@ public class SemanticSchemaManager {
|
||||
return getDimension(t);
|
||||
}
|
||||
|
||||
public static DataSource getDatasource(final DataModelYamlTpl d) {
|
||||
DataSource datasource = DataSource.builder().id(d.getId()).sourceId(d.getSourceId())
|
||||
public static DataModel getDataModel(final DataModelYamlTpl d) {
|
||||
DataModel dataModel = DataModel.builder().id(d.getId()).modelId(d.getSourceId())
|
||||
.type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName())
|
||||
.tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers()))
|
||||
.measures(getMeasureParams(d.getMeasures()))
|
||||
.dimensions(getDimensions(d.getDimensions())).build();
|
||||
datasource.setAggTime(getDataSourceAggTime(datasource.getDimensions()));
|
||||
dataModel.setAggTime(getDataModelAggTime(dataModel.getDimensions()));
|
||||
if (Objects.nonNull(d.getModelSourceTypeEnum())) {
|
||||
datasource.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name()));
|
||||
dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name()));
|
||||
}
|
||||
if (Objects.nonNull(d.getFields()) && !CollectionUtils.isEmpty(d.getFields())) {
|
||||
Set<String> measures = datasource.getMeasures().stream().map(mm -> mm.getName())
|
||||
Set<String> measures = dataModel.getMeasures().stream().map(mm -> mm.getName())
|
||||
.collect(Collectors.toSet());
|
||||
for (Field f : d.getFields()) {
|
||||
if (!measures.contains(f.getFieldName())) {
|
||||
datasource.getMeasures().add(Measure.builder().expr(f.getFieldName())
|
||||
dataModel.getMeasures().add(Measure.builder().expr(f.getFieldName())
|
||||
.name(f.getFieldName()).agg("").build());
|
||||
}
|
||||
}
|
||||
}
|
||||
return datasource;
|
||||
return dataModel;
|
||||
}
|
||||
|
||||
private static String getDataSourceAggTime(List<Dimension> dimensions) {
|
||||
private static String getDataModelAggTime(List<Dimension> dimensions) {
|
||||
Optional<Dimension> timeDimension = dimensions.stream()
|
||||
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
|
||||
.findFirst();
|
||||
@@ -356,39 +352,39 @@ public class SemanticSchemaManager {
|
||||
return joinRelations;
|
||||
}
|
||||
|
||||
public static void update(SemanticSchema schema, List<Metric> metric) throws Exception {
|
||||
public static void update(S2CalciteSchema schema, List<Metric> metric) throws Exception {
|
||||
if (schema != null) {
|
||||
updateMetric(metric, schema.getMetrics());
|
||||
}
|
||||
}
|
||||
|
||||
public static void update(SemanticSchema schema, DataSource datasourceYamlTpl)
|
||||
public static void update(S2CalciteSchema schema, DataModel datasourceYamlTpl)
|
||||
throws Exception {
|
||||
if (schema != null) {
|
||||
String dataSourceName = datasourceYamlTpl.getName();
|
||||
Optional<Entry<String, DataSource>> datasourceYamlTplMap =
|
||||
schema.getDatasource().entrySet().stream()
|
||||
Optional<Entry<String, DataModel>> datasourceYamlTplMap =
|
||||
schema.getDataModels().entrySet().stream()
|
||||
.filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst();
|
||||
if (datasourceYamlTplMap.isPresent()) {
|
||||
datasourceYamlTplMap.get().setValue(datasourceYamlTpl);
|
||||
} else {
|
||||
schema.getDatasource().put(dataSourceName, datasourceYamlTpl);
|
||||
schema.getDataModels().put(dataSourceName, datasourceYamlTpl);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void update(SemanticSchema schema, String datasourceBizName,
|
||||
public static void update(S2CalciteSchema schema, String datasourceBizName,
|
||||
List<Dimension> dimensionYamlTpls) throws Exception {
|
||||
if (schema != null) {
|
||||
Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema
|
||||
.getDimension().entrySet().stream()
|
||||
.getDimensions().entrySet().stream()
|
||||
.filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst();
|
||||
if (datasourceYamlTplMap.isPresent()) {
|
||||
updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue());
|
||||
} else {
|
||||
List<Dimension> dimensions = new ArrayList<>();
|
||||
updateDimension(dimensionYamlTpls, dimensions);
|
||||
schema.getDimension().put(datasourceBizName, dimensions);
|
||||
schema.getDimensions().put(datasourceBizName, dimensions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,7 +126,7 @@ public class DomainServiceImpl implements DomainService {
|
||||
return domainWithAuth.stream().peek(domainResp -> domainResp.setHasEditPermission(true))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
if (authTypeEnum.equals(AuthType.VISIBLE)) {
|
||||
if (authTypeEnum.equals(AuthType.VIEWER)) {
|
||||
domainWithAuth = domainResps.stream()
|
||||
.filter(domainResp -> checkViewPermission(orgIds, user, domainResp))
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
@@ -428,7 +428,7 @@ public class ModelServiceImpl implements ModelService {
|
||||
.filter(modelResp -> checkAdminPermission(orgIds, user, modelResp))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
if (authTypeEnum.equals(AuthType.VISIBLE)) {
|
||||
if (authTypeEnum.equals(AuthType.VIEWER)) {
|
||||
modelWithAuth = modelResps.stream()
|
||||
.filter(domainResp -> checkDataSetPermission(orgIds, user, domainResp))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
@@ -32,7 +32,7 @@ public class MetricDrillDownChecker {
|
||||
|
||||
public void checkQuery(QueryStatement queryStatement) {
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
String sql = queryStatement.getDataSetQueryParam().getSql();
|
||||
String sql = queryStatement.getSql();
|
||||
if (StringUtils.isBlank(sql)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,376 +0,0 @@
|
||||
package com.tencent.supersonic.headless.server.utils;
|
||||
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||
import com.tencent.supersonic.common.pojo.Aggregator;
|
||||
import com.tencent.supersonic.common.pojo.Constants;
|
||||
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.common.pojo.enums.QueryType;
|
||||
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
|
||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
||||
import com.tencent.supersonic.headless.api.pojo.MetricTable;
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
|
||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
|
||||
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Component
|
||||
@Slf4j
|
||||
public class QueryReqConverter {
|
||||
|
||||
@Autowired
|
||||
private QueryStructUtils queryStructUtils;
|
||||
|
||||
@Autowired
|
||||
private SqlGenerateUtils sqlGenerateUtils;
|
||||
|
||||
public QueryStatement convert(QuerySqlReq querySQLReq, SemanticSchemaResp semanticSchemaResp)
|
||||
throws Exception {
|
||||
|
||||
if (semanticSchemaResp == null) {
|
||||
return new QueryStatement();
|
||||
}
|
||||
// 1.convert name to bizName
|
||||
convertNameToBizName(querySQLReq, semanticSchemaResp);
|
||||
// 2.functionName corrector
|
||||
functionNameCorrector(querySQLReq, semanticSchemaResp);
|
||||
// 3.correct tableName
|
||||
correctTableName(querySQLReq);
|
||||
// 4.remove Underscores
|
||||
querySQLReq.setSql(SqlRemoveHelper.removeUnderscores(querySQLReq.getSql()));
|
||||
|
||||
String tableName = SqlSelectHelper.getTableName(querySQLReq.getSql());
|
||||
if (StringUtils.isEmpty(tableName)) {
|
||||
return new QueryStatement();
|
||||
}
|
||||
// correct order item is same as agg alias
|
||||
String reqSql = querySQLReq.getSql();
|
||||
querySQLReq.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(querySQLReq.getSql()));
|
||||
log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, querySQLReq.getSql());
|
||||
// 5.build MetricTables
|
||||
List<String> allFields = SqlSelectHelper.getAllSelectFields(querySQLReq.getSql());
|
||||
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
|
||||
List<String> metrics =
|
||||
metricSchemas.stream().map(m -> m.getBizName()).collect(Collectors.toList());
|
||||
QueryStructReq queryStructReq = new QueryStructReq();
|
||||
MetricTable metricTable = new MetricTable();
|
||||
metricTable.setMetrics(metrics);
|
||||
|
||||
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
|
||||
|
||||
metricTable.setDimensions(new ArrayList<>(dimensions));
|
||||
|
||||
metricTable.setAlias(tableName.toLowerCase());
|
||||
// if metric empty , fill model default
|
||||
if (CollectionUtils.isEmpty(metricTable.getMetrics())) {
|
||||
metricTable.setMetrics(new ArrayList<>());
|
||||
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
|
||||
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
|
||||
} else {
|
||||
queryStructReq.setAggregators(metricTable.getMetrics().stream()
|
||||
.map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
AggOption aggOption = getAggOption(querySQLReq, metricSchemas);
|
||||
metricTable.setAggOption(aggOption);
|
||||
List<MetricTable> tables = new ArrayList<>();
|
||||
tables.add(metricTable);
|
||||
// 6.build ParseSqlReq
|
||||
DataSetQueryParam result = new DataSetQueryParam();
|
||||
BeanUtils.copyProperties(querySQLReq, result);
|
||||
|
||||
result.setTables(tables);
|
||||
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
|
||||
if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()),
|
||||
database.getVersion())) {
|
||||
result.setSupportWith(false);
|
||||
result.setWithAlias(false);
|
||||
}
|
||||
// 7. do deriveMetric
|
||||
generateDerivedMetric(semanticSchemaResp, aggOption, result);
|
||||
// 8.physicalSql by ParseSqlReq
|
||||
|
||||
queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(querySQLReq.getSql()));
|
||||
queryStructReq.setDataSetId(querySQLReq.getDataSetId());
|
||||
queryStructReq.setQueryType(getQueryType(aggOption));
|
||||
log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq);
|
||||
QueryParam queryParam = new QueryParam();
|
||||
convert(queryStructReq, queryParam);
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
queryStatement.setQueryParam(queryParam);
|
||||
queryStatement.setDataSetQueryParam(result);
|
||||
queryStatement.setIsS2SQL(true);
|
||||
queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq));
|
||||
queryStatement.setDataSetId(querySQLReq.getDataSetId());
|
||||
queryStatement.setLimit(querySQLReq.getLimit());
|
||||
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
public void convert(QueryStructReq queryStructReq, QueryParam queryParam) {
|
||||
BeanUtils.copyProperties(queryStructReq, queryParam);
|
||||
queryParam.setOrders(queryStructReq.getOrders());
|
||||
queryParam.setMetrics(queryStructReq.getMetrics());
|
||||
queryParam.setGroups(queryStructReq.getGroups());
|
||||
}
|
||||
|
||||
private AggOption getAggOption(QuerySqlReq databaseReq, List<MetricSchemaResp> metricSchemas) {
|
||||
String sql = databaseReq.getSql();
|
||||
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)
|
||||
&& !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) {
|
||||
log.debug("getAggOption simple sql set to DEFAULT");
|
||||
return AggOption.DEFAULT;
|
||||
}
|
||||
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
|
||||
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
|
||||
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql)
|
||||
|| SqlSelectFunctionHelper.hasFunction(sql, "count")
|
||||
|| SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
if (databaseReq.isInnerLayerNative()) {
|
||||
return AggOption.NATIVE;
|
||||
}
|
||||
if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql)
|
||||
|| SqlSelectHelper.hasGroupBy(sql)) {
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
long defaultAggNullCnt = metricSchemas.stream().filter(
|
||||
m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg()))
|
||||
.count();
|
||||
if (defaultAggNullCnt > 0) {
|
||||
log.debug("getAggOption find null defaultAgg metric set to NATIVE");
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
return AggOption.DEFAULT;
|
||||
}
|
||||
|
||||
private void convertNameToBizName(QuerySqlReq querySqlReq,
|
||||
SemanticSchemaResp semanticSchemaResp) {
|
||||
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
|
||||
String sql = querySqlReq.getSql();
|
||||
log.debug("dataSetId:{},convert name to bizName before:{}", querySqlReq.getDataSetId(),
|
||||
sql);
|
||||
sql = SqlReplaceHelper.replaceSqlByPositions(sql);
|
||||
log.debug("replaceSqlByPositions:{}", sql);
|
||||
String replaceFields = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
|
||||
log.debug("dataSetId:{},convert name to bizName after:{}", querySqlReq.getDataSetId(),
|
||||
replaceFields);
|
||||
querySqlReq.setSql(replaceFields);
|
||||
}
|
||||
|
||||
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
|
||||
List<String> allFields) {
|
||||
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
|
||||
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
|
||||
SchemaItem::getBizName, (k1, k2) -> k1));
|
||||
Map<String, String> internalLowerToNameMap = QueryStructUtils.internalCols.stream()
|
||||
.collect(Collectors.toMap(String::toLowerCase, a -> a));
|
||||
dimensionLowerToNameMap.putAll(internalLowerToNameMap);
|
||||
return allFields.stream()
|
||||
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
|
||||
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
|
||||
List<String> allFields) {
|
||||
Map<String, MetricSchemaResp> metricLowerToNameMap =
|
||||
semanticSchemaResp.getMetrics().stream().collect(Collectors
|
||||
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
|
||||
return allFields.stream()
|
||||
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
|
||||
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private void functionNameCorrector(QuerySqlReq databaseReq,
|
||||
SemanticSchemaResp semanticSchemaResp) {
|
||||
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
|
||||
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
|
||||
return;
|
||||
}
|
||||
String type = database.getType();
|
||||
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
|
||||
if (Objects.nonNull(engineAdaptor)) {
|
||||
String functionNameCorrector =
|
||||
engineAdaptor.functionNameCorrector(databaseReq.getSql());
|
||||
databaseReq.setSql(functionNameCorrector);
|
||||
}
|
||||
}
|
||||
|
||||
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
|
||||
// support fieldName and field alias to bizName
|
||||
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
|
||||
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
|
||||
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
|
||||
|
||||
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
|
||||
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
|
||||
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
|
||||
|
||||
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
|
||||
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
|
||||
dimensionResults.putAll(metricResults);
|
||||
return dimensionResults;
|
||||
}
|
||||
|
||||
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
|
||||
String bizName) {
|
||||
Set<Pair<String, String>> elements = new HashSet<>();
|
||||
elements.add(Pair.of(name, bizName));
|
||||
if (StringUtils.isNotBlank(aliasStr)) {
|
||||
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
|
||||
for (String alias : aliasList) {
|
||||
elements.add(Pair.of(alias, bizName));
|
||||
}
|
||||
}
|
||||
return elements.stream();
|
||||
}
|
||||
|
||||
public void correctTableName(QuerySqlReq querySqlReq) {
|
||||
String sql = querySqlReq.getSql();
|
||||
sql = SqlReplaceHelper.replaceTable(sql,
|
||||
Constants.TABLE_PREFIX + querySqlReq.getDataSetId());
|
||||
log.debug("correctTableName after:{}", sql);
|
||||
querySqlReq.setSql(sql);
|
||||
}
|
||||
|
||||
private QueryType getQueryType(AggOption aggOption) {
|
||||
boolean isAgg = AggOption.isAgg(aggOption);
|
||||
QueryType queryType = QueryType.DETAIL;
|
||||
if (isAgg) {
|
||||
queryType = QueryType.AGGREGATE;
|
||||
}
|
||||
return queryType;
|
||||
}
|
||||
|
||||
private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption,
|
||||
DataSetQueryParam viewQueryParam) {
|
||||
String sql = viewQueryParam.getSql();
|
||||
for (MetricTable metricTable : viewQueryParam.getTables()) {
|
||||
Set<String> measures = new HashSet<>();
|
||||
Map<String, String> replaces = generateDerivedMetric(semanticSchemaResp, aggOption,
|
||||
metricTable.getMetrics(), metricTable.getDimensions(), measures);
|
||||
|
||||
if (!CollectionUtils.isEmpty(replaces)) {
|
||||
// metricTable sql use measures replace metric
|
||||
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
|
||||
metricTable.setAggOption(AggOption.NATIVE);
|
||||
// metricTable use measures replace metric
|
||||
if (!CollectionUtils.isEmpty(measures)) {
|
||||
metricTable.setMetrics(new ArrayList<>(measures));
|
||||
} else {
|
||||
// empty measure , fill default
|
||||
metricTable.setMetrics(new ArrayList<>());
|
||||
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
|
||||
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
|
||||
}
|
||||
}
|
||||
}
|
||||
viewQueryParam.setSql(sql);
|
||||
}
|
||||
|
||||
private Map<String, String> generateDerivedMetric(SemanticSchemaResp semanticSchemaResp,
|
||||
AggOption aggOption, List<String> metrics, List<String> dimensions,
|
||||
Set<String> measures) {
|
||||
Map<String, String> result = new HashMap<>();
|
||||
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
|
||||
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
|
||||
|
||||
// Check if any metric is derived
|
||||
boolean hasDerivedMetrics =
|
||||
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
|
||||
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
|
||||
if (!hasDerivedMetrics) {
|
||||
return result;
|
||||
}
|
||||
|
||||
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
|
||||
|
||||
Set<String> allFields = new HashSet<>();
|
||||
Map<String, Measure> allMeasures = new HashMap<>();
|
||||
semanticSchemaResp.getModelResps().forEach(modelResp -> {
|
||||
allFields.addAll(modelResp.getFieldList());
|
||||
if (modelResp.getModelDetail().getMeasures() != null) {
|
||||
modelResp.getModelDetail().getMeasures()
|
||||
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
|
||||
}
|
||||
});
|
||||
|
||||
Set<String> derivedDimensions = new HashSet<>();
|
||||
Set<String> derivedMetrics = new HashSet<>();
|
||||
Map<String, String> visitedMetrics = new HashMap<>();
|
||||
|
||||
for (MetricResp metricResp : metricResps) {
|
||||
if (metrics.contains(metricResp.getBizName())) {
|
||||
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
|
||||
metricResp.getMetricDefineByMeasureParams());
|
||||
if (isDerived) {
|
||||
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
|
||||
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
|
||||
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
|
||||
derivedMetrics, derivedDimensions);
|
||||
result.put(metricResp.getBizName(), expr);
|
||||
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
|
||||
} else {
|
||||
measures.add(metricResp.getBizName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
measures.addAll(derivedMetrics);
|
||||
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
|
||||
.forEach(dimensions::add);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List<String> dimensions) {
|
||||
if (!CollectionUtils.isEmpty(dimensions)) {
|
||||
Map<String, Long> modelMatchCnt = new HashMap<>();
|
||||
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
|
||||
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
|
||||
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
|
||||
}
|
||||
return modelMatchCnt.entrySet().stream()
|
||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
|
||||
.map(m -> m.getKey()).findFirst().orElse("");
|
||||
}
|
||||
return semanticSchemaResp.getModelResps().get(0).getBizName();
|
||||
}
|
||||
}
|
||||
@@ -140,15 +140,15 @@ public class QueryUtils {
|
||||
return null;
|
||||
}
|
||||
|
||||
public QueryStatement sqlParserUnion(QueryMultiStructReq queryMultiStructCmd,
|
||||
List<QueryStatement> sqlParsers) {
|
||||
public QueryStatement unionAll(QueryMultiStructReq queryMultiStructCmd,
|
||||
List<QueryStatement> queryStatements) {
|
||||
QueryStatement sqlParser = new QueryStatement();
|
||||
StringBuilder unionSqlBuilder = new StringBuilder();
|
||||
for (int i = 0; i < sqlParsers.size(); i++) {
|
||||
for (int i = 0; i < queryStatements.size(); i++) {
|
||||
String selectStr = SqlGenerateUtils
|
||||
.getUnionSelect(queryMultiStructCmd.getQueryStructReqs().get(i));
|
||||
unionSqlBuilder.append(String.format("select %s from ( %s ) sub_sql_%s", selectStr,
|
||||
sqlParsers.get(i).getSql(), i));
|
||||
queryStatements.get(i).getSql(), i));
|
||||
unionSqlBuilder.append(UNIONALL);
|
||||
}
|
||||
String unionSql = unionSqlBuilder.substring(0,
|
||||
|
||||
@@ -6,8 +6,8 @@ import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.planner.AggPlanner;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.schema.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder;
|
||||
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
|
||||
@@ -20,16 +20,12 @@ import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Slf4j
|
||||
class HeadlessParserServiceTest {
|
||||
|
||||
private static Map<String, SemanticSchema> headlessSchemaMap = new HashMap<>();
|
||||
|
||||
public static SqlParserResp parser(SemanticSchema semanticSchema,
|
||||
public static SqlParserResp parser(S2CalciteSchema semanticSchema,
|
||||
MetricQueryParam metricQueryParam, boolean isAgg) {
|
||||
SqlParserResp sqlParser = new SqlParserResp();
|
||||
try {
|
||||
@@ -37,14 +33,13 @@ class HeadlessParserServiceTest {
|
||||
sqlParser.setErrMsg("headlessSchema not found");
|
||||
return sqlParser;
|
||||
}
|
||||
AggPlanner aggBuilder = new AggPlanner(semanticSchema);
|
||||
SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
queryStatement.setMetricQueryParam(metricQueryParam);
|
||||
aggBuilder.explain(queryStatement, AggOption.getAggregation(!isAgg));
|
||||
EngineType engineType = EngineType
|
||||
.fromString(semanticSchema.getSemanticModel().getDatabase().getType());
|
||||
aggBuilder.build(queryStatement, AggOption.getAggregation(!isAgg));
|
||||
EngineType engineType =
|
||||
EngineType.fromString(semanticSchema.getOntology().getDatabase().getType());
|
||||
sqlParser.setSql(aggBuilder.getSql(engineType));
|
||||
sqlParser.setSourceId(aggBuilder.getSourceId());
|
||||
} catch (Exception e) {
|
||||
sqlParser.setErrMsg(e.getMessage());
|
||||
log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e);
|
||||
@@ -122,10 +117,10 @@ class HeadlessParserServiceTest {
|
||||
identify.setType("primary");
|
||||
identifies.add(identify);
|
||||
datasource.setIdentifiers(identifies);
|
||||
SemanticSchema semanticSchema = SemanticSchema.newBuilder("1").build();
|
||||
S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build();
|
||||
|
||||
SemanticSchemaManager.update(semanticSchema,
|
||||
SemanticSchemaManager.getDatasource(datasource));
|
||||
SemanticSchemaManager.getDataModel(datasource));
|
||||
|
||||
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
|
||||
dimension1.setExpr("page");
|
||||
@@ -192,7 +187,7 @@ class HeadlessParserServiceTest {
|
||||
System.out.println(parser(semanticSchema, metricCommand2, true));
|
||||
}
|
||||
|
||||
private static void addDepartment(SemanticSchema semanticSchema) {
|
||||
private static void addDepartment(S2CalciteSchema semanticSchema) {
|
||||
DataModelYamlTpl datasource = new DataModelYamlTpl();
|
||||
datasource.setName("user_department");
|
||||
datasource.setSourceId(1L);
|
||||
@@ -238,8 +233,8 @@ class HeadlessParserServiceTest {
|
||||
identifies.add(identify);
|
||||
datasource.setIdentifiers(identifies);
|
||||
|
||||
semanticSchema.getDatasource().put("user_department",
|
||||
SemanticSchemaManager.getDatasource(datasource));
|
||||
semanticSchema.getDataModels().put("user_department",
|
||||
SemanticSchemaManager.getDataModel(datasource));
|
||||
|
||||
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
|
||||
dimension1.setExpr("department");
|
||||
@@ -248,7 +243,7 @@ class HeadlessParserServiceTest {
|
||||
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
|
||||
dimensionYamlTpls.add(dimension1);
|
||||
|
||||
semanticSchema.getDimension().put("user_department",
|
||||
semanticSchema.getDimensions().put("user_department",
|
||||
SemanticSchemaManager.getDimensions(dimensionYamlTpls));
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user