first commit

This commit is contained in:
jerryjzhang
2023-06-12 18:44:01 +08:00
commit dc4fc69b57
879 changed files with 573090 additions and 0 deletions

View File

@@ -0,0 +1,112 @@
package com.tencent.supersonic.semantic.query.application;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.pojo.MetricTable;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.api.query.request.ParseSqlReq;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import com.tencent.supersonic.semantic.query.domain.parser.SemanticSchemaManager;
import com.tencent.supersonic.semantic.query.domain.parser.SemanticSqlService;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.SemanticModel;
import java.util.ArrayList;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@Service
@Primary
@Slf4j
public class ParserServiceImpl implements ParserService {
private final SemanticSchemaManager semanticSchemaManager;
private final SemanticSqlService semanticSqlService;
public ParserServiceImpl(SemanticSchemaManager schemaManager,
SemanticSqlService semanticSqlService) {
this.semanticSchemaManager = schemaManager;
this.semanticSqlService = semanticSqlService;
}
@Override
public SqlParserResp physicalSql(ParseSqlReq sqlCommend) throws Exception {
return parser(sqlCommend);
}
@Override
public SqlParserResp physicalSql(MetricReq metricCommand) throws Exception {
return parser(metricCommand);
}
public SqlParserResp parser(ParseSqlReq sqlCommend) {
log.info("parser MetricReq [{}] ", sqlCommend);
SqlParserResp sqlParserInfo = new SqlParserResp();
try {
if (!CollectionUtils.isEmpty(sqlCommend.getTables())) {
List<String> tables = new ArrayList<>();
String sourceId = "";
for (MetricTable metricTable : sqlCommend.getTables()) {
MetricReq metricReq = new MetricReq();
metricReq.setMetrics(metricTable.getMetrics());
metricReq.setDimensions(metricTable.getDimensions());
metricReq.setWhere(formatWhere(metricTable.getWhere()));
metricReq.setRootPath(sqlCommend.getRootPath());
SqlParserResp tableSql = parser(metricReq, metricTable.isAgg());
if (!tableSql.isOk()) {
sqlParserInfo.setErrMsg(String.format("parser table [%s] error [%s]", metricTable.getAlias(),
tableSql.getErrMsg()));
return sqlParserInfo;
}
tables.add(String.format("%s as (%s)", metricTable.getAlias(), tableSql.getSql()));
sourceId = tableSql.getSourceId();
}
if (!tables.isEmpty()) {
String sql = "with " + String.join(",", tables) + "\n" + sqlCommend.getSql();
sqlParserInfo.setSql(sql);
sqlParserInfo.setSourceId(sourceId);
return sqlParserInfo;
}
}
} catch (Exception e) {
log.error("physicalSql error {}", e);
sqlParserInfo.setErrMsg(e.getMessage());
}
return sqlParserInfo;
}
public SqlParserResp parser(MetricReq metricCommand) {
return parser(metricCommand, true);
}
public SqlParserResp parser(MetricReq metricCommand, boolean isAgg) {
log.info("parser MetricReq [{}] isAgg [{}]", metricCommand, isAgg);
SqlParserResp sqlParser = new SqlParserResp();
if (metricCommand.getRootPath().isEmpty()) {
sqlParser.setErrMsg("rootPath empty");
return sqlParser;
}
try {
SemanticModel semanticModel = semanticSchemaManager.get(metricCommand.getRootPath());
if (semanticModel == null) {
sqlParser.setErrMsg("semanticSchema not found");
return sqlParser;
}
return semanticSqlService.explain(metricCommand, isAgg, semanticModel);
} catch (Exception e) {
sqlParser.setErrMsg(e.getMessage());
log.error("parser error MetricCommand[{}] error [{}]", metricCommand, e);
}
return sqlParser;
}
private String formatWhere(String where) {
return where.replace("\"", "\\\\\"");
}
}

View File

@@ -0,0 +1,132 @@
package com.tencent.supersonic.semantic.query.application;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.semantic.api.core.pojo.QueryStat;
import com.tencent.supersonic.semantic.api.core.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.pojo.Cache;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.api.query.request.QueryMultiStructReq;
import com.tencent.supersonic.semantic.api.query.request.QuerySqlReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import com.tencent.supersonic.common.enums.TaskStatusEnum;
import com.tencent.supersonic.common.util.cache.CacheUtils;
import com.tencent.supersonic.semantic.core.domain.DatabaseService;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import com.tencent.supersonic.semantic.query.domain.QueryService;
import com.tencent.supersonic.semantic.query.domain.annotation.DataPermission;
import com.tencent.supersonic.semantic.query.domain.utils.QueryStructUtils;
import com.tencent.supersonic.semantic.query.domain.utils.StatUtils;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@Service
@Slf4j
public class QueryServiceImpl implements QueryService {
private final ParserService parserService;
private final DatabaseService databaseService;
private final QueryStructUtils queryStructUtils;
private final StatUtils statUtils;
private final CacheUtils cacheUtils;
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
public QueryServiceImpl(ParserService parserService,
DatabaseService databaseService,
QueryStructUtils queryStructUtils,
StatUtils statUtils,
CacheUtils cacheUtils) {
this.parserService = parserService;
this.databaseService = databaseService;
this.queryStructUtils = queryStructUtils;
this.statUtils = statUtils;
this.cacheUtils = cacheUtils;
}
@Override
public Object queryBySql(QuerySqlReq querySqlCmd) throws Exception {
//TODO QuerySqlCmd---> SqlCommend
MetricReq sqlCommend = new MetricReq();
SqlParserResp sqlParser = parserService.physicalSql(sqlCommend);
return databaseService.executeSql(sqlParser.getSql(), querySqlCmd.getDomainId());
}
@Override
public QueryResultWithSchemaResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception {
QueryResultWithSchemaResp queryResultWithColumns;
log.info("[queryStructCmd:{}]", queryStructCmd);
try {
statUtils.initStatInfo(queryStructCmd, user);
String cacheKey = cacheUtils.generateCacheKey(queryStructCmd.getDomainId().toString(),
queryStructCmd.generateCommandMd5());
handleGlobalCacheDisable(queryStructCmd);
if (queryStructUtils.queryCache(queryStructCmd.getCacheInfo())) {
queryResultWithColumns = queryStructUtils.queryByStructByCache(queryStructCmd, cacheKey);
} else {
queryResultWithColumns = queryStructUtils.queryByStructWithoutCache(queryStructCmd, cacheKey);
}
statUtils.statInfo2DbAsync(TaskStatusEnum.SUCCESS);
} catch (Exception e) {
log.warn("exception in queryByStruct, e: ", e);
statUtils.statInfo2DbAsync(TaskStatusEnum.ERROR);
throw e;
}
return queryResultWithColumns;
}
@Override
@DataPermission
public QueryResultWithSchemaResp queryByStruct(QueryStructReq queryStructCmd, User user, HttpServletRequest request)
throws Exception {
return queryByStruct(queryStructCmd, user);
}
@Override
public QueryResultWithSchemaResp queryByMultiStruct(QueryMultiStructReq queryMultiStructCmd, User user)
throws Exception {
statUtils.initStatInfo(queryMultiStructCmd.getQueryStructCmds().get(0), user);
String cacheKey = cacheUtils.generateCacheKey(
queryMultiStructCmd.getQueryStructCmds().get(0).getDomainId().toString(),
queryMultiStructCmd.generateCommandMd5());
return queryStructUtils.queryByMultiStructWithoutCache(queryMultiStructCmd, cacheKey);
}
private void handleGlobalCacheDisable(QueryStructReq queryStructCmd) {
if (!cacheEnable) {
Cache cacheInfo = new Cache();
cacheInfo.setCache(false);
queryStructCmd.setCacheInfo(cacheInfo);
}
}
@Override
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend) {
List<ItemUseResp> statInfos = statUtils.getStatInfo(itemUseCommend);
return statInfos;
}
@Override
public List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend) {
return statUtils.getQueryStatInfoWithoutCache(itemUseCommend);
}
}

View File

@@ -0,0 +1,123 @@
package com.tencent.supersonic.semantic.query.application;
import static com.tencent.supersonic.common.constant.Constants.AT_SYMBOL;
import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.semantic.api.core.request.DomainSchemaFilterReq;
import com.tencent.supersonic.semantic.api.core.request.PageDimensionReq;
import com.tencent.supersonic.semantic.api.core.request.PageMetricReq;
import com.tencent.supersonic.semantic.api.core.response.DimSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.DomainResp;
import com.tencent.supersonic.semantic.api.core.response.DomainSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.MetricResp;
import com.tencent.supersonic.semantic.api.core.response.MetricSchemaResp;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import com.tencent.supersonic.common.enums.TypeEnums;
import com.tencent.supersonic.semantic.core.domain.DimensionService;
import com.tencent.supersonic.semantic.core.domain.DomainService;
import com.tencent.supersonic.semantic.core.domain.MetricService;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import com.tencent.supersonic.semantic.query.domain.SchemaService;
import com.tencent.supersonic.semantic.query.domain.QueryService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@Slf4j
@Service
public class SchemaServiceImpl implements SchemaService {
private final QueryService queryService;
private final DomainService domainService;
private final DimensionService dimensionService;
private final MetricService metricService;
public SchemaServiceImpl(QueryService queryService,
DomainService domainService,
DimensionService dimensionService,
MetricService metricService) {
this.queryService = queryService;
this.domainService = domainService;
this.dimensionService = dimensionService;
this.metricService = metricService;
}
@Override
public List<DomainSchemaResp> fetchDomainSchema(DomainSchemaFilterReq filter, User user) {
List<DomainSchemaResp> domainSchemaDescList = domainService.fetchDomainSchema(filter, user);
List<ItemUseResp> statInfos = queryService.getStatInfo(new ItemUseReq());
log.info("statInfos:{}", statInfos);
fillCnt(domainSchemaDescList, statInfos);
return domainSchemaDescList;
}
private void fillCnt(List<DomainSchemaResp> domainSchemaDescList, List<ItemUseResp> statInfos) {
Map<String, ItemUseResp> typeIdAndStatPair = statInfos.stream()
.collect(Collectors.toMap(
itemUseInfo -> itemUseInfo.getType() + AT_SYMBOL + AT_SYMBOL + itemUseInfo.getBizName(),
itemUseInfo -> itemUseInfo,
(item1, item2) -> item1));
log.info("typeIdAndStatPair:{}", typeIdAndStatPair);
for (DomainSchemaResp domainSchemaDesc : domainSchemaDescList) {
fillDimCnt(domainSchemaDesc, typeIdAndStatPair);
fillMetricCnt(domainSchemaDesc, typeIdAndStatPair);
}
}
private void fillMetricCnt(DomainSchemaResp domainSchemaDesc, Map<String, ItemUseResp> typeIdAndStatPair) {
List<MetricSchemaResp> metrics = domainSchemaDesc.getMetrics();
if (CollectionUtils.isEmpty(domainSchemaDesc.getMetrics())) {
return;
}
if (!CollectionUtils.isEmpty(metrics)) {
metrics.stream().forEach(metric -> {
String key = TypeEnums.METRIC.getName() + AT_SYMBOL + AT_SYMBOL + metric.getBizName();
if (typeIdAndStatPair.containsKey(key)) {
metric.setUseCnt(typeIdAndStatPair.get(key).getUseCnt());
}
});
}
domainSchemaDesc.setMetrics(metrics);
}
private void fillDimCnt(DomainSchemaResp domainSchemaDesc, Map<String, ItemUseResp> typeIdAndStatPair) {
List<DimSchemaResp> dimensions = domainSchemaDesc.getDimensions();
if (CollectionUtils.isEmpty(domainSchemaDesc.getDimensions())) {
return;
}
if (!CollectionUtils.isEmpty(dimensions)) {
dimensions.stream().forEach(dim -> {
String key = TypeEnums.DIMENSION.getName() + AT_SYMBOL + AT_SYMBOL + dim.getBizName();
if (typeIdAndStatPair.containsKey(key)) {
dim.setUseCnt(typeIdAndStatPair.get(key).getUseCnt());
}
});
}
domainSchemaDesc.setDimensions(dimensions);
}
@Override
public List<DomainResp> getDomainListForAdmin(User user) {
return domainService.getDomainListForAdmin(user.getName());
}
@Override
public PageInfo<DimensionResp> queryDimension(PageDimensionReq pageDimensionCmd, User user) {
return dimensionService.queryDimension(pageDimensionCmd);
}
@Override
public PageInfo<MetricResp> queryMetric(PageMetricReq pageMetricCmd, User user) {
return metricService.queryMetric(pageMetricCmd);
}
}

View File

@@ -0,0 +1,305 @@
package com.tencent.supersonic.semantic.query.application;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.DatasourceYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.MetricYamlTpl;
import com.tencent.supersonic.semantic.core.domain.DatasourceService;
import com.tencent.supersonic.semantic.core.domain.DomainService;
import com.tencent.supersonic.semantic.query.domain.parser.SemanticSchemaManager;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Dimension;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DimensionTimeTypeParams;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Identify;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Measure;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Metric;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.MetricTypeParams;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.SemanticModel;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@Slf4j
@Service("SemanticSchemaManager")
public class SemanticSchemaManagerImpl implements SemanticSchemaManager {
@Autowired
private LoadingCache<String, SemanticModel> loadingCache;
private final DatasourceService datasourceService;
private final DomainService domainService;
public SemanticSchemaManagerImpl(DatasourceService datasourceService,
DomainService domainService) {
this.datasourceService = datasourceService;
this.domainService = domainService;
}
@Override
public SemanticModel reload(String rootPath) {
SemanticModel semanticModel = new SemanticModel();
semanticModel.setRootPath(rootPath);
Map<Long, String> domainFullPathMap = domainService.getDomainFullPath();
log.info("domainFullPathMap {}", domainFullPathMap);
Set<Long> domainIds = domainFullPathMap.entrySet().stream().filter(e -> e.getValue().startsWith(rootPath))
.map(e -> e.getKey()).collect(Collectors.toSet());
if (domainIds.isEmpty()) {
log.error("get domainId empty {}", rootPath);
return semanticModel;
}
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DatasourceYamlTpl> datasourceYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
datasourceService.getModelYamlTplByDomainIds(domainIds, dimensionYamlTpls, datasourceYamlTpls, metricYamlTpls);
if (!datasourceYamlTpls.isEmpty()) {
Map<String, DataSource> dataSourceMap = datasourceYamlTpls.stream().map(d -> getDatasource(d))
.collect(Collectors.toMap(DataSource::getName, item -> item));
semanticModel.setDatasourceMap(dataSourceMap);
}
if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
}
semanticModel.setDimensionMap(dimensionMap);
}
if (!metricYamlTpls.isEmpty()) {
semanticModel.setMetrics(getMetrics(metricYamlTpls));
}
return semanticModel;
}
//private Map<String, SemanticSchema> semanticSchemaMap = new HashMap<>();
@Override
public SemanticModel get(String rootPath) throws Exception {
rootPath = formatKey(rootPath);
SemanticModel schema = loadingCache.get(rootPath);
if (schema == null) {
return null;
}
return schema;
}
public static List<Metric> getMetrics(final List<MetricYamlTpl> t) {
return getMetricsByMetricYamlTpl(t);
}
public static List<Dimension> getDimensions(final List<DimensionYamlTpl> t) {
return getDimension(t);
}
public static DataSource getDatasource(final DatasourceYamlTpl d) {
DataSource datasource = new DataSource();
datasource.setSqlQuery(d.getSqlQuery());
datasource.setName(d.getName());
datasource.setSourceId(d.getSourceId());
datasource.setTableQuery(d.getTableQuery());
datasource.setIdentifiers(getIdentify(d.getIdentifiers()));
datasource.setDimensions(getDimensions(d.getDimensions()));
datasource.setMeasures(getMeasures(d.getMeasures()));
return datasource;
}
private static List<Metric> getMetricsByMetricYamlTpl(List<MetricYamlTpl> metricYamlTpls) {
List<Metric> metrics = new ArrayList<>();
for (MetricYamlTpl metricYamlTpl : metricYamlTpls) {
Metric metric = new Metric();
metric.setMetricTypeParams(getMetricTypeParams(metricYamlTpl.getTypeParams()));
metric.setOwners(metricYamlTpl.getOwners());
metric.setType(metricYamlTpl.getType());
metric.setName(metricYamlTpl.getName());
metrics.add(metric);
}
return metrics;
}
private static MetricTypeParams getMetricTypeParams(MetricTypeParamsYamlTpl metricTypeParamsYamlTpl) {
MetricTypeParams metricTypeParams = new MetricTypeParams();
metricTypeParams.setExpr(metricTypeParamsYamlTpl.getExpr());
metricTypeParams.setMeasures(getMeasures(metricTypeParamsYamlTpl.getMeasures()));
return metricTypeParams;
}
private static List<Measure> getMeasures(List<MeasureYamlTpl> measureYamlTpls) {
List<Measure> measures = new ArrayList<>();
for (MeasureYamlTpl measureYamlTpl : measureYamlTpls) {
Measure measure = new Measure();
measure.setCreateMetric(measureYamlTpl.getCreateMetric());
measure.setExpr(measureYamlTpl.getExpr());
measure.setAgg(measureYamlTpl.getAgg());
measure.setName(measureYamlTpl.getName());
measure.setAlias(measureYamlTpl.getAlias());
measure.setConstraint(measureYamlTpl.getConstraint());
measures.add(measure);
}
return measures;
}
private static List<Dimension> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) {
List<Dimension> dimensions = new ArrayList<>();
for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) {
Dimension dimension = new Dimension();
dimension.setType(dimensionYamlTpl.getType());
dimension.setExpr(dimensionYamlTpl.getExpr());
dimension.setName(dimensionYamlTpl.getName());
dimension.setOwners(dimensionYamlTpl.getOwners());
dimension.setDimensionTimeTypeParams(getDimensionTimeTypeParams(dimensionYamlTpl.getTypeParams()));
dimensions.add(dimension);
}
return dimensions;
}
private static DimensionTimeTypeParams getDimensionTimeTypeParams(
DimensionTimeTypeParamsTpl dimensionTimeTypeParamsTpl) {
DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams();
if (dimensionTimeTypeParamsTpl != null) {
dimensionTimeTypeParams.setTimeGranularity(dimensionTimeTypeParamsTpl.getTimeGranularity());
dimensionTimeTypeParams.setIsPrimary(dimensionTimeTypeParamsTpl.getIsPrimary());
}
return dimensionTimeTypeParams;
}
private static List<Identify> getIdentify(List<IdentifyYamlTpl> identifyYamlTpls) {
List<Identify> identifies = new ArrayList<>();
for (IdentifyYamlTpl identifyYamlTpl : identifyYamlTpls) {
Identify identify = new Identify();
identify.setType(identifyYamlTpl.getType());
identify.setName(identifyYamlTpl.getName());
identifies.add(identify);
}
return identifies;
}
public static void update(SemanticSchema schema, List<Metric> metric) throws Exception {
if (schema != null) {
updateMetric(metric, schema.getMetrics());
}
}
public static void update(SemanticSchema schema, DataSource datasourceYamlTpl) throws Exception {
if (schema != null) {
String dataSourceName = datasourceYamlTpl.getName();
Optional<Map.Entry<String, DataSource>> datasourceYamlTplMap = schema.getDatasource().entrySet().stream()
.filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst();
if (datasourceYamlTplMap.isPresent()) {
datasourceYamlTplMap.get().setValue(datasourceYamlTpl);
} else {
schema.getDatasource().put(dataSourceName, datasourceYamlTpl);
}
}
}
public static void update(SemanticSchema schema, String datasourceBizName, List<Dimension> dimensionYamlTpls)
throws Exception {
if (schema != null) {
Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema.getDimension().entrySet()
.stream().filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst();
if (datasourceYamlTplMap.isPresent()) {
updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue());
} else {
List<Dimension> dimensions = new ArrayList<>();
updateDimension(dimensionYamlTpls, dimensions);
schema.getDimension().put(datasourceBizName, dimensions);
}
}
}
private static void updateDimension(List<Dimension> dimensionYamlTpls, List<Dimension> dimensions) {
if (CollectionUtils.isEmpty(dimensionYamlTpls)) {
return;
}
Set<String> toAdd = dimensionYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Dimension> iterator = dimensions.iterator();
while (iterator.hasNext()) {
Dimension cur = iterator.next();
if (toAdd.contains(cur.getName())) {
iterator.remove();
}
}
dimensions.addAll(dimensionYamlTpls);
}
private static void updateMetric(List<Metric> metricYamlTpls, List<Metric> metrics) {
if (CollectionUtils.isEmpty(metricYamlTpls)) {
return;
}
Set<String> toAdd = metricYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Metric> iterator = metrics.iterator();
while (iterator.hasNext()) {
Metric cur = iterator.next();
if (toAdd.contains(cur.getName())) {
iterator.remove();
}
}
metrics.addAll(metricYamlTpls);
}
public static String formatKey(String key) {
key = key.trim();
if (key.startsWith("/")) {
key = key.substring(1);
}
if (key.endsWith("/")) {
key = key.substring(0, key.length() - 1);
}
return key;
}
@Configuration
@EnableCaching
public class GuavaCacheConfig {
@Value("${parser.cache.saveMinute:15}")
private Integer saveMinutes = 15;
@Value("${parser.cache.maximumSize:1000}")
private Integer maximumSize = 1000;
@Bean
public LoadingCache<String, SemanticModel> getCache() {
LoadingCache<String, SemanticModel> cache
= CacheBuilder.newBuilder()
.expireAfterWrite(saveMinutes, TimeUnit.MINUTES)
.initialCapacity(10)
.maximumSize(maximumSize).build(
new CacheLoader<String, SemanticModel>() {
@Override
public SemanticModel load(String key) {
log.info("load SemanticSchema [{}]", key);
return SemanticSchemaManagerImpl.this.reload(key);
}
}
);
return cache;
}
}
}

View File

@@ -0,0 +1,32 @@
package com.tencent.supersonic.semantic.query.application;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.SemanticSqlService;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.planner.AggPlanner;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.SemanticModel;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import org.springframework.stereotype.Service;
@Service("SemanticSqlService")
public class SemanticSqlServiceImpl implements SemanticSqlService {
@Override
public SqlParserResp explain(MetricReq metricReq, boolean isAgg, SemanticModel semanticModel) throws Exception {
SqlParserResp sqlParserResp = new SqlParserResp();
SemanticSchema semanticSchema = getSemanticSchema(semanticModel);
AggPlanner aggBuilder = new AggPlanner(semanticSchema);
aggBuilder.explain(metricReq, isAgg);
sqlParserResp.setSql(aggBuilder.getSql());
sqlParserResp.setSourceId(aggBuilder.getSourceId());
return sqlParserResp;
}
private SemanticSchema getSemanticSchema(SemanticModel semanticModel) {
SemanticSchema semanticSchema = SemanticSchema.newBuilder(semanticModel.getRootPath()).build();
semanticSchema.setDatasource(semanticModel.getDatasourceMap());
semanticSchema.setDimension(semanticModel.getDimensionMap());
semanticSchema.setMetric(semanticModel.getMetrics());
return semanticSchema;
}
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.semantic.query.domain;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.api.query.request.ParseSqlReq;
public interface ParserService {
SqlParserResp physicalSql(ParseSqlReq sqlCommend) throws Exception;
SqlParserResp physicalSql(MetricReq metricCommand) throws Exception;
}

View File

@@ -0,0 +1,30 @@
package com.tencent.supersonic.semantic.query.domain;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.semantic.api.core.pojo.QueryStat;
import com.tencent.supersonic.semantic.api.core.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.request.QueryMultiStructReq;
import com.tencent.supersonic.semantic.api.query.request.QuerySqlReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
public interface QueryService {
Object queryBySql(QuerySqlReq querySqlCmd) throws Exception;
QueryResultWithSchemaResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception;
QueryResultWithSchemaResp queryByStruct(QueryStructReq queryStructCmd, User user, HttpServletRequest request)
throws Exception;
QueryResultWithSchemaResp queryByMultiStruct(QueryMultiStructReq queryMultiStructCmd, User user) throws Exception;
List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend);
List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend);
}

View File

@@ -0,0 +1,24 @@
package com.tencent.supersonic.semantic.query.domain;
import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.semantic.api.core.request.DomainSchemaFilterReq;
import com.tencent.supersonic.semantic.api.core.request.PageDimensionReq;
import com.tencent.supersonic.semantic.api.core.request.PageMetricReq;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.DomainResp;
import com.tencent.supersonic.semantic.api.core.response.DomainSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.MetricResp;
import java.util.List;
public interface SchemaService {
List<DomainSchemaResp> fetchDomainSchema(DomainSchemaFilterReq filter, User user);
List<DomainResp> getDomainListForAdmin(User user);
PageInfo<DimensionResp> queryDimension(PageDimensionReq pageDimensionReq, User user);
PageInfo<MetricResp> queryMetric(PageMetricReq pageMetricReq, User user);
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.semantic.query.domain.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target({ElementType.PARAMETER, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface DataPermission {
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.semantic.query.domain.parser;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.SemanticModel;
public interface SemanticSchemaManager {
// get the schema from cache , if not exit , will refresh by the reload function
SemanticModel get(String rootPath) throws Exception;
// refresh cache ,will return the data get from db or other storage
SemanticModel reload(String rootPath);
}

View File

@@ -0,0 +1,10 @@
package com.tencent.supersonic.semantic.query.domain.parser;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.SemanticModel;
public interface SemanticSqlService {
SqlParserResp explain(MetricReq metricReq, boolean isAgg, SemanticModel semanticModel) throws Exception;
}

View File

@@ -0,0 +1,62 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.DSLSqlValidatorImpl;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSqlDialect;
import java.util.Properties;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.avatica.util.Quoting;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.config.Lex;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.impl.SqlParserImpl;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.validate.SqlConformanceEnum;
import org.apache.calcite.sql.validate.SqlValidator;
public class Configuration {
public static Properties configProperties = new Properties();
public static DSLSqlValidatorImpl dslSqlValidator;
public static RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static SqlOperatorTable operatorTable = SqlStdOperatorTable.instance();
public static CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
static {
configProperties.put(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString());
configProperties.put(CalciteConnectionProperty.UNQUOTED_CASING.camelName(), Casing.UNCHANGED.toString());
configProperties.put(CalciteConnectionProperty.QUOTED_CASING.camelName(), Casing.TO_LOWER.toString());
}
public static SqlValidator.Config validatorConfig = SqlValidator.Config.DEFAULT
.withLenientOperatorLookup(config.lenientOperatorLookup())
.withSqlConformance(SemanticSqlDialect.DEFAULT.getConformance())
.withDefaultNullCollation(config.defaultNullCollation())
.withIdentifierExpansion(true);
public static SqlParser.Config getParserConfig() {
CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder();
parserConfig.setCaseSensitive(config.caseSensitive());
parserConfig.setUnquotedCasing(config.unquotedCasing());
parserConfig.setQuotedCasing(config.quotedCasing());
parserConfig.setConformance(config.conformance());
parserConfig.setLex(Lex.BIG_QUERY);
parserConfig.setParserFactory(SqlParserImpl.FACTORY).setCaseSensitive(false)
.setIdentifierMaxLength(Integer.MAX_VALUE)
.setQuoting(Quoting.BACK_TICK)
.setQuoting(Quoting.SINGLE_QUOTE)
.setQuotedCasing(Casing.TO_UPPER)
.setUnquotedCasing(Casing.TO_UPPER)
.setConformance(SqlConformanceEnum.MYSQL_5)
.setLex(Lex.BIG_QUERY);
return parserConfig.build();
}
}

View File

@@ -0,0 +1,103 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.planner;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.Renderer;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.TableView;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.DataSourceNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.SemanticNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.render.FilterRender;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.render.OutputRender;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.render.SourceRender;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SchemaBuilder;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Stack;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class AggPlanner implements Planner {
private MetricReq metricCommand;
private SemanticSchema schema;
private SqlValidatorScope scope;
private Stack<TableView> dataSets = new Stack<>();
private SqlNode parserNode;
private String sourceId;
private boolean isAgg = true;
public AggPlanner(SemanticSchema schema) {
this.schema = schema;
}
public void parse() throws Exception {
// find the match Datasource
scope = SchemaBuilder.getScope(schema);
List<DataSource> datasource = getMatchDataSource(scope);
if (datasource == null || datasource.isEmpty()) {
throw new Exception("datasource not found");
}
sourceId = String.valueOf(datasource.get(0).getSourceId());
// build level by level
LinkedList<Renderer> builders = new LinkedList<>();
builders.add(new SourceRender());
builders.add(new FilterRender());
builders.add(new OutputRender());
ListIterator<Renderer> it = builders.listIterator();
int i = 0;
Renderer previous = null;
while (it.hasNext()) {
Renderer renderer = it.next();
if (previous != null) {
previous.render(metricCommand, datasource, scope, schema, !isAgg);
renderer.setTable(previous.builderAs(DataSourceNode.getNames(datasource) + "_" + String.valueOf(i)));
i++;
}
previous = renderer;
}
builders.getLast().render(metricCommand, datasource, scope, schema, !isAgg);
parserNode = builders.getLast().builder();
}
private List<DataSource> getMatchDataSource(SqlValidatorScope scope) throws Exception {
return DataSourceNode.getMatchDataSources(scope, schema, metricCommand);
}
@Override
public void explain(MetricReq metricCommand, boolean isAgg) throws Exception {
this.metricCommand = metricCommand;
if (metricCommand.getMetrics() == null) {
metricCommand.setMetrics(new ArrayList<>());
}
if (metricCommand.getDimensions() == null) {
metricCommand.setDimensions(new ArrayList<>());
}
if (metricCommand.getLimit() == null) {
metricCommand.setLimit(0L);
}
this.isAgg = isAgg;
// build a parse Node
parse();
// optimizer
}
@Override
public String getSql() {
return SemanticNode.getSql(parserNode);
}
@Override
public String getSourceId() {
return sourceId;
}
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.planner;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
public interface Planner {
public void explain(MetricReq metricCommand, boolean isAgg) throws Exception;
public String getSql();
public String getSourceId();
}

View File

@@ -0,0 +1,14 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.validate.SqlValidatorCatalogReader;
import org.apache.calcite.sql.validate.SqlValidatorImpl;
public class DSLSqlValidatorImpl extends SqlValidatorImpl {
public DSLSqlValidatorImpl(SqlOperatorTable opTab, SqlValidatorCatalogReader catalogReader,
RelDataTypeFactory typeFactory, Config config) {
super(opTab, catalogReader, typeFactory, config);
}
}

View File

@@ -0,0 +1,9 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.SemanticNode;
public interface Optimization {
public void visit(SemanticNode semanticNode);
}

View File

@@ -0,0 +1,108 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.MeasureNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.MetricNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.SemanticNode;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Dimension;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Identify;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Measure;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Metric;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@Data
public abstract class Renderer {
protected TableView tableView = new TableView();
public void setTable(SqlNode table) {
tableView.setTable(table);
}
public SqlNode builder() {
return tableView.build();
}
public SqlNode builderAs(String alias) throws Exception {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception;
public static Optional<Dimension> getDimensionByName(String name, DataSource datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
}
public static Optional<Measure> getMeasureByName(String name, DataSource datasource) {
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name)).findFirst();
}
public static Optional<Metric> getMetricByName(String name, SemanticSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream().filter(m -> m.getName().equalsIgnoreCase(name))
.findFirst();
return metric;
}
public static Optional<Identify> getIdentifyByName(String name, DataSource datasource) {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
}
public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg, String alias) throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode();
if (metricOpt.isPresent()) {
metricNode.setMetric(metricOpt.get());
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope));
metricNode.getAggNode()
.put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
} else {
metricNode.getNonAggNode().put(m.getName(), MeasureNode.buildNonAgg(alias, m, scope));
metricNode.getAggNode().put(m.getName(), MeasureNode.buildAgg(m, nonAgg, scope));
metricNode.getAggFunction().put(m.getName(), m.getAgg());
}
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(m.getName(), SemanticNode.parse(m.getConstraint(), scope));
}
}
return metricNode;
}
Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode()
.put(measure.get().getName(), MeasureNode.buildNonAgg(alias, measure.get(), scope));
metricNode.getAggNode().put(measure.get().getName(), MeasureNode.buildAgg(measure.get(), nonAgg, scope));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter()
.put(measure.get().getName(), SemanticNode.parse(measure.get().getConstraint(), scope));
;
}
}
return metricNode;
}
public static List<String> uniqList(List<String> list) {
Set<String> tmp = new HashSet<>(list);
return tmp.stream().collect(Collectors.toList());
}
}

View File

@@ -0,0 +1,49 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import lombok.Data;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.parser.SqlParserPos;
@Data
public class TableView {
private List<SqlNode> filter = new ArrayList<>();
private List<SqlNode> dimension = new ArrayList<>();
private List<SqlNode> measure = new ArrayList<>();
private SqlNodeList order;
private SqlNode fetch;
private SqlNode offset;
private SqlNode table;
private String alias;
private List<String> primary;
public SqlNode build() {
measure.addAll(dimension);
SqlNodeList dimensionNodeList = null;
if (dimension.size() > 0) {
dimensionNodeList = new SqlNodeList(getGroup(dimension), SqlParserPos.ZERO);
}
SqlNodeList filterNodeList = null;
if (filter.size() > 0) {
filterNodeList = new SqlNodeList(filter, SqlParserPos.ZERO);
}
return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(measure, SqlParserPos.ZERO), table,
filterNodeList, dimensionNodeList, null, null, null, order, offset, fetch, null);
}
private List<SqlNode> getGroup(List<SqlNode> sqlNodeList) {
return sqlNodeList.stream()
.map(s -> (s.getKind().equals(SqlKind.AS) ? ((SqlBasicCall) s).getOperandList().get(0) : s))
.collect(
Collectors.toList());
}
}

View File

@@ -0,0 +1,26 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class AggFunctionNode extends SemanticNode {
public static enum AggFunction {
AVG,
COUNT_DISTINCT,
MAX,
MIN,
SUM,
COUNT,
DISTINCT
}
public static SqlNode build(String agg, String name, SqlValidatorScope scope) throws Exception {
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope);
}
return parse(agg + " ( " + name + " ) ", scope);
}
}

View File

@@ -0,0 +1,202 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.Configuration;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Constants;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Dimension;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@Slf4j
public class DataSourceNode extends SemanticNode {
public static SqlNode build(DataSource datasource, SqlValidatorScope scope) throws Exception {
String sqlTable = "";
if (datasource.getSqlQuery() != null && !datasource.getSqlQuery().isEmpty()) {
sqlTable = datasource.getSqlQuery();
} else if (datasource.getTableQuery() != null && !datasource.getTableQuery().isEmpty()) {
sqlTable = "select * from " + datasource.getTableQuery();
}
if (sqlTable.isEmpty()) {
throw new Exception("DatasourceNode build error [tableSqlNode not found]");
}
return SemanticNode.buildAs(datasource.getName(), getTable(sqlTable, scope));
}
private static SqlNode getTable(String sqlQuery, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(sqlQuery, Configuration.getParserConfig());
SqlNode sqlNode = sqlParser.parseQuery();
scope.validateExpr(sqlNode);
return sqlNode;
}
public static String getNames(List<DataSource> dataSourceList) {
return dataSourceList.stream().map(d -> d.getName()).collect(Collectors.joining("_"));
}
public static void getQueryDimensionMeasure(SemanticSchema schema, MetricReq metricCommand,
Set<String> queryDimension, List<String> measures) {
queryDimension.addAll(metricCommand.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) ? d.split(Constants.DIMENSION_IDENTIFY)[1] : d)
.collect(Collectors.toSet()));
Set<String> schemaMetricName = schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
schema.getMetrics().stream().filter(m -> metricCommand.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream().forEach(mm -> measures.add(mm.getName())));
metricCommand.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)).forEach(m -> measures.add(m));
}
public static List<DataSource> getMatchDataSources(SqlValidatorScope scope, SemanticSchema schema,
MetricReq metricCommand) throws Exception {
List<DataSource> dataSources = new ArrayList<>();
// check by metric
List<String> measures = new ArrayList<>();
Set<String> queryDimension = new HashSet<>();
getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures);
String sourceName = "";
DataSource baseDataSource = null;
// one , match measure count
Map<String, Integer> dataSourceMeasures = new HashMap<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) {
Set<String> sourceMeasure = entry.getValue().getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
sourceMeasure.retainAll(measures);
dataSourceMeasures.put(entry.getKey(), sourceMeasure.size());
}
log.info("dataSourceMeasures [{}]", dataSourceMeasures);
Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
if (base.isPresent()) {
baseDataSource = schema.getDatasource().get(base.get().getKey());
dataSources.add(baseDataSource);
}
// second , check match all dimension and metric
if (baseDataSource != null) {
Set<String> filterMeasure = new HashSet<>();
Set<String> sourceMeasure = baseDataSource.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
Set<String> dimension = baseDataSource.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
baseDataSource.getIdentifiers().stream().forEach(i -> dimension.add(i.getName()));
if (schema.getDimension().containsKey(baseDataSource.getName())) {
schema.getDimension().get(baseDataSource.getName()).stream().forEach(d -> dimension.add(d.getName()));
}
filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, metricCommand, scope);
if (isAllMatch) {
log.info("baseDataSource match all ");
return dataSources;
}
// find all dataSource has the same identifiers
Set<String> baseIdentifiers = baseDataSource.getIdentifiers().stream().map(i -> i.getName())
.collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) {
throw new Exception("datasource error : " + baseDataSource.getName() + " miss identifier");
}
List<DataSource> linkDataSources = getLinkDataSources(baseIdentifiers, queryDimension, measures,
baseDataSource, schema);
if (linkDataSources.isEmpty()) {
throw new Exception(
String.format("not find the match datasource : dimension[%s],measure[%s]", queryDimension,
measures));
}
log.info("linkDataSources {}", linkDataSources);
dataSources.addAll(linkDataSources);
}
return dataSources;
}
private static boolean checkMatch(Set<String> sourceMeasure, Set<String> queryDimension, List<String> measures,
Set<String> dimension, MetricReq metricCommand, SqlValidatorScope scope) throws Exception {
boolean isAllMatch = true;
sourceMeasure.retainAll(measures);
if (sourceMeasure.size() < measures.size()) {
log.info("baseDataSource not match all measure");
isAllMatch = false;
}
measures.removeAll(sourceMeasure);
dimension.retainAll(queryDimension);
if (dimension.size() < queryDimension.size()) {
log.info("baseDataSource not match all dimension");
isAllMatch = false;
}
queryDimension.removeAll(dimension);
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
Set<String> whereFields = new HashSet<>();
SqlNode sqlNode = SemanticNode.parse(metricCommand.getWhere(), scope);
FilterNode.getFilterField(sqlNode, whereFields);
}
return isAllMatch;
}
private static List<DataSource> getLinkDataSources(Set<String> baseIdentifiers, Set<String> queryDimension,
List<String> measures, DataSource baseDataSource, SemanticSchema schema) {
Set<String> linkDataSourceName = new HashSet<>();
List<DataSource> linkDataSources = new ArrayList<>();
for (Map.Entry<String, DataSource> entry : schema.getDatasource().entrySet()) {
if (entry.getKey().equalsIgnoreCase(baseDataSource.getName())) {
continue;
}
Long identifierNum = entry.getValue().getIdentifiers().stream().map(i -> i.getName())
.filter(i -> baseIdentifiers.contains(i)).count();
if (identifierNum > 0) {
boolean isMatch = false;
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
entry.getValue().getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
}
}
if (!measures.isEmpty()) {
Set<String> linkMeasure = entry.getValue().getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) {
isMatch = true;
;
}
}
if (isMatch) {
linkDataSourceName.add(entry.getKey());
}
}
}
for (Map.Entry<String, List<Dimension>> entry : schema.getDimension().entrySet()) {
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
linkDataSourceName.add(entry.getKey());
}
}
}
for (String linkName : linkDataSourceName) {
linkDataSources.add(schema.getDatasource().get(linkName));
}
return linkDataSources;
}
}

View File

@@ -0,0 +1,38 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Dimension;
import java.util.List;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
return buildAs(dimension.getName(), sqlNode);
}
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope);
return expand(sqlNode, scope);
}
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope) throws Exception {
return parse(dimension.getName(), scope);
}
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope) throws Exception {
return parse(dimension.getExpr(), scope);
}
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope) throws Exception {
if ("".equals(alias)) {
return buildName(dimension, scope);
}
SqlNode sqlNode = parse(dimension.getName(), scope);
return buildAs(alias, sqlNode);
}
}

View File

@@ -0,0 +1,27 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import java.util.Set;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
public class FilterNode extends SemanticNode {
public static void getFilterField(SqlNode sqlNode, Set<String> fields) {
if (sqlNode instanceof SqlIdentifier) {
SqlIdentifier sqlIdentifier = (SqlIdentifier) sqlNode;
fields.add(sqlIdentifier.names.get(0).toLowerCase());
return;
}
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
for (SqlNode operand : sqlBasicCall.getOperandList()) {
getFilterField(operand, fields);
}
}
}
public static boolean isMatchDataSource(Set<String> measures) {
return false;
}
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Identify;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class IdentifyNode extends SemanticNode {
public static SqlNode build(Identify identify, SqlValidatorScope scope) throws Exception {
return parse(identify.getName(), scope);
}
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
@Data
public class JoinNode extends SemanticNode {
private SqlNode join;
private SqlNode on;
private SqlNode left;
private SqlNode right;
}

View File

@@ -0,0 +1,50 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Measure;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class MeasureNode extends SemanticNode {
public static SqlNode build(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception {
boolean addAgg = false;
if (!noAgg && measure.getAgg() != null && !measure.getAgg().isEmpty()) {
addAgg = true;
}
if (measure.getExpr() == null) {
if (addAgg) {
return parse(measure.getAgg() + " ( " + measure.getName() + " ) ", scope);
}
return parse(measure.getName(), scope);
} else {
if (addAgg) {
return buildAs(measure.getName(), parse(measure.getAgg() + " ( " + measure.getExpr() + " ) ", scope));
}
return buildAs(measure.getName(), parse(measure.getExpr(), scope));
}
}
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope) throws Exception {
return buildAs(measure.getName(), getExpr(measure, alias, scope));
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope) throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope);
}
return buildAs(measure.getName(), AggFunctionNode.build(measure.getAgg(), measure.getName(), scope));
}
public static SqlNode buildAggAs(String aggFunc, String name, SqlValidatorScope scope) throws Exception {
return buildAs(name, AggFunctionNode.build(aggFunc, name, scope));
}
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope) throws Exception {
if (measure.getExpr() == null) {
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope);
}
return parse(measure.getExpr(), scope);
}
}

View File

@@ -0,0 +1,29 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Metric;
import java.util.HashMap;
import java.util.Map;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@Data
public class MetricNode extends SemanticNode {
private Metric metric;
private Map<String, SqlNode> aggNode = new HashMap<>();
private Map<String, SqlNode> nonAggNode = new HashMap<>();
private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope) throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null
|| metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope);
}
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope);
return buildAs(metric.getName(), sqlNode);
}
}

View File

@@ -0,0 +1,121 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.Configuration;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.Optimization;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSqlDialect;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.calcite.sql.SqlAsOperator;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.advise.SqlSimpleParser;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.commons.lang3.StringUtils;
public abstract class SemanticNode {
public void accept(Optimization optimization) {
optimization.visit(this);
}
public static SqlNode parse(String expression, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig());
SqlNode sqlNode = sqlParser.parseExpression();
scope.validateExpr(sqlNode);
return sqlNode;
}
public static SqlNode buildAs(String asName, SqlNode sqlNode) throws Exception {
SqlAsOperator sqlAsOperator = new SqlAsOperator();
SqlIdentifier sqlIdentifier = new SqlIdentifier(asName, SqlParserPos.ZERO);
return new SqlBasicCall(sqlAsOperator, new ArrayList<>(Arrays.asList(sqlNode, sqlIdentifier)),
SqlParserPos.ZERO);
}
public static String getSql(SqlNode sqlNode) {
SqlSimpleParser sqlSimpleParser = new SqlSimpleParser("", Configuration.getParserConfig());
return sqlSimpleParser.simplifySql(sqlNode.toSqlString(SemanticSqlDialect.DEFAULT).getSql());
}
public static boolean isNumeric(String expr) {
return StringUtils.isNumeric(expr);
}
public static List<SqlNode> expand(SqlNode sqlNode, SqlValidatorScope scope) throws Exception {
if (!isIdentifier(sqlNode)) {
List<SqlNode> sqlNodeList = new ArrayList<>();
expand(sqlNode, sqlNodeList);
return sqlNodeList;
}
return new ArrayList<>(Arrays.asList(sqlNode));
}
public static void expand(SqlNode sqlNode, List<SqlNode> sqlNodeList) {
if (sqlNode instanceof SqlIdentifier) {
sqlNodeList.add(sqlNode);
return;
}
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
for (SqlNode sqlNo : sqlBasicCall.getOperandList()) {
expand(sqlNo, sqlNodeList);
}
}
}
public static boolean isIdentifier(SqlNode sqlNode) {
return sqlNode instanceof SqlIdentifier;
}
public static SqlNode getAlias(SqlNode sqlNode, SqlValidatorScope scope) throws Exception {
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
if (sqlBasicCall.getKind().equals(SqlKind.AS) && sqlBasicCall.getOperandList().size() > 1) {
return sqlBasicCall.getOperandList().get(1);
}
}
if (sqlNode instanceof SqlIdentifier) {
return sqlNode;
}
return null;
}
public static Set<String> getSelect(SqlNode sqlNode) {
SqlNode table = getTable(sqlNode);
if (table instanceof SqlSelect) {
SqlSelect tableSelect = (SqlSelect) table;
return tableSelect.getSelectList().stream()
.map(s -> (s instanceof SqlIdentifier) ? ((SqlIdentifier) s).names.get(0)
: (((s instanceof SqlBasicCall) && s.getKind().equals(SqlKind.AS))
? ((SqlBasicCall) s).getOperandList().get(1).toString() : ""))
.collect(Collectors.toSet());
}
return new HashSet<>();
}
public static SqlNode getTable(SqlNode sqlNode) {
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
if (sqlBasicCall.getOperator().getKind().equals(SqlKind.AS)) {
if (sqlBasicCall.getOperandList().get(0) instanceof SqlSelect) {
SqlSelect table = (SqlSelect) sqlBasicCall.getOperandList().get(0);
return table;
}
}
}
return sqlNode;
}
}

View File

@@ -0,0 +1,68 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.render;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.Renderer;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.TableView;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.FilterNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.MetricNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.SemanticNode;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Constants;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Metric;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class FilterRender extends Renderer {
@Override
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView;
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope);
Set<String> whereFields = new HashSet<>();
FilterNode.getFilterField(filterNode, whereFields);
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
for (DataSource dataSource : dataSources) {
SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(), metricCommand.getDimensions(),
dataSource, schema, dimensions, metrics);
}
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
}
for (String dimension : queryDimensions) {
tableView.getMeasure().add(SemanticNode.parse(dimension, scope));
}
for (String metric : queryMetrics) {
Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema);
if (optionalMetric.isPresent()) {
tableView.getMeasure().add(MetricNode.build(optionalMetric.get(), scope));
} else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope));
}
}
if (filterNode != null) {
TableView filterView = new TableView();
filterView.setTable(SemanticNode.buildAs(Constants.DATASOURCE_TABLE_FILTER_PREFIX, tableView.build()));
filterView.getFilter().add(filterNode);
filterView.getMeasure().add(SqlIdentifier.star(SqlParserPos.ZERO));
super.tableView = filterView;
}
}
}

View File

@@ -0,0 +1,277 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.render;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.Renderer;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.TableView;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.AggFunctionNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.DataSourceNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.FilterNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.MetricNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.SemanticNode;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Constants;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Dimension;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Identify;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Metric;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.JoinConditionType;
import org.apache.calcite.sql.JoinType;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlJoin;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@Slf4j
public class JoinRender extends Renderer {
@Override
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
Set<String> queryAllDimension = new HashSet<>();
List<String> measures = new ArrayList<>();
DataSourceNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures);
SqlNode left = null;
TableView leftTable = null;
TableView innerView = new TableView();
TableView filterView = new TableView();
Map<String, SqlNode> innerSelect = new HashMap<>();
Set<String> filterDimension = new HashSet<>();
for (int i = 0; i < dataSources.size(); i++) {
final DataSource dataSource = dataSources.get(i);
final Set<String> filterDimensions = new HashSet<>();
final Set<String> filterMetrics = new HashSet<>();
final List<String> queryDimension = new ArrayList<>();
final List<String> queryMetrics = new ArrayList<>();
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataSource, schema, filterDimensions,
filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
reqMetric.addAll(filterMetrics);
reqMetric = uniqList(reqMetric);
List<String> reqDimension = new ArrayList<>(metricCommand.getDimensions());
reqDimension.addAll(filterDimensions);
reqDimension = uniqList(reqDimension);
Set<String> sourceMeasure = dataSource.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataSource, sourceMeasure, scope, schema,
nonAgg);
Set<String> dimension = dataSource.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataSource, dimension, scope,
schema);
List<String> primary = new ArrayList<>();
for (Identify identify : dataSource.getIdentifiers()) {
primary.add(identify.getName());
if (!fieldWhere.contains(identify.getName())) {
fieldWhere.add(identify.getName());
}
}
TableView tableView = SourceRender.renderOne(false, "", fieldWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataSources.get(i), scope, schema, true);
log.info("tableView {}", tableView.getTable().toString());
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
tableView.setAlias(alias);
tableView.setPrimary(primary);
if (left == null) {
leftTable = tableView;
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope));
continue;
}
if (!left.getKind().equals(SqlKind.AS)) {
left = SemanticNode.buildAs(Constants.JOIN_TABLE_LEFT_PREFIX + tableView.getAlias(),
getTable(tableView, scope));
}
left = new SqlJoin(
SqlParserPos.ZERO,
left,
SqlLiteral.createBoolean(false, SqlParserPos.ZERO),
SqlLiteral.createSymbol(JoinType.INNER, SqlParserPos.ZERO),
SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO),
getCondition(leftTable, tableView, dataSource, schema, scope));
leftTable = tableView;
}
for (Map.Entry<String, SqlNode> entry : innerSelect.entrySet()) {
innerView.getMeasure().add(entry.getValue());
}
innerView.setTable(left);
filterView.setTable(SemanticNode.buildAs(Constants.JOIN_TABLE_OUT_PREFIX, innerView.build()));
if (!filterDimension.isEmpty()) {
for (String d : filterDimension) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.parse(d, scope));
} else {
filterView.getDimension().add(SemanticNode.parse(d, scope));
}
}
}
super.tableView = filterView;
}
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView, List<String> queryMetrics,
List<String> reqMetrics, DataSource dataSource, Set<String> sourceMeasure, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataSource, scope, schema, nonAgg, alias);
if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure,
SemanticNode.buildAs(measure, SemanticNode.parse(alias + "." + measure, scope)));
}
}
if (metricNode.getAggFunction() != null && !metricNode.getAggFunction().isEmpty()) {
for (Map.Entry<String, String> entry : metricNode.getAggFunction().entrySet()) {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope)));
} else {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(), entry.getKey(), scope)));
}
}
}
}
}
}
}
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension, List<String> queryDimension,
List<String> reqDimensions, DataSource dataSource, Set<String> dimension, SqlValidatorScope scope,
SemanticSchema schema) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataSource, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d,
SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + identifyDimension[1], scope)));
} else {
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode.parse(alias + "." + d, scope)));
}
filterDimension.add(d);
}
}
}
private boolean getMatchMetric(SemanticSchema schema, Set<String> sourceMeasure, String m,
List<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream().filter(mm -> mm.getName().equalsIgnoreCase(m))
.findFirst();
boolean isAdd = false;
if (metric.isPresent()) {
Set<String> metricMeasures = metric.get().getMetricTypeParams().getMeasures().stream()
.map(me -> me.getName()).collect(Collectors.toSet());
if (sourceMeasure.containsAll(metricMeasures)) {
isAdd = true;
}
}
if (sourceMeasure.contains(m)) {
isAdd = true;
}
if (isAdd && !queryMetrics.contains(m)) {
queryMetrics.add(m);
}
return isAdd;
}
private boolean getMatchDimension(SemanticSchema schema, Set<String> sourceDimension, DataSource dataSource,
String d, List<String> queryDimension) {
String oriDimension = d;
boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
oriDimension = d.split(Constants.DIMENSION_IDENTIFY)[1];
}
if (sourceDimension.contains(oriDimension)) {
isAdd = true;
}
for (Identify identify : dataSource.getIdentifiers()) {
if (identify.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
break;
}
}
if (schema.getDimension().containsKey(dataSource.getName())) {
for (Dimension dim : schema.getDimension().get(dataSource.getName())) {
if (dim.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
}
}
}
if (isAdd && !queryDimension.contains(oriDimension)) {
queryDimension.add(oriDimension);
}
return isAdd;
}
private SqlNode getTable(TableView tableView, SqlValidatorScope scope) throws Exception {
return SemanticNode.getTable(tableView.getTable());
}
private SqlNode getCondition(TableView left, TableView right, DataSource dataSource, SemanticSchema schema,
SqlValidatorScope scope) throws Exception {
log.info(left.getClass().toString());
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
selectLeft.retainAll(selectRight);
SqlNode condition = null;
for (String on : selectLeft) {
if (!SourceRender.isDimension(on, dataSource, schema)) {
continue;
}
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope));
if (condition == null) {
condition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS,
ons,
SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(
SqlStdOperatorTable.EQUALS,
ons,
SqlParserPos.ZERO, null);
condition = new SqlBasicCall(
SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)),
SqlParserPos.ZERO, null);
}
return condition;
}
}

View File

@@ -0,0 +1,50 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.render;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.Renderer;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.TableView;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.SemanticNode;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
public class OutputRender extends Renderer {
@Override
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope));
}
for (String metric : metricCommand.getMetrics()) {
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope));
}
if (metricCommand.getLimit() > 0) {
SqlNode offset = SemanticNode.parse(metricCommand.getLimit().toString(), scope);
selectDataSet.setOffset(offset);
}
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) {
List<SqlNode> orderList = new ArrayList<>();
for (ColumnOrder columnOrder : metricCommand.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[]{SemanticNode.parse(columnOrder.getCol(), scope)}));
} else {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope));
}
}
selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO));
}
}
}

View File

@@ -0,0 +1,307 @@
package com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.render;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.Renderer;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.TableView;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.DataSourceNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.DimensionNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.FilterNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.IdentifyNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.MetricNode;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.node.SemanticNode;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Constants;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Dimension;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Identify;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Measure;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Metric;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
@Slf4j
public class SourceRender extends Renderer {
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
if (dataSources.size() == 1) {
DataSource dataSource = dataSources.get(0);
super.tableView = renderOne(false, "", fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(),
metricCommand.getWhere(), dataSource, scope, schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(metricCommand, dataSources, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
public static TableView renderOne(boolean addWhere, String alias, List<String> fieldWhere,
List<String> reqMetrics, List<String> reqDimensions, String queryWhere, DataSource datasource,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception {
TableView dataSet = new TableView();
TableView output = new TableView();
List<String> queryMetrics = new ArrayList<>(reqMetrics);
List<String> queryDimensions = new ArrayList<>(reqDimensions);
if (!fieldWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
if (addWhere) {
output.getFilter().add(sqlNode);
}
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema, dimensions, metrics);
queryMetrics.addAll(metrics);
queryMetrics = uniqList(queryMetrics);
queryDimensions.addAll(dimensions);
queryDimensions = uniqList(queryDimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, datasource, scope, schema, nonAgg);
}
for (String metric : queryMetrics) {
MetricNode metricNode = buildMetricNode(metric, datasource, scope, schema, nonAgg, alias);
if (!metricNode.getAggNode().isEmpty()) {
metricNode.getAggNode().entrySet().stream().forEach(m -> output.getMeasure().add(m.getValue()));
}
if (metricNode.getNonAggNode() != null) {
metricNode.getNonAggNode().entrySet().stream().forEach(m -> dataSet.getMeasure().add(m.getValue()));
}
if (metricNode.getMeasureFilter() != null) {
metricNode.getMeasureFilter().entrySet().stream().forEach(m -> dataSet.getFilter().add(m.getValue()));
}
}
for (String dimension : queryDimensions) {
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension, datasource, schema, nonAgg, dataSet, output, scope);
}
SqlNode tableNode = DataSourceNode.build(datasource, scope);
dataSet.setTable(tableNode);
output.setTable(SemanticNode.buildAs(
Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName() + "_" + UUID.randomUUID().toString()
.substring(32), dataSet.build()));
return output;
}
private static void buildDimension(String alias, String dimension, DataSource datasource, SemanticSchema schema,
boolean nonAgg, TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(dimension)) {
continue;
}
dataSet.getMeasure().add(DimensionNode.build(dim, scope));
if (nonAgg) {
//dataSet.getMeasure().addAll(DimensionNode.expand(dim, scope));
output.getMeasure().add(DimensionNode.buildName(dim, scope));
isAdd = true;
continue;
}
if ("".equals(alias)) {
output.getDimension().add(DimensionNode.buildName(dim, scope));
} else {
output.getDimension().add(DimensionNode.buildNameAs(alias, dim, scope));
}
isAdd = true;
break;
}
}
if (!isAdd) {
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
if (identify.isPresent()) {
if (nonAgg) {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
output.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
} else {
dataSet.getMeasure().add(SemanticNode.parse(identify.get().getName(), scope));
output.getDimension().add(SemanticNode.parse(identify.get().getName(), scope));
}
isAdd = true;
}
}
if (isAdd) {
return;
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
if (nonAgg) {
dataSet.getMeasure().add(DimensionNode.build(dimensionOptional.get(), scope));
output.getMeasure().add(DimensionNode.buildName(dimensionOptional.get(), scope));
return;
}
dataSet.getMeasure().add(DimensionNode.build(dimensionOptional.get(), scope));
output.getDimension().add(DimensionNode.buildName(dimensionOptional.get(), scope));
}
}
private static boolean isWhereHasMetric(List<String> fields, DataSource datasource) {
Long metricNum = datasource.getMeasures().stream().filter(m -> fields.contains(m.getName().toLowerCase()))
.count();
Long measureNum = datasource.getMeasures().stream().filter(m -> fields.contains(m.getName().toLowerCase()))
.count();
return metricNum > 0 || measureNum > 0;
}
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SqlValidatorScope scope, SemanticSchema schema,
boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
iterator.remove();
}
}
for (String where : fields) {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(where)) {
continue;
}
if (nonAgg) {
whereNode.addAll(DimensionNode.expand(dim, scope));
isAdd = true;
continue;
}
whereNode.addAll(DimensionNode.expand(dim, scope));
isAdd = true;
}
}
Optional<Identify> identify = getIdentifyByName(where, datasource);
if (identify.isPresent()) {
whereNode.add(IdentifyNode.build(identify.get(), scope));
isAdd = true;
}
if (isAdd) {
continue;
}
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) {
if (nonAgg) {
whereNode.add(DimensionNode.build(dimensionOptional.get(), scope));
continue;
}
whereNode.add(DimensionNode.build(dimensionOptional.get(), scope));
}
}
return whereNode;
}
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SqlValidatorScope scope, SemanticSchema schema,
boolean nonAgg) throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, datasource, scope, schema,
nonAgg);
dataSet.getMeasure().addAll(whereNode);
//getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
}
public static void whereDimMetric(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SemanticSchema schema, Set<String> dimensions,
Set<String> metrics) {
for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
continue;
}
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dimension.isPresent()) {
dimensions.add(field);
continue;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(field)).findFirst();
if (identify.isPresent()) {
dimensions.add(field);
continue;
}
if (schema.getDimension().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dataSourceDim.isPresent()) {
dimensions.add(field);
continue;
}
}
Optional<Measure> metric = datasource.getMeasures()
.stream().filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (metric.isPresent()) {
metrics.add(field);
continue;
}
Optional<Metric> datasourceMetric = schema.getMetrics()
.stream().filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (datasourceMetric.isPresent()) {
metrics.add(field);
continue;
}
}
}
public static boolean isDimension(String name, DataSource datasource, SemanticSchema schema) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) {
return true;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return true;
}
if (schema.getDimension().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimension().get(datasource.getName()).stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) {
return true;
}
}
return false;
}
private static void expandWhere(MetricReq metricCommand, TableView tableView, SqlValidatorScope scope)
throws Exception {
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(metricCommand.getWhere(), scope);
Set<String> fieldWhere = new HashSet<>();
FilterNode.getFilterField(sqlNode, fieldWhere);
//super.tableView.getFilter().add(sqlNode);
tableView.getFilter().add(sqlNode);
}
}
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
public class Constants {
public static final String DIMENSION_IDENTIFY = "__";
public static final String DATASOURCE_TABLE_PREFIX = "src0_";
public static final String DATASOURCE_TABLE_FILTER_PREFIX = "src2_";
public static final String DATASOURCE_TABLE_OUT_PREFIX = "src00_";
public static final String JOIN_TABLE_PREFIX = "src1_";
public static final String JOIN_TABLE_OUT_PREFIX = "src11_";
public static final String JOIN_TABLE_LEFT_PREFIX = "src12_";
}

View File

@@ -0,0 +1,23 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import java.util.List;
import lombok.Data;
@Data
public class DataSource {
private String name;
private Long sourceId;
private String sqlQuery;
private String tableQuery;
private List<Identify> identifiers;
private List<Dimension> dimensions;
private List<Measure> measures;
}

View File

@@ -0,0 +1,26 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticItem;
import lombok.Data;
@Data
public class Dimension implements SemanticItem {
String name;
@Override
public String getName() {
return name;
}
private String owners;
private String type;
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import lombok.Data;
@Data
public class DimensionTimeTypeParams {
private String isPrimary;
private String timeGranularity;
}

View File

@@ -0,0 +1,17 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Identify {
private String name;
// primary or foreign
private String type;
}

View File

@@ -0,0 +1,25 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Measure {
private String name;
//sum max min avg count distinct
private String agg;
private String expr;
private String constraint;
private String alias;
private String createMetric;
}

View File

@@ -0,0 +1,25 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticItem;
import java.util.List;
import lombok.Data;
@Data
public class Metric implements SemanticItem {
private String name;
@Override
public String getName() {
return name;
}
private List<String> owners;
private String type;
private MetricTypeParams metricTypeParams;
}

View File

@@ -0,0 +1,13 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import java.util.List;
import lombok.Data;
@Data
public class MetricTypeParams {
private List<Measure> measures;
private String expr;
}

View File

@@ -0,0 +1,15 @@
package com.tencent.supersonic.semantic.query.domain.parser.dsl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.Data;
@Data
public class SemanticModel {
private String rootPath;
private List<Metric> metrics = new ArrayList<>();
private Map<String, DataSource> datasourceMap = new HashMap<>();
private Map<String, List<Dimension>> dimensionMap = new HashMap<>();
}

View File

@@ -0,0 +1,133 @@
package com.tencent.supersonic.semantic.query.domain.parser.schema;
import java.util.ArrayList;
import java.util.List;
import org.apache.calcite.DataContext;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.hint.RelHint;
import org.apache.calcite.rel.logical.LogicalTableScan;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
import org.apache.calcite.rel.type.RelRecordType;
import org.apache.calcite.rel.type.StructKind;
import org.apache.calcite.schema.ScannableTable;
import org.apache.calcite.schema.Statistic;
import org.apache.calcite.schema.Statistics;
import org.apache.calcite.schema.TranslatableTable;
import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.sql.type.SqlTypeName;
public class DataSourceTable extends AbstractTable implements ScannableTable, TranslatableTable {
private final String tableName;
private final List<String> fieldNames;
private final List<SqlTypeName> fieldTypes;
private final Statistic statistic;
private RelDataType rowType;
private DataSourceTable(String tableName, List<String> fieldNames, List<SqlTypeName> fieldTypes,
Statistic statistic) {
this.tableName = tableName;
this.fieldNames = fieldNames;
this.fieldTypes = fieldTypes;
this.statistic = statistic;
}
public String getTableName() {
return tableName;
}
@Override
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
if (rowType == null) {
List<RelDataTypeField> fields = new ArrayList<>(fieldNames.size());
for (int i = 0; i < fieldNames.size(); i++) {
RelDataType fieldType = typeFactory.createSqlType(fieldTypes.get(i));
RelDataTypeField field = new RelDataTypeFieldImpl(fieldNames.get(i), i, fieldType);
fields.add(field);
}
rowType = new RelRecordType(StructKind.PEEK_FIELDS, fields, true);
}
return rowType;
}
@Override
public Statistic getStatistic() {
return statistic;
}
@Override
public Enumerable<Object[]> scan(DataContext root) {
throw new UnsupportedOperationException("Not implemented");
}
public static Builder newBuilder(String tableName) {
return new Builder(tableName);
}
public RelNode toRel(RelOptTable.ToRelContext toRelContext, RelOptTable relOptTable) {
List<RelHint> hint = new ArrayList<>();
return new LogicalTableScan(toRelContext.getCluster(), toRelContext.getCluster().traitSet(), hint, relOptTable);
}
public static final class Builder {
private final String tableName;
private final List<String> fieldNames = new ArrayList<>();
private final List<SqlTypeName> fieldTypes = new ArrayList<>();
private long rowCount;
private Builder(String tableName) {
if (tableName == null || tableName.isEmpty()) {
throw new IllegalArgumentException("Table name cannot be null or empty");
}
this.tableName = tableName;
}
public Builder addField(String name, SqlTypeName typeName) {
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("Field name cannot be null or empty");
}
if (fieldNames.contains(name)) {
throw new IllegalArgumentException("Field already defined: " + name);
}
fieldNames.add(name);
fieldTypes.add(typeName);
return this;
}
public Builder withRowCount(long rowCount) {
this.rowCount = rowCount;
return this;
}
public DataSourceTable build() {
if (fieldNames.isEmpty()) {
throw new IllegalStateException("Table must have at least one field");
}
if (rowCount == 0L) {
throw new IllegalStateException("Table must have positive row count");
}
return new DataSourceTable(tableName, fieldNames, fieldTypes, Statistics.of(rowCount, null));
}
}
}

View File

@@ -0,0 +1,32 @@
package com.tencent.supersonic.semantic.query.domain.parser.schema;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.Configuration;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.sql.DSLSqlValidatorImpl;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.prepare.Prepare;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.validate.ParameterScope;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class SchemaBuilder {
public static SqlValidatorScope getScope(SemanticSchema schema) throws Exception {
Map<String, RelDataType> nameToTypeMap = new HashMap<>();
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
rootSchema.add(schema.getRootPath(), schema);
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(
rootSchema,
Collections.singletonList(schema.getRootPath()),
Configuration.typeFactory,
Configuration.config
);
DSLSqlValidatorImpl dslSqlValidator = new DSLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.validatorConfig);
return new ParameterScope(dslSqlValidator, nameToTypeMap);
}
}

View File

@@ -0,0 +1,6 @@
package com.tencent.supersonic.semantic.query.domain.parser.schema;
public interface SemanticItem {
public String getName();
}

View File

@@ -0,0 +1,101 @@
package com.tencent.supersonic.semantic.query.domain.parser.schema;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.DataSource;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Dimension;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.Metric;
import com.tencent.supersonic.semantic.query.domain.parser.dsl.SemanticModel;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.impl.AbstractSchema;
public class SemanticSchema extends AbstractSchema {
private final String rootPath;
private final Map<String, Table> tableMap;
private SemanticModel semanticModel = new SemanticModel();
private SemanticSchema(String rootPath, Map<String, Table> tableMap) {
this.rootPath = rootPath;
this.tableMap = tableMap;
}
public String getRootPath() {
return rootPath;
}
@Override
public Map<String, Table> getTableMap() {
return tableMap;
}
@Override
public Schema snapshot(SchemaVersion version) {
return this;
}
public static Builder newBuilder(String rootPath) {
return new Builder(rootPath);
}
public void setDatasource(Map<String, DataSource> datasource) {
semanticModel.setDatasourceMap(datasource);
}
public void setDimension(Map<String, List<Dimension>> dimensions) {
semanticModel.setDimensionMap(dimensions);
}
public Map<String, DataSource> getDatasource() {
return semanticModel.getDatasourceMap();
}
public Map<String, List<Dimension>> getDimension() {
return semanticModel.getDimensionMap();
}
public List<Metric> getMetrics() {
return semanticModel.getMetrics();
}
public void setMetric(List<Metric> metric) {
semanticModel.setMetrics(metric);
}
public static final class Builder {
private final String rootPath;
private final Map<String, Table> tableMap = new HashMap<>();
private Builder(String rootPath) {
if (rootPath == null || rootPath.isEmpty()) {
throw new IllegalArgumentException("Schema name cannot be null or empty");
}
this.rootPath = rootPath;
}
public Builder addTable(DataSourceTable table) {
if (tableMap.containsKey(table.getTableName())) {
throw new IllegalArgumentException("Table already defined: " + table.getTableName());
}
tableMap.put(table.getTableName(), table);
return this;
}
public SemanticSchema build() {
return new SemanticSchema(rootPath, tableMap);
}
}
}

View File

@@ -0,0 +1,158 @@
package com.tencent.supersonic.semantic.query.domain.parser.schema;
import org.apache.calcite.sql.fun.SqlLibrary;
import org.apache.calcite.sql.validate.SqlConformance;
import org.apache.calcite.sql.validate.SqlConformanceEnum;
public class SemanticSqlConformance implements SqlConformance {
@Override
public boolean isLiberal() {
return SqlConformanceEnum.BIG_QUERY.isLiberal();
}
@Override
public boolean allowCharLiteralAlias() {
return SqlConformanceEnum.BIG_QUERY.allowCharLiteralAlias();
}
@Override
public boolean isGroupByAlias() {
return SqlConformanceEnum.BIG_QUERY.isGroupByAlias();
}
@Override
public boolean isGroupByOrdinal() {
return SqlConformanceEnum.BIG_QUERY.isGroupByOrdinal();
}
@Override
public boolean isHavingAlias() {
return false;
}
@Override
public boolean isSortByOrdinal() {
return SqlConformanceEnum.BIG_QUERY.isSortByOrdinal();
}
@Override
public boolean isSortByAlias() {
return SqlConformanceEnum.BIG_QUERY.isSortByAlias();
}
@Override
public boolean isSortByAliasObscures() {
return SqlConformanceEnum.BIG_QUERY.isSortByAliasObscures();
}
@Override
public boolean isFromRequired() {
return SqlConformanceEnum.BIG_QUERY.isFromRequired();
}
@Override
public boolean splitQuotedTableName() {
return SqlConformanceEnum.BIG_QUERY.splitQuotedTableName();
}
@Override
public boolean allowHyphenInUnquotedTableName() {
return SqlConformanceEnum.BIG_QUERY.allowHyphenInUnquotedTableName();
}
@Override
public boolean isBangEqualAllowed() {
return SqlConformanceEnum.BIG_QUERY.isBangEqualAllowed();
}
@Override
public boolean isPercentRemainderAllowed() {
return SqlConformanceEnum.BIG_QUERY.isPercentRemainderAllowed();
}
@Override
public boolean isMinusAllowed() {
return SqlConformanceEnum.BIG_QUERY.isMinusAllowed();
}
@Override
public boolean isApplyAllowed() {
return SqlConformanceEnum.BIG_QUERY.isApplyAllowed();
}
@Override
public boolean isInsertSubsetColumnsAllowed() {
return SqlConformanceEnum.BIG_QUERY.isInsertSubsetColumnsAllowed();
}
@Override
public boolean allowAliasUnnestItems() {
return SqlConformanceEnum.BIG_QUERY.allowAliasUnnestItems();
}
@Override
public boolean allowNiladicParentheses() {
return SqlConformanceEnum.BIG_QUERY.allowNiladicParentheses();
}
@Override
public boolean allowExplicitRowValueConstructor() {
return SqlConformanceEnum.BIG_QUERY.allowExplicitRowValueConstructor();
}
@Override
public boolean allowExtend() {
return SqlConformanceEnum.BIG_QUERY.allowExtend();
}
@Override
public boolean isLimitStartCountAllowed() {
return true;
}
@Override
public boolean isOffsetLimitAllowed() {
return false;
}
@Override
public boolean allowGeometry() {
return SqlConformanceEnum.BIG_QUERY.allowGeometry();
}
@Override
public boolean shouldConvertRaggedUnionTypesToVarying() {
return SqlConformanceEnum.BIG_QUERY.shouldConvertRaggedUnionTypesToVarying();
}
@Override
public boolean allowExtendedTrim() {
return SqlConformanceEnum.BIG_QUERY.allowExtendedTrim();
}
@Override
public boolean allowPluralTimeUnits() {
return SqlConformanceEnum.BIG_QUERY.allowPluralTimeUnits();
}
@Override
public boolean allowQualifyingCommonColumn() {
return SqlConformanceEnum.BIG_QUERY.allowQualifyingCommonColumn();
}
@Override
public boolean isValueAllowed() {
return false;
}
@Override
public SqlLibrary semantics() {
return SqlConformanceEnum.BIG_QUERY.semantics();
}
@Override
public boolean allowCoercionStringToArray() {
return false;
}
}

View File

@@ -0,0 +1,97 @@
package com.tencent.supersonic.semantic.query.domain.parser.schema;
import com.google.common.base.Preconditions;
import org.apache.calcite.avatica.util.Casing;
import org.apache.calcite.config.NullCollation;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlIntervalLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.validate.SqlConformance;
import org.checkerframework.checker.nullness.qual.Nullable;
public class SemanticSqlDialect extends SqlDialect {
private static final SqlConformance tagTdwSqlConformance = new SemanticSqlConformance();
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''")
.withIdentifierQuoteString("`")
.withNullCollation(NullCollation.LOW)
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final SqlDialect DEFAULT = new SemanticSqlDialect(DEFAULT_CONTEXT);
public SemanticSqlDialect(Context context) {
super(context);
}
@Override
public void quoteStringLiteralUnicode(StringBuilder buf, String val) {
buf.append("'");
buf.append(val);
buf.append("'");
}
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
@Override
public boolean supportsCharSet() {
return false;
}
@Override
public boolean requiresAliasForFromItems() {
return true;
}
@Override
public SqlConformance getConformance() {
// mysql_5
return tagTdwSqlConformance;
}
public boolean supportsGroupByWithCube() {
return true;
}
public void unparseSqlIntervalLiteral(SqlWriter writer, SqlIntervalLiteral literal, int leftPrec, int rightPrec) {
}
public void unparseOffsetFetch(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
unparseFetchUsingAnsi(writer, offset, fetch);
}
public static void unparseFetchUsingAnsi(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
Preconditions.checkArgument(fetch != null || offset != null);
SqlWriter.Frame fetchFrame;
writer.newlineAndIndent();
fetchFrame = writer.startList(SqlWriter.FrameTypeEnum.OFFSET);
writer.keyword("LIMIT");
if (offset != null) {
//writer.keyword("OFFSET");
offset.unparse(writer, -1, -1);
//writer.keyword("ROWS");
}
if (fetch != null) {
//writer.newlineAndIndent();
//fetchFrame = writer.startList(SqlWriter.FrameTypeEnum.FETCH);
writer.keyword(",");
//writer.keyword("NEXT");
fetch.unparse(writer, -1, -1);
}
writer.endList(fetchFrame);
}
}

View File

@@ -0,0 +1,32 @@
package com.tencent.supersonic.semantic.query.domain.pojo;
public class ParserSvrResponse<T> {
private String code;
private String msg;
private T data;
public String getCode() {
return code;
}
public String getMsg() {
return msg;
}
public T getData() {
return data;
}
public void setCode(String code) {
this.code = code;
}
public void setMsg(String msg) {
this.msg = msg;
}
public void setData(T data) {
this.data = data;
}
}

View File

@@ -0,0 +1,15 @@
package com.tencent.supersonic.semantic.query.domain.repository;
import com.tencent.supersonic.semantic.api.core.pojo.QueryStat;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import java.util.List;
public interface StatRepository {
Boolean createRecord(QueryStat queryStatInfo);
List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend);
List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend);
}

View File

@@ -0,0 +1,411 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import static com.tencent.supersonic.common.constant.Constants.MINUS;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authorization.pojo.AuthRes;
import com.tencent.supersonic.auth.api.authorization.pojo.AuthResGrp;
import com.tencent.supersonic.auth.api.authorization.pojo.DimensionFilter;
import com.tencent.supersonic.auth.api.authorization.request.QueryAuthResReq;
import com.tencent.supersonic.auth.api.authorization.response.AuthorizedResourceResp;
import com.tencent.supersonic.auth.api.authorization.service.AuthService;
import com.tencent.supersonic.semantic.api.core.pojo.QueryAuthorization;
import com.tencent.supersonic.semantic.api.core.pojo.QueryColumn;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.DomainResp;
import com.tencent.supersonic.semantic.api.core.response.MetricResp;
import com.tencent.supersonic.semantic.api.core.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.semantic.api.query.enums.FilterOperatorEnum;
import com.tencent.supersonic.semantic.api.query.pojo.Filter;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.constant.Constants;
import com.tencent.supersonic.common.exception.InvalidArgumentException;
import com.tencent.supersonic.common.exception.InvalidPermissionException;
import com.tencent.supersonic.semantic.core.domain.DimensionService;
import com.tencent.supersonic.semantic.core.domain.DomainService;
import com.tencent.supersonic.semantic.core.domain.MetricService;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Component
@Aspect
@Slf4j
public class DataPermissionAOP {
@Autowired
private QueryStructUtils queryStructUtils;
@Autowired
private AuthService authService;
@Autowired
private DimensionService dimensionService;
@Autowired
private MetricService metricService;
@Autowired
private DomainService domainService;
@Value("${permission.data.enable:true}")
private Boolean permissionDataEnable;
private static final ObjectMapper MAPPER = new ObjectMapper().setDateFormat(
new SimpleDateFormat(Constants.DAY_FORMAT));
@Pointcut("@annotation(com.tencent.supersonic.semantic.query.domain.annotation.DataPermission)")
public void dataPermissionAOP() {
}
@Around(value = "dataPermissionAOP()")
public Object around(ProceedingJoinPoint point) throws Throwable {
Object[] args = point.getArgs();
QueryStructReq queryStructCmd = (QueryStructReq) args[0];
User user = (User) args[1];
if (!permissionDataEnable) {
log.info("permissionDataEnable is false");
return point.proceed();
}
if (Objects.isNull(user) || Strings.isNullOrEmpty(user.getName())) {
throw new RuntimeException("lease provide user information");
}
// 1. determine whether the subject field is visible
doDomainVisible(user, queryStructCmd);
// 2. fetch data permission meta information
Long domainId = queryStructCmd.getDomainId();
Set<String> res4Privilege = queryStructUtils.getResNameEnExceptInternalCol(queryStructCmd);
log.info("classId:{}, res4Privilege:{}", domainId, res4Privilege);
Set<String> sensitiveResByDomain = getHighSensitiveColsByDomainId(domainId);
Set<String> sensitiveResReq = res4Privilege.parallelStream()
.filter(res -> sensitiveResByDomain.contains(res)).collect(Collectors.toSet());
log.info("this query domainId:{}, sensitiveResReq:{}", domainId, sensitiveResReq);
// query user privilege info
HttpServletRequest request = (HttpServletRequest) args[2];
AuthorizedResourceResp authorizedResource = getAuthorizedResource(user, request, domainId, sensitiveResReq);
// get sensitiveRes that user has privilege
Set<String> resAuthSet = getAuthResNameSet(authorizedResource, queryStructCmd.getDomainId());
// 3.if sensitive fields without permission are involved in filter, thrown an exception
doFilterCheckLogic(queryStructCmd, resAuthSet, sensitiveResReq);
// 4.row permission pre-filter
doRowPermission(queryStructCmd, authorizedResource);
// 5.proceed
QueryResultWithSchemaResp queryResultWithColumns = (QueryResultWithSchemaResp) point.proceed();
if (CollectionUtils.isEmpty(sensitiveResReq) || allSensitiveResReqIsOk(sensitiveResReq, resAuthSet)) {
// if sensitiveRes is empty
log.info("sensitiveResReq is empty");
return getQueryResultWithColumns(queryResultWithColumns, domainId, authorizedResource);
}
// 6.if the column has no permission, hit *
Set<String> need2Apply = sensitiveResReq.stream().filter(req -> !resAuthSet.contains(req))
.collect(Collectors.toSet());
QueryResultWithSchemaResp queryResultAfterDesensitization = desensitizationData(queryResultWithColumns,
need2Apply);
addPromptInfoInfo(domainId, queryResultAfterDesensitization, authorizedResource);
return queryResultAfterDesensitization;
}
private void doDomainVisible(User user, QueryStructReq queryStructCmd) {
Boolean visible = true;
Long domainId = queryStructCmd.getDomainId();
List<DomainResp> classListForViewer = domainService.getDomainListForViewer(user.getName());
if (CollectionUtils.isEmpty(classListForViewer)) {
visible = false;
} else {
Map<Long, List<DomainResp>> id2domainDesc = classListForViewer.stream()
.collect(Collectors.groupingBy(classInfo -> classInfo.getId()));
if (!CollectionUtils.isEmpty(id2domainDesc) && !id2domainDesc.containsKey(domainId)) {
visible = false;
}
}
if (!visible) {
List<Long> domainIds = new ArrayList<>();
domainIds.add(domainId);
List<DomainResp> classInfos = domainService.getDomainList(domainIds);
if (CollectionUtils.isEmpty(classInfos)) {
throw new InvalidArgumentException(
"invalid domainId:" + domainId + ", please contact admin for details");
}
String domainName = classInfos.get(0).getName();
throw new InvalidPermissionException(
"You do not have domain:" + domainName + " permission, please contact admin for details");
}
}
private QueryResultWithSchemaResp getQueryResultWithColumns(QueryResultWithSchemaResp resultWithColumns,
Long domainId, AuthorizedResourceResp authResource) {
addPromptInfoInfo(domainId, resultWithColumns, authResource);
return resultWithColumns;
}
private void addPromptInfoInfo(Long domainId, QueryResultWithSchemaResp queryResultWithColumns,
AuthorizedResourceResp authorizedResource) {
List<DimensionFilter> filters = authorizedResource.getFilters();
if (!CollectionUtils.isEmpty(filters)) {
log.debug("dimensionFilters:{}", filters);
List<Long> domainIds = new ArrayList<>();
domainIds.add(domainId);
List<DomainResp> classInfos = domainService.getDomainList(domainIds);
String classNameCn = "";
if (!CollectionUtils.isEmpty(classInfos)) {
classNameCn = classInfos.get(0).getName();
}
List<String> exprList = new ArrayList<>();
List<String> descList = new ArrayList<>();
filters.stream().forEach(filter -> {
descList.add(filter.getDescription());
exprList.add(filter.getExpressions().toString());
});
String promptInfo = "the current data has been controlled by permissions,"
+ " related information:%s, please contact admin for details";
String message = String.format(promptInfo, CollectionUtils.isEmpty(descList) ? exprList : descList);
queryResultWithColumns.setQueryAuthorization(
new QueryAuthorization(classNameCn, exprList, descList, message));
log.info("queryResultWithColumns:{}", queryResultWithColumns);
}
}
private QueryResultWithSchemaResp deepCopyResult(QueryResultWithSchemaResp raw) throws Exception {
QueryResultWithSchemaResp queryResultWithColumns = new QueryResultWithSchemaResp();
BeanUtils.copyProperties(raw, queryResultWithColumns);
List<QueryColumn> columns = new ArrayList<>();
if (!CollectionUtils.isEmpty(raw.getColumns())) {
String columnsStr = MAPPER.writeValueAsString(raw.getColumns());
columns = MAPPER.readValue(columnsStr, new TypeReference<List<QueryColumn>>() {
});
queryResultWithColumns.setColumns(columns);
}
queryResultWithColumns.setColumns(columns);
List<Map<String, Object>> resultData = new ArrayList<>();
if (!CollectionUtils.isEmpty(raw.getResultList())) {
for (Map<String, Object> line : raw.getResultList()) {
Map<String, Object> newLine = new HashMap<>();
newLine.putAll(line);
resultData.add(newLine);
}
}
queryResultWithColumns.setResultList(resultData);
return queryResultWithColumns;
}
private boolean allSensitiveResReqIsOk(Set<String> sensitiveResReq, Set<String> resAuthSet) {
if (resAuthSet.containsAll(sensitiveResReq)) {
return true;
}
log.info("sensitiveResReq:{}, resAuthSet:{}", sensitiveResReq, resAuthSet);
return false;
}
private Set<String> getAuthResNameSet(AuthorizedResourceResp authorizedResource, Long domainId) {
Set<String> resAuthName = new HashSet<>();
List<AuthResGrp> authResGrpList = authorizedResource.getResources();
authResGrpList.stream().forEach(authResGrp -> {
List<AuthRes> cols = authResGrp.getGroup();
if (!CollectionUtils.isEmpty(cols)) {
cols.stream().filter(col -> domainId.equals(Long.parseLong(col.getDomainId())))
.forEach(col -> resAuthName.add(col.getName()));
}
});
log.info("resAuthName:{}", resAuthName);
return resAuthName;
}
private AuthorizedResourceResp getAuthorizedResource(User user, HttpServletRequest request, Long domainId,
Set<String> sensitiveResReq) {
List<AuthRes> resourceReqList = new ArrayList<>();
sensitiveResReq.stream().forEach(res -> resourceReqList.add(new AuthRes(domainId.toString(), res)));
QueryAuthResReq queryAuthResReq = new QueryAuthResReq();
queryAuthResReq.setUser(user.getName());
queryAuthResReq.setResources(resourceReqList);
queryAuthResReq.setDomainId(domainId + "");
AuthorizedResourceResp authorizedResource = fetchAuthRes(request, queryAuthResReq);
log.info("user:{}, domainId:{}, after queryAuthorizedResources:{}", user.getName(), domainId,
authorizedResource);
return authorizedResource;
}
private Set<String> getHighSensitiveColsByDomainId(Long domainId) {
Set<String> highSensitiveCols = new HashSet<>();
List<DimensionResp> highSensitiveDimensions = dimensionService.getHighSensitiveDimension(domainId);
List<MetricResp> highSensitiveMetrics = metricService.getHighSensitiveMetric(domainId);
if (!CollectionUtils.isEmpty(highSensitiveDimensions)) {
highSensitiveDimensions.stream().forEach(dim -> highSensitiveCols.add(dim.getBizName()));
}
if (!CollectionUtils.isEmpty(highSensitiveMetrics)) {
highSensitiveMetrics.stream().forEach(metric -> highSensitiveCols.add(metric.getBizName()));
}
return highSensitiveCols;
}
private void doRowPermission(QueryStructReq queryStructCmd, AuthorizedResourceResp authorizedResource) {
log.debug("start doRowPermission logic");
StringJoiner joiner = new StringJoiner(" OR ");
List<String> dimensionFilters = new ArrayList<>();
if (!CollectionUtils.isEmpty(authorizedResource.getFilters())) {
authorizedResource.getFilters().stream()
.forEach(filter -> dimensionFilters.addAll(filter.getExpressions()));
}
if (CollectionUtils.isEmpty(dimensionFilters)) {
log.debug("dimensionFilters is empty");
return;
}
dimensionFilters.stream().forEach(filter -> {
if (StringUtils.isNotEmpty(filter) && StringUtils.isNotEmpty(filter.trim())) {
joiner.add(" ( " + filter + " ) ");
}
});
if (StringUtils.isNotEmpty(joiner.toString())) {
log.info("before doRowPermission, queryStructCmd:{}", queryStructCmd);
Filter filter = new Filter("", FilterOperatorEnum.SQL_PART, joiner.toString());
List<Filter> filters = Objects.isNull(queryStructCmd.getOriginalFilter()) ? new ArrayList<>()
: queryStructCmd.getOriginalFilter();
filters.add(filter);
queryStructCmd.setDimensionFilters(filters);
log.info("after doRowPermission, queryStructCmd:{}", queryStructCmd);
}
}
private QueryResultWithSchemaResp desensitizationData(QueryResultWithSchemaResp raw, Set<String> need2Apply) {
log.debug("start desensitizationData logic");
if (CollectionUtils.isEmpty(need2Apply)) {
log.info("user has all sensitiveRes");
return raw;
}
List<QueryColumn> columns = raw.getColumns();
boolean doDesensitization = false;
for (QueryColumn queryColumn : columns) {
if (need2Apply.contains(queryColumn.getNameEn())) {
doDesensitization = true;
break;
}
}
if (!doDesensitization) {
return raw;
}
QueryResultWithSchemaResp queryResultWithColumns = raw;
try {
queryResultWithColumns = deepCopyResult(raw);
} catch (Exception e) {
log.warn("deepCopyResult, e:{}", e);
}
addAuthorizedSchemaInfo(queryResultWithColumns.getColumns(), need2Apply);
desensitizationInternal(queryResultWithColumns.getResultList(), need2Apply);
return queryResultWithColumns;
}
private void addAuthorizedSchemaInfo(List<QueryColumn> columns, Set<String> need2Apply) {
if (CollectionUtils.isEmpty(need2Apply)) {
return;
}
columns.stream().forEach(col -> {
if (need2Apply.contains(col.getNameEn())) {
col.setAuthorized(false);
}
});
}
private void desensitizationInternal(List<Map<String, Object>> result, Set<String> need2Apply) {
log.info("start desensitizationInternal logic");
for (int i = 0; i < result.size(); i++) {
Map<String, Object> row = result.get(i);
Map<String, Object> newRow = new HashMap<>();
for (String col : row.keySet()) {
if (need2Apply.contains(col)) {
newRow.put(col, "****");
} else {
newRow.put(col, row.get(col));
}
}
result.set(i, newRow);
}
}
private void doFilterCheckLogic(QueryStructReq queryStructCmd, Set<String> resAuthName,
Set<String> sensitiveResReq) {
Set<String> resFilterSet = queryStructUtils.getFilterResNameEnExceptInternalCol(queryStructCmd);
Set<String> need2Apply = resFilterSet.stream()
.filter(res -> !resAuthName.contains(res) && sensitiveResReq.contains(res)).collect(Collectors.toSet());
Set<String> nameCnSet = new HashSet<>();
List<Long> domainIds = new ArrayList<>();
domainIds.add(queryStructCmd.getDomainId());
List<DomainResp> classInfos = domainService.getDomainList(domainIds);
String classNameCn = Constants.EMPTY;
if (!CollectionUtils.isEmpty(classInfos)) {
classNameCn = classInfos.get(0).getName();
}
List<DimensionResp> dimensionDescList = dimensionService.getDimensions(queryStructCmd.getDomainId());
String finalDomainNameCn = classNameCn;
dimensionDescList.stream().filter(dim -> need2Apply.contains(dim.getBizName()))
.forEach(dim -> nameCnSet.add(finalDomainNameCn + MINUS + dim.getName()));
if (!CollectionUtils.isEmpty(need2Apply)) {
log.warn("in doFilterLogic, need2Apply:{}", need2Apply);
throw new InvalidPermissionException(
"you do not have data permission:" + nameCnSet + ", please contact admin for details");
}
}
private AuthorizedResourceResp fetchAuthRes(HttpServletRequest request, QueryAuthResReq queryAuthResReq) {
log.info("Authorization:{}", request.getHeader("Authorization"));
log.info("queryAuthResReq:{}", queryAuthResReq);
return authService.queryAuthorizedResources(request, queryAuthResReq);
}
}

View File

@@ -0,0 +1,221 @@
package com.tencent.supersonic.domain.semantic.query.domain.utils;
import static com.tencent.supersonic.common.constant.Constants.APOSTROPHE;
import static com.tencent.supersonic.common.constant.Constants.COMMA;
import static com.tencent.supersonic.common.constant.Constants.DAY;
import static com.tencent.supersonic.common.constant.Constants.DAY_FORMAT;
import static com.tencent.supersonic.common.constant.Constants.MONTH;
import com.google.common.base.Strings;
import com.tencent.supersonic.semantic.api.core.response.ItemDateResp;
import com.tencent.supersonic.common.pojo.DateConf;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.time.temporal.TemporalAdjusters;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.StringJoiner;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Slf4j
@Component
public class DateUtils {
@Value("${query.parameter.sys.date:sys_imp_date}")
private String sysDateCol;
public Boolean recentMode(DateConf dateInfo) {
if (Objects.nonNull(dateInfo) && DateConf.DateMode.RECENT_UNITS == dateInfo.getDateMode()
&& DAY.equalsIgnoreCase(dateInfo.getPeriod()) && Objects.nonNull(dateInfo.getUnit())) {
return true;
}
return false;
}
public boolean hasDataMode(DateConf dateInfo) {
if (Objects.nonNull(dateInfo) && DateConf.DateMode.AVAILABLE_TIME == dateInfo.getDateMode()) {
return true;
}
return false;
}
/**
* dateMode = 4, advance time until data is available
*
* @param dateDate
* @param dateInfo
* @return
*/
public String hasDataModeStr(ItemDateResp dateDate, DateConf dateInfo) {
if (Objects.isNull(dateDate)
|| Strings.isNullOrEmpty(dateDate.getStartDate())
|| Strings.isNullOrEmpty(dateDate.getStartDate())
) {
return String.format("(%s >= '%s' and %s <= '%s')", sysDateCol, dateInfo.getStartDate(), sysDateCol,
dateInfo.getEndDate());
} else {
log.info("dateDate:{}", dateDate);
}
String dateFormatStr = dateDate.getDateFormat();
if (Strings.isNullOrEmpty(dateFormatStr)) {
dateFormatStr = DAY_FORMAT;
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormatStr);
LocalDate endData = LocalDate.parse(dateDate.getEndDate(), formatter);
LocalDate endReq = LocalDate.parse(dateInfo.getEndDate(), formatter);
if (endReq.isAfter(endData)) {
if (DAY.equalsIgnoreCase(dateInfo.getPeriod())) {
Long unit = getInterval(dateInfo.getStartDate(), dateInfo.getEndDate(), dateFormatStr, ChronoUnit.DAYS);
LocalDate dateMax = endData;
LocalDate dateMin = dateMax.minusDays(unit - 1);
return String.format("(%s >= '%s' and %s <= '%s')", sysDateCol, dateMin, sysDateCol, dateMax);
}
if (MONTH.equalsIgnoreCase(dateInfo.getPeriod())) {
Long unit = getInterval(dateInfo.getStartDate(), dateInfo.getEndDate(), dateFormatStr,
ChronoUnit.MONTHS);
return generateMonthSql(endData, unit, dateFormatStr);
}
}
return String.format("(%s >= '%s' and %s <= '%s')", sysDateCol, dateInfo.getStartDate(), sysDateCol,
dateInfo.getEndDate());
}
public String generateMonthSql(LocalDate endData, Long unit, String dateFormatStr) {
LocalDate dateMax = endData;
List<String> months = generateMonthStr(dateMax, unit, dateFormatStr);
if (!CollectionUtils.isEmpty(months)) {
StringJoiner joiner = new StringJoiner(",");
months.stream().forEach(month -> joiner.add("'" + month + "'"));
return String.format("(%s in (%s))", sysDateCol, joiner.toString());
}
return "";
}
private List<String> generateMonthStr(LocalDate dateMax, Long unit, String formatStr) {
DateTimeFormatter format = DateTimeFormatter.ofPattern(formatStr);
List<String> months = new ArrayList<>();
for (int i = unit.intValue() - 1; i >= 0; i--) {
LocalDate tmp = dateMax.minusMonths(i);
months.add(tmp.with(TemporalAdjusters.firstDayOfMonth()).format(format));
}
return months;
}
public String recentDayStr(ItemDateResp dateDate, DateConf dateInfo) {
String dateFormatStr = dateDate.getDateFormat();
if (Strings.isNullOrEmpty(dateFormatStr)) {
dateFormatStr = DAY_FORMAT;
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormatStr);
LocalDate end = LocalDate.parse(dateDate.getEndDate(), formatter);
// todo unavailableDateList logic
Integer unit = dateInfo.getUnit() - 1;
String start = end.minusDays(unit).format(formatter);
return String.format("(%s >= '%s' and %s <= '%s')", sysDateCol, start, sysDateCol, dateDate.getEndDate());
}
private Long getInterval(String startDate, String endDate, String dateFormat, ChronoUnit chronoUnit) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormat);
try {
LocalDate start = LocalDate.parse(startDate, formatter);
LocalDate end = LocalDate.parse(endDate, formatter);
return start.until(end, chronoUnit) + 1;
} catch (Exception e) {
log.warn("e:{}", e);
}
return -1L;
}
public String recentDateStr(ItemDateResp dateDate, DateConf dateInfo) {
if (DAY.equalsIgnoreCase(dateInfo.getPeriod())) {
return recentDayStr(dateDate, dateInfo);
}
return "";
}
/**
* dateMode = 1; between, continuous value
*
* @param dateInfo
* @return
*/
public String betweenDateStr(ItemDateResp dateDate, DateConf dateInfo) {
return String.format("%s >= '%s' and %s <= '%s'",
sysDateCol, dateInfo.getStartDate(), sysDateCol, dateInfo.getEndDate());
}
/**
* dateMode = 2; list discrete value
*
* @param dateInfo
* @return
*/
public String listDateStr(ItemDateResp dateDate, DateConf dateInfo) {
StringJoiner joiner = new StringJoiner(COMMA);
dateInfo.getDateList().stream().forEach(date -> joiner.add(APOSTROPHE + date + APOSTROPHE));
return String.format("(%s in (%s))", sysDateCol, joiner.toString());
}
/**
* dateMode = 3; - recent time units
*
* @param dateInfo
* @return
*/
public String defaultRecentDateInfo(DateConf dateInfo) {
if (Objects.isNull(dateInfo)) {
return "";
}
Integer unit = dateInfo.getUnit();
if (DAY.equalsIgnoreCase(dateInfo.getPeriod())) {
LocalDate dateMax = LocalDate.now().minusDays(1);
LocalDate dateMin = dateMax.minusDays(unit - 1);
return String.format("(%s >= '%s' and %s <= '%s')", sysDateCol, dateMin, sysDateCol, dateMax);
}
if (MONTH.equalsIgnoreCase(dateInfo.getPeriod())) {
LocalDate dateMax = LocalDate.now().minusDays(1);
return generateMonthSql(dateMax, unit.longValue(), DAY_FORMAT);
}
return String.format("(%s >= '%s' and %s <= '%s')", sysDateCol, LocalDate.now().minusDays(2), sysDateCol,
LocalDate.now().minusDays(1));
}
public String getDateWhereStr(DateConf dateInfo, ItemDateResp dateDate) {
String dateStr = "";
switch (dateInfo.getDateMode()) {
case BETWEEN_CONTINUOUS:
dateStr = betweenDateStr(dateDate, dateInfo);
break;
case LIST_DISCRETE:
dateStr = listDateStr(dateDate, dateInfo);
break;
case RECENT_UNITS:
dateStr = recentDateStr(dateDate, dateInfo);
break;
case AVAILABLE_TIME:
dateStr = hasDataModeStr(dateDate, dateInfo);
break;
default:
break;
}
return dateStr;
}
}

View File

@@ -0,0 +1,134 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tencent.supersonic.semantic.api.core.pojo.Identify;
import com.tencent.supersonic.semantic.api.core.pojo.Measure;
import com.tencent.supersonic.semantic.api.core.response.DatasourceResp;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.MetricResp;
import com.tencent.supersonic.semantic.api.query.pojo.Filter;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.semantic.core.domain.DatasourceService;
import com.tencent.supersonic.semantic.core.domain.DimensionService;
import com.tencent.supersonic.semantic.core.domain.MetricService;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Slf4j
@Component
public class MultiSourceJoinUtils {
private DatasourceService datasourceService;
private DimensionService dimensionService;
private MetricService metricService;
public MultiSourceJoinUtils(DatasourceService datasourceService, DimensionService dimensionService,
MetricService metricService) {
this.datasourceService = datasourceService;
this.dimensionService = dimensionService;
this.metricService = metricService;
}
public void buildJoinPrefix(QueryStructReq queryStructCmd) {
List<String> groups = queryStructCmd.getGroups();
List<Aggregator> aggregators = queryStructCmd.getAggregators();
List<Filter> filters = queryStructCmd.getOriginalFilter();
List<String> fields = Lists.newArrayList();
fields.addAll(groups);
fields.addAll(filters.stream().map(Filter::getBizName).collect(Collectors.toList()));
if (CollectionUtils.isEmpty(groups) || CollectionUtils.isEmpty(aggregators)) {
return;
}
Long domainId = queryStructCmd.getDomainId();
List<String> aggs = aggregators.stream().map(Aggregator::getColumn).collect(Collectors.toList());
Map<String, DimensionResp> dimensionMap = dimensionService.getDimensions(domainId).stream()
.filter(dimensionDesc -> fields.contains(dimensionDesc.getBizName()))
.collect(Collectors.toMap(DimensionResp::getBizName, dimensionDesc -> dimensionDesc));
List<MetricResp> metricDescList = metricService.getMetrics(domainId).stream()
.filter(metricDesc -> aggs.contains(metricDesc.getBizName()))
.collect(Collectors.toList());
Map<Long, DatasourceResp> datasourceMap = datasourceService.getDatasourceList(domainId)
.stream().collect(Collectors.toMap(DatasourceResp::getId, datasource -> datasource));
//check groups filters and aggs is in same datasource
if (!isInSameDatasource(new ArrayList<>(dimensionMap.values()), metricDescList)) {
List<String> groupsWithPrefix = Lists.newArrayList();
for (String group : groups) {
DimensionResp dimensionDesc = dimensionMap.get(group);
if (dimensionDesc == null) {
groupsWithPrefix.add(group);
continue;
}
String joinKeyName = getJoinKey(datasourceMap, dimensionDesc.getDatasourceId());
if (joinKeyName.equalsIgnoreCase(group)) {
groupsWithPrefix.add(group);
} else {
String groupWithPrefix = String.format("%s__%s", joinKeyName, group);
groupsWithPrefix.add(groupWithPrefix);
}
}
List<Filter> filtersWithPrefix = Lists.newArrayList();
for (Filter filter : filters) {
DimensionResp dimensionDesc = dimensionMap.get(filter.getBizName());
if (dimensionDesc == null) {
filtersWithPrefix.add(filter);
continue;
}
String joinKeyName = getJoinKey(datasourceMap, dimensionDesc.getDatasourceId());
if (joinKeyName.equalsIgnoreCase(filter.getBizName())) {
filtersWithPrefix.add(filter);
} else {
String filterWithPrefix = String.format("%s__%s", joinKeyName, filter.getBizName());
filter.setBizName(filterWithPrefix);
filtersWithPrefix.add(filter);
}
}
queryStructCmd.setGroups(groupsWithPrefix);
queryStructCmd.setDimensionFilters(filtersWithPrefix);
}
}
private String getJoinKey(Map<Long, DatasourceResp> datasourceMap, Long datasourceId) {
DatasourceResp datasourceDesc = datasourceMap.get(datasourceId);
List<Identify> identifies = datasourceDesc.getDatasourceDetail().getIdentifiers();
Optional<Identify> identifyOptional = identifies.stream()
.filter(identify -> identify.getType().equalsIgnoreCase("primary")).findFirst();
if (identifyOptional.isPresent()) {
return identifyOptional.get().getBizName();
}
return "";
}
private boolean isInSameDatasource(List<DimensionResp> dimensionDescs, List<MetricResp> metricDescs) {
Set<Long> datasourceIdSet = Sets.newHashSet();
datasourceIdSet.addAll(dimensionDescs.stream().map(DimensionResp::getDatasourceId).filter(Objects::nonNull)
.collect(Collectors.toSet()));
datasourceIdSet.addAll(
metricDescs.stream().flatMap(metricDesc -> metricDesc.getTypeParams().getMeasures().stream())
.map(Measure::getDatasourceId).filter(Objects::nonNull).collect(Collectors.toList()));
log.info("[multi source join] datasource id:{}", datasourceIdSet);
return datasourceIdSet.size() <= 1;
}
}

View File

@@ -0,0 +1,107 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import static com.tencent.supersonic.common.constant.Constants.UNDERLINE;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.pojo.Param;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.semantic.core.domain.DimensionService;
import com.tencent.supersonic.semantic.core.domain.DomainService;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import com.tencent.supersonic.semantic.query.domain.utils.calculate.CalculateConverter;
import com.tencent.supersonic.semantic.query.domain.utils.calculate.CalculateConverterAgg;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.logging.log4j.util.Strings;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Slf4j
@Component
public class ParserCommandConverter {
private final ParserService parserService;
private final DomainService domainService;
private final CalculateConverterAgg calculateCoverterAgg;
@Value("${internal.metric.cnt.suffix:internal_cnt}")
private String internalMetricNameSuffix;
private final DimensionService dimensionService;
private List<CalculateConverter> calculateCoverters = new LinkedList<>();
private final QueryStructUtils queryStructUtils;
public ParserCommandConverter(ParserService parserService,
DomainService domainService,
CalculateConverterAgg calculateCoverterAgg,
DimensionService dimensionService,
@Lazy QueryStructUtils queryStructUtils) {
this.parserService = parserService;
this.domainService = domainService;
this.calculateCoverterAgg = calculateCoverterAgg;
this.dimensionService = dimensionService;
this.queryStructUtils = queryStructUtils;
calculateCoverters.add(calculateCoverterAgg);
}
public SqlParserResp getSqlParser(QueryStructReq queryStructCmd) throws Exception {
StatUtils.get().setUseSqlCache(false);
for (CalculateConverter calculateConverter : calculateCoverters) {
if (calculateConverter.accept(queryStructCmd)) {
log.info("getSqlParser {}", calculateConverter.getClass());
return calculateConverter.getSqlParser(queryStructCmd);
}
}
return parserService.physicalSql(generateSqlCommand(queryStructCmd));
}
public MetricReq generateSqlCommand(QueryStructReq queryStructCmd) {
MetricReq sqlCommend = new MetricReq();
sqlCommend.setMetrics(queryStructCmd.getMetrics());
sqlCommend.setDimensions(queryStructCmd.getGroups());
String where = queryStructUtils.generateWhere(queryStructCmd);
log.info("in generateSqlCommend, complete where:{}", where);
sqlCommend.setWhere(where);
sqlCommend.setOrder(queryStructCmd.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection())).collect(Collectors.toList()));
sqlCommend.setVariables(queryStructCmd.getParams().stream()
.collect(Collectors.toMap(Param::getName, Param::getValue, (k1, k2) -> k1)));
sqlCommend.setLimit(queryStructCmd.getLimit());
String rootPath = domainService.getDomainFullPath(queryStructCmd.getDomainId());
sqlCommend.setRootPath(rootPath);
// support detail query
if (queryStructCmd.getNativeQuery() && CollectionUtils.isEmpty(sqlCommend.getMetrics())) {
String internalMetricName = generateInternalMetricName(queryStructCmd);
sqlCommend.getMetrics().add(internalMetricName);
}
return sqlCommend;
}
public String generateInternalMetricName(QueryStructReq queryStructCmd) {
String internalMetricNamePrefix = "";
if (CollectionUtils.isEmpty(queryStructCmd.getGroups())) {
log.warn("group is empty!");
} else {
String group = queryStructCmd.getGroups().get(0).equalsIgnoreCase("sys_imp_date")
? queryStructCmd.getGroups().get(1) : queryStructCmd.getGroups().get(0);
DimensionResp dimension = dimensionService.getDimension(group, queryStructCmd.getDomainId());
String datasourceBizName = dimension.getDatasourceBizName();
if (Strings.isNotEmpty(datasourceBizName)) {
internalMetricNamePrefix = datasourceBizName + UNDERLINE;
}
}
String internalMetricName = internalMetricNamePrefix + internalMetricNameSuffix;
return internalMetricName;
}
}

View File

@@ -0,0 +1,299 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import static com.tencent.supersonic.common.constant.Constants.UNIONALL;
import com.tencent.supersonic.semantic.api.core.pojo.ItemDateFilter;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.ItemDateResp;
import com.tencent.supersonic.semantic.api.core.response.MetricResp;
import com.tencent.supersonic.semantic.api.core.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.pojo.Cache;
import com.tencent.supersonic.semantic.api.query.request.QueryMultiStructReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.constant.Constants;
import com.tencent.supersonic.common.enums.TypeEnums;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.SchemaItem;
import com.tencent.supersonic.common.util.cache.CacheUtils;
import com.tencent.supersonic.semantic.core.domain.DatabaseService;
import com.tencent.supersonic.semantic.core.domain.DatasourceService;
import com.tencent.supersonic.semantic.core.domain.DimensionService;
import com.tencent.supersonic.semantic.core.domain.MetricService;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.logging.log4j.util.Strings;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Slf4j
@Component
public class QueryStructUtils {
private final DatabaseService databaseService;
private final QueryUtils queryUtils;
private final ParserService parserService;
private final SqlParserUtils sqlParserUtils;
private final StatUtils statUtils;
private final DimensionService dimensionService;
private final MetricService metricService;
private final DatasourceService datasourceService;
private final com.tencent.supersonic.domain.semantic.query.domain.utils.DateUtils dateUtils;
private final SqlFilterUtils sqlFilterUtils;
private final CacheUtils cacheUtils;
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
Set<String> internalCols = new HashSet<>(
Arrays.asList("dayno", "plat_sys_var", "sys_imp_date", "sys_imp_week", "sys_imp_month"));
public QueryStructUtils(DatabaseService databaseService,
QueryUtils queryUtils,
ParserService parserService,
SqlParserUtils sqlParserUtils,
StatUtils statUtils,
DimensionService dimensionService,
MetricService metricService,
DatasourceService datasourceService,
com.tencent.supersonic.domain.semantic.query.domain.utils.DateUtils dateUtils,
SqlFilterUtils sqlFilterUtils,
CacheUtils cacheUtils) {
this.databaseService = databaseService;
this.queryUtils = queryUtils;
this.parserService = parserService;
this.sqlParserUtils = sqlParserUtils;
this.statUtils = statUtils;
this.dimensionService = dimensionService;
this.metricService = metricService;
this.datasourceService = datasourceService;
this.dateUtils = dateUtils;
this.sqlFilterUtils = sqlFilterUtils;
this.cacheUtils = cacheUtils;
}
public QueryResultWithSchemaResp queryByStructByCache(QueryStructReq queryStructCmd, String key) throws Exception {
QueryResultWithSchemaResp queryResultWithColumns;
Object resultObject = cacheUtils.get(key);
if (Objects.nonNull(resultObject)) {
log.info("queryByStructWithCache, key:{}, queryStructCmd:{}", key, queryStructCmd.toString());
statUtils.updateResultCacheKey(key);
return (QueryResultWithSchemaResp) resultObject;
}
// if cache data is null, query database
queryResultWithColumns = queryByStructWithoutCache(queryStructCmd, key);
return queryResultWithColumns;
}
public boolean queryCache(Cache cacheInfo) {
if (Objects.isNull(cacheInfo)) {
return true;
}
return Objects.nonNull(cacheInfo)
&& cacheInfo.getCache();
}
public QueryResultWithSchemaResp queryByStructWithoutCache(QueryStructReq queryStructCmd, String key)
throws Exception {
log.info("stat queryByStructWithoutCache, queryStructCmd:{}", queryStructCmd);
StatUtils.get().setUseResultCache(false);
SqlParserResp sqlParser = getSqlParser(queryStructCmd);
queryUtils.checkSqlParse(sqlParser);
log.info("sqlParser:{}", sqlParser);
queryUtils.handleDetail(queryStructCmd, sqlParser);
QueryResultWithSchemaResp queryResultWithColumns = databaseService.queryWithColumns(sqlParser);
queryUtils.fillItemNameInfo(queryResultWithColumns, queryStructCmd.getDomainId());
queryResultWithColumns.setSql(sqlParser.getSql());
// if queryResultWithColumns is not null, update cache data
cacheResultLogic(key, queryResultWithColumns);
return queryResultWithColumns;
}
private void cacheResultLogic(String key, QueryResultWithSchemaResp queryResultWithColumns) {
if (cacheEnable && Objects.nonNull(queryResultWithColumns) && !CollectionUtils.isEmpty(
queryResultWithColumns.getResultList())) {
QueryResultWithSchemaResp finalQueryResultWithColumns = queryResultWithColumns;
CompletableFuture.supplyAsync(() -> cacheUtils.put(key, finalQueryResultWithColumns))
.exceptionally(exception -> {
log.warn("exception:", exception);
return null;
});
statUtils.updateResultCacheKey(key);
log.info("add record to cache, key:{}", key);
}
}
public QueryResultWithSchemaResp queryByMultiStructWithoutCache(QueryMultiStructReq queryMultiStructCmd, String key)
throws Exception {
log.info("stat queryByStructWithoutCache, queryMultiStructCmd:{}", queryMultiStructCmd);
QueryResultWithSchemaResp queryResultWithColumns;
List<SqlParserResp> sqlParsers = new ArrayList<>();
for (QueryStructReq queryStructCmd : queryMultiStructCmd.getQueryStructCmds()) {
SqlParserResp sqlParser = getSqlParser(queryStructCmd);
queryUtils.checkSqlParse(sqlParser);
queryUtils.handleDetail(queryStructCmd, sqlParser);
sqlParsers.add(sqlParser);
}
log.info("multi sqlParser:{}", sqlParsers);
SqlParserResp sqlParser = sqlParserUnion(queryMultiStructCmd, sqlParsers);
queryResultWithColumns = databaseService.queryWithColumns(sqlParser);
queryUtils.fillItemNameInfo(queryResultWithColumns, queryMultiStructCmd);
queryResultWithColumns.setSql(sqlParser.getSql());
cacheResultLogic(key, queryResultWithColumns);
return queryResultWithColumns;
}
private SqlParserResp sqlParserUnion(QueryMultiStructReq queryMultiStructCmd, List<SqlParserResp> sqlParsers) {
SqlParserResp sqlParser = new SqlParserResp();
StringBuilder unionSqlBuilder = new StringBuilder();
for (int i = 0; i < sqlParsers.size(); i++) {
String selectStr = SqlGenerateUtils.getUnionSelect(queryMultiStructCmd.getQueryStructCmds().get(i));
unionSqlBuilder.append(String.format("select %s from ( %s ) sub_sql_%s",
selectStr,
sqlParsers.get(i).getSql(), i));
unionSqlBuilder.append(UNIONALL);
}
String unionSql = unionSqlBuilder.substring(0, unionSqlBuilder.length() - Constants.UNIONALL.length());
sqlParser.setSql(unionSql);
sqlParser.setSourceId(sqlParsers.get(0).getSourceId());
log.info("union sql parser:{}", sqlParser);
return sqlParser;
}
private SqlParserResp getSqlParser(QueryStructReq queryStructCmd) throws Exception {
return sqlParserUtils.getSqlParserWithoutCache(queryStructCmd);
}
private List<Long> getDimensionIds(QueryStructReq queryStructCmd) {
List<Long> dimensionIds = new ArrayList<>();
List<DimensionResp> dimensions = dimensionService.getDimensions(queryStructCmd.getDomainId());
Map<String, List<DimensionResp>> pair = dimensions.stream()
.collect(Collectors.groupingBy(DimensionResp::getBizName));
for (String group : queryStructCmd.getGroups()) {
if (pair.containsKey(group)) {
dimensionIds.add(pair.get(group).get(0).getId());
}
}
List<String> filtersCols = sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter());
for (String col : filtersCols) {
if (pair.containsKey(col)) {
dimensionIds.add(pair.get(col).get(0).getId());
}
}
return dimensionIds;
}
private List<Long> getMetricIds(QueryStructReq queryStructCmd) {
List<Long> metricIds = new ArrayList<>();
List<MetricResp> metrics = metricService.getMetrics(queryStructCmd.getDomainId());
Map<String, List<MetricResp>> pair = metrics.stream().collect(Collectors.groupingBy(SchemaItem::getBizName));
for (Aggregator agg : queryStructCmd.getAggregators()) {
if (pair.containsKey(agg.getColumn())) {
metricIds.add(pair.get(agg.getColumn()).get(0).getId());
}
}
List<String> filtersCols = sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter());
for (String col : filtersCols) {
if (pair.containsKey(col)) {
metricIds.add(pair.get(col).get(0).getId());
}
}
return metricIds;
}
public String getDateWhereClause(QueryStructReq queryStructCmd) {
DateConf dateInfo = queryStructCmd.getDateInfo();
if (Objects.isNull(dateInfo) || Objects.isNull(dateInfo.getDateMode())) {
return "";
}
List<Long> dimensionIds = getDimensionIds(queryStructCmd);
List<Long> metricIds = getMetricIds(queryStructCmd);
ItemDateResp dateDate = datasourceService.getDateDate(
new ItemDateFilter(dimensionIds, TypeEnums.DIMENSION.getName()),
new ItemDateFilter(metricIds, TypeEnums.METRIC.getName()));
if (Objects.isNull(dateDate)
|| Strings.isEmpty(dateDate.getStartDate())
&& Strings.isEmpty(dateDate.getEndDate())) {
if (dateUtils.hasDataMode(dateInfo)) {
return dateUtils.hasDataModeStr(dateDate, dateInfo);
}
return dateUtils.defaultRecentDateInfo(queryStructCmd.getDateInfo());
}
log.info("dateDate:{}", dateDate);
return dateUtils.getDateWhereStr(dateInfo, dateDate);
}
public String generateWhere(QueryStructReq queryStructCmd) {
String whereClauseFromFilter = sqlFilterUtils.getWhereClause(queryStructCmd.getOriginalFilter());
String whereFromDate = getDateWhereClause(queryStructCmd);
if (Strings.isNotEmpty(whereFromDate) && Strings.isNotEmpty(whereClauseFromFilter)) {
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
} else if (Strings.isEmpty(whereFromDate) && Strings.isNotEmpty(whereClauseFromFilter)) {
return whereClauseFromFilter;
} else if (Strings.isNotEmpty(whereFromDate) && Strings.isEmpty(whereClauseFromFilter)) {
return whereFromDate;
} else if (Strings.isEmpty(whereFromDate) && Strings.isEmpty(whereClauseFromFilter)) {
log.info("the current date information is empty, enter the date initialization logic");
return dateUtils.defaultRecentDateInfo(queryStructCmd.getDateInfo());
}
return whereClauseFromFilter;
}
public Set<String> getResNameEn(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = new HashSet<>();
queryStructCmd.getAggregators().stream().forEach(agg -> resNameEnSet.add(agg.getColumn()));
resNameEnSet.addAll(queryStructCmd.getGroups());
queryStructCmd.getOrders().stream().forEach(order -> resNameEnSet.add(order.getColumn()));
sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter()).stream().forEach(col -> resNameEnSet.add(col));
return resNameEnSet;
}
public Set<String> getResNameEnExceptInternalCol(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = getResNameEn(queryStructCmd);
return resNameEnSet.stream().filter(res -> !internalCols.contains(res)).collect(Collectors.toSet());
}
public Set<String> getFilterResNameEn(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = new HashSet<>();
sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter()).stream().forEach(col -> resNameEnSet.add(col));
return resNameEnSet;
}
public Set<String> getFilterResNameEnExceptInternalCol(QueryStructReq queryStructCmd) {
Set<String> resNameEnSet = getFilterResNameEn(queryStructCmd);
return resNameEnSet.stream().filter(res -> !internalCols.contains(res)).collect(Collectors.toSet());
}
}

View File

@@ -0,0 +1,233 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import static com.tencent.supersonic.common.constant.Constants.END_SUBQUERY;
import static com.tencent.supersonic.common.constant.Constants.GROUP_UPPER;
import static com.tencent.supersonic.common.constant.Constants.JOIN_UNDERLINE;
import static com.tencent.supersonic.common.constant.Constants.LIMIT_UPPER;
import static com.tencent.supersonic.common.constant.Constants.ORDER_UPPER;
import static com.tencent.supersonic.common.constant.Constants.SPACE;
import com.google.common.base.Strings;
import com.tencent.supersonic.semantic.api.core.enums.TimeDimensionEnum;
import com.tencent.supersonic.semantic.api.core.pojo.QueryColumn;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.MetricResp;
import com.tencent.supersonic.semantic.api.core.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.QueryMultiStructReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.semantic.core.domain.DimensionService;
import com.tencent.supersonic.semantic.core.domain.MetricService;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Slf4j
@Component
public class QueryUtils {
private final Set<Pattern> patterns = new HashSet<>();
@PostConstruct
public void fillPattern() {
Set<String> aggFunctions = new HashSet<>(Arrays.asList("MAX", "MIN", "SUM", "AVG"));
String patternStr = "\\s*(%s\\((.*)\\)) AS";
for (String agg : aggFunctions) {
patterns.add(Pattern.compile(String.format(patternStr, agg)));
}
}
private final MetricService metricService;
private final DimensionService dimensionService;
private final ParserCommandConverter parserCommandConverter;
public QueryUtils(MetricService metricService,
DimensionService dimensionService,
@Lazy ParserCommandConverter parserCommandConverter) {
this.metricService = metricService;
this.dimensionService = dimensionService;
this.parserCommandConverter = parserCommandConverter;
}
public void checkSqlParse(SqlParserResp sqlParser) {
if (Strings.isNullOrEmpty(sqlParser.getSql()) || Strings.isNullOrEmpty(sqlParser.getSourceId())) {
throw new RuntimeException("parse Exception: " + sqlParser.getErrMsg());
}
}
public boolean isDetailQuery(QueryStructReq queryStructCmd) {
return Objects.nonNull(queryStructCmd) && queryStructCmd.getNativeQuery() && CollectionUtils.isEmpty(
queryStructCmd.getMetrics());
}
public SqlParserResp handleDetail(QueryStructReq queryStructCmd, SqlParserResp sqlParser) {
String sqlRaw = sqlParser.getSql().trim();
if (Strings.isNullOrEmpty(sqlRaw)) {
throw new RuntimeException("sql is empty or null");
}
log.info("before handleDetail, sql:{}", sqlRaw);
String sql = sqlRaw;
if (isDetailQuery(queryStructCmd)) {
String internalMetricName = parserCommandConverter.generateInternalMetricName(queryStructCmd);
// select handle
log.info("size:{}, metric:{}, contain:{}", queryStructCmd.getMetrics().size(), queryStructCmd.getMetrics(),
queryStructCmd.getMetrics().contains(internalMetricName));
if (queryStructCmd.getMetrics().size() == 0) {
Set<String> internalCntSet = new HashSet<>(
Arrays.asList(
String.format(", SUM(%s) AS %s", internalMetricName, internalMetricName),
String.format(", %s AS %s", internalMetricName, internalMetricName))
);
for (String target : internalCntSet) {
sql = sql.replace(target, SPACE);
}
} else {
// dimension + metric
for (Pattern pattern : patterns) {
Matcher matcher = pattern.matcher(sql);
while (matcher.find()) {
String target = matcher.group(1);
String replace = matcher.group(2);
sql = sql.replace(target, replace);
}
}
}
// group handle
String groupTarget = "";
if (sql.contains(GROUP_UPPER)) {
String afterLastGroup = sql.substring(sql.lastIndexOf(GROUP_UPPER));
log.info("afterLastGroup:{}", afterLastGroup);
if (!Strings.isNullOrEmpty(afterLastGroup)) {
int tmp = afterLastGroup.length();
if (afterLastGroup.contains(END_SUBQUERY)) {
tmp = afterLastGroup.indexOf(END_SUBQUERY);
} else if (afterLastGroup.contains(ORDER_UPPER)) {
tmp = afterLastGroup.indexOf(ORDER_UPPER);
} else if (afterLastGroup.contains(LIMIT_UPPER)) {
tmp = afterLastGroup.indexOf(LIMIT_UPPER);
}
groupTarget = afterLastGroup.substring(0, tmp);
}
if (!Strings.isNullOrEmpty(groupTarget)) {
sql = sql.replace(groupTarget, SPACE);
}
}
sqlParser.setSql(sql);
}
log.info("after handleDetail, sql:{}", sqlParser.getSql());
return sqlParser;
}
public void fillItemNameInfo(QueryResultWithSchemaResp queryResultWithColumns, Long domainId) {
List<MetricResp> metricDescList = metricService.getMetrics(domainId);
List<DimensionResp> dimensionDescList = dimensionService.getDimensions(domainId);
Map<String, String> namePair = new HashMap<>();
Map<String, String> nameTypePair = new HashMap<>();
addSysTimeDimension(namePair, nameTypePair);
metricDescList.forEach(metricDesc -> {
namePair.put(metricDesc.getBizName(), metricDesc.getName());
nameTypePair.put(metricDesc.getBizName(), "NUMBER");
});
dimensionDescList.forEach(dimensionDesc -> {
namePair.put(dimensionDesc.getBizName(), dimensionDesc.getName());
nameTypePair.put(dimensionDesc.getBizName(), dimensionDesc.getSemanticType());
});
List<QueryColumn> columns = queryResultWithColumns.getColumns();
columns.forEach(column -> {
String nameEn = column.getNameEn();
if (nameEn.contains(JOIN_UNDERLINE)) {
nameEn = nameEn.split(JOIN_UNDERLINE)[1];
}
if (namePair.containsKey(nameEn)) {
column.setName(namePair.get(nameEn));
}
if (nameTypePair.containsKey(nameEn)) {
column.setShowType(nameTypePair.get(nameEn));
}
});
}
public void fillItemNameInfo(QueryResultWithSchemaResp queryResultWithColumns,
QueryMultiStructReq queryMultiStructCmd) {
List<Aggregator> aggregators = queryMultiStructCmd.getQueryStructCmds().stream()
.flatMap(queryStructCmd -> queryStructCmd.getAggregators().stream())
.collect(Collectors.toList());
log.info("multi agg merge:{}", aggregators);
Map<String, String> metricNameFromAgg = getMetricNameFromAgg(aggregators);
log.info("metricNameFromAgg:{}", metricNameFromAgg);
Map<String, String> namePair = new HashMap<>();
Map<String, String> nameTypePair = new HashMap<>();
addSysTimeDimension(namePair, nameTypePair);
namePair.putAll(metricNameFromAgg);
List<QueryColumn> columns = queryResultWithColumns.getColumns();
columns.forEach(column -> {
String nameEn = column.getNameEn();
if (nameEn.contains(JOIN_UNDERLINE)) {
nameEn = nameEn.split(JOIN_UNDERLINE)[1];
}
if (namePair.containsKey(nameEn)) {
column.setName(namePair.get(nameEn));
} else {
if (nameEn.startsWith("name")) {
column.setName("名称");
} else if (nameEn.startsWith("value")) {
column.setName("指标值");
}
}
if (nameTypePair.containsKey(nameEn)) {
column.setShowType(nameTypePair.get(nameEn));
} else {
if (nameEn.startsWith("name")) {
column.setShowType("CATEGORY");
} else if (nameEn.startsWith("value")) {
column.setShowType("NUMBER");
}
}
});
}
private Map<String, String> getMetricNameFromAgg(List<Aggregator> aggregators) {
Map<String, String> map = new HashMap<>();
if (CollectionUtils.isEmpty(aggregators)) {
return map;
}
for (int i = 0; i < aggregators.size(); i++) {
Aggregator aggregator = aggregators.get(i);
if (StringUtils.isBlank(aggregator.getNameCh())) {
continue;
}
map.put("value" + (i + 1), aggregator.getNameCh());
}
return map;
}
private static void addSysTimeDimension(Map<String, String> namePair, Map<String, String> nameTypePair) {
for (TimeDimensionEnum timeDimensionEnum : TimeDimensionEnum.values()) {
namePair.put(timeDimensionEnum.getName(), "date");
nameTypePair.put(timeDimensionEnum.getName(), "DATE");
}
}
}

View File

@@ -0,0 +1,244 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import static com.tencent.supersonic.common.constant.Constants.PARENTHESES_END;
import static com.tencent.supersonic.common.constant.Constants.PARENTHESES_START;
import static com.tencent.supersonic.common.constant.Constants.SPACE;
import static com.tencent.supersonic.common.constant.Constants.SYS_VAR;
import com.tencent.supersonic.semantic.api.query.enums.FilterOperatorEnum;
import com.tencent.supersonic.semantic.api.query.pojo.Criterion;
import com.tencent.supersonic.semantic.api.query.pojo.Filter;
import com.tencent.supersonic.common.constant.Constants;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.StringJoiner;
import java.util.regex.Pattern;
import lombok.extern.slf4j.Slf4j;
import org.apache.logging.log4j.util.Strings;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Component
@Slf4j
public class SqlFilterUtils {
private static String pattern = "^'.*?'$";
public List<String> getFiltersCol(List<Filter> filters) {
List<String> filterCols = new ArrayList<>();
if (CollectionUtils.isEmpty(filters)) {
return filterCols;
}
for (Filter filter : filters) {
filterCols.addAll(getFilterCol(filter));
}
return filterCols;
}
private List<String> getFilterCol(Filter filter) {
List<String> filterCols = new ArrayList<>();
if (Filter.Relation.FILTER.equals(filter.getRelation())) {
if (Strings.isNotEmpty(filter.getBizName())) {
filterCols.add(filter.getBizName());
}
}
List<Filter> children = filter.getChildren();
if (!CollectionUtils.isEmpty(children)) {
for (Filter child : children) {
filterCols.addAll(getFilterCol(child));
}
}
return filterCols;
}
public String getWhereClause(List<Filter> filters) {
StringJoiner joiner = new StringJoiner(Constants.AND_UPPER);
if (!CollectionUtils.isEmpty(filters)) {
filters.stream()
.forEach(filter -> {
if (Strings.isNotEmpty(dealFilter(filter))) {
joiner.add(SPACE + dealFilter(filter) + SPACE);
}
});
log.info("getWhereClause, where sql : {}", joiner.toString());
return joiner.toString();
}
return "";
}
public String dealFilter(Filter filter) {
if (Objects.isNull(filter)) {
return "";
}
if (Strings.isNotEmpty(filter.getBizName()) && filter.getBizName().endsWith(SYS_VAR)) {
return "";
}
StringBuilder condition = new StringBuilder();
if (Filter.Relation.FILTER.equals(filter.getRelation())) {
return dealSingleFilter(filter);
}
List<Filter> children = filter.getChildren();
condition.append(PARENTHESES_START);
StringJoiner joiner = new StringJoiner(SPACE + filter.getRelation().name() + SPACE);
for (Filter child : children) {
joiner.add(dealFilter(child));
}
condition.append(joiner.toString());
condition.append(PARENTHESES_END);
return condition.toString();
}
// todo deal metric filter
private String dealSingleFilter(Filter filter) {
String name = filter.getBizName();
Object value = filter.getValue();
FilterOperatorEnum operator = filter.getOperator();
String dataType = Criterion.StringDataType.STRING.name();
Criterion criterion = new Criterion(name, operator, value, dataType);
return generator(criterion);
}
private String generator(Criterion criterion) {
log.info("criterion :{}", criterion);
String sqlPart;
switch (criterion.getOperator()) {
case SQL_PART:
sqlPart = sqlPartLogic(criterion);
break;
case IS_NULL:
case IS_NOT_NULL:
sqlPart = judgeNullLogic(criterion);
break;
case EQUALS:
case NOT_EQUALS:
case GREATER_THAN:
case GREATER_THAN_EQUALS:
case MINOR_THAN:
case MINOR_THAN_EQUALS:
sqlPart = singleValueLogic(criterion);
break;
case BETWEEN:
sqlPart = betweenLogic(criterion);
break;
case IN:
case NOT_IN:
sqlPart = inLogic(criterion);
break;
case LIKE:
sqlPart = likeLogic(criterion);
break;
default:
throw new IllegalStateException("Unexpected value: " + criterion.getOperator());
}
return sqlPart;
}
private String likeLogic(Criterion criterion) {
if (Objects.isNull(criterion) || Objects.isNull(criterion.getValue())) {
throw new RuntimeException("criterion.getValue() can not be null");
}
StringBuilder whereClause = new StringBuilder();
whereClause.append(criterion.getColumn() + SPACE + criterion.getOperator().getValue() + SPACE);
String value = criterion.getValue().toString();
if (criterion.isNeedApostrophe() && !Pattern.matches(pattern, value)) {
// like click => 'like%'
whereClause.append(Constants.APOSTROPHE + value + Constants.PERCENT_SIGN + Constants.APOSTROPHE);
} else {
// like 'click' => 'like%'
whereClause.append(Constants.APOSTROPHE + value.replaceAll(Constants.APOSTROPHE, Constants.PERCENT_SIGN)
+ Constants.APOSTROPHE);
}
return whereClause.toString();
}
private String inLogic(Criterion criterion) {
if (Objects.isNull(criterion) || Objects.isNull(criterion.getValue())) {
throw new RuntimeException("criterion.getValue() can not be null");
}
StringBuilder whereClause = new StringBuilder();
whereClause.append(criterion.getColumn() + SPACE + criterion.getOperator().getValue() + SPACE);
List values = (List) criterion.getValue();
whereClause.append(PARENTHESES_START);
StringJoiner joiner = new StringJoiner(",");
if (criterion.isNeedApostrophe()) {
values.stream().forEach(value -> joiner.add(valueApostropheLogic(value.toString())));
} else {
values.stream().forEach(value -> joiner.add(value.toString()));
}
whereClause.append(joiner);
whereClause.append(PARENTHESES_END);
return whereClause.toString();
}
private String betweenLogic(Criterion criterion) {
if (Objects.isNull(criterion) || Objects.isNull(criterion.getValue())) {
throw new RuntimeException("criterion.getValue() can not be null");
}
List values = (List) criterion.getValue();
if (values.size() != 2) {
throw new RuntimeException("between value size should be 2");
}
if (criterion.isNeedApostrophe()) {
return String.format("(%s >= %s and %s <= %s)", criterion.getColumn(),
valueApostropheLogic(values.get(0).toString()),
criterion.getColumn(), valueApostropheLogic(values.get(1).toString()));
}
return String.format("(%s >= %s and %s <= %s)", criterion.getColumn(), values.get(0).toString(),
criterion.getColumn(), values.get(1).toString());
}
private String singleValueLogic(Criterion criterion) {
if (Objects.isNull(criterion) || Objects.isNull(criterion.getValue())) {
throw new RuntimeException("criterion.getValue() can not be null");
}
StringBuilder whereClause = new StringBuilder();
whereClause.append(criterion.getColumn() + SPACE + criterion.getOperator().getValue() + SPACE);
String value = criterion.getValue().toString();
if (criterion.isNeedApostrophe()) {
value = valueApostropheLogic(value);
}
whereClause.append(value);
return whereClause.toString();
}
private String valueApostropheLogic(String value) {
if (Pattern.matches(pattern, value)) {
return value;
}
return Constants.APOSTROPHE + value + Constants.APOSTROPHE;
}
private String judgeNullLogic(Criterion criterion) {
if (Objects.isNull(criterion) || Objects.isNull(criterion.getColumn())) {
throw new RuntimeException("criterion.getColumn() can not be null");
}
return String.format("( %s %s)", criterion.getColumn(), criterion.getOperator().getValue());
}
private String sqlPartLogic(Criterion criterion) {
if (Objects.isNull(criterion) || Objects.isNull(criterion.getValue())) {
throw new RuntimeException("criterion.getValue() can not be null");
}
return PARENTHESES_START + SPACE + criterion.getValue().toString() + SPACE + PARENTHESES_END;
}
}

View File

@@ -0,0 +1,78 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import com.tencent.supersonic.semantic.api.core.enums.TimeDimensionEnum;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.pojo.Aggregator;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Component
@Slf4j
public class SqlGenerateUtils {
public String getLimit(QueryStructReq queryStructCmd) {
if (queryStructCmd.getLimit() > 0) {
return " limit " + String.valueOf(queryStructCmd.getLimit());
}
return "";
}
public static String getUnionSelect(QueryStructReq queryStructCmd) {
StringBuilder sb = new StringBuilder();
int locate = 0;
for (String group : queryStructCmd.getGroups()) {
if (!TimeDimensionEnum.getNameList().contains(group)) {
locate++;
sb.append(group).append(" as ").append("name").append(locate).append(",");
} else {
sb.append(group).append(",");
}
}
locate = 0;
for (Aggregator agg : queryStructCmd.getAggregators()) {
locate++;
sb.append(agg.getColumn()).append(" as ").append("value").append(locate).append(",");
}
String selectSql = sb.substring(0, sb.length() - 1);
log.info("union select sql {}", selectSql);
return selectSql;
}
public String getSelect(QueryStructReq queryStructCmd) {
String aggStr = queryStructCmd.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryStructCmd.getGroups()) ? aggStr
: String.join(",", queryStructCmd.getGroups()) + "," + aggStr;
}
public String getSelectField(final Aggregator agg) {
if (CollectionUtils.isEmpty(agg.getArgs())) {
return agg.getFunc() + " ( " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
}
return agg.getFunc() + " ( " + agg.getArgs().stream().map(arg ->
arg.equals(agg.getColumn()) ? arg : (StringUtils.isNumeric(arg) ? arg : ("'" + arg + "'"))
).collect(Collectors.joining(",")) + " ) AS " + agg.getColumn() + " ";
}
public String getGroupBy(QueryStructReq queryStructCmd) {
if (CollectionUtils.isEmpty(queryStructCmd.getGroups())) {
return "";
}
return "group by " + String.join(",", queryStructCmd.getGroups());
}
public String getOrderBy(QueryStructReq queryStructCmd) {
if (CollectionUtils.isEmpty(queryStructCmd.getOrders())) {
return "";
}
return "order by " + queryStructCmd.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
}
}

View File

@@ -0,0 +1,30 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class SqlParserUtils {
private final ParserCommandConverter parserCommandConverter;
private final MultiSourceJoinUtils multiSourceJoinUtils;
public SqlParserUtils(ParserCommandConverter parserCommandConverter, MultiSourceJoinUtils multiSourceJoinUtils) {
this.parserCommandConverter = parserCommandConverter;
this.multiSourceJoinUtils = multiSourceJoinUtils;
}
public SqlParserResp getSqlParserWithoutCache(QueryStructReq queryStructCmd) throws Exception {
log.info("stat getSqlParser without cache");
multiSourceJoinUtils.buildJoinPrefix(queryStructCmd);
SqlParserResp sqlParser = parserCommandConverter.getSqlParser(queryStructCmd);
return sqlParser;
}
}

View File

@@ -0,0 +1,116 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import com.alibaba.ttl.TransmittableThreadLocal;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.semantic.api.core.enums.QueryTypeBackEnum;
import com.tencent.supersonic.semantic.api.core.enums.QueryTypeEnum;
import com.tencent.supersonic.semantic.api.core.pojo.QueryStat;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import com.tencent.supersonic.common.enums.TaskStatusEnum;
import com.tencent.supersonic.semantic.query.domain.repository.StatRepository;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.logging.log4j.util.Strings;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class StatUtils {
private final StatRepository statRepository;
private final SqlFilterUtils sqlFilterUtils;
private final ObjectMapper objectMapper = new ObjectMapper();
private static final TransmittableThreadLocal<QueryStat> STATS = new TransmittableThreadLocal<>();
public StatUtils(StatRepository statRepository,
SqlFilterUtils sqlFilterUtils) {
this.statRepository = statRepository;
this.sqlFilterUtils = sqlFilterUtils;
}
public static QueryStat get() {
return STATS.get();
}
public static void set(QueryStat queryStatInfo) {
STATS.set(queryStatInfo);
}
public static void remove() {
STATS.remove();
}
public void statInfo2DbAsync(TaskStatusEnum state) {
QueryStat queryStatInfo = get();
queryStatInfo.setElapsedMs(System.currentTimeMillis() - queryStatInfo.getStartTime());
queryStatInfo.setQueryState(state.getStatus());
log.info("queryStatInfo: {}", queryStatInfo);
CompletableFuture.runAsync(() -> {
statRepository.createRecord(queryStatInfo);
}).exceptionally(exception -> {
log.warn("queryStatInfo, exception:", exception);
return null;
});
remove();
}
public Boolean updateResultCacheKey(String key) {
STATS.get().setResultCacheKey(key);
return true;
}
public void initStatInfo(QueryStructReq queryStructCmd, User facadeUser) {
QueryStat queryStatInfo = new QueryStat();
String traceId = "";
List<String> dimensions = queryStructCmd.getGroups();
List<String> metrics = new ArrayList<>();
queryStructCmd.getAggregators().stream().forEach(aggregator -> metrics.add(aggregator.getColumn()));
String user = (Objects.nonNull(facadeUser) && Strings.isNotEmpty(facadeUser.getName())) ? facadeUser.getName()
: "Admin";
try {
queryStatInfo.setTraceId(traceId)
.setClassId(queryStructCmd.getDomainId())
.setUser(user)
.setQueryType(QueryTypeEnum.STRUCT.getValue())
.setQueryTypeBack(QueryTypeBackEnum.NORMAL.getState())
.setQueryStructCmd(queryStructCmd.toString())
.setQueryStructCmdMd5(DigestUtils.md5Hex(queryStructCmd.toString()))
.setStartTime(System.currentTimeMillis())
.setNativeQuery(queryStructCmd.getNativeQuery())
.setGroupByCols(objectMapper.writeValueAsString(queryStructCmd.getGroups()))
.setAggCols(objectMapper.writeValueAsString(queryStructCmd.getAggregators()))
.setOrderByCols(objectMapper.writeValueAsString(queryStructCmd.getOrders()))
.setFilterCols(objectMapper.writeValueAsString(
sqlFilterUtils.getFiltersCol(queryStructCmd.getOriginalFilter())))
.setUseResultCache(true)
.setUseSqlCache(true)
.setMetrics(objectMapper.writeValueAsString(metrics))
.setDimensions(objectMapper.writeValueAsString(dimensions));
} catch (JsonProcessingException e) {
e.printStackTrace();
}
StatUtils.set(queryStatInfo);
}
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend) {
return statRepository.getStatInfo(itemUseCommend);
}
public List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend) {
return statRepository.getQueryStatInfoWithoutCache(itemUseCommend);
}
}

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.semantic.query.domain.utils.calculate;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
public interface CalculateConverter {
boolean accept(QueryStructReq queryStructCmd);
SqlParserResp getSqlParser(QueryStructReq queryStructCmd) throws Exception;
}

View File

@@ -0,0 +1,108 @@
package com.tencent.supersonic.semantic.query.domain.utils.calculate;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.pojo.MetricTable;
import com.tencent.supersonic.semantic.api.query.request.ParseSqlReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.semantic.core.domain.DomainService;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import com.tencent.supersonic.semantic.query.domain.utils.QueryStructUtils;
import com.tencent.supersonic.semantic.query.domain.utils.SqlGenerateUtils;
import java.util.ArrayList;
import java.util.Collections;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Component
@Slf4j
public class CalculateConverterAgg implements CalculateConverter {
private final DomainService domainService;
private final ParserService parserService;
private final CalculateConverterRatio calculateCoverterRatio;
private final QueryStructUtils queryStructUtils;
private final SqlGenerateUtils sqlGenerateUtils;
@Value("${metricParser.agg.default:sum}")
private String metricAggDefault;
public CalculateConverterAgg(DomainService domainService,
ParserService parserService,
CalculateConverterRatio calculateCoverterRatio,
@Lazy QueryStructUtils queryStructUtils,
SqlGenerateUtils sqlGenerateUtils) {
this.domainService = domainService;
this.parserService = parserService;
this.calculateCoverterRatio = calculateCoverterRatio;
this.queryStructUtils = queryStructUtils;
this.sqlGenerateUtils = sqlGenerateUtils;
}
public ParseSqlReq generateSqlCommend(QueryStructReq queryStructCmd) throws Exception {
// 同环比
if (calculateCoverterRatio.accept(queryStructCmd)) {
return calculateCoverterRatio.generateSqlCommand(queryStructCmd);
}
ParseSqlReq sqlCommand = new ParseSqlReq();
sqlCommand.setRootPath(domainService.getDomainFullPath(queryStructCmd.getDomainId()));
String metricTableName = "metric_tb";
MetricTable metricTable = new MetricTable();
metricTable.setAlias(metricTableName);
metricTable.setMetrics(queryStructCmd.getMetrics());
metricTable.setDimensions(queryStructCmd.getGroups());
String where = queryStructUtils.generateWhere(queryStructCmd);
log.info("in generateSqlCommand, complete where:{}", where);
metricTable.setWhere(where);
metricTable.setAgg(true);
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
String sql = String.format("select %s from %s %s %s %s", sqlGenerateUtils.getSelect(queryStructCmd),
metricTableName,
sqlGenerateUtils.getGroupBy(queryStructCmd), sqlGenerateUtils.getOrderBy(queryStructCmd),
sqlGenerateUtils.getLimit(queryStructCmd));
sqlCommand.setSql(sql);
return sqlCommand;
}
@Override
public boolean accept(QueryStructReq queryStructCmd) {
if (queryStructCmd.getNativeQuery()) {
return false;
}
if (CollectionUtils.isEmpty(queryStructCmd.getAggregators())) {
return false;
}
//todo ck类型暂不拼with语句
if (queryStructCmd.getDomainId().equals(34L)) {
return false;
}
int nonSumFunction = 0;
for (Aggregator agg : queryStructCmd.getAggregators()) {
if (agg.getFunc() == null || "".equals(agg.getFunc())) {
return false;
}
if (agg.getFunc().equals(AggOperatorEnum.UNKNOWN)) {
return false;
}
if (agg.getFunc() != null
// && !agg.getFunc().equalsIgnoreCase(MetricAggDefault)
) {
nonSumFunction++;
}
}
return nonSumFunction > 0;
}
@Override
public SqlParserResp getSqlParser(QueryStructReq queryStructCmd) throws Exception {
return parserService.physicalSql(generateSqlCommend(queryStructCmd));
}
}

View File

@@ -0,0 +1,271 @@
package com.tencent.supersonic.semantic.query.domain.utils.calculate;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.enums.FilterOperatorEnum;
import com.tencent.supersonic.semantic.api.query.pojo.Filter;
import com.tencent.supersonic.semantic.api.query.pojo.MetricTable;
import com.tencent.supersonic.semantic.api.query.request.ParseSqlReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.semantic.core.domain.DomainService;
import com.tencent.supersonic.semantic.query.domain.utils.QueryStructUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
/**
*
*/
@Slf4j
@Component
public class CalculateConverterRatio implements CalculateConverter {
private final DomainService domainService;
private final QueryStructUtils queryStructUtils;
@Value("${metricParser.agg.ratio_roll.name:ratio_roll}")
private String metricAggRatioRollName;
@Value("${metricParser.agg.ratio_over.week:ratio_over_week}") // week over
private String metricAggRatioOverWeek;
@Value("${metricParser.agg.ratio_over.month:ratio_over_month}")
private String metricAggRatioOverMonth;
@Value("${metricParser.agg.ratio_over.quarter:ratio_over_quarter}")
private String metricAggRatioOverQuarter;
@Value("${metricParser.agg.ratio_over.year:ratio_over_year}")
private String metricAggRatioOverYear;
private List<String> dateGrain = new ArrayList<>(Arrays.asList("day", "week", "month", "year", "quarter"));
private Set<String> aggFunctionsOver = new HashSet<>(
Arrays.asList(metricAggRatioOverWeek, metricAggRatioOverMonth, metricAggRatioOverQuarter,
metricAggRatioOverYear));
public CalculateConverterRatio(DomainService domainService,
@Lazy QueryStructUtils queryStructUtils) {
this.domainService = domainService;
this.queryStructUtils = queryStructUtils;
}
public boolean accept(QueryStructReq queryStructCmd) {
Long ratioFuncNum = queryStructCmd.getAggregators().stream()
.filter(f -> f.getArgs() != null && f.getArgs().get(0) != null
&& metricAggRatioRollName.equalsIgnoreCase(f.getArgs().get(0))).count();
if (ratioFuncNum > 0) {
return true;
}
return false;
}
public SqlParserResp getSqlParser(QueryStructReq queryStructCmd) throws Exception {
return null;
}
/**
* @param queryStructCmd
* @return
*/
public ParseSqlReq generateSqlCommand(QueryStructReq queryStructCmd) throws Exception {
check(queryStructCmd);
ParseSqlReq sqlCommand = new ParseSqlReq();
sqlCommand.setRootPath(domainService.getDomainFullPath(queryStructCmd.getDomainId()));
String metricTableName = "metric_tb";
MetricTable metricTable = new MetricTable();
metricTable.setAlias(metricTableName);
metricTable.setMetrics(queryStructCmd.getMetrics());
metricTable.setDimensions(queryStructCmd.getGroups());
String where = queryStructUtils.generateWhere(queryStructCmd);
log.info("in generateSqlCommend, complete where:{}", where);
// metricTable.setWhere(queryStructCmd.getWhereClause());
metricTable.setWhere(where);
metricTable.setAgg(false);
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
String sqlInner = String.format("select %s from %s %s ", getSelect(queryStructCmd), metricTableName,
getGroupBy(queryStructCmd));
String sql = String.format(
"select %s from ( select %s , %s from ( %s ) metric_tb_inner_1 ) metric_tb_src %s %s ",
getOverSelect(queryStructCmd), getSelect(queryStructCmd, true), getLeadSelect(queryStructCmd), sqlInner,
getOrderBy(queryStructCmd), getLimit(queryStructCmd));
sqlCommand.setSql(sql);
return sqlCommand;
}
private String getOverSelect(QueryStructReq queryStructCmd) {
String timeDim = getTimeDim(queryStructCmd);
String timeSpan = "INTERVAL " + getTimeSpan(queryStructCmd);
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getArgs() != null && f.getArgs().size() > 0) {
return String.format("if(%s = date_add(%s_roll,%s) and %s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s",
timeDim, timeDim, timeSpan, f.getColumn(), f.getColumn(), f.getColumn(), f.getColumn(),
f.getColumn());
} else {
return f.getColumn();
}
}).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryStructCmd.getGroups()) ? aggStr
: String.join(",", queryStructCmd.getGroups()) + "," + aggStr;
}
private String getLeadSelect(QueryStructReq queryStructCmd) {
String timeDim = getTimeDim(queryStructCmd);
String groupDimWithOutTime = getGroupDimWithOutTime(queryStructCmd);
String aggStr = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getArgs() != null && f.getArgs().size() > 0 && f.getArgs().get(0)
.equalsIgnoreCase(metricAggRatioRollName)) {
return String.format("lead(%s ,1,0) over ( %s order by %s desc) as %s_roll", f.getColumn(),
!groupDimWithOutTime.isEmpty() ? " partition by " + groupDimWithOutTime : "", timeDim,
f.getColumn());
} else {
return "";
}
}).filter(f -> !f.isEmpty()).collect(Collectors.joining(","));
String timeDimLead = String.format("lead(cast(%s as string) ,1,'') over ( %s order by %s desc) as %s_roll",
timeDim, !groupDimWithOutTime.isEmpty() ? " partition by " + groupDimWithOutTime : "", timeDim,
timeDim);
return timeDimLead + " , " + aggStr;
}
private String getTimeSpan(QueryStructReq queryStructCmd) {
String timeGrain = getTimeDimGrain(queryStructCmd).toLowerCase();
if ("week".equalsIgnoreCase(timeGrain)) {
return "7 day";
}
if ("quarter".equalsIgnoreCase(timeGrain)) {
return "3 month";
}
return "1 " + timeGrain;
}
private String getTimeDimGrain(QueryStructReq queryStructCmd) {
String grain = queryStructCmd.getAggregators().stream().map(f -> {
if (f.getArgs() != null && f.getArgs().size() > 1 && f.getArgs().get(0)
.equalsIgnoreCase(metricAggRatioRollName)) {
return f.getArgs().get(1);
}
return "";
}).filter(f -> !f.isEmpty()).findFirst().orElse("");
return grain.isEmpty() ? "day" : grain;
}
private String getGroupDimWithOutTime(QueryStructReq queryStructCmd) {
String timeDim = getTimeDim(queryStructCmd);
return queryStructCmd.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(","));
}
private String getTimeDim(QueryStructReq queryStructCmd) {
String dsField = "";
String dsStart = "";
String dsEnd = "";
for (Filter filter : queryStructCmd.getOriginalFilter()) {
if (Filter.Relation.FILTER.equals(filter.getRelation())) {
// TODO get parameters from DateInfo
if ("DATE".equalsIgnoreCase(filter.getRelation().name())) {
if (FilterOperatorEnum.GREATER_THAN_EQUALS.getValue()
.equalsIgnoreCase(filter.getOperator().toString())
|| FilterOperatorEnum.GREATER_THAN.getValue()
.equalsIgnoreCase(filter.getOperator().toString())) {
dsField = filter.getBizName();
dsStart = filter.getValue().toString();
}
if (FilterOperatorEnum.MINOR_THAN_EQUALS.getValue()
.equalsIgnoreCase(filter.getOperator().toString())
|| FilterOperatorEnum.MINOR_THAN.getValue()
.equalsIgnoreCase(filter.getOperator().toString())) {
dsField = filter.getBizName();
dsEnd = filter.getValue().toString();
}
}
}
}
return dsField;
}
private String getLimit(QueryStructReq queryStructCmd) {
if (queryStructCmd.getLimit() > 0) {
return " limit " + String.valueOf(queryStructCmd.getLimit());
}
return "";
}
private String getSelect(QueryStructReq queryStructCmd) {
return getSelect(queryStructCmd, false);
}
private String getSelect(QueryStructReq queryStructCmd, boolean isRatio) {
String aggStr = queryStructCmd.getAggregators().stream().map(f -> getSelectField(f, isRatio))
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryStructCmd.getGroups()) ? aggStr
: String.join(",", queryStructCmd.getGroups()) + "," + aggStr;
}
private String getSelectField(final Aggregator agg, boolean isRatio) {
if (!CollectionUtils.isEmpty(agg.getArgs()) && agg.getArgs().size() > 0) {
if (agg.getArgs().get(0).equalsIgnoreCase(metricAggRatioRollName)) {
if (isRatio) {
return agg.getColumn();
}
return agg.getFunc().name().isEmpty() ? agg.getColumn()
: agg.getFunc() + "( " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
}
}
if (CollectionUtils.isEmpty(agg.getArgs())) {
return agg.getFunc() + "( " + agg.getColumn() + " ) AS " + agg.getColumn() + " ";
}
return agg.getFunc() + "( " + agg.getArgs().stream().map(arg ->
arg.equals(agg.getColumn()) ? arg : (StringUtils.isNumeric(arg) ? arg : ("'" + arg + "'"))
).collect(Collectors.joining(",")) + " ) AS " + agg.getColumn() + " ";
}
private String getGroupBy(QueryStructReq queryStructCmd) {
if (CollectionUtils.isEmpty(queryStructCmd.getGroups())) {
return "";
}
return "group by " + String.join(",", queryStructCmd.getGroups());
}
private String getOrderBy(QueryStructReq queryStructCmd) {
if (CollectionUtils.isEmpty(queryStructCmd.getOrders())) {
return "";
}
return "order by " + queryStructCmd.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
}
private void check(QueryStructReq queryStructCmd) throws Exception {
Set<String> aggFunctions = queryStructCmd.getAggregators().stream()
.filter(f -> f.getArgs() != null && f.getArgs().get(0) != null)
.map(agg -> agg.getArgs().get(0).toLowerCase()).collect(Collectors.toSet());
Long ratioOverNum = aggFunctions.stream().filter(aggFunctionsOver::contains).count();
if (ratioOverNum > 0) {
throw new Exception("not support over ratio");
}
if (aggFunctions.contains(metricAggRatioRollName)) {
if (ratioOverNum > 0) {
throw new Exception("not support over ratio and roll ratio together ");
}
}
if (getTimeDim(queryStructCmd).isEmpty()) {
throw new Exception("miss time filter");
}
String timeDimGrain = getTimeDimGrain(queryStructCmd).toLowerCase();
if (!dateGrain.contains(timeDimGrain)) {
throw new Exception("second arg must be [day week month year quarter] ");
}
}
}

View File

@@ -0,0 +1,15 @@
package com.tencent.supersonic.semantic.query.infrastructure.mapper;
import com.tencent.supersonic.semantic.api.core.pojo.QueryStat;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import java.util.List;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface StatMapper {
Boolean createRecord(QueryStat queryStatInfo);
List<QueryStat> getStatInfo(ItemUseReq itemUseCommend);
}

View File

@@ -0,0 +1,98 @@
package com.tencent.supersonic.semantic.query.infrastructure.repository;
import static com.tencent.supersonic.common.constant.Constants.AT_SYMBOL;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.tencent.supersonic.semantic.api.core.pojo.QueryStat;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import com.tencent.supersonic.common.enums.TypeEnums;
import com.tencent.supersonic.semantic.query.domain.repository.StatRepository;
import com.tencent.supersonic.semantic.query.infrastructure.mapper.StatMapper;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.logging.log4j.util.Strings;
import org.springframework.stereotype.Repository;
@Slf4j
@Repository
public class StatRepositoryImpl implements StatRepository {
private final StatMapper statMapper;
private final ObjectMapper mapper = new ObjectMapper();
public StatRepositoryImpl(StatMapper statMapper) {
this.statMapper = statMapper;
}
@Override
public Boolean createRecord(QueryStat queryStatInfo) {
return statMapper.createRecord(queryStatInfo);
}
@Override
public List<ItemUseResp> getStatInfo(ItemUseReq itemUseCommend) {
List<ItemUseResp> result = new ArrayList<>();
List<QueryStat> statInfos = statMapper.getStatInfo(itemUseCommend);
Map<String, Long> map = new ConcurrentHashMap<>();
statInfos.stream().forEach(stat -> {
String dimensions = stat.getDimensions();
String metrics = stat.getMetrics();
updateStatMapInfo(map, dimensions, TypeEnums.DIMENSION.getName(), stat.getDomainId());
updateStatMapInfo(map, metrics, TypeEnums.METRIC.getName(), stat.getDomainId());
});
map.forEach((k, v) -> {
Long classId = Long.parseLong(k.split(AT_SYMBOL + AT_SYMBOL)[0]);
String type = k.split(AT_SYMBOL + AT_SYMBOL)[1];
String nameEn = k.split(AT_SYMBOL + AT_SYMBOL)[2];
result.add(new ItemUseResp(classId, type, nameEn, v));
});
List<ItemUseResp> itemUseResps = result.stream().sorted(Comparator.comparing(ItemUseResp::getUseCnt).reversed())
.collect(Collectors.toList());
return itemUseResps;
}
@Override
public List<QueryStat> getQueryStatInfoWithoutCache(ItemUseReq itemUseCommend) {
return statMapper.getStatInfo(itemUseCommend);
}
private void updateStatMapInfo(Map<String, Long> map, String dimensions, String type, Long domainId) {
if (Strings.isNotEmpty(dimensions)) {
try {
List<String> dimensionList = mapper.readValue(dimensions, new TypeReference<List<String>>() {
});
dimensionList.stream().forEach(dimension -> {
String key = domainId + AT_SYMBOL + AT_SYMBOL + type + AT_SYMBOL + AT_SYMBOL + dimension;
if (map.containsKey(key)) {
map.put(key, map.get(key) + 1);
} else {
map.put(key, 1L);
}
});
} catch (Exception e) {
log.warn("e:{}", e);
}
}
}
private void updateStatMapInfo(Map<String, Long> map, Long domainId, String type) {
if (Objects.nonNull(domainId)) {
String key = type + AT_SYMBOL + AT_SYMBOL + domainId;
if (map.containsKey(key)) {
map.put(key, map.get(key) + 1);
} else {
map.put(key, 1L);
}
}
}
}

View File

@@ -0,0 +1,65 @@
package com.tencent.supersonic.semantic.query.rest;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authentication.utils.UserHolder;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.request.QueryMultiStructReq;
import com.tencent.supersonic.semantic.api.query.request.QuerySqlReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import com.tencent.supersonic.semantic.query.domain.QueryService;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/semantic/query")
public class QueryController {
@Autowired
private QueryService queryService;
@PostMapping("/sql")
public Object queryBySql(@RequestBody QuerySqlReq querySqlReq) throws Exception {
return queryService.queryBySql(querySqlReq);
}
@PostMapping("/struct")
public Object queryByStruct(@RequestBody QueryStructReq queryStructReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
return queryService.queryByStruct(queryStructReq, user, request);
}
/**
* queryByMultiStruct
*/
@PostMapping("/multiStruct")
public Object queryByMultiStruct(@RequestBody QueryMultiStructReq queryMultiStructReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
return queryService.queryByMultiStruct(queryMultiStructReq, user);
}
/**
* getStatInfo
* query the used frequency of the metric/dimension
*
* @param itemUseReq
*/
@PostMapping("/stat")
public List<ItemUseResp> getStatInfo(@RequestBody ItemUseReq itemUseReq) {
return queryService.getStatInfo(itemUseReq);
}
}

View File

@@ -0,0 +1,69 @@
package com.tencent.supersonic.semantic.query.rest;
import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authentication.utils.UserHolder;
import com.tencent.supersonic.semantic.api.core.request.DomainSchemaFilterReq;
import com.tencent.supersonic.semantic.api.core.request.PageDimensionReq;
import com.tencent.supersonic.semantic.api.core.request.PageMetricReq;
import com.tencent.supersonic.semantic.api.core.response.DimensionResp;
import com.tencent.supersonic.semantic.api.core.response.DomainResp;
import com.tencent.supersonic.semantic.api.core.response.DomainSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.MetricResp;
import com.tencent.supersonic.semantic.query.domain.SchemaService;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/semantic/schema")
public class SchemaController {
@Autowired
private SchemaService schemaService;
@PostMapping
public List<DomainSchemaResp> fetchDomainSchema(@RequestBody DomainSchemaFilterReq filter,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.fetchDomainSchema(filter, user);
}
/**
* get domain list
*
* @param
*/
@GetMapping("/domain/list")
public List<DomainResp> getDomainList(HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.getDomainListForAdmin(user);
}
@PostMapping("/dimension/page")
public PageInfo<DimensionResp> queryDimension(@RequestBody PageDimensionReq pageDimensionCmd,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.queryDimension(pageDimensionCmd, user);
}
@PostMapping("/metric/page")
public PageInfo<MetricResp> queryMetric(@RequestBody PageMetricReq pageMetricCmd,
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.queryMetric(pageMetricCmd, user);
}
}

View File

@@ -0,0 +1,74 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.tencent.supersonic.semantic.query.infrastructure.mapper.StatMapper">
<resultMap id="QueryStatDO"
type="com.tencent.supersonic.semantic.api.core.pojo.QueryStat">
<id column="id" property="id"/>
<result column="trace_id" property="traceId"/>
<result column="domain_id" property="domainId"/>
<result column="user" property="user"/>
<result column="created_at" property="createdAt"/>
<result column="query_type" property="queryType"/>
<result column="query_type_back" property="queryTypeBack"/>
<result column="query_sql_cmd" property="querySqlCmd"/>
<result column="sql_cmd_md5" property="querySqlCmdMd5"/>
<result column="query_struct_cmd" property="queryStructCmd"/>
<result column="struct_cmd_md5" property="queryStructCmdMd5"/>
<result column="sql" property="sql"/>
<result column="sql_md5" property="sqlMd5"/>
<result column="query_engine" property="queryEngine"/>
<result column="elapsed_ms" property="elapsedMs"/>
<result column="query_state" property="queryState"/>
<result column="native_query" property="nativeQuery"/>
<result column="start_date" property="startDate"/>
<result column="end_date" property="endDate"/>
<result column="dimensions" property="dimensions"/>
<result column="metrics" property="metrics"/>
<result column="select_cols" property="selectCols"/>
<result column="agg_cols" property="aggCols"/>
<result column="filter_cols" property="filterCols"/>
<result column="group_by_cols" property="groupByCols"/>
<result column="order_by_cols" property="orderByCols"/>
<result column="use_result_cache" property="useResultCache"/>
<result column="use_sql_cache" property="useSqlCache"/>
<result column="sql_cache_key" property="sqlCacheKey"/>
<result column="result_cache_key" property="resultCacheKey"/>
</resultMap>
<insert id="createRecord">
insert into s2_query_stat_info
(
trace_id, domain_id, `user`, query_type, query_type_back, query_sql_cmd, sql_cmd_md5, query_struct_cmd, struct_cmd_md5, `sql`, sql_md5, query_engine,
elapsed_ms, query_state, native_query, start_date, end_date, dimensions, metrics, select_cols, agg_cols, filter_cols, group_by_cols,
order_by_cols, use_result_cache, use_sql_cache, sql_cache_key, result_cache_key
)
values
(
#{traceId}, #{domainId}, #{user}, #{queryType}, #{queryTypeBack}, #{querySqlCmd}, #{querySqlCmdMd5}, #{queryStructCmd}, #{queryStructCmdMd5}, #{sql}, #{sqlMd5}, #{queryEngine},
#{elapsedMs}, #{queryState}, #{nativeQuery}, #{startDate}, #{endDate}, #{dimensions}, #{metrics}, #{selectCols}, #{aggCols}, #{filterCols}, #{groupByCols},
#{orderByCols}, #{useResultCache}, #{useSqlCache}, #{sqlCacheKey}, #{resultCacheKey}
)
</insert>
<select id="getStatInfo"
resultType="com.tencent.supersonic.semantic.api.core.pojo.QueryStat">
select *
from s2_query_stat_info
<where>
<if test="startTime != null">
and start_time >= #{startTime}
</if>
<if test="domainId != null">
and domain_id = #{domainId}
</if>
<if test="metric != null">
and metrics like concat('%',#{metric},'%')
</if>
</where>
</select>
</mapper>

View File

@@ -0,0 +1,34 @@
CREATE TABLE `s2_query_stat_info` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`trace_id` varchar(200) DEFAULT NULL COMMENT '查询标识',
`domain_id` bigint(20) DEFAULT NULL COMMENT '主题域ID',
`user` varchar(200) DEFAULT NULL COMMENT '执行sql的用户',
`created_at` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`query_type` varchar(200) DEFAULT NULL COMMENT '查询对应的场景',
`query_type_back` int(10) DEFAULT '0' COMMENT '查询类型, 0-正常查询, 1-预刷类型',
`query_sql_cmd` mediumtext COMMENT '对应查询的struct',
`sql_cmd_md5` varchar(200) DEFAULT NULL COMMENT 'sql md5值',
`query_struct_cmd` mediumtext COMMENT '对应查询的struct',
`struct_cmd_md5` varchar(200) DEFAULT NULL COMMENT 'sql md5值',
`sql` mediumtext COMMENT '对应查询的sql',
`sql_md5` varchar(200) DEFAULT NULL COMMENT 'sql md5值',
`query_engine` varchar(20) DEFAULT NULL COMMENT '查询引擎',
`elapsed_ms` bigint(10) DEFAULT NULL COMMENT '查询耗时',
`query_state` varchar(20) DEFAULT NULL COMMENT '查询最终状态',
`native_query` int(10) DEFAULT NULL COMMENT '1-明细查询,0-聚合查询',
`start_date` varchar(50) DEFAULT NULL COMMENT 'sql开始日期',
`end_date` varchar(50) DEFAULT NULL COMMENT 'sql结束日期',
`dimensions` mediumtext COMMENT 'sql 涉及的维度',
`metrics` mediumtext COMMENT 'sql 涉及的指标',
`select_cols` mediumtext COMMENT 'sql select部分涉及的标签',
`agg_cols` mediumtext COMMENT 'sql agg部分涉及的标签',
`filter_cols` mediumtext COMMENT 'sql where部分涉及的标签',
`group_by_cols` mediumtext COMMENT 'sql grouy by部分涉及的标签',
`order_by_cols` mediumtext COMMENT 'sql order by部分涉及的标签',
`use_result_cache` tinyint(1) DEFAULT '-1' COMMENT '是否命中sql缓存',
`use_sql_cache` tinyint(1) DEFAULT '-1' COMMENT '是否命中sql缓存',
`sql_cache_key` mediumtext COMMENT '缓存的key',
`result_cache_key` mediumtext COMMENT '缓存的key',
PRIMARY KEY (`id`),
KEY `domain_index` (`domain_id`)
) COMMENT='查询统计信息表'

View File

@@ -0,0 +1,18 @@
CREATE TABLE IF NOT EXISTS `s2_semantic_pasre_info` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`trace_id` varchar(200) NOT NULL COMMENT '查询标识' ,
`domain_id` bigint(20) NOT NULL COMMENT '主题域ID',
`dimensions` mediumtext COMMENT '查询相关的维度信息',
`metrics` mediumtext COMMENT '查询相关的指标信息',
`orders` mediumtext COMMENT '查询相关的排序信息',
`filters` mediumtext COMMENT '查询相关的过滤信息',
`date_info` mediumtext COMMENT '查询相关的日期信息',
`limit` bigint(20) NOT NULL COMMENT'查询相关的limit信息',
`native_query` tinyint(1) NOT NULL DEFAULT '0' COMMENT '1-明细查询,0-聚合查询',
`sql` mediumtext COMMENT '解析后的sql',
`created_at` datetime NOT NULL COMMENT '创建时间',
`created_by` varchar(100) NOT NULL COMMENT '创建人',
`status` int(10) NOT NULL COMMENT '运行状态',
`elapsed_ms` bigint(10) DEFAULT NULL COMMENT 'sql解析耗时',
PRIMARY KEY (`id`)
)COMMENT='语义层sql解析信息表'

View File

@@ -0,0 +1,253 @@
package com.tencent.supersonic.semantic.query.domain.parser;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.DatasourceYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.semantic.api.core.pojo.yaml.MetricYamlTpl;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.semantic.query.domain.parser.convertor.planner.AggPlanner;
import com.tencent.supersonic.semantic.query.domain.parser.schema.SemanticSchema;
import com.tencent.supersonic.semantic.query.application.SemanticSchemaManagerImpl;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
@Slf4j
class SemanticParserServiceTest {
private static Map<String, SemanticSchema> semanticSchemaMap = new HashMap<>();
public static SqlParserResp parser(SemanticSchema semanticSchema, MetricReq metricCommand, boolean isAgg) {
SqlParserResp sqlParser = new SqlParserResp();
if (metricCommand.getRootPath().isEmpty()) {
sqlParser.setErrMsg("rootPath empty");
return sqlParser;
}
try {
if (semanticSchema == null) {
sqlParser.setErrMsg("semanticSchema not found");
return sqlParser;
}
AggPlanner aggBuilder = new AggPlanner(semanticSchema);
aggBuilder.explain(metricCommand, isAgg);
sqlParser.setSql(aggBuilder.getSql());
sqlParser.setSourceId(aggBuilder.getSourceId());
} catch (Exception e) {
sqlParser.setErrMsg(e.getMessage());
log.error("parser error MetricCommand[{}] error [{}]", metricCommand, e);
}
return sqlParser;
}
//@Test
public void test() throws Exception {
DatasourceYamlTpl datasource = new DatasourceYamlTpl();
datasource.setName("s2_pv_uv_statis");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date, user_name,page,1 as pv, user_name as uv FROM s2_pv_uv_statis");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("sum");
measure.setName("s2_pv_uv_statis_pv");
measure.setExpr("pv");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
MeasureYamlTpl measure2 = new MeasureYamlTpl();
measure2.setAgg("count");
measure2.setName("s2_pv_uv_statis_internal_cnt");
measure2.setExpr("1");
measure2.setCreateMetric("true");
measures.add(measure2);
MeasureYamlTpl measure3 = new MeasureYamlTpl();
measure3.setAgg("count");
measure3.setName("s2_pv_uv_statis_uv");
measure3.setExpr("uv");
measure3.setCreateMetric("true");
measures.add(measure3);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension2 = new DimensionYamlTpl();
dimension2.setName("sys_imp_date");
dimension2.setExpr("imp_date");
dimension2.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams2 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams2.setIsPrimary("true");
dimensionTimeTypeParams2.setTimeGranularity("day");
dimension2.setTypeParams(dimensionTimeTypeParams2);
dimensions.add(dimension2);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("day");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
SemanticSchema semanticSchema = SemanticSchema.newBuilder("s2").build();
SemanticSchemaManagerImpl.update(semanticSchema, SemanticSchemaManagerImpl.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("page");
dimension1.setName("page");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
SemanticSchemaManagerImpl.update(semanticSchema, "s2_pv_uv_statis",
SemanticSchemaManagerImpl.getDimensions(dimensionYamlTpls));
MetricYamlTpl metric1 = new MetricYamlTpl();
metric1.setName("pv");
metric1.setType("expr");
MetricTypeParamsYamlTpl metricTypeParams = new MetricTypeParamsYamlTpl();
List<MeasureYamlTpl> measures1 = new ArrayList<>();
MeasureYamlTpl measure1 = new MeasureYamlTpl();
measure1.setName("s2_pv_uv_statis_pv");
measures1.add(measure1);
metricTypeParams.setMeasures(measures1);
metricTypeParams.setExpr("s2_pv_uv_statis_pv");
metric1.setTypeParams(metricTypeParams);
List<MetricYamlTpl> metric = new ArrayList<>();
metric.add(metric1);
MetricYamlTpl metric2 = new MetricYamlTpl();
metric2.setName("uv");
metric2.setType("expr");
MetricTypeParamsYamlTpl metricTypeParams1 = new MetricTypeParamsYamlTpl();
List<MeasureYamlTpl> measures2 = new ArrayList<>();
MeasureYamlTpl measure4 = new MeasureYamlTpl();
measure4.setName("s2_pv_uv_statis_uv");
measures2.add(measure4);
metricTypeParams1.setMeasures(measures2);
metricTypeParams1.setExpr("s2_pv_uv_statis_uv");
metric2.setTypeParams(metricTypeParams1);
metric.add(metric2);
SemanticSchemaManagerImpl.update(semanticSchema, SemanticSchemaManagerImpl.getMetrics(metric));
MetricReq metricCommand = new MetricReq();
metricCommand.setRootPath("s2");
metricCommand.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date")));
metricCommand.setMetrics(new ArrayList<>(Arrays.asList("pv")));
metricCommand.setWhere("user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand.setLimit(1000L);
List<ColumnOrder> orders = new ArrayList<>();
orders.add(ColumnOrder.buildDesc("sys_imp_date"));
metricCommand.setOrder(orders);
System.out.println(parser(semanticSchema, metricCommand, true));
addDepartment(semanticSchema);
MetricReq metricCommand2 = new MetricReq();
metricCommand2.setRootPath("s2");
metricCommand2.setDimensions(new ArrayList<>(
Arrays.asList("sys_imp_date", "user_name__department", "user_name", "user_name__page")));
metricCommand2.setMetrics(new ArrayList<>(Arrays.asList("pv")));
metricCommand2.setWhere(
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand2.setLimit(1000L);
List<ColumnOrder> orders2 = new ArrayList<>();
orders2.add(ColumnOrder.buildDesc("sys_imp_date"));
metricCommand2.setOrder(orders2);
System.out.println(parser(semanticSchema, metricCommand2, true));
}
private static void addDepartment(SemanticSchema semanticSchema) {
DatasourceYamlTpl datasource = new DatasourceYamlTpl();
datasource.setName("user_department");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("count");
measure.setName("user_department_internal_cnt");
measure.setCreateMetric("true");
measure.setExpr("1");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("sys_imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("week");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
semanticSchema.getDatasource().put("user_department", SemanticSchemaManagerImpl.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("department");
dimension1.setName("department");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
semanticSchema.getDimension()
.put("user_department", SemanticSchemaManagerImpl.getDimensions(dimensionYamlTpls));
}
}