[improvement][project] global refactor , code format , support llm , support fuzzy detect ,support query filter and so on.

This commit is contained in:
lexluo
2023-07-08 15:00:03 +08:00
parent 5ffd617431
commit 404163f391
329 changed files with 21050 additions and 5036 deletions

View File

@@ -12,6 +12,7 @@ import com.tencent.supersonic.semantic.query.domain.parser.dsl.SemanticModel;
import java.util.ArrayList;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@@ -31,7 +32,6 @@ public class ParserServiceImpl implements ParserService {
}
@Override
public SqlParserResp physicalSql(ParseSqlReq sqlCommend) throws Exception {
return parser(sqlCommend);
@@ -107,6 +107,9 @@ public class ParserServiceImpl implements ParserService {
private String formatWhere(String where) {
if (StringUtils.isEmpty(where)) {
return where;
}
return where.replace("\"", "\\\\\"");
}
}

View File

@@ -1,28 +1,31 @@
package com.tencent.supersonic.semantic.query.application;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.common.enums.TaskStatusEnum;
import com.tencent.supersonic.common.util.cache.CacheUtils;
import com.tencent.supersonic.common.util.context.ContextUtils;
import com.tencent.supersonic.semantic.api.core.pojo.QueryStat;
import com.tencent.supersonic.semantic.api.core.request.DomainSchemaFilterReq;
import com.tencent.supersonic.semantic.api.core.response.DomainSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.QueryResultWithSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.pojo.Cache;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.request.MetricReq;
import com.tencent.supersonic.semantic.api.query.request.QueryMultiStructReq;
import com.tencent.supersonic.semantic.api.query.request.QuerySqlReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import com.tencent.supersonic.common.enums.TaskStatusEnum;
import com.tencent.supersonic.common.util.cache.CacheUtils;
import com.tencent.supersonic.semantic.core.domain.DatabaseService;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import com.tencent.supersonic.semantic.query.domain.QueryService;
import com.tencent.supersonic.semantic.query.domain.SchemaService;
import com.tencent.supersonic.semantic.query.domain.annotation.DataPermission;
import com.tencent.supersonic.semantic.query.domain.utils.QueryReqConverter;
import com.tencent.supersonic.semantic.query.domain.utils.QueryStructUtils;
import com.tencent.supersonic.semantic.query.domain.utils.StatUtils;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@@ -37,28 +40,37 @@ public class QueryServiceImpl implements QueryService {
private final QueryStructUtils queryStructUtils;
private final StatUtils statUtils;
private final CacheUtils cacheUtils;
private final QueryReqConverter queryReqConverter;
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
public QueryServiceImpl(ParserService parserService,
DatabaseService databaseService,
QueryStructUtils queryStructUtils,
StatUtils statUtils,
CacheUtils cacheUtils) {
DatabaseService databaseService,
QueryStructUtils queryStructUtils,
StatUtils statUtils,
CacheUtils cacheUtils,
QueryReqConverter queryReqConverter) {
this.parserService = parserService;
this.databaseService = databaseService;
this.queryStructUtils = queryStructUtils;
this.statUtils = statUtils;
this.cacheUtils = cacheUtils;
this.queryReqConverter = queryReqConverter;
}
@Override
public Object queryBySql(QuerySqlReq querySqlCmd) throws Exception {
//TODO QuerySqlCmd---> SqlCommend
MetricReq sqlCommend = new MetricReq();
public Object queryBySql(QuerySqlReq querySqlCmd, User user) throws Exception {
DomainSchemaFilterReq filter = new DomainSchemaFilterReq();
List<Long> domainIds = new ArrayList<>();
domainIds.add(querySqlCmd.getDomainId());
filter.setDomainIds(domainIds);
SchemaService schemaService = ContextUtils.getBean(SchemaService.class);
List<DomainSchemaResp> domainSchemas = schemaService.fetchDomainSchema(filter, user);
SqlParserResp sqlParser = queryReqConverter.convert(querySqlCmd, domainSchemas);
SqlParserResp sqlParser = parserService.physicalSql(sqlCommend);
return databaseService.executeSql(sqlParser.getSql(), querySqlCmd.getDomainId());
}
@@ -128,5 +140,4 @@ public class QueryServiceImpl implements QueryService {
}
}

View File

@@ -38,9 +38,9 @@ public class SchemaServiceImpl implements SchemaService {
private final MetricService metricService;
public SchemaServiceImpl(QueryService queryService,
DomainService domainService,
DimensionService dimensionService,
MetricService metricService) {
DomainService domainService,
DimensionService dimensionService,
MetricService metricService) {
this.queryService = queryService;
this.domainService = domainService;
this.dimensionService = dimensionService;
@@ -52,7 +52,7 @@ public class SchemaServiceImpl implements SchemaService {
public List<DomainSchemaResp> fetchDomainSchema(DomainSchemaFilterReq filter, User user) {
List<DomainSchemaResp> domainSchemaDescList = domainService.fetchDomainSchema(filter, user);
List<ItemUseResp> statInfos = queryService.getStatInfo(new ItemUseReq());
log.info("statInfos:{}", statInfos);
log.debug("statInfos:{}", statInfos);
fillCnt(domainSchemaDescList, statInfos);
return domainSchemaDescList;
@@ -66,7 +66,7 @@ public class SchemaServiceImpl implements SchemaService {
itemUseInfo -> itemUseInfo.getType() + AT_SYMBOL + AT_SYMBOL + itemUseInfo.getBizName(),
itemUseInfo -> itemUseInfo,
(item1, item2) -> item1));
log.info("typeIdAndStatPair:{}", typeIdAndStatPair);
log.debug("typeIdAndStatPair:{}", typeIdAndStatPair);
for (DomainSchemaResp domainSchemaDesc : domainSchemaDescList) {
fillDimCnt(domainSchemaDesc, typeIdAndStatPair);
fillMetricCnt(domainSchemaDesc, typeIdAndStatPair);

View File

@@ -38,18 +38,19 @@ import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@Slf4j
@Primary
@Service("SemanticSchemaManager")
public class SemanticSchemaManagerImpl implements SemanticSchemaManager {
@Autowired
private LoadingCache<String, SemanticModel> loadingCache;
private final DatasourceService datasourceService;
private final DomainService domainService;
@Autowired
private LoadingCache<String, SemanticModel> loadingCache;
public SemanticSchemaManagerImpl(DatasourceService datasourceService,
@@ -58,62 +59,14 @@ public class SemanticSchemaManagerImpl implements SemanticSchemaManager {
this.domainService = domainService;
}
@Override
public SemanticModel reload(String rootPath) {
SemanticModel semanticModel = new SemanticModel();
semanticModel.setRootPath(rootPath);
Map<Long, String> domainFullPathMap = domainService.getDomainFullPath();
log.info("domainFullPathMap {}", domainFullPathMap);
Set<Long> domainIds = domainFullPathMap.entrySet().stream().filter(e -> e.getValue().startsWith(rootPath))
.map(e -> e.getKey()).collect(Collectors.toSet());
if (domainIds.isEmpty()) {
log.error("get domainId empty {}", rootPath);
return semanticModel;
}
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DatasourceYamlTpl> datasourceYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
datasourceService.getModelYamlTplByDomainIds(domainIds, dimensionYamlTpls, datasourceYamlTpls, metricYamlTpls);
if (!datasourceYamlTpls.isEmpty()) {
Map<String, DataSource> dataSourceMap = datasourceYamlTpls.stream().map(d -> getDatasource(d))
.collect(Collectors.toMap(DataSource::getName, item -> item));
semanticModel.setDatasourceMap(dataSourceMap);
}
if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
}
semanticModel.setDimensionMap(dimensionMap);
}
if (!metricYamlTpls.isEmpty()) {
semanticModel.setMetrics(getMetrics(metricYamlTpls));
}
return semanticModel;
}
//private Map<String, SemanticSchema> semanticSchemaMap = new HashMap<>();
@Override
public SemanticModel get(String rootPath) throws Exception {
rootPath = formatKey(rootPath);
SemanticModel schema = loadingCache.get(rootPath);
if (schema == null) {
return null;
}
return schema;
}
public static List<Metric> getMetrics(final List<MetricYamlTpl> t) {
return getMetricsByMetricYamlTpl(t);
}
public static List<Dimension> getDimensions(final List<DimensionYamlTpl> t) {
return getDimension(t);
}
public static DataSource getDatasource(final DatasourceYamlTpl d) {
DataSource datasource = new DataSource();
datasource.setSqlQuery(d.getSqlQuery());
@@ -197,7 +150,6 @@ public class SemanticSchemaManagerImpl implements SemanticSchemaManager {
return identifies;
}
public static void update(SemanticSchema schema, List<Metric> metric) throws Exception {
if (schema != null) {
updateMetric(metric, schema.getMetrics());
@@ -273,6 +225,51 @@ public class SemanticSchemaManagerImpl implements SemanticSchemaManager {
return key;
}
@Override
public SemanticModel reload(String rootPath) {
SemanticModel semanticModel = new SemanticModel();
semanticModel.setRootPath(rootPath);
Map<Long, String> domainFullPathMap = domainService.getDomainFullPath();
log.info("domainFullPathMap {}", domainFullPathMap);
Set<Long> domainIds = domainFullPathMap.entrySet().stream().filter(e -> e.getValue().startsWith(rootPath))
.map(e -> e.getKey()).collect(Collectors.toSet());
if (domainIds.isEmpty()) {
log.error("get domainId empty {}", rootPath);
return semanticModel;
}
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DatasourceYamlTpl> datasourceYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
datasourceService.getModelYamlTplByDomainIds(domainIds, dimensionYamlTpls, datasourceYamlTpls, metricYamlTpls);
if (!datasourceYamlTpls.isEmpty()) {
Map<String, DataSource> dataSourceMap = datasourceYamlTpls.stream().map(d -> getDatasource(d))
.collect(Collectors.toMap(DataSource::getName, item -> item));
semanticModel.setDatasourceMap(dataSourceMap);
}
if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
}
semanticModel.setDimensionMap(dimensionMap);
}
if (!metricYamlTpls.isEmpty()) {
semanticModel.setMetrics(getMetrics(metricYamlTpls));
}
return semanticModel;
}
//private Map<String, SemanticSchema> semanticSchemaMap = new HashMap<>();
@Override
public SemanticModel get(String rootPath) throws Exception {
rootPath = formatKey(rootPath);
SemanticModel schema = loadingCache.get(rootPath);
if (schema == null) {
return null;
}
return schema;
}
@Configuration
@EnableCaching
public class GuavaCacheConfig {

View File

@@ -14,7 +14,7 @@ import javax.servlet.http.HttpServletRequest;
public interface QueryService {
Object queryBySql(QuerySqlReq querySqlCmd) throws Exception;
Object queryBySql(QuerySqlReq querySqlCmd, User user) throws Exception;
QueryResultWithSchemaResp queryByStruct(QueryStructReq queryStructCmd, User user) throws Exception;

View File

@@ -27,6 +27,11 @@ public class Configuration {
public static RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
public static SqlOperatorTable operatorTable = SqlStdOperatorTable.instance();
public static CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
public static SqlValidator.Config validatorConfig = SqlValidator.Config.DEFAULT
.withLenientOperatorLookup(config.lenientOperatorLookup())
.withSqlConformance(SemanticSqlDialect.DEFAULT.getConformance())
.withDefaultNullCollation(config.defaultNullCollation())
.withIdentifierExpansion(true);
static {
configProperties.put(CalciteConnectionProperty.CASE_SENSITIVE.camelName(), Boolean.TRUE.toString());
@@ -34,12 +39,6 @@ public class Configuration {
configProperties.put(CalciteConnectionProperty.QUOTED_CASING.camelName(), Casing.TO_LOWER.toString());
}
public static SqlValidator.Config validatorConfig = SqlValidator.Config.DEFAULT
.withLenientOperatorLookup(config.lenientOperatorLookup())
.withSqlConformance(SemanticSqlDialect.DEFAULT.getConformance())
.withDefaultNullCollation(config.defaultNullCollation())
.withIdentifierExpansion(true);
public static SqlParser.Config getParserConfig() {
CalciteConnectionConfig config = new CalciteConnectionConfigImpl(configProperties);
SqlParser.ConfigBuilder parserConfig = SqlParser.configBuilder();

View File

@@ -25,21 +25,6 @@ public abstract class Renderer {
protected TableView tableView = new TableView();
public void setTable(SqlNode table) {
tableView.setTable(table);
}
public SqlNode builder() {
return tableView.build();
}
public SqlNode builderAs(String alias) throws Exception {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception;
public static Optional<Dimension> getDimensionByName(String name, DataSource datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
}
@@ -58,7 +43,6 @@ public abstract class Renderer {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
}
public static MetricNode buildMetricNode(String metric, DataSource datasource, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg, String alias) throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
@@ -105,4 +89,19 @@ public abstract class Renderer {
Set<String> tmp = new HashSet<>(list);
return tmp.stream().collect(Collectors.toList());
}
public void setTable(SqlNode table) {
tableView.setTable(table);
}
public SqlNode builder() {
return tableView.build();
}
public SqlNode builderAs(String alias) throws Exception {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception;
}

View File

@@ -5,6 +5,13 @@ import org.apache.calcite.sql.validate.SqlValidatorScope;
public class AggFunctionNode extends SemanticNode {
public static SqlNode build(String agg, String name, SqlValidatorScope scope) throws Exception {
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope);
}
return parse(agg + " ( " + name + " ) ", scope);
}
public static enum AggFunction {
AVG,
COUNT_DISTINCT,
@@ -15,12 +22,5 @@ public class AggFunctionNode extends SemanticNode {
DISTINCT
}
public static SqlNode build(String agg, String name, SqlValidatorScope scope) throws Exception {
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name + " ) ", scope);
}
return parse(agg + " ( " + name + " ) ", scope);
}
}

View File

@@ -24,10 +24,6 @@ import org.apache.commons.lang3.StringUtils;
public abstract class SemanticNode {
public void accept(Optimization optimization) {
optimization.visit(this);
}
public static SqlNode parse(String expression, SqlValidatorScope scope) throws Exception {
SqlParser sqlParser = SqlParser.create(expression, Configuration.getParserConfig());
SqlNode sqlNode = sqlParser.parseExpression();
@@ -77,7 +73,6 @@ public abstract class SemanticNode {
return sqlNode instanceof SqlIdentifier;
}
public static SqlNode getAlias(SqlNode sqlNode, SqlValidatorScope scope) throws Exception {
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
@@ -117,5 +112,9 @@ public abstract class SemanticNode {
return sqlNode;
}
public void accept(Optimization optimization) {
optimization.visit(this);
}
}

View File

@@ -90,7 +90,7 @@ public class JoinRender extends Renderer {
fieldWhere.add(identify.getName());
}
}
TableView tableView = SourceRender.renderOne(false, "", fieldWhere, queryMetrics, queryDimension,
TableView tableView = SourceRender.renderOne("", fieldWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataSources.get(i), scope, schema, true);
log.info("tableView {}", tableView.getTable().toString());
String alias = Constants.JOIN_TABLE_PREFIX + dataSource.getName();

View File

@@ -33,30 +33,7 @@ import org.springframework.util.CollectionUtils;
@Slf4j
public class SourceRender extends Renderer {
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
if (dataSources.size() == 1) {
DataSource dataSource = dataSources.get(0);
super.tableView = renderOne(false, "", fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(),
metricCommand.getWhere(), dataSource, scope, schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(metricCommand, dataSources, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
public static TableView renderOne(boolean addWhere, String alias, List<String> fieldWhere,
public static TableView renderOne(String alias, List<String> fieldWhere,
List<String> reqMetrics, List<String> reqDimensions, String queryWhere, DataSource datasource,
SqlValidatorScope scope, SemanticSchema schema, boolean nonAgg) throws Exception {
@@ -65,10 +42,10 @@ public class SourceRender extends Renderer {
List<String> queryMetrics = new ArrayList<>(reqMetrics);
List<String> queryDimensions = new ArrayList<>(reqDimensions);
if (!fieldWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
if (addWhere) {
output.getFilter().add(sqlNode);
}
// SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
// if (addWhere) {
// output.getFilter().add(sqlNode);
// }
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema, dimensions, metrics);
@@ -104,7 +81,6 @@ public class SourceRender extends Renderer {
return output;
}
private static void buildDimension(String alias, String dimension, DataSource datasource, SemanticSchema schema,
boolean nonAgg, TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimension().get(datasource.getName());
@@ -160,7 +136,6 @@ public class SourceRender extends Renderer {
}
}
private static boolean isWhereHasMetric(List<String> fields, DataSource datasource) {
Long metricNum = datasource.getMeasures().stream().filter(m -> fields.contains(m.getName().toLowerCase()))
.count();
@@ -227,7 +202,6 @@ public class SourceRender extends Renderer {
//getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
}
public static void whereDimMetric(List<String> fields, List<String> queryMetrics,
List<String> queryDimensions, DataSource datasource, SemanticSchema schema, Set<String> dimensions,
Set<String> metrics) {
@@ -291,7 +265,6 @@ public class SourceRender extends Renderer {
return false;
}
private static void expandWhere(MetricReq metricCommand, TableView tableView, SqlValidatorScope scope)
throws Exception {
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
@@ -303,5 +276,27 @@ public class SourceRender extends Renderer {
}
}
public void render(MetricReq metricCommand, List<DataSource> dataSources, SqlValidatorScope scope,
SemanticSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
if (dataSources.size() == 1) {
DataSource dataSource = dataSources.get(0);
super.tableView = renderOne("", fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(),
metricCommand.getWhere(), dataSource, scope, schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(metricCommand, dataSources, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
}

View File

@@ -9,18 +9,13 @@ import lombok.Data;
public class Dimension implements SemanticItem {
String name;
private String owners;
private String type;
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
@Override
public String getName() {
return name;
}
private String owners;
private String type;
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
}

View File

@@ -10,16 +10,12 @@ import lombok.Data;
public class Metric implements SemanticItem {
private String name;
private List<String> owners;
private String type;
private MetricTypeParams metricTypeParams;
@Override
public String getName() {
return name;
}
private List<String> owners;
private String type;
private MetricTypeParams metricTypeParams;
}

View File

@@ -8,6 +8,7 @@ import lombok.Data;
@Data
public class SemanticModel {
private String rootPath;
private List<Metric> metrics = new ArrayList<>();
private Map<String, DataSource> datasourceMap = new HashMap<>();

View File

@@ -39,11 +39,14 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
this.statistic = statistic;
}
public static Builder newBuilder(String tableName) {
return new Builder(tableName);
}
public String getTableName() {
return tableName;
}
@Override
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
if (rowType == null) {
@@ -71,11 +74,6 @@ public class DataSourceTable extends AbstractTable implements ScannableTable, Tr
throw new UnsupportedOperationException("Not implemented");
}
public static Builder newBuilder(String tableName) {
return new Builder(tableName);
}
public RelNode toRel(RelOptTable.ToRelContext toRelContext, RelOptTable relOptTable) {
List<RelHint> hint = new ArrayList<>();
return new LogicalTableScan(toRelContext.getCluster(), toRelContext.getCluster().traitSet(), hint, relOptTable);

View File

@@ -26,6 +26,10 @@ public class SemanticSchema extends AbstractSchema {
this.tableMap = tableMap;
}
public static Builder newBuilder(String rootPath) {
return new Builder(rootPath);
}
public String getRootPath() {
return rootPath;
}
@@ -40,27 +44,22 @@ public class SemanticSchema extends AbstractSchema {
return this;
}
public static Builder newBuilder(String rootPath) {
return new Builder(rootPath);
public Map<String, DataSource> getDatasource() {
return semanticModel.getDatasourceMap();
}
public void setDatasource(Map<String, DataSource> datasource) {
semanticModel.setDatasourceMap(datasource);
}
public void setDimension(Map<String, List<Dimension>> dimensions) {
semanticModel.setDimensionMap(dimensions);
}
public Map<String, DataSource> getDatasource() {
return semanticModel.getDatasourceMap();
}
public Map<String, List<Dimension>> getDimension() {
return semanticModel.getDimensionMap();
}
public void setDimension(Map<String, List<Dimension>> dimensions) {
semanticModel.setDimensionMap(dimensions);
}
public List<Metric> getMetrics() {
return semanticModel.getMetrics();
}

View File

@@ -12,7 +12,6 @@ import org.checkerframework.checker.nullness.qual.Nullable;
public class SemanticSqlDialect extends SqlDialect {
private static final SqlConformance tagTdwSqlConformance = new SemanticSqlConformance();
public static final Context DEFAULT_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'")
@@ -22,13 +21,37 @@ public class SemanticSqlDialect extends SqlDialect {
.withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED)
.withCaseSensitive(false);
public static final SqlDialect DEFAULT = new SemanticSqlDialect(DEFAULT_CONTEXT);
private static final SqlConformance tagTdwSqlConformance = new SemanticSqlConformance();
public SemanticSqlDialect(Context context) {
super(context);
}
public static void unparseFetchUsingAnsi(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
Preconditions.checkArgument(fetch != null || offset != null);
SqlWriter.Frame fetchFrame;
writer.newlineAndIndent();
fetchFrame = writer.startList(SqlWriter.FrameTypeEnum.OFFSET);
writer.keyword("LIMIT");
if (offset != null) {
//writer.keyword("OFFSET");
offset.unparse(writer, -1, -1);
//writer.keyword("ROWS");
}
if (fetch != null) {
//writer.newlineAndIndent();
//fetchFrame = writer.startList(SqlWriter.FrameTypeEnum.FETCH);
writer.keyword(",");
//writer.keyword("NEXT");
fetch.unparse(writer, -1, -1);
}
writer.endList(fetchFrame);
}
@Override
public void quoteStringLiteralUnicode(StringBuilder buf, String val) {
buf.append("'");
@@ -36,7 +59,6 @@ public class SemanticSqlDialect extends SqlDialect {
buf.append("'");
}
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
@@ -70,28 +92,4 @@ public class SemanticSqlDialect extends SqlDialect {
public void unparseOffsetFetch(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
unparseFetchUsingAnsi(writer, offset, fetch);
}
public static void unparseFetchUsingAnsi(SqlWriter writer, @Nullable SqlNode offset, @Nullable SqlNode fetch) {
Preconditions.checkArgument(fetch != null || offset != null);
SqlWriter.Frame fetchFrame;
writer.newlineAndIndent();
fetchFrame = writer.startList(SqlWriter.FrameTypeEnum.OFFSET);
writer.keyword("LIMIT");
if (offset != null) {
//writer.keyword("OFFSET");
offset.unparse(writer, -1, -1);
//writer.keyword("ROWS");
}
if (fetch != null) {
//writer.newlineAndIndent();
//fetchFrame = writer.startList(SqlWriter.FrameTypeEnum.FETCH);
writer.keyword(",");
//writer.keyword("NEXT");
fetch.unparse(writer, -1, -1);
}
writer.endList(fetchFrame);
}
}

View File

@@ -10,22 +10,22 @@ public class ParserSvrResponse<T> {
return code;
}
public String getMsg() {
return msg;
}
public T getData() {
return data;
}
public void setCode(String code) {
this.code = code;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}

View File

@@ -57,26 +57,20 @@ import org.springframework.util.CollectionUtils;
@Slf4j
public class DataPermissionAOP {
@Autowired
private QueryStructUtils queryStructUtils;
@Autowired
private AuthService authService;
@Autowired
private DimensionService dimensionService;
@Autowired
private MetricService metricService;
@Autowired
private DomainService domainService;
@Value("${permission.data.enable:true}")
private Boolean permissionDataEnable;
private static final ObjectMapper MAPPER = new ObjectMapper().setDateFormat(
new SimpleDateFormat(Constants.DAY_FORMAT));
@Autowired
private QueryStructUtils queryStructUtils;
@Autowired
private AuthService authService;
@Autowired
private DimensionService dimensionService;
@Autowired
private MetricService metricService;
@Autowired
private DomainService domainService;
@Value("${permission.data.enable:true}")
private Boolean permissionDataEnable;
@Pointcut("@annotation(com.tencent.supersonic.semantic.query.domain.annotation.DataPermission)")
public void dataPermissionAOP() {
@@ -179,7 +173,7 @@ public class DataPermissionAOP {
}
private void addPromptInfoInfo(Long domainId, QueryResultWithSchemaResp queryResultWithColumns,
AuthorizedResourceResp authorizedResource) {
AuthorizedResourceResp authorizedResource) {
List<DimensionFilter> filters = authorizedResource.getFilters();
if (!CollectionUtils.isEmpty(filters)) {
log.debug("dimensionFilters:{}", filters);
@@ -258,7 +252,7 @@ public class DataPermissionAOP {
}
private AuthorizedResourceResp getAuthorizedResource(User user, HttpServletRequest request, Long domainId,
Set<String> sensitiveResReq) {
Set<String> sensitiveResReq) {
List<AuthRes> resourceReqList = new ArrayList<>();
sensitiveResReq.stream().forEach(res -> resourceReqList.add(new AuthRes(domainId.toString(), res)));
QueryAuthResReq queryAuthResReq = new QueryAuthResReq();
@@ -375,7 +369,7 @@ public class DataPermissionAOP {
}
private void doFilterCheckLogic(QueryStructReq queryStructCmd, Set<String> resAuthName,
Set<String> sensitiveResReq) {
Set<String> sensitiveResReq) {
Set<String> resFilterSet = queryStructUtils.getFilterResNameEnExceptInternalCol(queryStructCmd);
Set<String> need2Apply = resFilterSet.stream()
.filter(res -> !resAuthName.contains(res) && sensitiveResReq.contains(res)).collect(Collectors.toSet());
@@ -405,7 +399,7 @@ public class DataPermissionAOP {
private AuthorizedResourceResp fetchAuthRes(HttpServletRequest request, QueryAuthResReq queryAuthResReq) {
log.info("Authorization:{}", request.getHeader("Authorization"));
log.info("queryAuthResReq:{}", queryAuthResReq);
return authService.queryAuthorizedResources(request, queryAuthResReq);
return authService.queryAuthorizedResources(queryAuthResReq, request);
}
}

View File

@@ -1,14 +1,16 @@
package com.tencent.supersonic.domain.semantic.query.domain.utils;
package com.tencent.supersonic.semantic.query.domain.utils;
import static com.tencent.supersonic.common.constant.Constants.APOSTROPHE;
import static com.tencent.supersonic.common.constant.Constants.COMMA;
import static com.tencent.supersonic.common.constant.Constants.DAY;
import static com.tencent.supersonic.common.constant.Constants.DAY_FORMAT;
import static com.tencent.supersonic.common.constant.Constants.MONTH;
import static com.tencent.supersonic.common.constant.Constants.MONTH_FORMAT;
import static com.tencent.supersonic.common.constant.Constants.WEEK;
import com.google.common.base.Strings;
import com.tencent.supersonic.semantic.api.core.response.ItemDateResp;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.semantic.api.core.response.ItemDateResp;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
@@ -29,6 +31,10 @@ public class DateUtils {
@Value("${query.parameter.sys.date:sys_imp_date}")
private String sysDateCol;
@Value("${query.parameter.sys.month:sys_imp_month}")
private String sysDateMonthCol;
@Value("${query.parameter.sys.month:sys_imp_week}")
private String sysDateWeekCol;
public Boolean recentMode(DateConf dateInfo) {
if (Objects.nonNull(dateInfo) && DateConf.DateMode.RECENT_UNITS == dateInfo.getDateMode()
@@ -125,6 +131,29 @@ public class DateUtils {
return String.format("(%s >= '%s' and %s <= '%s')", sysDateCol, start, sysDateCol, dateDate.getEndDate());
}
public String recentMonthStr(ItemDateResp dateDate, DateConf dateInfo) {
String dateFormatStr = MONTH_FORMAT;
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormatStr);
LocalDate end = LocalDate.parse(dateDate.getEndDate(), formatter);
String endStr = end.format(formatter);
Integer unit = dateInfo.getUnit() - 1;
String start = end.minusMonths(unit).format(formatter);
return String.format("(%s >= '%s' and %s <= '%s')", sysDateMonthCol, start, sysDateMonthCol, endStr);
}
public String recentWeekStr(ItemDateResp dateDate, DateConf dateInfo) {
String dateFormatStr = dateDate.getDateFormat();
if (Strings.isNullOrEmpty(dateFormatStr)) {
dateFormatStr = DAY_FORMAT;
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormatStr);
LocalDate end = LocalDate.parse(dateDate.getEndDate(), formatter);
Integer unit = dateInfo.getUnit() - 1;
String start = end.minusDays(unit * 7).format(formatter);
return String.format("(%s >= '%s' and %s <= '%s')", sysDateWeekCol, start, sysDateWeekCol,
dateDate.getEndDate());
}
private Long getInterval(String startDate, String endDate, String dateFormat, ChronoUnit chronoUnit) {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(dateFormat);
try {
@@ -142,6 +171,12 @@ public class DateUtils {
if (DAY.equalsIgnoreCase(dateInfo.getPeriod())) {
return recentDayStr(dateDate, dateInfo);
}
if (MONTH.equalsIgnoreCase(dateInfo.getPeriod())) {
return recentMonthStr(dateDate, dateInfo);
}
if (WEEK.equalsIgnoreCase(dateInfo.getPeriod())) {
return recentWeekStr(dateDate, dateInfo);
}
return "";
}

View File

@@ -30,12 +30,11 @@ public class ParserCommandConverter {
private final ParserService parserService;
private final DomainService domainService;
private final CalculateConverterAgg calculateCoverterAgg;
private final DimensionService dimensionService;
private final QueryStructUtils queryStructUtils;
@Value("${internal.metric.cnt.suffix:internal_cnt}")
private String internalMetricNameSuffix;
private final DimensionService dimensionService;
private List<CalculateConverter> calculateCoverters = new LinkedList<>();
private final QueryStructUtils queryStructUtils;
public ParserCommandConverter(ParserService parserService,
DomainService domainService,
@@ -78,11 +77,14 @@ public class ParserCommandConverter {
sqlCommend.setLimit(queryStructCmd.getLimit());
String rootPath = domainService.getDomainFullPath(queryStructCmd.getDomainId());
sqlCommend.setRootPath(rootPath);
// todo tmp delete
// support detail query
if (queryStructCmd.getNativeQuery() && CollectionUtils.isEmpty(sqlCommend.getMetrics())) {
String internalMetricName = generateInternalMetricName(queryStructCmd);
sqlCommend.getMetrics().add(internalMetricName);
}
return sqlCommend;
}

View File

@@ -0,0 +1,72 @@
package com.tencent.supersonic.semantic.query.domain.utils;
import com.tencent.supersonic.common.util.calcite.SqlParseUtils;
import com.tencent.supersonic.common.util.calcite.SqlParserInfo;
import com.tencent.supersonic.semantic.api.core.response.DomainSchemaResp;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.pojo.MetricTable;
import com.tencent.supersonic.semantic.api.query.request.ParseSqlReq;
import com.tencent.supersonic.semantic.api.query.request.QuerySqlReq;
import com.tencent.supersonic.semantic.core.domain.DomainService;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@Component
public class QueryReqConverter {
@Autowired
private DomainService domainService;
@Autowired
private ParserService parserService;
public SqlParserResp convert(QuerySqlReq databaseReq, List<DomainSchemaResp> domainSchemas) throws Exception {
List<MetricTable> tables = new ArrayList<>();
MetricTable metricTable = new MetricTable();
String sql = databaseReq.getSql();
SqlParserInfo sqlParseInfo = SqlParseUtils.getSqlParseInfo(sql);
List<String> allFields = sqlParseInfo.getAllFields();
if (CollectionUtils.isEmpty(domainSchemas)) {
return new SqlParserResp();
}
Set<String> dimensions = domainSchemas.get(0).getDimensions().stream()
.map(entry -> entry.getBizName().toLowerCase())
.collect(Collectors.toSet());
dimensions.addAll(QueryStructUtils.internalCols);
Set<String> metrics = domainSchemas.get(0).getMetrics().stream().map(entry -> entry.getBizName().toLowerCase())
.collect(Collectors.toSet());
metricTable.setMetrics(allFields.stream().filter(entry -> metrics.contains(entry.toLowerCase()))
.map(entry -> entry.toLowerCase()).collect(Collectors.toList()));
Set<String> collect = allFields.stream().filter(entry -> dimensions.contains(entry.toLowerCase()))
.map(entry -> entry.toLowerCase()).collect(Collectors.toSet());
for (String internalCol : QueryStructUtils.internalCols) {
if (sql.contains(internalCol)) {
collect.add(internalCol);
}
}
metricTable.setDimensions(new ArrayList<>(collect));
metricTable.setAlias(sqlParseInfo.getTableName().toLowerCase());
tables.add(metricTable);
ParseSqlReq result = new ParseSqlReq();
BeanUtils.copyProperties(databaseReq, result);
result.setRootPath(domainService.getDomainFullPath(databaseReq.getDomainId()));
result.setTables(tables);
return parserService.physicalSql(result);
}
}

View File

@@ -42,6 +42,8 @@ import org.springframework.util.CollectionUtils;
@Component
public class QueryStructUtils {
public static Set<String> internalCols = new HashSet<>(
Arrays.asList("dayno", "plat_sys_var", "sys_imp_date", "sys_imp_week", "sys_imp_month"));
private final DatabaseService databaseService;
private final QueryUtils queryUtils;
private final ParserService parserService;
@@ -50,16 +52,12 @@ public class QueryStructUtils {
private final DimensionService dimensionService;
private final MetricService metricService;
private final DatasourceService datasourceService;
private final com.tencent.supersonic.domain.semantic.query.domain.utils.DateUtils dateUtils;
private final DateUtils dateUtils;
private final SqlFilterUtils sqlFilterUtils;
private final CacheUtils cacheUtils;
@Value("${query.cache.enable:true}")
private Boolean cacheEnable;
Set<String> internalCols = new HashSet<>(
Arrays.asList("dayno", "plat_sys_var", "sys_imp_date", "sys_imp_week", "sys_imp_month"));
public QueryStructUtils(DatabaseService databaseService,
QueryUtils queryUtils,
ParserService parserService,
@@ -68,7 +66,7 @@ public class QueryStructUtils {
DimensionService dimensionService,
MetricService metricService,
DatasourceService datasourceService,
com.tencent.supersonic.domain.semantic.query.domain.utils.DateUtils dateUtils,
DateUtils dateUtils,
SqlFilterUtils sqlFilterUtils,
CacheUtils cacheUtils) {
this.databaseService = databaseService;
@@ -118,7 +116,10 @@ public class QueryStructUtils {
queryUtils.checkSqlParse(sqlParser);
log.info("sqlParser:{}", sqlParser);
queryUtils.handleDetail(queryStructCmd, sqlParser);
// todo tmp delete
//queryUtils.handleDetail(queryStructCmd, sqlParser);
queryUtils.handleNoMetric(queryStructCmd, sqlParser);
QueryResultWithSchemaResp queryResultWithColumns = databaseService.queryWithColumns(sqlParser);
queryUtils.fillItemNameInfo(queryResultWithColumns, queryStructCmd.getDomainId());

View File

@@ -42,6 +42,23 @@ import org.springframework.util.CollectionUtils;
public class QueryUtils {
private final Set<Pattern> patterns = new HashSet<>();
private final MetricService metricService;
private final DimensionService dimensionService;
private final ParserCommandConverter parserCommandConverter;
public QueryUtils(MetricService metricService,
DimensionService dimensionService,
@Lazy ParserCommandConverter parserCommandConverter) {
this.metricService = metricService;
this.dimensionService = dimensionService;
this.parserCommandConverter = parserCommandConverter;
}
private static void addSysTimeDimension(Map<String, String> namePair, Map<String, String> nameTypePair) {
for (TimeDimensionEnum timeDimensionEnum : TimeDimensionEnum.values()) {
namePair.put(timeDimensionEnum.getName(), "date");
nameTypePair.put(timeDimensionEnum.getName(), "DATE");
}
}
@PostConstruct
public void fillPattern() {
@@ -52,19 +69,6 @@ public class QueryUtils {
}
}
private final MetricService metricService;
private final DimensionService dimensionService;
private final ParserCommandConverter parserCommandConverter;
public QueryUtils(MetricService metricService,
DimensionService dimensionService,
@Lazy ParserCommandConverter parserCommandConverter) {
this.metricService = metricService;
this.dimensionService = dimensionService;
this.parserCommandConverter = parserCommandConverter;
}
public void checkSqlParse(SqlParserResp sqlParser) {
if (Strings.isNullOrEmpty(sqlParser.getSql()) || Strings.isNullOrEmpty(sqlParser.getSourceId())) {
throw new RuntimeException("parse Exception: " + sqlParser.getErrMsg());
@@ -76,6 +80,23 @@ public class QueryUtils {
queryStructCmd.getMetrics());
}
public SqlParserResp handleNoMetric(QueryStructReq queryStructCmd, SqlParserResp sqlParser) {
String sqlRaw = sqlParser.getSql().trim();
if (Strings.isNullOrEmpty(sqlRaw)) {
throw new RuntimeException("sql is empty or null");
}
log.info("before handleNoMetric, sql:{}", sqlRaw);
if (isDetailQuery(queryStructCmd)) {
if (queryStructCmd.getMetrics().size() == 0) {
String sql = String.format("select %s from ( %s ) src_no_metric",
queryStructCmd.getGroups().stream().collect(Collectors.joining(",")), sqlRaw);
sqlParser.setSql(sql);
}
}
log.info("after handleNoMetric, sql:{}", sqlParser.getSql());
return sqlParser;
}
public SqlParserResp handleDetail(QueryStructReq queryStructCmd, SqlParserResp sqlParser) {
String sqlRaw = sqlParser.getSql().trim();
if (Strings.isNullOrEmpty(sqlRaw)) {
@@ -223,11 +244,4 @@ public class QueryUtils {
}
return map;
}
private static void addSysTimeDimension(Map<String, String> namePair, Map<String, String> nameTypePair) {
for (TimeDimensionEnum timeDimensionEnum : TimeDimensionEnum.values()) {
namePair.put(timeDimensionEnum.getName(), "date");
nameTypePair.put(timeDimensionEnum.getName(), "DATE");
}
}
}

View File

@@ -13,13 +13,6 @@ import org.springframework.util.CollectionUtils;
@Slf4j
public class SqlGenerateUtils {
public String getLimit(QueryStructReq queryStructCmd) {
if (queryStructCmd.getLimit() > 0) {
return " limit " + String.valueOf(queryStructCmd.getLimit());
}
return "";
}
public static String getUnionSelect(QueryStructReq queryStructCmd) {
StringBuilder sb = new StringBuilder();
int locate = 0;
@@ -41,6 +34,12 @@ public class SqlGenerateUtils {
return selectSql;
}
public String getLimit(QueryStructReq queryStructCmd) {
if (queryStructCmd.getLimit() > 0) {
return " limit " + String.valueOf(queryStructCmd.getLimit());
}
return "";
}
public String getSelect(QueryStructReq queryStructCmd) {
String aggStr = queryStructCmd.getAggregators().stream().map(this::getSelectField)

View File

@@ -22,7 +22,7 @@ public class SqlParserUtils {
public SqlParserResp getSqlParserWithoutCache(QueryStructReq queryStructCmd) throws Exception {
log.info("stat getSqlParser without cache");
multiSourceJoinUtils.buildJoinPrefix(queryStructCmd);
//multiSourceJoinUtils.buildJoinPrefix(queryStructCmd);
SqlParserResp sqlParser = parserCommandConverter.getSqlParser(queryStructCmd);
return sqlParser;
}

View File

@@ -25,12 +25,11 @@ import org.springframework.stereotype.Component;
@Slf4j
public class StatUtils {
private static final TransmittableThreadLocal<QueryStat> STATS = new TransmittableThreadLocal<>();
private final StatRepository statRepository;
private final SqlFilterUtils sqlFilterUtils;
private final ObjectMapper objectMapper = new ObjectMapper();
private static final TransmittableThreadLocal<QueryStat> STATS = new TransmittableThreadLocal<>();
public StatUtils(StatRepository statRepository,
SqlFilterUtils sqlFilterUtils) {

View File

@@ -2,15 +2,15 @@ package com.tencent.supersonic.semantic.query.rest;
import com.tencent.supersonic.auth.api.authentication.pojo.User;
import com.tencent.supersonic.auth.api.authentication.utils.UserHolder;
import com.tencent.supersonic.semantic.api.query.request.ItemUseReq;
import com.tencent.supersonic.semantic.api.query.request.QueryMultiStructReq;
import com.tencent.supersonic.semantic.api.query.request.QuerySqlReq;
import com.tencent.supersonic.semantic.api.query.request.QueryStructReq;
import com.tencent.supersonic.semantic.api.core.response.SqlParserResp;
import com.tencent.supersonic.semantic.api.query.request.*;
import com.tencent.supersonic.semantic.api.query.response.ItemUseResp;
import com.tencent.supersonic.semantic.query.domain.ParserService;
import com.tencent.supersonic.semantic.query.domain.QueryService;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -20,17 +20,25 @@ import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/semantic/query")
@Slf4j
public class QueryController {
@Autowired
private QueryService queryService;
@Autowired
private ParserService parserService;
@PostMapping("/sql")
public Object queryBySql(@RequestBody QuerySqlReq querySqlReq) throws Exception {
return queryService.queryBySql(querySqlReq);
public Object queryBySql(@RequestBody QuerySqlReq querySqlReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
Object queryBySql = queryService.queryBySql(querySqlReq, user);
log.info("queryBySql:{},queryBySql");
return queryBySql;
}
@PostMapping("/struct")
public Object queryByStruct(@RequestBody QueryStructReq queryStructReq,
HttpServletRequest request,
@@ -39,6 +47,14 @@ public class QueryController {
return queryService.queryByStruct(queryStructReq, user, request);
}
@PostMapping("/struct/parse")
public SqlParserResp parseByStruct(@RequestBody ParseSqlReq parseSqlReq,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
User user = UserHolder.findUser(request, response);
return parserService.physicalSql(parseSqlReq);
}
/**
* queryByMultiStruct
*/
@@ -61,5 +77,4 @@ public class QueryController {
return queryService.getStatInfo(itemUseReq);
}
}

View File

@@ -32,8 +32,8 @@ public class SchemaController {
@PostMapping
public List<DomainSchemaResp> fetchDomainSchema(@RequestBody DomainSchemaFilterReq filter,
HttpServletRequest request,
HttpServletResponse response) {
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.fetchDomainSchema(filter, user);
}
@@ -45,23 +45,23 @@ public class SchemaController {
*/
@GetMapping("/domain/list")
public List<DomainResp> getDomainList(HttpServletRequest request,
HttpServletResponse response) {
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.getDomainListForAdmin(user);
}
@PostMapping("/dimension/page")
public PageInfo<DimensionResp> queryDimension(@RequestBody PageDimensionReq pageDimensionCmd,
HttpServletRequest request,
HttpServletResponse response) {
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.queryDimension(pageDimensionCmd, user);
}
@PostMapping("/metric/page")
public PageInfo<MetricResp> queryMetric(@RequestBody PageMetricReq pageMetricCmd,
HttpServletRequest request,
HttpServletResponse response) {
HttpServletRequest request,
HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return schemaService.queryMetric(pageMetricCmd, user);
}

View File

@@ -47,6 +47,66 @@ class SemanticParserServiceTest {
return sqlParser;
}
private static void addDepartment(SemanticSchema semanticSchema) {
DatasourceYamlTpl datasource = new DatasourceYamlTpl();
datasource.setName("user_department");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("count");
measure.setName("user_department_internal_cnt");
measure.setCreateMetric("true");
measure.setExpr("1");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("sys_imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("week");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
semanticSchema.getDatasource().put("user_department", SemanticSchemaManagerImpl.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("department");
dimension1.setName("department");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
semanticSchema.getDimension()
.put("user_department", SemanticSchemaManagerImpl.getDimensions(dimensionYamlTpls));
}
//@Test
public void test() throws Exception {
@@ -189,65 +249,4 @@ class SemanticParserServiceTest {
}
private static void addDepartment(SemanticSchema semanticSchema) {
DatasourceYamlTpl datasource = new DatasourceYamlTpl();
datasource.setName("user_department");
datasource.setSourceId(1L);
datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department");
MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setAgg("count");
measure.setName("user_department_internal_cnt");
measure.setCreateMetric("true");
measure.setExpr("1");
List<MeasureYamlTpl> measures = new ArrayList<>();
measures.add(measure);
datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setName("sys_imp_date");
dimension.setExpr("imp_date");
dimension.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setIsPrimary("true");
dimensionTimeTypeParams.setTimeGranularity("day");
dimension.setTypeParams(dimensionTimeTypeParams);
List<DimensionYamlTpl> dimensions = new ArrayList<>();
dimensions.add(dimension);
DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setName("sys_imp_week");
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
dimension3.setType("time");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setIsPrimary("true");
dimensionTimeTypeParams3.setTimeGranularity("week");
dimension3.setTypeParams(dimensionTimeTypeParams3);
dimensions.add(dimension3);
datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>();
IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setName("user_name");
identify.setType("primary");
identifies.add(identify);
datasource.setIdentifiers(identifies);
semanticSchema.getDatasource().put("user_department", SemanticSchemaManagerImpl.getDatasource(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setExpr("department");
dimension1.setName("department");
dimension1.setType("categorical");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
dimensionYamlTpls.add(dimension1);
semanticSchema.getDimension()
.put("user_department", SemanticSchemaManagerImpl.getDimensions(dimensionYamlTpls));
}
}