[improvement][headless]Release brand new version of Translator module.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run

This commit is contained in:
jerryjzhang
2025-01-05 00:00:18 +08:00
91 changed files with 1965 additions and 3158 deletions

View File

@@ -6,7 +6,6 @@ import com.tencent.supersonic.headless.core.pojo.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable.Builder;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.TimeRange;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.adapter.enumerable.EnumerableRules;
import org.apache.calcite.config.CalciteConnectionConfigImpl;

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
package com.tencent.supersonic.headless.core.executor;
import lombok.Builder;
import lombok.Data;

View File

@@ -2,26 +2,32 @@ package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import lombok.Data;
import java.util.*;
import java.util.stream.Collectors;
/**
* An ontology comprises a group of data models that can be joined together either in star schema or
* snowflake schema.
*/
@Data
public class Ontology {
private List<Metric> metrics = new ArrayList<>();
private Map<String, DataModel> dataModelMap = new HashMap<>();
private Map<String, List<Dimension>> dimensionMap = new HashMap<>();
private List<Materialization> materializationList = new ArrayList<>();
private List<JoinRelation> joinRelations;
private DatabaseResp database;
private Map<String, ModelResp> modelMap = new HashMap<>();
private Map<String, List<MetricSchemaResp>> metricMap = new HashMap<>();
private Map<String, List<DimSchemaResp>> dimensionMap = new HashMap<>();
private List<JoinRelation> joinRelations;
public List<Dimension> getDimensions() {
public List<MetricSchemaResp> getMetrics() {
return metricMap.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
}
public List<DimSchemaResp> getDimensions() {
return dimensionMap.values().stream().flatMap(Collection::stream)
.collect(Collectors.toList());
}
@@ -39,4 +45,5 @@ public class Ontology {
}
return null;
}
}

View File

@@ -1,20 +1,60 @@
package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import lombok.Data;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
* An ontology query comprises metrics/dimensions that are relevant to the semantic query. Note that
* metrics/dimensions in the ontology query must be a subset of an ontology.
*/
@Data
public class OntologyQuery {
private Set<String> metrics = Sets.newHashSet();
private Set<String> dimensions = Sets.newHashSet();
private String where;
private Map<String, ModelResp> modelMap = Maps.newHashMap();
private Map<String, Set<MetricSchemaResp>> metricMap = Maps.newHashMap();
private Map<String, Set<DimSchemaResp>> dimensionMap = Maps.newHashMap();
private Set<String> fields = Sets.newHashSet();
private Long limit;
private List<ColumnOrder> order;
private boolean nativeQuery = true;
private AggOption aggOption = AggOption.NATIVE;
public Set<ModelResp> getModels() {
return modelMap.values().stream().collect(Collectors.toSet());
}
public Set<DimSchemaResp> getDimensions() {
Set<DimSchemaResp> dimensions = Sets.newHashSet();
dimensionMap.entrySet().forEach(entry -> {
dimensions.addAll(entry.getValue());
});
return dimensions;
}
public Set<MetricSchemaResp> getMetrics() {
Set<MetricSchemaResp> metrics = Sets.newHashSet();
metricMap.entrySet().forEach(entry -> {
metrics.addAll(entry.getValue());
});
return metrics;
}
public Set<MetricSchemaResp> getMetricsByModel(String modelName) {
return metricMap.get(modelName);
}
public Set<DimSchemaResp> getDimensionsByModel(String modelName) {
return dimensionMap.get(modelName);
}
}

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
@@ -34,11 +35,6 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
}
}
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQuery().getTable(), queryStatement.getErrMsg()));
}
mergeOntologyQuery(queryStatement);
if (StringUtils.isNotBlank(queryStatement.getSqlQuery().getSimplifiedSql())) {
@@ -62,6 +58,14 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
private void mergeOntologyQuery(QueryStatement queryStatement) throws Exception {
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
log.info("parse with ontology: [{}]", ontologyQuery);
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQuery().getTable(), queryStatement.getErrMsg()));
}
SqlQuery sqlQuery = queryStatement.getSqlQuery();
String ontologyQuerySql = sqlQuery.getSql();
String ontologyInnerTable = sqlQuery.getTable();
@@ -69,30 +73,29 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
List<Pair<String, String>> tables = new ArrayList<>();
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
String finalSql = null;
if (sqlQuery.isSupportWith()) {
EngineType engineType = queryStatement.getOntology().getDatabaseType();
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
String withSql = "with " + tables.stream()
finalSql = "with " + tables.stream()
.map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight()))
.collect(Collectors.joining(",")) + "\n" + ontologyQuerySql;
queryStatement.setSql(withSql);
} else {
List<String> withTableList =
tables.stream().map(Pair::getLeft).collect(Collectors.toList());
List<String> withSqlList =
tables.stream().map(Pair::getRight).collect(Collectors.toList());
String mergeSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql,
withSqlList, withTableList);
queryStatement.setSql(mergeSql);
finalSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql, withSqlList,
withTableList);
}
} else {
for (Pair<String, String> tb : tables) {
ontologyQuerySql = StringUtils.replace(ontologyQuerySql, tb.getLeft(),
finalSql = StringUtils.replace(ontologyQuerySql, tb.getLeft(),
"(" + tb.getRight() + ") " + (sqlQuery.isWithAlias() ? "" : tb.getLeft()),
-1);
}
queryStatement.setSql(ontologyQuerySql);
}
queryStatement.setSql(finalSql);
}
}

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/**
* SemanticTranslator converts semantic query statement into SQL statement that can be executed
* against physical data models.
* A semantic translator converts semantic query into SQL statement that can be executed against
* physical data models.
*/
public interface SemanticTranslator {

View File

@@ -26,11 +26,8 @@ public class DbDialectOptimizer implements QueryOptimizer {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
String sql = queryStatement.getSql();
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
return;
}
String type = database.getType();
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
DbAdaptor engineAdaptor =
DbAdaptorFactory.getEngineAdaptor(database.getType().toLowerCase());
if (Objects.nonNull(engineAdaptor)) {
String adaptedSql = engineAdaptor.rewriteSql(sql);
queryStatement.setSql(adaptedSql);

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
package com.tencent.supersonic.headless.core.translator.parser;
public class Constants {

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.StringValue;
@@ -20,6 +20,10 @@ import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* This parser appends default dimension values (if configured) to the where statement.
*/
@Slf4j
@Component("DefaultDimValueParser")
public class DefaultDimValueParser implements QueryParser {
@@ -27,12 +31,13 @@ public class DefaultDimValueParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql());
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql())
&& !CollectionUtils.isEmpty(queryStatement.getOntology().getDimensions());
}
@Override
public void parse(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream()
List<DimSchemaResp> dimensions = queryStatement.getOntology().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) {
@@ -45,7 +50,7 @@ public class DefaultDimValueParser implements QueryParser {
return;
}
List<Expression> expressions = Lists.newArrayList();
for (Dimension dimension : dimensions) {
for (DimSchemaResp dimension : dimensions) {
ExpressionList expressionList = new ExpressionList();
List<Expression> exprs = new ArrayList<>();
dimension.getDefaultValues().forEach(value -> exprs.add(new StringValue(value)));
@@ -55,7 +60,7 @@ public class DefaultDimValueParser implements QueryParser {
inExpression.setRightExpression(expressionList);
expressions.add(inExpression);
if (Objects.nonNull(queryStatement.getSqlQuery().getTable())) {
queryStatement.getOntologyQuery().getDimensions().add(dimension.getBizName());
queryStatement.getOntologyQuery().getDimensions().add(dimension);
}
}
sql = SqlAddHelper.addWhere(sql, expressions);

View File

@@ -0,0 +1,67 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* This parser replaces dimension bizName in the S2SQL with calculation expression (if configured).
*/
@Component("DimExpressionParser")
@Slf4j
public class DimExpressionParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQuery())
&& Objects.nonNull(queryStatement.getOntologyQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql())
&& !CollectionUtils.isEmpty(queryStatement.getOntologyQuery().getDimensions());
}
@Override
public void parse(QueryStatement queryStatement) throws Exception {
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
Map<String, String> bizName2Expr = getDimensionExpressions(semanticSchema, ontologyQuery);
if (!CollectionUtils.isEmpty(bizName2Expr)) {
String sql = SqlReplaceHelper.replaceSqlByExpression(sqlQuery.getSql(), bizName2Expr);
sqlQuery.setSql(sql);
}
}
private Map<String, String> getDimensionExpressions(SemanticSchemaResp semanticSchema,
OntologyQuery ontologyQuery) {
Set<DimSchemaResp> queryDimensions = ontologyQuery.getDimensions();
Set<String> queryFields = ontologyQuery.getFields();
log.debug("begin to generateDerivedMetric {} [{}]", queryDimensions);
Map<String, String> dim2Expr = new HashMap<>();
for (DimSchemaResp queryDim : queryDimensions) {
queryDim.getFields().addAll(SqlSelectHelper.getFieldsFromExpr(queryDim.getExpr()));
queryFields.addAll(queryDim.getFields());
if (!queryDim.getBizName().equals(queryDim.getExpr())) {
dim2Expr.put(queryDim.getBizName(), queryDim.getExpr());
}
}
return dim2Expr;
}
}

View File

@@ -0,0 +1,130 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*;
/**
* This parser replaces metric bizName in the S2SQL with calculation expression (if configured).
*/
@Component("MetricExpressionParser")
@Slf4j
public class MetricExpressionParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQuery())
&& Objects.nonNull(queryStatement.getOntologyQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql())
&& !CollectionUtils.isEmpty(queryStatement.getOntologyQuery().getMetrics());
}
@Override
public void parse(QueryStatement queryStatement) throws Exception {
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
Map<String, String> bizName2Expr = getMetricExpressions(semanticSchema, ontologyQuery);
if (!CollectionUtils.isEmpty(bizName2Expr)) {
String sql = SqlReplaceHelper.replaceSqlByExpression(sqlQuery.getSql(), bizName2Expr);
sqlQuery.setSql(sql);
}
}
private Map<String, String> getMetricExpressions(SemanticSchemaResp semanticSchema,
OntologyQuery ontologyQuery) {
List<MetricSchemaResp> allMetrics = semanticSchema.getMetrics();
Set<MetricSchemaResp> queryMetrics = ontologyQuery.getMetrics();
Set<String> queryFields = ontologyQuery.getFields();
log.debug("begin to generateDerivedMetric {} [{}]", queryMetrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchema.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Map<String, String> visitedMetrics = new HashMap<>();
Map<String, String> metric2Expr = new HashMap<>();
for (MetricSchemaResp queryMetric : queryMetrics) {
String fieldExpr = buildFieldExpr(allMetrics, allMeasures, queryMetric.getExpr(),
queryMetric.getMetricDefineType(), visitedMetrics);
// add all fields referenced in the expression
queryMetric.getFields().addAll(SqlSelectHelper.getFieldsFromExpr(fieldExpr));
queryFields.addAll(queryMetric.getFields());
if (!queryMetric.getBizName().equals(fieldExpr)) {
metric2Expr.put(queryMetric.getBizName(), fieldExpr);
}
}
return metric2Expr;
}
private String buildFieldExpr(final List<MetricSchemaResp> metricResps,
final Map<String, Measure> allMeasures, final String metricExpr,
final MetricDefineType metricDefineType, Map<String, String> visitedMetric) {
Set<String> fields = SqlSelectHelper.getFieldsFromExpr(metricExpr);
if (!CollectionUtils.isEmpty(fields)) {
Map<String, String> replace = new HashMap<>();
for (String field : fields) {
switch (metricDefineType) {
case METRIC:
// if defineType=METRIC, field should be the bizName of its parent metric
Optional<MetricSchemaResp> metricItem = metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field)).findFirst();
if (metricItem.isPresent()) {
if (visitedMetric.keySet().contains(field)) {
replace.put(field, visitedMetric.get(field));
break;
}
replace.put(field,
buildFieldExpr(metricResps, allMeasures,
metricItem.get().getExpr(),
metricItem.get().getMetricDefineType(), visitedMetric));
visitedMetric.put(field, replace.get(field));
}
break;
case MEASURE:
// if defineType=MEASURE, field should be the bizName of its measure
if (allMeasures.containsKey(field)) {
Measure measure = allMeasures.get(field);
String expr = metricExpr;
if (Objects.nonNull(measure.getAgg())) {
expr = String.format("%s (%s)", measure.getAgg(), metricExpr);
}
replace.put(field, expr);
}
break;
case FIELD:
default:
break;
}
}
if (!CollectionUtils.isEmpty(replace)) {
String expr = SqlReplaceHelper.replaceExpression(metricExpr, replace);
log.debug("derived metric {}->{}", metricExpr, expr);
return expr;
}
}
return metricExpr;
}
}

View File

@@ -5,13 +5,8 @@ import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.pojo.*;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@@ -60,9 +55,8 @@ public class MetricRatioParser implements QueryParser {
@Override
public void parse(QueryStatement queryStatement) throws Exception {
DatabaseResp database = queryStatement.getOntology().getDatabase();
generateRatioSql(queryStatement, queryStatement.getOntology().getDatabaseType(),
database.getVersion());
Ontology ontology = queryStatement.getOntology();
generateRatioSql(queryStatement, ontology.getDatabaseType(), ontology.getDatabaseVersion());
}
/** Ratio */
@@ -89,8 +83,8 @@ public class MetricRatioParser implements QueryParser {
boolean isOver = isOverRatio(structQuery);
String sql = "";
SqlQuery dsParam = queryStatement.getSqlQuery();
dsParam.setTable(metricTableName);
SqlQuery sqlQuery = queryStatement.getSqlQuery();
sqlQuery.setTable(metricTableName);
switch (engineTypeEnum) {
case H2:
sql = new H2EngineSql().sql(structQuery, isOver, true, metricTableName);
@@ -99,19 +93,19 @@ public class MetricRatioParser implements QueryParser {
case DORIS:
case CLICKHOUSE:
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
dsParam.setSupportWith(false);
sqlQuery.setSupportWith(false);
}
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(),
sql = new MysqlEngineSql().sql(structQuery, isOver, sqlQuery.isSupportWith(),
metricTableName);
} else {
sql = new CkEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(),
sql = new CkEngineSql().sql(structQuery, isOver, sqlQuery.isSupportWith(),
metricTableName);
}
break;
default:
}
dsParam.setSql(sql);
sqlQuery.setSql(sql);
}
public class H2EngineSql implements EngineSql {
@@ -346,15 +340,8 @@ public class MetricRatioParser implements QueryParser {
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
}
private String getGroupDimWithOutTime(StructQuery structQuery) {
String timeDim = getTimeDim(structQuery);
return structQuery.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(","));
}
private static String getTimeDim(StructQuery structQuery) {
DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class);
return dateModeUtils.getSysDateCol(structQuery.getDateInfo());
return structQuery.getDateInfo().getDateField();
}
private static String getLimit(StructQuery structQuery) {
@@ -380,13 +367,6 @@ public class MetricRatioParser implements QueryParser {
return sqlGenerateUtils.getSelectField(agg);
}
private String getGroupBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQuery.getGroups())) {
return "";
}
return "group by " + String.join(",", structQuery.getGroups());
}
private static String getOrderBy(StructQuery structQuery) {
return "order by " + getTimeDim(structQuery) + " desc";
}

View File

@@ -2,7 +2,6 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import lombok.extern.slf4j.Slf4j;
@@ -10,7 +9,10 @@ import org.springframework.stereotype.Component;
import java.util.Objects;
/** the calcite parse implements */
/**
* This parser generates inner sql statement for the ontology query, which would be selected by the
* parsed sql query.
*/
@Component("OntologyQueryParser")
@Slf4j
public class OntologyQueryParser implements QueryParser {

View File

@@ -2,7 +2,9 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** A query parser generates physical SQL for the QueryStatement. */
/**
* A query parser generates physical SQL for the QueryStatement.
*/
public interface QueryParser {
boolean accept(QueryStatement queryStatement);

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite;
package com.tencent.supersonic.headless.core.translator.parser;
import lombok.Builder;
import lombok.Data;

View File

@@ -1,18 +1,17 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAsHelper;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
@@ -21,12 +20,15 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* This parser rewrites S2SQL including conversion from metric/dimension name to bizName and build
* ontology query in preparation for generation of physical SQL.
*/
@Component("SqlQueryParser")
@Slf4j
public class SqlQueryParser implements QueryParser {
@@ -38,69 +40,46 @@ public class SqlQueryParser implements QueryParser {
@Override
public void parse(QueryStatement queryStatement) throws Exception {
// build ontologyQuery
SqlQuery sqlQuery = queryStatement.getSqlQuery();
List<String> queryFields = SqlSelectHelper.getAllSelectFields(sqlQuery.getSql());
Ontology ontology = queryStatement.getOntology();
OntologyQuery ontologyQuery = buildOntologyQuery(ontology, queryFields);
// check if there are fields not matched with any metric or dimension
if (queryFields.size() > ontologyQuery.getMetrics().size()
+ ontologyQuery.getDimensions().size()) {
queryStatement
.setErrMsg("There are fields in the SQL not matched with any semantic column.");
queryStatement.setStatus(1);
return;
}
queryStatement.setOntologyQuery(ontologyQuery);
AggOption sqlQueryAggOption = getAggOption(sqlQuery.getSql(), ontologyQuery.getMetrics());
ontologyQuery.setAggOption(sqlQueryAggOption);
convertNameToBizName(queryStatement);
rewriteOrderBy(queryStatement);
// fill sqlQuery
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
String tableName = SqlSelectHelper.getTableName(sqlQuery.getSql());
if (StringUtils.isEmpty(tableName)) {
return;
}
sqlQuery.setTable(tableName.toLowerCase());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
if (!sqlGenerateUtils.isSupportWith(
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()),
semanticSchemaResp.getDatabaseResp().getVersion())) {
EngineType.fromString(semanticSchema.getDatabaseResp().getType().toUpperCase()),
semanticSchema.getDatabaseResp().getVersion())) {
sqlQuery.setSupportWith(false);
sqlQuery.setWithAlias(false);
}
// build ontologyQuery
List<String> queryFields = SqlSelectHelper.getAllSelectFields(sqlQuery.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, queryFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
List<DimSchemaResp> dimensionSchemas = getDimensions(semanticSchemaResp, queryFields);
List<String> dimensions =
dimensionSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
// check if there are fields not matched with any metric or dimension
if (queryFields.size() > metricSchemas.size() + dimensions.size()) {
List<String> semanticFields = Lists.newArrayList();
metricSchemas.forEach(m -> semanticFields.add(m.getBizName()));
dimensionSchemas.forEach(d -> semanticFields.add(d.getBizName()));
String errMsg =
String.format("Querying columns[%s] not matched with semantic fields[%s].",
queryFields, semanticFields);
queryStatement.setErrMsg(errMsg);
queryStatement.setStatus(1);
return;
}
OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQuery.getMetrics().addAll(metrics);
ontologyQuery.getDimensions().addAll(dimensions);
AggOption sqlQueryAggOption = getAggOption(sqlQuery.getSql(), metricSchemas);
// if sql query itself has aggregation, ontology query just returns detail
if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) {
ontologyQuery.setAggOption(AggOption.NATIVE);
} else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) {
ontologyQuery.setAggOption(AggOption.DEFAULT);
}
ontologyQuery.setNativeQuery(!AggOption.isAgg(ontologyQuery.getAggOption()));
queryStatement.setOntologyQuery(ontologyQuery);
generateDerivedMetric(sqlGenerateUtils, queryStatement);
queryStatement.setSql(sqlQuery.getSql());
// replace sql fields for db, must called after convertNameToBizName
String sqlRewrite = replaceSqlFieldsForHanaDB(queryStatement, sqlQuery.getSql());
sqlQuery.setSql(sqlRewrite);
log.info("parse sqlQuery [{}] ", sqlQuery);
}
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) {
private AggOption getAggOption(String sql, Set<MetricSchemaResp> metricSchemas) {
if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) {
return AggOption.AGGREGATION;
}
@@ -133,190 +112,13 @@ public class SqlQueryParser implements QueryParser {
return AggOption.DEFAULT;
}
private List<DimSchemaResp> getDimensions(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, DimSchemaResp> dimensionLowerToNameMap =
semanticSchemaResp.getDimensions().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, MetricSchemaResp> metricLowerToNameMap =
semanticSchemaResp.getMetrics().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQuery sqlParam = queryStatement.getSqlQuery();
OntologyQuery ontologyParam = queryStatement.getOntologyQuery();
String sql = sqlParam.getSql();
Set<String> measures = new HashSet<>();
Map<String, String> replaces = generateDerivedMetric(sqlGenerateUtils, semanticSchemaResp,
ontologyParam.getAggOption(), ontologyParam.getMetrics(),
ontologyParam.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
ontologyParam.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
ontologyParam.getMetrics().addAll(measures);
} else {
// empty measure , fill default
ontologyParam.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, ontologyParam.getDimensions())));
}
}
sqlParam.setSql(sql);
}
private Map<String, String> generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
SemanticSchemaResp semanticSchemaResp, AggOption aggOption, Set<String> metrics,
Set<String> dimensions, Set<String> measures) {
Map<String, String> result = new HashMap<>();
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
// Check if any metric is derived
boolean hasDerivedMetrics =
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
if (!hasDerivedMetrics) {
return result;
}
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchemaResp.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Set<String> derivedDimensions = new HashSet<>();
Set<String> derivedMetrics = new HashSet<>();
Map<String, String> visitedMetrics = new HashMap<>();
for (MetricResp metricResp : metricResps) {
if (metrics.contains(metricResp.getBizName())) {
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
metricResp.getMetricDefineByMeasureParams());
if (isDerived) {
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
derivedMetrics, derivedDimensions);
result.put(metricResp.getBizName(), expr);
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
} else {
measures.add(metricResp.getBizName());
}
}
}
measures.addAll(derivedMetrics);
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
.forEach(dimensions::add);
return result;
}
/**
* special process for hanaDB,the sap hana DB don't support the chinese name as the column name,
* so we need to quote the column name after converting the convertNameToBizName called
*
* sap hana DB will auto translate the colume to upper case letter if not quoted. also we need
* to quote the field name if it is a lower case letter.
*
* @param queryStatement
* @param sql
* @return
*/
private String replaceSqlFieldsForHanaDB(QueryStatement queryStatement, String sql) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
if (!semanticSchemaResp.getDatabaseResp().getType()
.equalsIgnoreCase(EngineType.HANADB.getName())) {
return sql;
}
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
Map<String, String> fieldNameToBizNameMapQuote = new HashMap<>();
fieldNameToBizNameMap.forEach((key, value) -> {
if (!fieldNameToBizNameMapQuote.containsKey(value) && !value.matches("\".*\"")
&& !value.matches("[A-Z0-9_].*?")) {
fieldNameToBizNameMapQuote.put(value, "\"" + value + "\"");
}
});
String sqlNew = sql;
if (fieldNameToBizNameMapQuote.size() > 0) {
sqlNew = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMapQuote, true);
}
// replace alias field name
List<String> asFields = SqlAsHelper.getAsFields(sqlNew);
Map<String, String> fieldMapput = new HashMap<>();
for (String asField : asFields) {
String value = asField;
if (!value.matches("\".*?\"") && !value.matches("[A-Z0-9_].*?")) {
value = "\"" + asField + "\"";
fieldMapput.put(asField, value);
}
}
if (fieldMapput.size() > 0) {
sqlNew = SqlReplaceHelper.replaceAliasFieldName(sqlNew, fieldMapput);
}
return sqlNew;
}
private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = queryStatement.getSqlQuery().getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQuery().setSql(sql);
}
private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number
String sql = queryStatement.getSqlQuery().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQuery().setSql(newSql);
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
private Map<String, String> getNameToBizNameMap(OntologyQuery query) {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
Map<String, String> dimensionResults = query.getDimensions().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
Map<String, String> metricResults = query.getMetrics().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
@@ -337,18 +139,93 @@ public class SqlQueryParser implements QueryParser {
return elements.stream();
}
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, Set<String> dimensions) {
if (!CollectionUtils.isEmpty(dimensions)) {
Map<String, Long> modelMatchCnt = new HashMap<>();
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
}
return modelMatchCnt.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(Map.Entry::getKey).findFirst().orElse("");
private void convertNameToBizName(QueryStatement queryStatement) {
Map<String, String> fieldNameToBizNameMap =
getNameToBizNameMap(queryStatement.getOntologyQuery());
String sql = queryStatement.getSqlQuery().getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQuery().setSql(sql);
}
private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number
String sql = queryStatement.getSqlQuery().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQuery().setSql(newSql);
}
private OntologyQuery buildOntologyQuery(Ontology ontology, List<String> queryFields) {
OntologyQuery ontologyQuery = new OntologyQuery();
Set<String> fields = Sets.newHashSet(queryFields);
// find belonging model for every querying metrics
ontology.getMetricMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
entry.getValue().forEach(m -> {
if (fields.contains(m.getName()) || fields.contains(m.getBizName())) {
if (!ontologyQuery.getMetricMap().containsKey(modelName)) {
ontologyQuery.getMetricMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getMetricMap().get(modelName).add(m);
fields.remove(m.getName());
fields.remove(m.getBizName());
}
});
});
// first try to find all querying dimensions in the models with querying metrics.
ontology.getDimensionMap().entrySet().stream()
.filter(entry -> ontologyQuery.getMetricMap().containsKey(entry.getKey()))
.forEach(entry -> {
String modelName = entry.getKey();
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(entry.getKey())) {
ontologyQuery.getDimensionMap().put(entry.getKey(),
Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(entry.getKey()).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}
});
});
// if there are still fields not found belonging models, try to find in the models without
// querying metrics.
if (!fields.isEmpty()) {
ontology.getDimensionMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
ontologyQuery.getDimensionMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(modelName).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}
});
}
});
}
return semanticSchemaResp.getModelResps().get(0).getBizName();
return ontologyQuery;
}
}

View File

@@ -4,7 +4,6 @@ import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@@ -36,9 +35,9 @@ public class SqlVariableParser implements QueryParser {
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(),
queryStatement.getStructQuery().getParams());
DataModel dataModel =
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
dataModel.setSqlQuery(sqlParsed);
ModelResp dataModel =
queryStatement.getOntology().getModelMap().get(modelResp.getBizName());
dataModel.getModelDetail().setSqlQuery(sqlParsed);
}
}
}

View File

@@ -1,10 +1,6 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
@@ -13,8 +9,11 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* This parser converts struct semantic query into sql query by generating S2SQL based on structured
* semantic information.
*/
@Component("StructQueryParser")
@Slf4j
public class StructQueryParser implements QueryParser {
@@ -29,42 +28,30 @@ public class StructQueryParser implements QueryParser {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQuery structQuery = queryStatement.getStructQuery();
String dsTable = "t_1";
SqlQuery sqlParam = new SqlQuery();
sqlParam.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s",
String dsTable = queryStatement.getDataSetName();
if (Objects.isNull(dsTable)) {
dsTable = "t_ds_temp";
}
SqlQuery sqlQuery = new SqlQuery();
sqlQuery.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.generateWhere(structQuery, null),
sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery));
if (!sqlGenerateUtils.isSupportWith(queryStatement.getOntology().getDatabaseType(),
queryStatement.getOntology().getDatabaseVersion())) {
sqlParam.setSupportWith(false);
sqlQuery.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQuery),
sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery));
}
sqlParam.setSql(sql);
queryStatement.setSqlQuery(sqlParam);
sqlQuery.setSql(sql);
queryStatement.setSqlQuery(sqlQuery);
queryStatement.setIsS2SQL(true);
OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQuery.getDimensions().addAll(structQuery.getGroups());
ontologyQuery.getMetrics().addAll(structQuery.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQuery, null);
ontologyQuery.setWhere(where);
if (ontologyQuery.getMetrics().isEmpty()) {
ontologyQuery.setAggOption(AggOption.NATIVE);
} else {
ontologyQuery.setAggOption(AggOption.DEFAULT);
}
ontologyQuery.setNativeQuery(structQuery.getQueryType().isNativeAggQuery());
ontologyQuery.setOrder(structQuery.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQuery.setLimit(structQuery.getLimit());
queryStatement.setOntologyQuery(ontologyQuery);
log.info("parse structQuery [{}] ", queryStatement.getSqlQuery());
}

View File

@@ -1,17 +1,23 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
import com.tencent.supersonic.headless.core.translator.parser.Constants;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.*;
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -23,28 +29,32 @@ import java.util.stream.Collectors;
@Slf4j
public class DataModelNode extends SemanticNode {
public static SqlNode build(DataModel dataModel, SqlValidatorScope scope) throws Exception {
public static SqlNode build(ModelResp dataModel, SqlValidatorScope scope) throws Exception {
String sqlTable = "";
if (dataModel.getSqlQuery() != null && !dataModel.getSqlQuery().isEmpty()) {
sqlTable = dataModel.getSqlQuery();
} else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) {
if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
String fullTableName =
String.join(".public.", dataModel.getTableQuery().split("\\."));
if (dataModel.getModelDetail().getSqlQuery() != null
&& !dataModel.getModelDetail().getSqlQuery().isEmpty()) {
sqlTable = dataModel.getModelDetail().getSqlQuery();
} else if (dataModel.getModelDetail().getTableQuery() != null
&& !dataModel.getModelDetail().getTableQuery().isEmpty()) {
if (dataModel.getModelDetail().getDbType()
.equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
String fullTableName = String.join(".public.",
dataModel.getModelDetail().getTableQuery().split("\\."));
sqlTable = "select * from " + fullTableName;
} else {
sqlTable = "select * from " + dataModel.getTableQuery();
sqlTable = "select * from " + dataModel.getModelDetail().getTableQuery();
}
}
if (sqlTable.isEmpty()) {
throw new Exception("DataModelNode build error [tableSqlNode not found]");
}
SqlNode source = getTable(sqlTable, scope, EngineType.fromString(dataModel.getType()));
SqlNode source = getTable(sqlTable, scope,
EngineType.fromString(dataModel.getModelDetail().getDbType()));
addSchema(scope, dataModel, sqlTable);
return buildAs(dataModel.getName(), source);
}
private static void addSchema(SqlValidatorScope scope, DataModel datasource, String table)
private static void addSchema(SqlValidatorScope scope, ModelResp datasource, String table)
throws Exception {
Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table);
for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) {
@@ -58,22 +68,22 @@ public class DataModelNode extends SemanticNode {
}
}
private static void addSchemaTable(SqlValidatorScope scope, DataModel datasource, String db,
private static void addSchemaTable(SqlValidatorScope scope, ModelResp dataModel, String db,
String tb, Set<String> fields) throws Exception {
Set<String> dateInfo = new HashSet<>();
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
EngineType engineType = EngineType.fromString(datasource.getType());
for (Dimension d : datasource.getDimensions()) {
EngineType engineType = EngineType.fromString(dataModel.getModelDetail().getDbType());
for (Dimension d : dataModel.getModelDetail().getDimensions()) {
List<SqlNode> identifiers =
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName());
}
for (Identify i : datasource.getIdentifiers()) {
for (Identify i : dataModel.getIdentifiers()) {
dimensions.add(i.getName());
}
for (Measure m : datasource.getMeasures()) {
for (Measure m : dataModel.getMeasures()) {
List<SqlNode> identifiers =
expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.forEach(i -> {
@@ -88,26 +98,13 @@ public class DataModelNode extends SemanticNode {
for (String field : fields) {
if (!metrics.contains(field) && !dimensions.contains(field)) {
dimensions.add(field);
log.info("add column {} {}", datasource.getName(), field);
log.info("add column {} {}", dataModel.getName(), field);
}
}
SchemaBuilder.addSourceView(scope.getValidator().getCatalogReader().getRootSchema(), db, tb,
dateInfo, dimensions, metrics);
}
public static SqlNode buildExtend(DataModel datasource, Map<String, String> exprList,
SqlValidatorScope scope) throws Exception {
if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope);
}
EngineType engineType = EngineType.fromString(datasource.getType());
SqlNode dataSet = new SqlBasicCall(new LateralViewExplodeNode(exprList),
Arrays.asList(build(datasource, scope), new SqlNodeList(
getExtendField(exprList, scope, engineType), SqlParserPos.ZERO)),
SqlParserPos.ZERO);
return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, dataSet);
}
public static List<SqlNode> getExtendField(Map<String, String> exprList,
SqlValidatorScope scope, EngineType engineType) throws Exception {
List<SqlNode> sqlNodeList = new ArrayList<>();
@@ -129,64 +126,15 @@ public class DataModelNode extends SemanticNode {
return sqlNode;
}
public static String getNames(List<DataModel> dataModelList) {
return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_"));
}
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
Set<String> queryDimensions, Set<String> queryMeasures) {
queryDimensions.addAll(queryParam.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY)
? d.split(Constants.DIMENSION_IDENTIFY)[1]
: d)
.collect(Collectors.toSet()));
Set<String> schemaMetricName =
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName())));
queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
.forEach(queryMeasures::add);
}
public static void mergeQueryFilterDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
Set<String> dimensions, Set<String> measures, SqlValidatorScope scope)
throws Exception {
EngineType engineType = ontology.getDatabaseType();
if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType),
filterConditions);
Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName =
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
for (String filterCondition : filterConditions) {
if (schemaMetricName.contains(filterCondition)) {
ontology.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName())));
continue;
}
dimensions.add(filterCondition);
}
measures.clear();
measures.addAll(queryMeasures);
}
}
public static List<DataModel> getQueryDataModels(SqlValidatorScope scope,
S2CalciteSchema schema, OntologyQuery queryParam) throws Exception {
Ontology ontology = schema.getOntology();
public static List<ModelResp> getQueryDataModels(Ontology ontology,
OntologyQuery ontologyQuery) {
// get query measures and dimensions
Set<String> queryMeasures = new HashSet<>();
Set<String> queryDimensions = new HashSet<>();
getQueryDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures);
mergeQueryFilterDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures,
scope);
getQueryDimensionMeasure(ontology, ontologyQuery, queryDimensions, queryMeasures);
// first, find the base model
DataModel baseDataModel = findBaseModel(ontology, queryMeasures, queryDimensions);
ModelResp baseDataModel = findBaseModel(ontology, ontologyQuery);
if (Objects.isNull(baseDataModel)) {
throw new RuntimeException(
String.format("could not find matching dataModel, dimensions:%s, measures:%s",
@@ -199,7 +147,7 @@ public class DataModelNode extends SemanticNode {
}
// second, traverse the ontology to find other related dataModels
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, queryParam,
List<ModelResp> relatedDataModels = findRelatedModelsByRelation(ontology, ontologyQuery,
baseDataModel, queryDimensions, queryMeasures);
if (CollectionUtils.isEmpty(relatedDataModels)) {
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
@@ -213,51 +161,66 @@ public class DataModelNode extends SemanticNode {
return relatedDataModels;
}
private static DataModel findBaseModel(Ontology ontology, Set<String> queryMeasures,
Set<String> queryDimensions) {
DataModel dataModel = null;
// first, try to find the model with the most matching measures
Map<String, Integer> dataModelMeasuresCount = new HashMap<>();
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
Set<String> sourceMeasure = entry.getValue().getMeasures().stream()
.map(Measure::getName).collect(Collectors.toSet());
sourceMeasure.retainAll(queryMeasures);
dataModelMeasuresCount.put(entry.getKey(), sourceMeasure.size());
}
log.info("dataModelMeasureCount: [{}]", dataModelMeasuresCount);
Optional<Map.Entry<String, Integer>> base =
dataModelMeasuresCount.entrySet().stream().filter(e -> e.getValue() > 0)
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
if (base.isPresent()) {
dataModel = ontology.getDataModelMap().get(base.get().getKey());
} else {
// second, try to find the model with the most matching dimensions
Map<String, Integer> dataModelDimCount = new HashMap<>();
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) {
Set<String> modelDimensions = entry.getValue().stream().map(Dimension::getName)
.collect(Collectors.toSet());
modelDimensions.retainAll(queryDimensions);
dataModelDimCount.put(entry.getKey(), modelDimensions.size());
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery ontologyQuery,
Set<String> queryDimensions, Set<String> queryMeasures) {
ontologyQuery.getMetrics().forEach(m -> {
if (Objects.nonNull(m.getMetricDefineByMeasureParams())) {
m.getMetricDefineByMeasureParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName()));
}
log.info("dataModelDimCount: [{}]", dataModelDimCount);
base = dataModelDimCount.entrySet().stream().filter(e -> e.getValue() > 0)
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
if (base.isPresent()) {
dataModel = ontology.getDataModelMap().get(base.get().getKey());
if (Objects.nonNull(m.getMetricDefineByFieldParams())) {
m.getMetricDefineByFieldParams().getFields()
.forEach(mm -> queryMeasures.add(mm.getFieldName()));
}
});
}
private static ModelResp findBaseModel(Ontology ontology, OntologyQuery query) {
ModelResp dataModel = null;
// first, try to find the model with the most query metrics
Map<String, Integer> modelMetricCount = Maps.newHashMap();
query.getMetrics().forEach(m -> {
if (!modelMetricCount.containsKey(m.getModelBizName())) {
modelMetricCount.put(m.getModelBizName(), 1);
} else {
int count = modelMetricCount.get(m.getModelBizName());
modelMetricCount.put(m.getModelBizName(), count + 1);
}
});
Optional<String> baseModelName = modelMetricCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).map(e -> e.getKey())
.findFirst();
if (baseModelName.isPresent()) {
dataModel = ontology.getModelMap().get(baseModelName.get());
} else {
// second, try to find the model with the most query dimensions
Map<String, Integer> modelDimCount = Maps.newHashMap();
query.getDimensions().forEach(m -> {
if (!modelDimCount.containsKey(m.getModelBizName())) {
modelDimCount.put(m.getModelBizName(), 1);
} else {
int count = modelDimCount.get(m.getModelBizName());
modelDimCount.put(m.getModelBizName(), count + 1);
}
});
baseModelName = modelMetricCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(e -> e.getKey()).findFirst();
if (baseModelName.isPresent()) {
dataModel = ontology.getModelMap().get(baseModelName.get());
}
}
return dataModel;
}
private static boolean checkMatch(DataModel baseDataModel, Set<String> queryMeasures,
private static boolean checkMatch(ModelResp baseDataModel, Set<String> queryMeasures,
Set<String> queryDimension) {
boolean isAllMatch = true;
Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName)
.collect(Collectors.toSet());
Set<String> baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName)
.collect(Collectors.toSet());
Set<String> baseDimensions = baseDataModel.getModelDetail().getDimensions().stream()
.map(Dimension::getName).collect(Collectors.toSet());
baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName()));
baseMeasures.retainAll(queryMeasures);
@@ -284,11 +247,11 @@ public class DataModelNode extends SemanticNode {
return isAllMatch;
}
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
OntologyQuery queryParam, DataModel baseDataModel, Set<String> queryDimensions,
private static List<ModelResp> findRelatedModelsByRelation(Ontology ontology,
OntologyQuery ontologyQuery, ModelResp baseDataModel, Set<String> queryDimensions,
Set<String> queryMeasures) {
Set<String> joinDataModelNames = new HashSet<>();
List<DataModel> joinDataModels = new ArrayList<>();
List<ModelResp> joinDataModels = new ArrayList<>();
Set<String> before = new HashSet<>();
before.add(baseDataModel.getName());
@@ -307,30 +270,31 @@ public class DataModelNode extends SemanticNode {
}
boolean isMatch = false;
boolean isRight = before.contains(joinRelation.getLeft());
DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight())
: ontology.getDataModelMap().get(joinRelation.getLeft());
ModelResp other = isRight ? ontology.getModelMap().get(joinRelation.getRight())
: ontology.getModelMap().get(joinRelation.getLeft());
String joinDimName = isRight ? joinRelation.getJoinCondition().get(0).getRight()
: joinRelation.getJoinCondition().get(0).getLeft();
if (!queryDimensions.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream()
Set<String> linkDimension = other.getModelDetail().getDimensions().stream()
.map(Dimension::getName).collect(Collectors.toSet());
other.getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
other.getModelDetail().getIdentifiers()
.forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) {
isMatch = true;
// joinDim should be added to the query dimension
queryParam.getDimensions().add(joinDimName);
// ontologyQuery.getDimensions().add(joinDimName);
}
}
Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName)
.collect(Collectors.toSet());
Set<String> linkMeasure = other.getModelDetail().getMeasures().stream()
.map(Measure::getName).collect(Collectors.toSet());
linkMeasure.retainAll(queryMeasures);
if (!linkMeasure.isEmpty()) {
isMatch = true;
}
if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) {
Set<String> linkDimension = ontology.getDimensionMap().get(other.getName())
.stream().map(Dimension::getName).collect(Collectors.toSet());
.stream().map(DimSchemaResp::getName).collect(Collectors.toSet());
linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) {
isMatch = true;
@@ -362,7 +326,7 @@ public class DataModelNode extends SemanticNode {
orders.entrySet().stream()
.sorted((entry1, entry2) -> entry2.getValue().compareTo(entry1.getValue())) // 倒序排序
.forEach(d -> {
joinDataModels.add(ontology.getDataModelMap().get(d.getKey()));
joinDataModels.add(ontology.getModelMap().get(d.getKey()));
});
}
return joinDataModels;
@@ -383,36 +347,37 @@ public class DataModelNode extends SemanticNode {
}
}
private static List<DataModel> findRelatedModelsByIdentifier(Ontology ontology,
DataModel baseDataModel, Set<String> queryDimension, Set<String> measures) {
Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream().map(Identify::getName)
.collect(Collectors.toSet());
private static List<ModelResp> findRelatedModelsByIdentifier(Ontology ontology,
ModelResp baseDataModel, Set<String> queryDimension, Set<String> measures) {
Set<String> baseIdentifiers = baseDataModel.getModelDetail().getIdentifiers().stream()
.map(Identify::getName).collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) {
return Collections.EMPTY_LIST;
}
Set<String> linkDataSourceName = new HashSet<>();
List<DataModel> linkDataModels = new ArrayList<>();
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
List<ModelResp> linkDataModels = new ArrayList<>();
for (Map.Entry<String, ModelResp> entry : ontology.getModelMap().entrySet()) {
if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
continue;
}
long identifierNum = entry.getValue().getIdentifiers().stream().map(Identify::getName)
.filter(baseIdentifiers::contains).count();
long identifierNum = entry.getValue().getModelDetail().getIdentifiers().stream()
.map(Identify::getName).filter(baseIdentifiers::contains).count();
if (identifierNum > 0) {
boolean isMatch = false;
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream()
.map(Dimension::getName).collect(Collectors.toSet());
entry.getValue().getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
Set<String> linkDimension = entry.getValue().getModelDetail().getDimensions()
.stream().map(Dimension::getName).collect(Collectors.toSet());
entry.getValue().getModelDetail().getIdentifiers()
.forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
isMatch = true;
}
}
if (!measures.isEmpty()) {
Set<String> linkMeasure = entry.getValue().getMeasures().stream()
.map(Measure::getName).collect(Collectors.toSet());
Set<String> linkMeasure = entry.getValue().getModelDetail().getMeasures()
.stream().map(Measure::getName).collect(Collectors.toSet());
linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) {
isMatch = true;
@@ -423,9 +388,9 @@ public class DataModelNode extends SemanticNode {
}
}
}
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) {
for (Map.Entry<String, List<DimSchemaResp>> entry : ontology.getDimensionMap().entrySet()) {
if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().stream().map(Dimension::getName)
Set<String> linkDimension = entry.getValue().stream().map(DimSchemaResp::getName)
.collect(Collectors.toSet());
linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) {
@@ -434,10 +399,10 @@ public class DataModelNode extends SemanticNode {
}
}
for (String linkName : linkDataSourceName) {
linkDataModels.add(ontology.getDataModelMap().get(linkName));
linkDataModels.add(ontology.getModelMap().get(linkName));
}
if (!CollectionUtils.isEmpty(linkDataModels)) {
List<DataModel> all = new ArrayList<>();
List<ModelResp> all = new ArrayList<>();
all.add(baseDataModel);
all.addAll(linkDataModels);
return all;

View File

@@ -25,7 +25,9 @@ import java.util.List;
import java.util.Objects;
import java.util.Optional;
/** push down the time filter into group using the RuntimeOptions defined minMaxTime */
/**
* push down the time filter into group using the RuntimeOptions defined minMaxTime
*/
public class FilterToGroupScanRule extends RelRule<Config> implements TransformationRule {
public static FilterTableScanRule.Config DEFAULT =

View File

@@ -1,10 +1,11 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.RuntimeOptions;
import lombok.Builder;
import lombok.Data;
import org.apache.calcite.schema.Schema;
@@ -29,15 +30,15 @@ public class S2CalciteSchema extends AbstractSchema {
return this;
}
public Map<String, DataModel> getDataModels() {
return ontology.getDataModelMap();
public Map<String, ModelResp> getDataModels() {
return ontology.getModelMap();
}
public List<Metric> getMetrics() {
public List<MetricSchemaResp> getMetrics() {
return ontology.getMetrics();
}
public Map<String, List<Dimension>> getDimensions() {
public Map<String, List<DimSchemaResp>> getDimensions() {
return ontology.getDimensionMap();
}

View File

@@ -26,14 +26,14 @@ public class SchemaBuilder {
public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1";
public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2";
public static SqlValidatorScope getScope(S2CalciteSchema schema) throws Exception {
public static SqlValidatorScope getScope(S2CalciteSchema schema) {
Map<String, RelDataType> nameToTypeMap = new HashMap<>();
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
rootSchema.add(schema.getSchemaKey(), schema);
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
Configuration.config);
EngineType engineType = schema.getOntology().getDatabaseType();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
S2SQLSqlValidatorImpl s2SQLSqlValidator =
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.getValidatorConfig(engineType));

View File

@@ -1,12 +1,10 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.calcite.SemanticSqlDialect;
import com.tencent.supersonic.common.calcite.SqlDialectFactory;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.calcite.FilterToGroupScanRule;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.Constants;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepPlanner;

View File

@@ -2,107 +2,63 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.FilterRender;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.OutputRender;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Renderer;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.pojo.*;
import com.tencent.supersonic.headless.core.translator.parser.Constants;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.*;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.util.CollectionUtils;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Objects;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class SqlBuilder {
private final S2CalciteSchema schema;
private OntologyQuery ontologyQuery;
private SqlValidatorScope scope;
private SqlNode parserNode;
private boolean isAgg = false;
private AggOption aggOption = AggOption.DEFAULT;
private final SqlValidatorScope scope;
public SqlBuilder(S2CalciteSchema schema) {
this.schema = schema;
this.scope = SchemaBuilder.getScope(schema);
}
public String buildOntologySql(QueryStatement queryStatement) throws Exception {
this.ontologyQuery = queryStatement.getOntologyQuery();
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
if (ontologyQuery.getLimit() == null) {
ontologyQuery.setLimit(0L);
}
this.aggOption = ontologyQuery.getAggOption();
buildParseNode();
optimizeParseNode(queryStatement.getOntology().getDatabaseType());
return getSql(queryStatement.getOntology().getDatabaseType());
}
private void buildParseNode() throws Exception {
// find relevant data models
scope = SchemaBuilder.getScope(schema);
List<DataModel> dataModels = DataModelNode.getQueryDataModels(scope, schema, ontologyQuery);
Set<ModelResp> dataModels = ontologyQuery.getModels();
if (dataModels == null || dataModels.isEmpty()) {
throw new Exception("data model not found");
}
isAgg = getAgg(dataModels.get(0));
// build level by level
LinkedList<Renderer> builders = new LinkedList<>();
builders.add(new SourceRender());
builders.add(new FilterRender());
builders.add(new OutputRender());
ListIterator<Renderer> it = builders.listIterator();
int i = 0;
Renderer previous = null;
while (it.hasNext()) {
Renderer renderer = it.next();
if (previous != null) {
previous.render(ontologyQuery, dataModels, scope, schema, !isAgg);
renderer.setTable(previous
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
i++;
}
previous = renderer;
}
builders.getLast().render(ontologyQuery, dataModels, scope, schema, !isAgg);
parserNode = builders.getLast().builder();
}
private boolean getAgg(DataModel dataModel) {
if (!AggOption.DEFAULT.equals(aggOption)) {
return AggOption.isAgg(aggOption);
}
// default by dataModel time aggregation
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!ontologyQuery.isNativeQuery()) {
return true;
}
}
return isAgg;
}
public String getSql(EngineType engineType) {
TableView tableView = render(ontologyQuery, new ArrayList<>(dataModels), scope, schema);
SqlNode parserNode = tableView.build();
DatabaseResp database = queryStatement.getOntology().getDatabase();
EngineType engineType = EngineType.fromString(database.getType());
parserNode = optimizeParseNode(parserNode, engineType);
return SemanticNode.getSql(parserNode, engineType);
}
private void optimizeParseNode(EngineType engineType) {
private SqlNode optimizeParseNode(SqlNode parserNode, EngineType engineType) {
if (Objects.isNull(schema.getRuntimeOptions())
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) {
return;
return parserNode;
}
SqlNode optimizeNode = null;
@@ -117,8 +73,237 @@ public class SqlBuilder {
}
if (Objects.nonNull(optimizeNode)) {
parserNode = optimizeNode;
return optimizeNode;
}
return parserNode;
}
private TableView render(OntologyQuery ontologyQuery, List<ModelResp> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema) throws Exception {
SqlNode left = null;
TableView leftTable = null;
TableView outerTable = new TableView();
Map<String, SqlNode> outerSelect = new HashMap<>();
Map<String, String> beforeModels = new HashMap<>();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
for (int i = 0; i < dataModels.size(); i++) {
final ModelResp dataModel = dataModels.get(i);
final Set<DimSchemaResp> queryDimensions =
ontologyQuery.getDimensionsByModel(dataModel.getName());
final Set<MetricSchemaResp> queryMetrics =
ontologyQuery.getMetricsByModel(dataModel.getName());
List<String> primary = new ArrayList<>();
for (Identify identify : dataModel.getIdentifiers()) {
primary.add(identify.getName());
}
TableView tableView =
renderOne(queryMetrics, queryDimensions, dataModel, scope, schema);
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
tableView.setAlias(alias);
tableView.setPrimary(primary);
tableView.setDataModel(dataModel);
for (String field : tableView.getFields()) {
outerSelect.put(field, SemanticNode.parse(alias + "." + field, scope, engineType));
}
if (left == null) {
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView));
} else {
left = buildJoin(left, leftTable, tableView, beforeModels, dataModel, schema,
scope);
}
leftTable = tableView;
beforeModels.put(dataModel.getName(), leftTable.getAlias());
}
for (Map.Entry<String, SqlNode> entry : outerSelect.entrySet()) {
outerTable.getSelect().add(entry.getValue());
}
outerTable.setTable(left);
return outerTable;
}
private SqlNode getTable(TableView tableView) {
return SemanticNode.getTable(tableView.getTable());
}
private SqlNode buildJoin(SqlNode leftNode, TableView leftTable, TableView rightTable,
Map<String, String> before, ModelResp dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
SqlNode condition =
getCondition(leftTable, rightTable, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, rightTable, schema);
SqlNode joinRelationCondition;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition;
}
return new SqlJoin(SqlParserPos.ZERO, leftNode,
SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral,
SemanticNode.buildAs(rightTable.getAlias(), getTable(rightTable)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition);
}
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView,
S2CalciteSchema schema) {
JoinRelation matchJoinRelation = JoinRelation.builder().build();
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getLeft())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getLeft()) + "." + r.getLeft(),
r.getMiddle(), tableView.getAlias() + "." + r.getRight()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
// Added join condition judgment to solve the problem of join condition order
} else if (joinRelation.getLeft()
.equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getRight())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getRight()) + "." + r.getRight(),
r.getMiddle(), tableView.getAlias() + "." + r.getLeft()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
}
}
}
return matchJoinRelation;
}
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope,
EngineType engineType) throws Exception {
SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
if (Objects.isNull(condition)) {
condition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()), ons,
SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()),
ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
private SqlNode getCondition(TableView left, TableView right, ModelResp dataModel,
S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType)
throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
selectLeft.retainAll(selectRight);
SqlNode condition = null;
for (String on : selectLeft) {
if (!isDimension(on, dataModel, schema)) {
continue;
}
if (isForeign(on, left.getDataModel().getIdentifiers())) {
if (!isPrimary(on, right.getDataModel().getIdentifiers())) {
continue;
}
}
if (isForeign(on, right.getDataModel().getIdentifiers())) {
if (!isPrimary(on, left.getDataModel().getIdentifiers())) {
continue;
}
}
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
if (condition == null) {
condition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
public static TableView renderOne(Set<MetricSchemaResp> queryMetrics,
Set<DimSchemaResp> queryDimensions, ModelResp dataModel, SqlValidatorScope scope,
S2CalciteSchema schema) {
TableView tableView = new TableView();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
Set<String> queryFields = tableView.getFields();
if (Objects.nonNull(queryMetrics)) {
queryMetrics.stream().forEach(m -> queryFields.addAll(m.getFields()));
}
if (Objects.nonNull(queryDimensions)) {
queryDimensions.stream().forEach(d -> queryFields.addAll(d.getFields()));
}
try {
for (String field : queryFields) {
tableView.getSelect().add(SemanticNode.parse(field, scope, engineType));
}
tableView.setTable(DataModelNode.build(dataModel, scope));
} catch (Exception e) {
log.error("Failed to create sqlNode for data model {}", dataModel);
}
return tableView;
}
private static boolean isDimension(String name, ModelResp dataModel, S2CalciteSchema schema) {
Optional<Dimension> dimension = dataModel.getModelDetail().getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) {
return true;
}
Optional<Identify> identify = dataModel.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return true;
}
if (schema.getDimensions().containsKey(dataModel.getName())) {
Optional<DimSchemaResp> dataSourceDim = schema.getDimensions().get(dataModel.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) {
return true;
}
}
return false;
}
private static boolean isForeign(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return IdentifyType.foreign.equals(identify.get().getType());
}
return false;
}
private static boolean isPrimary(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return IdentifyType.primary.equals(identify.get().getType());
}
return false;
}
}

View File

@@ -1,9 +1,9 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import lombok.Data;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlSelect;
@@ -11,44 +11,33 @@ import org.apache.calcite.sql.parser.SqlParserPos;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.Set;
/** basic query project */
@Data
public class TableView {
private List<SqlNode> filter = new ArrayList<>();
private List<SqlNode> dimension = new ArrayList<>();
private List<SqlNode> measure = new ArrayList<>();
private Set<String> fields = Sets.newHashSet();
private List<SqlNode> select = Lists.newArrayList();
private SqlNodeList order;
private SqlNode fetch;
private SqlNode offset;
private SqlNode table;
private String alias;
private List<String> primary;
private DataModel dataModel;
private ModelResp dataModel;
public SqlNode build() {
measure.addAll(dimension);
SqlNodeList dimensionNodeList = null;
if (dimension.size() > 0) {
dimensionNodeList = new SqlNodeList(getGroup(dimension), SqlParserPos.ZERO);
List<SqlNode> selectNodeList = new ArrayList<>();
if (select.isEmpty()) {
return new SqlSelect(SqlParserPos.ZERO, null,
new SqlNodeList(SqlNodeList.SINGLETON_STAR, SqlParserPos.ZERO), table, null,
null, null, null, null, order, offset, fetch, null);
} else {
selectNodeList.addAll(select);
return new SqlSelect(SqlParserPos.ZERO, null,
new SqlNodeList(selectNodeList, SqlParserPos.ZERO), table, null, null, null,
null, null, order, offset, fetch, null);
}
SqlNodeList filterNodeList = null;
if (filter.size() > 0) {
filterNodeList = new SqlNodeList(filter, SqlParserPos.ZERO);
}
return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(measure, SqlParserPos.ZERO),
table, filterNodeList, dimensionNodeList, null, null, null, order, offset, fetch,
null);
}
private List<SqlNode> getGroup(List<SqlNode> sqlNodeList) {
return sqlNodeList.stream()
.map(s -> (s.getKind().equals(SqlKind.AS)
? ((SqlBasicCall) s).getOperandList().get(0)
: s))
.collect(Collectors.toList());
}
}

View File

@@ -1,26 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.Objects;
public class AggFunctionNode extends SemanticNode {
public static SqlNode build(String agg, String name, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if (Objects.isNull(agg) || agg.isEmpty()) {
return parse(name, scope, engineType);
}
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name
+ " ) ", scope, engineType);
}
return parse(agg + " ( " + name + " ) ", scope, engineType);
}
public static enum AggFunction {
AVG, COUNT_DISTINCT, MAX, MIN, SUM, COUNT, DISTINCT
}
}

View File

@@ -1,61 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.List;
import java.util.Objects;
public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (!dimension.getName().equals(dimension.getExpr())) {
sqlNode = buildAs(dimension.getName(), sqlNode);
}
return sqlNode;
}
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return expand(sqlNode, scope);
}
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
return parse(dimension.getName(), scope, engineType);
}
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
return parse(dimension.getExpr(), scope, engineType);
}
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if ("".equals(alias)) {
return buildName(dimension, scope, engineType);
}
SqlNode sqlNode = parse(dimension.getName(), scope, engineType);
return buildAs(alias, sqlNode);
}
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope,
engineType));
}
throw new Exception("array dimension expr should only identify");
}
return build(dimension, scope, engineType);
}
}

View File

@@ -1,35 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlInternalOperator;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.SqlWriter.Frame;
import org.apache.calcite.sql.SqlWriter.FrameTypeEnum;
public class ExtendNode extends SqlInternalOperator {
public ExtendNode() {
super(SqlKind.EXTEND.lowerName, SqlKind.EXTEND);
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
Frame frame = writer.startList(FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(operator.getName());
SqlNodeList list = (SqlNodeList) call.operand(1);
Frame frameArgs = writer.startList("(", ")");
for (int i = 0; i < list.size(); i++) {
list.get(i).unparse(writer, 0, 0);
if (i < list.size() - 1) {
writer.sep(",");
}
}
writer.endList(frameArgs);
writer.endList(frame);
}
}

View File

@@ -1,28 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import java.util.Set;
public class FilterNode extends SemanticNode {
public static void getFilterField(SqlNode sqlNode, Set<String> fields) {
if (sqlNode instanceof SqlIdentifier) {
SqlIdentifier sqlIdentifier = (SqlIdentifier) sqlNode;
fields.add(sqlIdentifier.names.get(0).toLowerCase());
return;
}
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
for (SqlNode operand : sqlBasicCall.getOperandList()) {
getFilterField(operand, fields);
}
}
}
public static boolean isMatchDataSource(Set<String> measures) {
return false;
}
}

View File

@@ -1,42 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class IdentifyNode extends SemanticNode {
public static SqlNode build(Identify identify, SqlValidatorScope scope, EngineType engineType)
throws Exception {
return parse(identify.getName(), scope, engineType);
}
public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) {
return identifies.stream().filter(i -> type.name().equalsIgnoreCase(i.getType()))
.map(i -> i.getName()).collect(Collectors.toSet());
}
public static boolean isForeign(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return Identify.Type.FOREIGN.name().equalsIgnoreCase(identify.get().getType());
}
return false;
}
public static boolean isPrimary(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return Identify.Type.PRIMARY.name().equalsIgnoreCase(identify.get().getType());
}
return false;
}
}

View File

@@ -1,79 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
/** extend node to handle lateral explode dataSet */
public class LateralViewExplodeNode extends ExtendNode {
public final String sqlNameView = "view";
public final String sqlNameExplode = "explode";
public final String sqlNameExplodeSplit = "explode_split";
private Map<String, String> delimiterMap;
public LateralViewExplodeNode(Map<String, String> delimiterMap) {
super();
this.delimiterMap = delimiterMap;
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
writer.setNeedWhitespace(true);
assert call.operandCount() == 2;
writer.sep(SqlKind.SELECT.lowerName);
writer.sep(SqlIdentifier.STAR.toString());
writer.sep("from");
SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
SqlNodeList list = (SqlNodeList) call.operand(1);
Ord node;
Iterator var = Ord.zip(list).iterator();
while (var.hasNext()) {
node = (Ord) var.next();
if (node.i > 0 && node.i % 2 > 0) {
writer.sep(SqlKind.AS.lowerName);
((SqlNode) node.e).unparse(writer, 0, 0);
continue;
}
if (node.i > 0 && node.i % 2 == 0) {
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
}
explode(writer, (SqlNode) node.e);
}
writer.endList(frame);
}
public void explode(SqlWriter writer, SqlNode sqlNode) {
String delimiter =
Objects.nonNull(delimiterMap) && delimiterMap.containsKey(sqlNode.toString())
? delimiterMap.get(sqlNode.toString())
: "";
if (delimiter.isEmpty()) {
writer.sep(sqlNameExplode);
} else {
writer.sep(sqlNameExplodeSplit);
}
SqlWriter.Frame frame = writer.startList("(", ")");
sqlNode.unparse(writer, 0, 0);
if (!delimiter.isEmpty()) {
writer.sep(",");
writer.sep(String.format("'%s'", delimiter));
}
writer.endList(frame);
writer.sep("tmp_sgl_" + sqlNode.toString());
}
}

View File

@@ -1,36 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class MeasureNode extends SemanticNode {
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if (measure.getExpr() == null) {
return getExpr(measure, alias, scope, engineType);
} else {
return buildAs(measure.getName(), getExpr(measure, alias, scope, engineType));
}
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope, engineType);
}
return buildAs(measure.getName(),
AggFunctionNode.build(measure.getAgg(), measure.getName(), scope, engineType));
}
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope,
EngineType enginType) throws Exception {
if (measure.getExpr() == null) {
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope,
enginType);
}
return parse(measure.getExpr(), scope, enginType);
}
}

View File

@@ -1,42 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@Data
public class MetricNode extends SemanticNode {
private Metric metric;
private Map<String, SqlNode> aggNode = new HashMap<>();
private Map<String, SqlNode> nonAggNode = new HashMap<>();
private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null
|| metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope, engineType);
}
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope, engineType);
return buildAs(metric.getName(), sqlNode);
}
public static Boolean isMetricField(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric();
}
public static Boolean isMetricField(Metric metric) {
return metric.getMetricTypeParams().isFieldMetric();
}
}

View File

@@ -1,78 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/** process query specified filtering information */
public class FilterRender extends Renderer {
@Override
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView;
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = schema.getOntology().getDatabaseType();
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
Set<String> whereFields = new HashSet<>();
FilterNode.getFilterField(filterNode, whereFields);
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
for (DataModel dataModel : dataModels) {
SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(), dataModel, schema, dimensions, metrics);
}
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
}
for (String dimension : queryDimensions) {
tableView.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
for (String metric : queryMetrics) {
Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema);
if (optionalMetric.isPresent() && MetricNode.isMetricField(optionalMetric.get())) {
// metric from field ignore
continue;
}
if (optionalMetric.isPresent()) {
tableView.getMeasure()
.add(MetricNode.build(optionalMetric.get(), scope, engineType));
} else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
}
tableView.setMeasure(SemanticNode.deduplicateNode(tableView.getMeasure()));
tableView.setDimension(SemanticNode.deduplicateNode(tableView.getDimension()));
if (filterNode != null) {
TableView filterView = new TableView();
filterView.setTable(SemanticNode.buildAs(Constants.DATASOURCE_TABLE_FILTER_PREFIX,
tableView.build()));
filterView.getFilter().add(filterNode);
filterView.getMeasure().add(SqlIdentifier.star(SqlParserPos.ZERO));
super.tableView = filterView;
}
}
}

View File

@@ -1,485 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.AggFunctionNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.JoinConditionType;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlJoin;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
/** process the join conditions when the source number is greater than 1 */
@Slf4j
public class JoinRender extends Renderer {
@Override
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
EngineType engineType = schema.getOntology().getDatabaseType();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
Set<String> queryAllDimension = new HashSet<>();
Set<String> measures = new HashSet<>();
DataModelNode.getQueryDimensionMeasure(schema.getOntology(), metricCommand,
queryAllDimension, measures);
SqlNode left = null;
TableView leftTable = null;
TableView innerView = new TableView();
TableView filterView = new TableView();
Map<String, SqlNode> innerSelect = new HashMap<>();
Set<String> filterDimension = new HashSet<>();
Map<String, String> beforeSources = new HashMap<>();
for (int i = 0; i < dataModels.size(); i++) {
final DataModel dataModel = dataModels.get(i);
final Set<String> filterDimensions = new HashSet<>();
final Set<String> filterMetrics = new HashSet<>();
final Set<String> queryDimension = new HashSet<>();
final Set<String> queryMetrics = new HashSet<>();
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema,
filterDimensions, filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
reqMetric.addAll(filterMetrics);
reqMetric = uniqList(reqMetric);
List<String> reqDimension = new ArrayList<>(metricCommand.getDimensions());
reqDimension.addAll(filterDimensions);
reqDimension = uniqList(reqDimension);
Set<String> sourceMeasure = dataModel.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataModel, sourceMeasure,
scope, schema, nonAgg);
Set<String> dimension = dataModel.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataModel,
dimension, scope, schema);
List<String> primary = new ArrayList<>();
for (Identify identify : dataModel.getIdentifiers()) {
primary.add(identify.getName());
if (!fieldWhere.contains(identify.getName())) {
fieldWhere.add(identify.getName());
}
}
List<String> dataSourceWhere = new ArrayList<>(fieldWhere);
addZipperField(dataModel, dataSourceWhere);
TableView tableView =
SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataModels.get(i), scope, schema, true);
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
tableView.setAlias(alias);
tableView.setPrimary(primary);
tableView.setDataModel(dataModel);
if (left == null) {
leftTable = tableView;
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope));
beforeSources.put(dataModel.getName(), leftTable.getAlias());
continue;
}
left = buildJoin(left, leftTable, tableView, beforeSources, dataModel, schema, scope);
leftTable = tableView;
beforeSources.put(dataModel.getName(), tableView.getAlias());
}
for (Map.Entry<String, SqlNode> entry : innerSelect.entrySet()) {
innerView.getMeasure().add(entry.getValue());
}
innerView.setTable(left);
filterView
.setTable(SemanticNode.buildAs(Constants.JOIN_TABLE_OUT_PREFIX, innerView.build()));
if (!filterDimension.isEmpty()) {
for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.parse(d, scope, engineType));
} else {
filterView.getDimension().add(SemanticNode.parse(d, scope, engineType));
}
}
}
filterView.setMeasure(SemanticNode.deduplicateNode(filterView.getMeasure()));
filterView.setDimension(SemanticNode.deduplicateNode(filterView.getDimension()));
super.tableView = filterView;
}
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView,
Set<String> queryMetrics, List<String> reqMetrics, DataModel dataModel,
Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema,
boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = schema.getOntology().getDatabaseType();
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias);
if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure, SemanticNode.buildAs(measure,
SemanticNode.parse(alias + "." + measure, scope, engineType)));
}
}
if (metricNode.getAggFunction() != null && !metricNode.getAggFunction().isEmpty()) {
for (Map.Entry<String, String> entry : metricNode.getAggFunction().entrySet()) {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope, engineType)));
} else {
filterView.getMeasure()
.add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(),
entry.getKey(), scope, engineType)));
}
}
}
}
}
}
}
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension,
Set<String> queryDimension, List<String> reqDimensions, DataModel dataModel,
Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema)
throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = schema.getOntology().getDatabaseType();
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode
.parse(alias + "." + identifyDimension[1], scope, engineType)));
} else {
innerSelect.put(d, SemanticNode.buildAs(d,
SemanticNode.parse(alias + "." + d, scope, engineType)));
}
filterDimension.add(d);
}
}
}
private Set<String> getQueryDimension(Set<String> filterDimension,
Set<String> queryAllDimension, Set<String> whereFields) {
return filterDimension.stream()
.filter(d -> queryAllDimension.contains(d) || whereFields.contains(d))
.collect(Collectors.toSet());
}
private boolean getMatchMetric(S2CalciteSchema schema, Set<String> sourceMeasure, String m,
Set<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst();
boolean isAdd = false;
if (metric.isPresent()) {
Set<String> metricMeasures = metric.get().getMetricTypeParams().getMeasures().stream()
.map(me -> me.getName()).collect(Collectors.toSet());
if (sourceMeasure.containsAll(metricMeasures)) {
isAdd = true;
}
}
if (sourceMeasure.contains(m)) {
isAdd = true;
}
if (isAdd && !queryMetrics.contains(m)) {
queryMetrics.add(m);
}
return isAdd;
}
private boolean getMatchDimension(S2CalciteSchema schema, Set<String> sourceDimension,
DataModel dataModel, String d, Set<String> queryDimension) {
String oriDimension = d;
boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
oriDimension = d.split(Constants.DIMENSION_IDENTIFY)[1];
}
if (sourceDimension.contains(oriDimension)) {
isAdd = true;
}
for (Identify identify : dataModel.getIdentifiers()) {
if (identify.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
break;
}
}
if (schema.getDimensions().containsKey(dataModel.getName())) {
for (Dimension dim : schema.getDimensions().get(dataModel.getName())) {
if (dim.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
}
}
}
if (isAdd && !queryDimension.contains(oriDimension)) {
queryDimension.add(oriDimension);
}
return isAdd;
}
private SqlNode getTable(TableView tableView, SqlValidatorScope scope) throws Exception {
return SemanticNode.getTable(tableView.getTable());
}
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView,
Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception {
EngineType engineType = schema.getOntology().getDatabaseType();
SqlNode condition =
getCondition(leftTable, tableView, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema);
SqlNode joinRelationCondition = null;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition;
}
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataModel().getTimePartType())
|| Materialization.TimePartType.ZIPPER
.equals(tableView.getDataModel().getTimePartType())) {
SqlNode zipperCondition =
getZipperCondition(leftTable, tableView, dataModel, schema, scope);
if (Objects.nonNull(joinRelationCondition)) {
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)),
SqlParserPos.ZERO, null);
} else {
condition = zipperCondition;
}
}
return new SqlJoin(SqlParserPos.ZERO, left,
SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral,
SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition);
}
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView,
S2CalciteSchema schema) {
JoinRelation matchJoinRelation = JoinRelation.builder().build();
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getLeft())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getLeft()) + "." + r.getLeft(),
r.getMiddle(), tableView.getAlias() + "." + r.getRight()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
// Added join condition judgment to solve the problem of join condition order
} else if (joinRelation.getLeft()
.equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getRight())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getRight()) + "." + r.getRight(),
r.getMiddle(), tableView.getAlias() + "." + r.getLeft()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
}
}
}
return matchJoinRelation;
}
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope,
EngineType engineType) throws Exception {
SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
if (Objects.isNull(condition)) {
condition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()), ons,
SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()),
ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
private SqlNode getCondition(TableView left, TableView right, DataModel dataModel,
S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType)
throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
selectLeft.retainAll(selectRight);
SqlNode condition = null;
for (String on : selectLeft) {
if (!SourceRender.isDimension(on, dataModel, schema)) {
continue;
}
if (IdentifyNode.isForeign(on, left.getDataModel().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, right.getDataModel().getIdentifiers())) {
continue;
}
}
if (IdentifyNode.isForeign(on, right.getDataModel().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, left.getDataModel().getIdentifiers())) {
continue;
}
}
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
if (condition == null) {
condition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
private static void joinOrder(int cnt, String id, Map<String, Set<String>> next,
Queue<String> orders, Map<String, Boolean> visited) {
visited.put(id, true);
orders.add(id);
if (orders.size() >= cnt) {
return;
}
for (String nextId : next.get(id)) {
if (!visited.get(nextId)) {
joinOrder(cnt, nextId, next, orders, visited);
if (orders.size() >= cnt) {
return;
}
}
}
orders.poll();
visited.put(id, false);
}
private void addZipperField(DataModel dataModel, List<String> fields) {
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.forEach(t -> {
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)
&& !fields.contains(t.getName())) {
fields.add(t.getName());
}
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)
&& !fields.contains(t.getName())) {
fields.add(t.getName());
}
});
}
}
private SqlNode getZipperCondition(TableView left, TableView right, DataModel dataModel,
S2CalciteSchema schema, SqlValidatorScope scope) throws Exception {
if (Materialization.TimePartType.ZIPPER.equals(left.getDataModel().getTimePartType())
&& Materialization.TimePartType.ZIPPER
.equals(right.getDataModel().getTimePartType())) {
throw new Exception("not support two zipper table");
}
SqlNode condition = null;
Optional<Dimension> leftTime = left.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
Optional<Dimension> rightTime = right.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
if (leftTime.isPresent() && rightTime.isPresent()) {
String startTime = "";
String endTime = "";
String dateTime = "";
Optional<Dimension> startTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName()
.startsWith(Constants.MATERIALIZATION_ZIPPER_START))
.findFirst();
Optional<Dimension> endTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName()
.startsWith(Constants.MATERIALIZATION_ZIPPER_END))
.findFirst();
if (startTimeOp.isPresent() && endTimeOp.isPresent()) {
TableView zipper = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right;
TableView partMetric = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? right : left;
Optional<Dimension> partTime = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? rightTime : leftTime;
startTime = zipper.getAlias() + "." + startTimeOp.get().getName();
endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName();
}
EngineType engineType = schema.getOntology().getDatabaseType();
ArrayList<SqlNode> operandList =
new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType)));
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<SqlNode>(Arrays.asList(
new SqlBasicCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(Arrays.asList(
SemanticNode.parse(startTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))),
SqlParserPos.ZERO, null),
new SqlBasicCall(SqlStdOperatorTable.GREATER_THAN, operandList,
SqlParserPos.ZERO, null))),
SqlParserPos.ZERO, null);
}
return condition;
}
}

View File

@@ -1,59 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
/** process the query result items from query request */
public class OutputRender extends Renderer {
@Override
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
EngineType engineType = schema.getOntology().getDatabaseType();
for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
for (String metric : metricCommand.getMetrics()) {
if (MetricNode.isMetricField(metric, schema)) {
// metric from field ignore
continue;
}
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
if (metricCommand.getLimit() > 0) {
SqlNode offset =
SemanticNode.parse(metricCommand.getLimit().toString(), scope, engineType);
selectDataSet.setOffset(offset);
}
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) {
List<SqlNode> orderList = new ArrayList<>();
for (ColumnOrder columnOrder : metricCommand.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[] {SemanticNode.parse(columnOrder.getCol(), scope,
engineType)}));
} else {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope, engineType));
}
}
selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO));
}
}
}

View File

@@ -1,119 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MeasureNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/** process TableView */
@Data
public abstract class Renderer {
protected TableView tableView = new TableView();
public static Optional<Dimension> getDimensionByName(String name, DataModel datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Measure> getMeasureByName(String name, DataModel datasource) {
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Metric> getMetricByName(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric;
}
public static Optional<Identify> getIdentifyByName(String name, DataModel datasource) {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
}
public static MetricNode buildMetricNode(String metric, DataModel datasource,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg, String alias)
throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode();
EngineType engineType = EngineType.fromString(datasource.getType());
if (metricOpt.isPresent()) {
metricNode.setMetric(metricOpt.get());
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode().put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode().put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(),
measure.get().getAgg());
} else {
metricNode.getNonAggNode().put(m.getName(),
MeasureNode.buildNonAgg(alias, m, scope, engineType));
metricNode.getAggNode().put(m.getName(),
MeasureNode.buildAgg(m, nonAgg, scope, engineType));
metricNode.getAggFunction().put(m.getName(), m.getAgg());
}
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(m.getName(),
SemanticNode.parse(m.getConstraint(), scope, engineType));
}
}
return metricNode;
}
Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode().put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode().put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(measure.get().getName(),
SemanticNode.parse(measure.get().getConstraint(), scope, engineType));
}
}
return metricNode;
}
public static List<String> uniqList(List<String> list) {
Set<String> tmp = new HashSet<>(list);
return tmp.stream().collect(Collectors.toList());
}
public void setTable(SqlNode table) {
tableView.setTable(table);
}
public SqlNode builder() {
return tableView.build();
}
public SqlNode builderAs(String alias) throws Exception {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
}

View File

@@ -1,359 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DimensionNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants.DIMENSION_DELIMITER;
/** process the table dataSet from the defined data model schema */
@Slf4j
public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres,
Set<String> reqMetrics, Set<String> reqDimensions, String queryWhere,
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
throws Exception {
TableView dataSet = new TableView();
TableView output = new TableView();
Set<String> queryMetrics = new HashSet<>(reqMetrics);
Set<String> queryDimensions = new HashSet<>(reqDimensions);
List<String> fieldWhere = new ArrayList<>(fieldWheres);
Map<String, String> extendFields = new HashMap<>();
if (!fieldWhere.isEmpty()) {
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema,
dimensions, metrics);
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
datasource, scope, schema, nonAgg);
}
addTimeDimension(datasource, queryDimensions);
for (String metric : queryMetrics) {
MetricNode metricNode =
buildMetricNode(metric, datasource, scope, schema, nonAgg, alias);
if (!metricNode.getAggNode().isEmpty()) {
metricNode.getAggNode().entrySet().stream()
.forEach(m -> output.getMeasure().add(m.getValue()));
}
if (metricNode.getNonAggNode() != null) {
metricNode.getNonAggNode().entrySet().stream()
.forEach(m -> dataSet.getMeasure().add(m.getValue()));
}
if (metricNode.getMeasureFilter() != null) {
metricNode.getMeasureFilter().entrySet().stream()
.forEach(m -> dataSet.getFilter().add(m.getValue()));
}
}
for (String dimension : queryDimensions) {
if (dimension.contains(Constants.DIMENSION_IDENTIFY)
&& queryDimensions.contains(dimension.split(Constants.DIMENSION_IDENTIFY)[1])) {
continue;
}
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY)
? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension,
datasource, schema, nonAgg, extendFields, dataSet, output, scope);
}
output.setMeasure(SemanticNode.deduplicateNode(output.getMeasure()));
dataSet.setMeasure(SemanticNode.deduplicateNode(dataSet.getMeasure()));
SqlNode tableNode = DataModelNode.buildExtend(datasource, extendFields, scope);
dataSet.setTable(tableNode);
output.setTable(
SemanticNode.buildAs(Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName()
+ "_" + UUID.randomUUID().toString().substring(32), dataSet.build()));
return output;
}
private static void buildDimension(String alias, String dimension, DataModel datasource,
S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields,
TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
EngineType engineType = schema.getOntology().getDatabaseType();
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(dimension)) {
continue;
}
dataSet.getMeasure().add(DimensionNode.buildArray(dim, scope, engineType));
addExtendFields(dim, extendFields);
if (nonAgg) {
output.getMeasure().add(DimensionNode.buildName(dim, scope, engineType));
isAdd = true;
continue;
}
if ("".equals(alias)) {
output.getDimension().add(DimensionNode.buildName(dim, scope, engineType));
} else {
output.getDimension()
.add(DimensionNode.buildNameAs(alias, dim, scope, engineType));
}
isAdd = true;
break;
}
}
if (!isAdd) {
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
if (identify.isPresent()) {
if (nonAgg) {
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
} else {
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getDimension()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
}
isAdd = true;
}
}
if (isAdd) {
return;
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
dataSet.getMeasure()
.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
addExtendFields(dimensionOptional.get(), extendFields);
if (nonAgg) {
output.getMeasure()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
return;
}
output.getDimension()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
}
}
private static void addExtendFields(Dimension dimension, Map<String, String> extendFields) {
if (dimension.getDataType().isArray()) {
if (Objects.nonNull(dimension.getExt())
&& dimension.getExt().containsKey(DIMENSION_DELIMITER)) {
extendFields.put(dimension.getExpr(),
(String) dimension.getExt().get(DIMENSION_DELIMITER));
} else {
extendFields.put(dimension.getExpr(), "");
}
}
}
private static List<SqlNode> getWhereMeasure(List<String> fields, Set<String> queryMetrics,
Set<String> queryDimensions, Map<String, String> extendFields, DataModel datasource,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabaseType();
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
iterator.remove();
}
}
for (String where : fields) {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(where)) {
continue;
}
whereNode.addAll(DimensionNode.expand(dim, scope, engineType));
isAdd = true;
}
}
Optional<Identify> identify = getIdentifyByName(where, datasource);
if (identify.isPresent()) {
whereNode.add(IdentifyNode.build(identify.get(), scope, engineType));
isAdd = true;
}
if (isAdd) {
continue;
}
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) {
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
addExtendFields(dimensionOptional.get(), extendFields);
}
}
return whereNode;
}
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
Set<String> queryMetrics, Set<String> queryDimensions, Map<String, String> extendFields,
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions,
extendFields, datasource, scope, schema, nonAgg);
dataSet.getMeasure().addAll(whereNode);
// getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
}
public static void whereDimMetric(List<String> fields, Set<String> queryMetrics,
Set<String> queryDimensions, DataModel datasource, S2CalciteSchema schema,
Set<String> dimensions, Set<String> metrics) {
for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
continue;
}
String filterField = field;
if (field.contains(Constants.DIMENSION_IDENTIFY)) {
filterField = field.split(Constants.DIMENSION_IDENTIFY)[1];
}
addField(filterField, field, datasource, schema, dimensions, metrics);
}
}
private static void addField(String field, String oriField, DataModel datasource,
S2CalciteSchema schema, Set<String> dimensions, Set<String> metrics) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dimension.isPresent()) {
dimensions.add(oriField);
return;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(field)).findFirst();
if (identify.isPresent()) {
dimensions.add(oriField);
return;
}
if (schema.getDimensions().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dataSourceDim.isPresent()) {
dimensions.add(oriField);
return;
}
}
Optional<Measure> metric = datasource.getMeasures().stream()
.filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (metric.isPresent()) {
metrics.add(oriField);
return;
}
Optional<Metric> datasourceMetric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (datasourceMetric.isPresent()) {
Set<String> measures = datasourceMetric.get().getMetricTypeParams().getMeasures()
.stream().map(m -> m.getName()).collect(Collectors.toSet());
if (datasource.getMeasures().stream().map(m -> m.getName()).collect(Collectors.toSet())
.containsAll(measures)) {
metrics.add(oriField);
return;
}
}
}
public static boolean isDimension(String name, DataModel datasource, S2CalciteSchema schema) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) {
return true;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return true;
}
if (schema.getDimensions().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) {
return true;
}
}
return false;
}
private static void addTimeDimension(DataModel dataModel, Set<String> queryDimension) {
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
Optional<Dimension> startTimeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START))
.findFirst();
Optional<Dimension> endTimeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END))
.findFirst();
if (startTimeOp.isPresent() && !queryDimension.contains(startTimeOp.get().getName())) {
queryDimension.add(startTimeOp.get().getName());
}
if (endTimeOp.isPresent() && !queryDimension.contains(endTimeOp.get().getName())) {
queryDimension.add(endTimeOp.get().getName());
}
} else {
Optional<Dimension> timeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) {
queryDimension.add(timeOp.get().getName());
}
}
}
public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = ontologyQuery.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabaseType();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
if (dataModels.size() == 1) {
DataModel dataModel = dataModels.get(0);
super.tableView = renderOne("", fieldWhere, ontologyQuery.getMetrics(),
ontologyQuery.getDimensions(), ontologyQuery.getWhere(), dataModel, scope,
schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(ontologyQuery, dataModels, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
}

View File

@@ -1,33 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class DataModel {
private Long id;
private String name;
private Long modelId;
private String type;
private String sqlQuery;
private String tableQuery;
private List<Identify> identifiers;
private List<Dimension> dimensions;
private List<Measure> measures;
private String aggTime;
private Materialization.TimePartType timePartType = Materialization.TimePartType.None;
}

View File

@@ -1,54 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import java.util.Arrays;
public enum DataType {
ARRAY("ARRAY"),
MAP("MAP"),
JSON("JSON"),
VARCHAR("VARCHAR"),
DATE("DATE"),
BIGINT("BIGINT"),
INT("INT"),
DOUBLE("DOUBLE"),
FLOAT("FLOAT"),
DECIMAL("DECIMAL"),
UNKNOWN("unknown");
private String type;
DataType(String type) {
this.type = type;
}
public String getType() {
return type;
}
public static DataType of(String type) {
for (DataType typeEnum : DataType.values()) {
if (typeEnum.getType().equalsIgnoreCase(type)) {
return typeEnum;
}
}
return DataType.UNKNOWN;
}
public boolean isObject() {
return Arrays.asList(ARRAY, MAP, JSON).contains(this);
}
public boolean isArray() {
return ARRAY.equals(this);
}
}

View File

@@ -1,28 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import lombok.Builder;
import lombok.Data;
import java.util.List;
import java.util.Map;
@Data
@Builder
public class Dimension implements SemanticItem {
String name;
private String owners;
private String type;
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
private DataType dataType = DataType.UNKNOWN;
private String bizName;
private List<String> defaultValues;
private Map<String, Object> ext;
@Override
public String getName() {
return name;
}
}

View File

@@ -1,20 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Identify {
public enum Type {
PRIMARY, FOREIGN
}
private String name;
// primary or foreign
private String type;
}

View File

@@ -1,46 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
@Data
@Builder
public class Materialization {
public enum TimePartType {
/**
* partition time type 1 - FULL, not use partition 2 - PARTITION , use time list 3 - ZIPPER,
* use [startDate, endDate] range time
*/
FULL("FULL"), PARTITION("PARTITION"), ZIPPER("ZIPPER"), None("");
private String name;
TimePartType(String name) {
this.name = name;
}
public static TimePartType of(String name) {
for (TimePartType typeEnum : TimePartType.values()) {
if (typeEnum.name.equalsIgnoreCase(name)) {
return typeEnum;
}
}
return TimePartType.None;
}
}
private TimePartType timePartType;
private String destinationTable;
private String dateInfo;
private String entities;
private Long modelId;
private Long dataBase;
private Long materializationId;
private Integer level;
private List<MaterializationElement> dimensions = new ArrayList<>();
private List<MaterializationElement> metrics = new ArrayList<>();
}

View File

@@ -1,13 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class MaterializationElement {
private List<TimeRange> timeRangeList;
private String name;
}

View File

@@ -1,26 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class Measure {
private String name;
// sum max min avg count distinct
private String agg;
private String expr;
private String constraint;
private String alias;
private String createMetric;
}

View File

@@ -1,19 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Data;
import java.util.List;
@Data
public class Metric implements SemanticItem {
private String name;
private List<String> owners;
private String type;
private MetricTypeParams metricTypeParams;
@Override
public String getName() {
return name;
}
}

View File

@@ -1,15 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Data;
import java.util.List;
@Data
public class MetricTypeParams {
private List<Measure> measures;
private List<Measure> metrics;
private List<Measure> fields;
private boolean isFieldMetric = false;
private String expr;
}

View File

@@ -1,7 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
public interface SemanticItem {
String getName();
String getType();
}

View File

@@ -29,7 +29,7 @@ public class ComponentFactory {
initQueryOptimizer();
initQueryExecutors();
initQueryAccelerators();
initQueryParsers();
initQueryParser();
initQueryCache();
}
@@ -55,8 +55,8 @@ public class ComponentFactory {
}
public static List<QueryParser> getQueryParsers() {
if (queryParsers.isEmpty()) {
initQueryParsers();
if (queryParsers == null) {
initQueryParser();
}
return queryParsers;
}
@@ -82,14 +82,18 @@ public class ComponentFactory {
}
private static void initQueryExecutors() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryExecutor.class, queryExecutors);
}
private static void initQueryAccelerators() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryAccelerator.class, queryAccelerators);
}
private static void initQueryParsers() {
private static void initQueryParser() {
init(QueryParser.class, queryParsers);
}

View File

@@ -1,7 +1,5 @@
package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.ItemDateResp;
@@ -10,13 +8,7 @@ import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.common.util.SqlFilterUtils;
import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.config.ExecutorConfig;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import lombok.extern.slf4j.Slf4j;
@@ -31,7 +23,8 @@ import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.*;
import static com.tencent.supersonic.common.pojo.Constants.DAY_FORMAT;
import static com.tencent.supersonic.common.pojo.Constants.JOIN_UNDERLINE;
/** tools functions to analyze queryStructReq */
@Component
@@ -148,7 +141,12 @@ public class SqlGenerateUtils {
String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(structQuery.getDimensionFilters());
String whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp);
return mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate);
String mergedWhere =
mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate);
if (StringUtils.isNotBlank(mergedWhere)) {
mergedWhere = "where " + mergedWhere;
}
return mergedWhere;
}
private String mergeDateWhereClause(StructQuery structQuery, String whereClauseFromFilter,
@@ -255,87 +253,4 @@ public class SqlGenerateUtils {
return true;
}
public String generateInternalMetricName(String modelBizName) {
return modelBizName + UNDERLINE + executorConfig.getInternalMetricNameSuffix();
}
public String generateDerivedMetric(final List<MetricSchemaResp> metricResps,
final Set<String> allFields, final Map<String, Measure> allMeasures,
final List<DimSchemaResp> dimensionResps, final String expression,
final MetricDefineType metricDefineType, AggOption aggOption,
Map<String, String> visitedMetric, Set<String> measures, Set<String> dimensions) {
Set<String> fields = SqlSelectHelper.getColumnFromExpr(expression);
if (!CollectionUtils.isEmpty(fields)) {
Map<String, String> replace = new HashMap<>();
for (String field : fields) {
switch (metricDefineType) {
case METRIC:
Optional<MetricSchemaResp> metricItem = metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field)).findFirst();
if (metricItem.isPresent()) {
if (visitedMetric.keySet().contains(field)) {
replace.put(field, visitedMetric.get(field));
break;
}
replace.put(field,
generateDerivedMetric(metricResps, allFields, allMeasures,
dimensionResps, getExpr(metricItem.get()),
metricItem.get().getMetricDefineType(), aggOption,
visitedMetric, measures, dimensions));
visitedMetric.put(field, replace.get(field));
}
break;
case MEASURE:
if (allMeasures.containsKey(field)) {
measures.add(field);
replace.put(field, getExpr(allMeasures.get(field), aggOption));
}
break;
case FIELD:
if (allFields.contains(field)) {
Optional<DimSchemaResp> dimensionItem = dimensionResps.stream()
.filter(d -> d.getBizName().equals(field)).findFirst();
if (dimensionItem.isPresent()) {
dimensions.add(field);
} else {
measures.add(field);
}
}
break;
default:
break;
}
}
if (!CollectionUtils.isEmpty(replace)) {
String expr = SqlReplaceHelper.replaceExpression(expression, replace);
log.debug("derived measure {}->{}", expression, expr);
return expr;
}
}
return expression;
}
public String getExpr(Measure measure, AggOption aggOption) {
if (AggOperatorEnum.COUNT_DISTINCT.getOperator().equalsIgnoreCase(measure.getAgg())) {
return AggOption.NATIVE.equals(aggOption) ? measure.getExpr()
: AggOperatorEnum.COUNT.getOperator() + " ( " + AggOperatorEnum.DISTINCT + " "
+ measure.getExpr() + " ) ";
}
return AggOption.NATIVE.equals(aggOption) ? measure.getExpr()
: measure.getAgg() + " ( " + measure.getExpr() + " ) ";
}
public String getExpr(MetricResp metricResp) {
if (Objects.isNull(metricResp.getMetricDefineType())) {
return metricResp.getMetricDefineByMeasureParams().getExpr();
}
if (metricResp.getMetricDefineType().equals(MetricDefineType.METRIC)) {
return metricResp.getMetricDefineByMetricParams().getExpr();
}
if (metricResp.getMetricDefineType().equals(MetricDefineType.FIELD)) {
return metricResp.getMetricDefineByFieldParams().getExpr();
}
// measure add agg function
return metricResp.getMetricDefineByMeasureParams().getExpr();
}
}

View File

@@ -220,8 +220,9 @@ public class SqlUtils {
}
public SqlUtils build() {
DatabaseResp database = DatabaseResp.builder().name(this.name).type(this.type)
.url(this.jdbcUrl).username(this.username).password(this.password).build();
DatabaseResp database = DatabaseResp.builder().name(this.name)
.type(this.type.toUpperCase()).url(this.jdbcUrl).username(this.username)
.password(this.password).build();
SqlUtils sqlUtils = new SqlUtils(database);
sqlUtils.jdbcDataSource = this.jdbcDataSource;