[improvement][headless]Refactor translator module to make code logic cleaner and more readable.

This commit is contained in:
jerryjzhang
2024-12-14 21:42:24 +08:00
parent 9f8793bfe2
commit f9cc395e49
71 changed files with 1513 additions and 2337 deletions

View File

@@ -138,7 +138,7 @@ public class MetricRatioCalcProcessor implements ExecuteResultProcessor {
return new HashSet<>(); return new HashSet<>();
} }
return queryResult.getQueryColumns().stream() return queryResult.getQueryColumns().stream()
.flatMap(c -> SqlSelectHelper.getColumnFromExpr(c.getNameEn()).stream()) .flatMap(c -> SqlSelectHelper.getFieldsFromExpr(c.getNameEn()).stream())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
} }

View File

@@ -679,61 +679,61 @@ public class SqlSelectHelper {
return table.getFullyQualifiedName(); return table.getFullyQualifiedName();
} }
public static Set<String> getColumnFromExpr(String expr) { public static Set<String> getFieldsFromExpr(String expr) {
Expression expression = QueryExpressionReplaceVisitor.getExpression(expr); Expression expression = QueryExpressionReplaceVisitor.getExpression(expr);
Set<String> columns = new HashSet<>(); Set<String> columns = new HashSet<>();
if (Objects.nonNull(expression)) { if (Objects.nonNull(expression)) {
getColumnFromExpr(expression, columns); getFieldsFromExpr(expression, columns);
} }
return columns; return columns;
} }
public static void getColumnFromExpr(Expression expression, Set<String> columns) { public static void getFieldsFromExpr(Expression expression, Set<String> columns) {
if (expression instanceof Column) { if (expression instanceof Column) {
columns.add(((Column) expression).getColumnName()); columns.add(((Column) expression).getColumnName());
} }
if (expression instanceof Function) { if (expression instanceof Function) {
ExpressionList<?> expressionList = ((Function) expression).getParameters(); ExpressionList<?> expressionList = ((Function) expression).getParameters();
for (Expression expr : expressionList) { for (Expression expr : expressionList) {
getColumnFromExpr(expr, columns); getFieldsFromExpr(expr, columns);
} }
} }
if (expression instanceof CaseExpression) { if (expression instanceof CaseExpression) {
CaseExpression expr = (CaseExpression) expression; CaseExpression expr = (CaseExpression) expression;
if (Objects.nonNull(expr.getWhenClauses())) { if (Objects.nonNull(expr.getWhenClauses())) {
for (WhenClause whenClause : expr.getWhenClauses()) { for (WhenClause whenClause : expr.getWhenClauses()) {
getColumnFromExpr(whenClause.getWhenExpression(), columns); getFieldsFromExpr(whenClause.getWhenExpression(), columns);
getColumnFromExpr(whenClause.getThenExpression(), columns); getFieldsFromExpr(whenClause.getThenExpression(), columns);
} }
} }
if (Objects.nonNull(expr.getElseExpression())) { if (Objects.nonNull(expr.getElseExpression())) {
getColumnFromExpr(expr.getElseExpression(), columns); getFieldsFromExpr(expr.getElseExpression(), columns);
} }
} }
if (expression instanceof BinaryExpression) { if (expression instanceof BinaryExpression) {
BinaryExpression expr = (BinaryExpression) expression; BinaryExpression expr = (BinaryExpression) expression;
getColumnFromExpr(expr.getLeftExpression(), columns); getFieldsFromExpr(expr.getLeftExpression(), columns);
getColumnFromExpr(expr.getRightExpression(), columns); getFieldsFromExpr(expr.getRightExpression(), columns);
} }
if (expression instanceof InExpression) { if (expression instanceof InExpression) {
InExpression inExpression = (InExpression) expression; InExpression inExpression = (InExpression) expression;
getColumnFromExpr(inExpression.getLeftExpression(), columns); getFieldsFromExpr(inExpression.getLeftExpression(), columns);
} }
if (expression instanceof Between) { if (expression instanceof Between) {
Between between = (Between) expression; Between between = (Between) expression;
getColumnFromExpr(between.getLeftExpression(), columns); getFieldsFromExpr(between.getLeftExpression(), columns);
} }
if (expression instanceof IsBooleanExpression) { if (expression instanceof IsBooleanExpression) {
IsBooleanExpression isBooleanExpression = (IsBooleanExpression) expression; IsBooleanExpression isBooleanExpression = (IsBooleanExpression) expression;
getColumnFromExpr(isBooleanExpression.getLeftExpression(), columns); getFieldsFromExpr(isBooleanExpression.getLeftExpression(), columns);
} }
if (expression instanceof IsNullExpression) { if (expression instanceof IsNullExpression) {
IsNullExpression isNullExpression = (IsNullExpression) expression; IsNullExpression isNullExpression = (IsNullExpression) expression;
getColumnFromExpr(isNullExpression.getLeftExpression(), columns); getFieldsFromExpr(isNullExpression.getLeftExpression(), columns);
} }
if (expression instanceof Parenthesis) { if (expression instanceof Parenthesis) {
Parenthesis expr = (Parenthesis) expression; Parenthesis expr = (Parenthesis) expression;
getColumnFromExpr(expr.getExpression(), columns); getFieldsFromExpr(expr.getExpression(), columns);
} }
} }

View File

@@ -8,5 +8,5 @@ import java.util.List;
@Data @Data
public class MetricDefineByMeasureParams extends MetricDefineParams { public class MetricDefineByMeasureParams extends MetricDefineParams {
private List<MeasureParam> measures = Lists.newArrayList(); private List<Measure> measures = Lists.newArrayList();
} }

View File

@@ -1,6 +1,6 @@
package com.tencent.supersonic.headless.api.pojo.enums; package com.tencent.supersonic.headless.api.pojo.enums;
import com.tencent.supersonic.headless.api.pojo.MeasureParam; import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams; import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams;
import java.util.List; import java.util.List;
@@ -18,11 +18,6 @@ public enum MetricType {
return null; return null;
} }
public static Boolean isDerived(String src) {
MetricType metricType = of(src);
return Objects.nonNull(metricType) && metricType.equals(DERIVED);
}
public static Boolean isDerived(MetricDefineType metricDefineType, public static Boolean isDerived(MetricDefineType metricDefineType,
MetricDefineByMeasureParams typeParams) { MetricDefineByMeasureParams typeParams) {
if (MetricDefineType.METRIC.equals(metricDefineType)) { if (MetricDefineType.METRIC.equals(metricDefineType)) {
@@ -32,7 +27,7 @@ public enum MetricType {
return true; return true;
} }
if (MetricDefineType.MEASURE.equals(metricDefineType)) { if (MetricDefineType.MEASURE.equals(metricDefineType)) {
List<MeasureParam> measures = typeParams.getMeasures(); List<Measure> measures = typeParams.getMeasures();
if (measures.size() > 1) { if (measures.size() > 1) {
return true; return true;
} }

View File

@@ -3,12 +3,19 @@ package com.tencent.supersonic.headless.api.pojo.response;
import lombok.Data; import lombok.Data;
import lombok.ToString; import lombok.ToString;
import java.util.List;
@Data @Data
@ToString(callSuper = true) @ToString(callSuper = true)
public class DimSchemaResp extends DimensionResp { public class DimSchemaResp extends DimensionResp {
private Long useCnt = 0L; private Long useCnt = 0L;
@Override
public boolean equals(Object o) {
return super.equals(o);
}
@Override
public int hashCode() {
return super.hashCode();
}
} }

View File

@@ -2,13 +2,9 @@ package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.DataFormat; import com.tencent.supersonic.common.pojo.DataFormat;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByFieldParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMetricParams;
import com.tencent.supersonic.headless.api.pojo.RelateDimension;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType; import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import lombok.Data; import lombok.Data;
import lombok.ToString; import lombok.ToString;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
@@ -69,6 +65,19 @@ public class MetricResp extends SchemaItem {
private boolean containsPartitionDimensions; private boolean containsPartitionDimensions;
public void setMetricDefinition(MetricDefineType type, MetricDefineParams params) {
if (MetricDefineType.MEASURE.equals(type)) {
assert params instanceof MetricDefineByMeasureParams;
metricDefineByMeasureParams = (MetricDefineByMeasureParams) params;
} else if (MetricDefineType.FIELD.equals(type)) {
assert params instanceof MetricDefineByFieldParams;
metricDefineByFieldParams = (MetricDefineByFieldParams) params;
} else if (MetricDefineType.METRIC.equals(type)) {
assert params instanceof MetricDefineByMetricParams;
metricDefineByMetricParams = (MetricDefineByMetricParams) params;
}
}
public void setClassifications(String tag) { public void setClassifications(String tag) {
if (StringUtils.isBlank(tag)) { if (StringUtils.isBlank(tag)) {
classifications = Lists.newArrayList(); classifications = Lists.newArrayList();
@@ -105,4 +114,8 @@ public class MetricResp extends SchemaItem {
} }
return ""; return "";
} }
public boolean isDerived() {
return MetricType.isDerived(metricDefineType, metricDefineByMeasureParams);
}
} }

View File

@@ -1,11 +1,26 @@
package com.tencent.supersonic.headless.api.pojo.response; package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Sets;
import lombok.Data; import lombok.Data;
import lombok.ToString; import lombok.ToString;
import java.util.Set;
@Data @Data
@ToString(callSuper = true) @ToString(callSuper = true)
public class MetricSchemaResp extends MetricResp { public class MetricSchemaResp extends MetricResp {
private Long useCnt = 0L; private Long useCnt = 0L;
private Set<String> fields = Sets.newHashSet();
@Override
public boolean equals(Object o) {
return super.equals(o);
}
@Override
public int hashCode() {
return super.hashCode();
}
} }

View File

@@ -3,14 +3,20 @@ package com.tencent.supersonic.headless.api.pojo.response;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.QueryType; import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.SchemaType; import com.tencent.supersonic.headless.api.pojo.enums.SchemaType;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Data @Data
@AllArgsConstructor @AllArgsConstructor
@@ -39,6 +45,14 @@ public class SemanticSchemaResp {
.orElse(null); .orElse(null);
} }
public List<MetricSchemaResp> getMetrics(List<String> bizNames) {
Map<String, MetricSchemaResp> metricLowerToNameMap = metrics.stream().collect(
Collectors.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return bizNames.stream().map(String::toLowerCase)
.filter(entry -> metricLowerToNameMap.containsKey(entry))
.map(entry -> metricLowerToNameMap.get(entry)).collect(Collectors.toList());
}
public DimSchemaResp getDimension(String bizName) { public DimSchemaResp getDimension(String bizName) {
return dimensions.stream() return dimensions.stream()
.filter(dimension -> bizName.equalsIgnoreCase(dimension.getBizName())).findFirst() .filter(dimension -> bizName.equalsIgnoreCase(dimension.getBizName())).findFirst()
@@ -50,6 +64,14 @@ public class SemanticSchemaResp {
.orElse(null); .orElse(null);
} }
public List<DimSchemaResp> getDimensions(List<String> bizNames) {
Map<String, DimSchemaResp> dimLowerToNameMap = dimensions.stream().collect(
Collectors.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return bizNames.stream().map(String::toLowerCase)
.filter(entry -> dimLowerToNameMap.containsKey(entry))
.map(entry -> dimLowerToNameMap.get(entry)).collect(Collectors.toList());
}
public Set<String> getNameFromBizNames(Set<String> bizNames) { public Set<String> getNameFromBizNames(Set<String> bizNames) {
Set<String> names = new HashSet<>(); Set<String> names = new HashSet<>();
for (String bizName : bizNames) { for (String bizName : bizNames) {
@@ -65,4 +87,32 @@ public class SemanticSchemaResp {
} }
return names; return names;
} }
public Map<String, String> getNameToBizNameMap() {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = dimensions.stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = metrics.stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
dimensionResults.putAll(metricResults);
return dimensionResults;
}
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
String bizName) {
Set<Pair<String, String>> elements = new HashSet<>();
elements.add(Pair.of(name, bizName));
if (StringUtils.isNotBlank(aliasStr)) {
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
for (String alias : aliasList) {
elements.add(Pair.of(alias, bizName));
}
}
return elements.stream();
}
} }

View File

@@ -0,0 +1,38 @@
package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class DataModel {
private Long id;
private String name;
private Long modelId;
private String type;
private String sqlQuery;
private String tableQuery;
private List<Identify> identifiers;
private List<DimSchemaResp> dimensions;
private List<Measure> measures;
private String aggTime;
private com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType timePartType =
Materialization.TimePartType.None;
}

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql; package com.tencent.supersonic.headless.core.pojo;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -0,0 +1,28 @@
package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import lombok.Data;
import java.util.*;
import java.util.stream.Collectors;
/**
* An ontology comprises a group of data models that can be joined together either in star schema or
* snowflake schema.
*/
@Data
public class Ontology {
private Database database;
private Map<String, DataModel> dataModelMap = new HashMap<>();
private List<MetricSchemaResp> metrics = new ArrayList<>();
private Map<String, List<DimSchemaResp>> dimensionMap = new HashMap<>();
private List<JoinRelation> joinRelations;
public List<DimSchemaResp> getDimensions() {
return dimensionMap.values().stream().flatMap(Collection::stream)
.collect(Collectors.toList());
}
}

View File

@@ -0,0 +1,40 @@
package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import lombok.Data;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Data
public class OntologyQuery {
private Set<MetricSchemaResp> metrics = Sets.newHashSet();
private Set<DimSchemaResp> dimensions = Sets.newHashSet();
private Set<String> fields = Sets.newHashSet();
private String where;
private Long limit;
private List<ColumnOrder> order;
private boolean nativeQuery = true;
private AggOption aggOption = AggOption.NATIVE;
public boolean hasDerivedMetric() {
return metrics.stream().anyMatch(MetricResp::isDerived);
}
public Set<MetricSchemaResp> getMetricsByModel(Long modelId) {
return metrics.stream().filter(m -> m.getModelId().equals(modelId))
.collect(Collectors.toSet());
}
public Set<DimSchemaResp> getDimensionsByModel(Long modelId) {
return dimensions.stream().filter(m -> m.getModelId().equals(modelId))
.collect(Collectors.toSet());
}
}

View File

@@ -1,8 +1,6 @@
package com.tencent.supersonic.headless.core.pojo; package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import lombok.Data; import lombok.Data;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple; import org.apache.commons.lang3.tuple.Triple;
@@ -11,17 +9,18 @@ import org.apache.commons.lang3.tuple.Triple;
public class QueryStatement { public class QueryStatement {
private Long dataSetId; private Long dataSetId;
private String dataSetName;
private String sql; private String sql;
private String errMsg; private String errMsg;
private StructQueryParam structQueryParam; private StructQuery structQuery;
private SqlQueryParam sqlQueryParam; private SqlQuery sqlQuery;
private OntologyQueryParam ontologyQueryParam; private OntologyQuery ontologyQuery;
private Integer status = 0; private Integer status = 0;
private Boolean isS2SQL = false; private Boolean isS2SQL = false;
private Boolean enableOptimize = true; private Boolean enableOptimize = true;
private Triple<String, String, String> minMaxTime; private Triple<String, String, String> minMaxTime;
private Ontology ontology; private Ontology ontology;
private SemanticSchemaResp semanticSchemaResp; private SemanticSchemaResp semanticSchema;
private Integer limit = 1000; private Integer limit = 1000;
private Boolean isTranslated = false; private Boolean isTranslated = false;

View File

@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.core.pojo;
import lombok.Data; import lombok.Data;
@Data @Data
public class SqlQueryParam { public class SqlQuery {
private String sql; private String sql;
private String table; private String table;
private boolean supportWith = true; private boolean supportWith = true;

View File

@@ -12,7 +12,7 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
@Data @Data
public class StructQueryParam { public class StructQuery {
private List<String> groups = new ArrayList(); private List<String> groups = new ArrayList();
private List<Aggregator> aggregators = new ArrayList(); private List<Aggregator> aggregators = new ArrayList();
private List<Order> orders = new ArrayList(); private List<Order> orders = new ArrayList();

View File

@@ -2,11 +2,11 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter; import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer; import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -34,8 +34,8 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
doOntologyParse(queryStatement); doOntologyParse(queryStatement);
if (StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSimplifiedSql())) { if (StringUtils.isNotBlank(queryStatement.getSqlQuery().getSimplifiedSql())) {
queryStatement.setSql(queryStatement.getSqlQueryParam().getSimplifiedSql()); queryStatement.setSql(queryStatement.getSqlQuery().getSimplifiedSql());
} }
if (StringUtils.isBlank(queryStatement.getSql())) { if (StringUtils.isBlank(queryStatement.getSql())) {
throw new RuntimeException("parse exception: " + queryStatement.getErrMsg()); throw new RuntimeException("parse exception: " + queryStatement.getErrMsg());
@@ -51,22 +51,22 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
private void doOntologyParse(QueryStatement queryStatement) throws Exception { private void doOntologyParse(QueryStatement queryStatement) throws Exception {
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam(); OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
log.info("parse with ontology: [{}]", ontologyQueryParam); log.info("parse with ontology: [{}]", ontologyQuery);
ComponentFactory.getQueryParser().parse(queryStatement); ComponentFactory.getQueryParser().parse(queryStatement);
if (!queryStatement.isOk()) { if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]", throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQueryParam().getTable(), queryStatement.getErrMsg())); queryStatement.getSqlQuery().getTable(), queryStatement.getErrMsg()));
} }
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); SqlQuery sqlQuery = queryStatement.getSqlQuery();
String ontologyQuerySql = sqlQueryParam.getSql(); String ontologyQuerySql = sqlQuery.getSql();
String ontologyInnerTable = sqlQueryParam.getTable(); String ontologyInnerTable = sqlQuery.getTable();
String ontologyInnerSql = queryStatement.getSql(); String ontologyInnerSql = queryStatement.getSql();
List<Pair<String, String>> tables = new ArrayList<>(); List<Pair<String, String>> tables = new ArrayList<>();
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql)); tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
if (sqlQueryParam.isSupportWith()) { if (sqlQuery.isSupportWith()) {
EngineType engineType = queryStatement.getOntology().getDatabase().getType(); EngineType engineType = queryStatement.getOntology().getDatabase().getType();
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) { if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
String withSql = "with " + tables.stream() String withSql = "with " + tables.stream()
@@ -84,9 +84,9 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
} else { } else {
for (Pair<String, String> tb : tables) { for (Pair<String, String> tb : tables) {
ontologyQuerySql = ontologyQuerySql = StringUtils.replace(ontologyQuerySql, tb.getLeft(),
StringUtils.replace(ontologyQuerySql, tb.getLeft(), "(" + tb.getRight() "(" + tb.getRight() + ") " + (sqlQuery.isWithAlias() ? "" : tb.getLeft()),
+ ") " + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1); -1);
} }
queryStatement.setSql(ontologyQuerySql); queryStatement.setSql(ontologyQuerySql);
} }

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** /**
* SemanticTranslator converts semantic query statement into SQL statement that can be executed * A semantic translator converts semantic query into SQL statement that can be executed against
* against physical data models. * physical data models.
*/ */
public interface SemanticTranslator { public interface SemanticTranslator {

View File

@@ -3,9 +3,8 @@ package com.tencent.supersonic.headless.core.translator.converter;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper; import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.StringValue; import net.sf.jsqlparser.expression.StringValue;
@@ -21,32 +20,36 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/**
* This converter appends default dimension values (if configured) to the where statement.
*/
@Slf4j @Slf4j
@Component("DefaultDimValueConverter") @Component("DefaultDimValueConverter")
public class DefaultDimValueConverter implements QueryConverter { public class DefaultDimValueConverter implements QueryConverter {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam()) return Objects.nonNull(queryStatement.getSqlQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSql()); && StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql());
} }
@Override @Override
public void convert(QueryStatement queryStatement) { public void convert(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream() List<DimSchemaResp> dimensions = queryStatement.getOntology().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues())) .filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) { if (CollectionUtils.isEmpty(dimensions)) {
return; return;
} }
String sql = queryStatement.getSqlQueryParam().getSql(); String sql = queryStatement.getSqlQuery().getSql();
List<String> whereFields = List<String> whereFields =
SqlSelectHelper.getWhereFields(sql).stream().collect(Collectors.toList()); SqlSelectHelper.getWhereFields(sql).stream().collect(Collectors.toList());
if (!CollectionUtils.isEmpty(whereFields)) { if (!CollectionUtils.isEmpty(whereFields)) {
return; return;
} }
List<Expression> expressions = Lists.newArrayList(); List<Expression> expressions = Lists.newArrayList();
for (Dimension dimension : dimensions) { for (DimSchemaResp dimension : dimensions) {
ExpressionList expressionList = new ExpressionList(); ExpressionList expressionList = new ExpressionList();
List<Expression> exprs = new ArrayList<>(); List<Expression> exprs = new ArrayList<>();
dimension.getDefaultValues().forEach(value -> exprs.add(new StringValue(value))); dimension.getDefaultValues().forEach(value -> exprs.add(new StringValue(value)));
@@ -55,11 +58,11 @@ public class DefaultDimValueConverter implements QueryConverter {
inExpression.setLeftExpression(new Column(dimension.getBizName())); inExpression.setLeftExpression(new Column(dimension.getBizName()));
inExpression.setRightExpression(expressionList); inExpression.setRightExpression(expressionList);
expressions.add(inExpression); expressions.add(inExpression);
if (Objects.nonNull(queryStatement.getSqlQueryParam().getTable())) { if (Objects.nonNull(queryStatement.getSqlQuery().getTable())) {
queryStatement.getOntologyQueryParam().getDimensions().add(dimension.getBizName()); queryStatement.getOntologyQuery().getDimensions().add(dimension);
} }
} }
sql = SqlAddHelper.addWhere(sql, expressions); sql = SqlAddHelper.addWhere(sql, expressions);
queryStatement.getSqlQueryParam().setSql(sql); queryStatement.getSqlQuery().setSql(sql);
} }
} }

View File

@@ -1,115 +0,0 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*;
@Component("DerivedMetricConverter")
@Slf4j
public class DerivedMetricConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam())
&& StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSql());
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SqlQueryParam sqlParam = queryStatement.getSqlQueryParam();
OntologyQueryParam ontologyParam = queryStatement.getOntologyQueryParam();
String sql = sqlParam.getSql();
Set<String> measures = new HashSet<>();
Map<String, String> replaces =
generateDerivedMetric(semanticSchemaResp, ontologyParam.getAggOption(),
ontologyParam.getMetrics(), ontologyParam.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
ontologyParam.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
ontologyParam.getMetrics().addAll(measures);
}
}
sqlParam.setSql(sql);
queryStatement.setSql(queryStatement.getSqlQueryParam().getSql());
}
private Map<String, String> generateDerivedMetric(SemanticSchemaResp semanticSchemaResp,
AggOption aggOption, Set<String> metrics, Set<String> dimensions,
Set<String> measures) {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
Map<String, String> result = new HashMap<>();
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
// Check if any metric is derived
boolean hasDerivedMetrics =
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
if (!hasDerivedMetrics) {
return result;
}
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchemaResp.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Set<String> derivedDimensions = new HashSet<>();
Set<String> derivedMetrics = new HashSet<>();
Map<String, String> visitedMetrics = new HashMap<>();
for (MetricResp metricResp : metricResps) {
if (metrics.contains(metricResp.getBizName())) {
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
metricResp.getMetricDefineByMeasureParams());
if (isDerived) {
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
derivedMetrics, derivedDimensions);
result.put(metricResp.getBizName(), expr);
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
} else {
measures.add(metricResp.getBizName());
}
}
}
measures.addAll(derivedMetrics);
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
.forEach(dimensions::add);
return result;
}
}

View File

@@ -0,0 +1,156 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*;
/**
* This converter replaces metric fields in the S2SQL with calculation expressions (if configured).
*/
@Component("MetricExpressionConverter")
@Slf4j
public class MetricExpressionConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql());
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
Map<String, String> metric2Expr = getMetricExpressions(semanticSchema, ontologyQuery);
if (!CollectionUtils.isEmpty(metric2Expr)) {
String sql = SqlReplaceHelper.replaceSqlByExpression(sqlQuery.getSql(), metric2Expr);
sqlQuery.setSql(sql);
ontologyQuery.setAggOption(AggOption.NATIVE);
}
}
private Map<String, String> getMetricExpressions(SemanticSchemaResp semanticSchema,
OntologyQuery ontologyQuery) {
if (!ontologyQuery.hasDerivedMetric()) {
return Collections.emptyMap();
}
List<MetricSchemaResp> allMetrics = semanticSchema.getMetrics();
List<DimSchemaResp> allDimensions = semanticSchema.getDimensions();
AggOption aggOption = ontologyQuery.getAggOption();
Set<MetricSchemaResp> queryMetrics = ontologyQuery.getMetrics();
Set<DimSchemaResp> queryDimensions = ontologyQuery.getDimensions();
Set<String> queryFields = ontologyQuery.getFields();
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, queryMetrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchema.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Map<String, String> visitedMetrics = new HashMap<>();
Map<String, String> metric2Expr = new HashMap<>();
for (MetricSchemaResp queryMetric : queryMetrics) {
if (queryMetric.isDerived()) {
String fieldExpr = buildFieldExpr(allMetrics, allFields, allMeasures, allDimensions,
queryMetric.getExpr(), queryMetric.getMetricDefineType(), aggOption,
visitedMetrics, queryDimensions, queryFields);
metric2Expr.put(queryMetric.getBizName(), fieldExpr);
// add all fields referenced in the expression
queryMetric.getFields().addAll(SqlSelectHelper.getFieldsFromExpr(fieldExpr));
log.debug("derived metric {}->{}", queryMetric.getBizName(), fieldExpr);
}
}
return metric2Expr;
}
private String buildFieldExpr(final List<MetricSchemaResp> metricResps,
final Set<String> allFields, final Map<String, Measure> allMeasures,
final List<DimSchemaResp> dimensionResps, final String expression,
final MetricDefineType metricDefineType, AggOption aggOption,
Map<String, String> visitedMetric, Set<DimSchemaResp> queryDimensions,
Set<String> queryFields) {
Set<String> fields = SqlSelectHelper.getFieldsFromExpr(expression);
if (!CollectionUtils.isEmpty(fields)) {
Map<String, String> replace = new HashMap<>();
for (String field : fields) {
queryFields.add(field);
switch (metricDefineType) {
case METRIC:
Optional<MetricSchemaResp> metricItem = metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field)).findFirst();
if (metricItem.isPresent()) {
if (visitedMetric.keySet().contains(field)) {
replace.put(field, visitedMetric.get(field));
break;
}
replace.put(field,
buildFieldExpr(metricResps, allFields, allMeasures,
dimensionResps, metricItem.get().getExpr(),
metricItem.get().getMetricDefineType(), aggOption,
visitedMetric, queryDimensions, queryFields));
visitedMetric.put(field, replace.get(field));
}
break;
case MEASURE:
if (allMeasures.containsKey(field)) {
Measure measure = allMeasures.get(field);
if (AggOperatorEnum.COUNT_DISTINCT.getOperator()
.equalsIgnoreCase(measure.getAgg())) {
return AggOption.NATIVE.equals(aggOption) ? measure.getBizName()
: AggOperatorEnum.COUNT.getOperator() + " ( "
+ AggOperatorEnum.DISTINCT + " "
+ measure.getBizName() + " ) ";
}
String expr = AggOption.NATIVE.equals(aggOption) ? measure.getBizName()
: measure.getAgg() + " ( " + measure.getBizName() + " ) ";
replace.put(field, expr);
}
break;
case FIELD:
if (allFields.contains(field)) {
Optional<DimSchemaResp> dimensionItem = dimensionResps.stream()
.filter(d -> d.getBizName().equals(field)).findFirst();
if (dimensionItem.isPresent()) {
queryDimensions.add(dimensionItem.get());
}
}
break;
default:
break;
}
}
if (!CollectionUtils.isEmpty(replace)) {
String expr = SqlReplaceHelper.replaceExpression(expression, replace);
log.debug("derived measure {}->{}", expression, expr);
return expr;
}
}
return expression;
}
}

View File

@@ -7,10 +7,10 @@ import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -21,30 +21,29 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Component("CalculateAggConverter") @Component("MetricRatioConverter")
@Slf4j @Slf4j
public class MetricRatioConverter implements QueryConverter { public class MetricRatioConverter implements QueryConverter {
public interface EngineSql { public interface EngineSql {
String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, String sql(StructQuery structQuery, boolean isOver, boolean asWith, String metricSql);
String metricSql);
} }
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getStructQueryParam()) || queryStatement.getIsS2SQL() if (Objects.isNull(queryStatement.getStructQuery()) || queryStatement.getIsS2SQL()
|| !isRatioAccept(queryStatement.getStructQueryParam())) { || !isRatioAccept(queryStatement.getStructQuery())) {
return false; return false;
} }
StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); StructQuery structQuery = queryStatement.getStructQuery();
if (structQueryParam.getQueryType().isNativeAggQuery() if (structQuery.getQueryType().isNativeAggQuery()
|| CollectionUtils.isEmpty(structQueryParam.getAggregators())) { || CollectionUtils.isEmpty(structQuery.getAggregators())) {
return false; return false;
} }
int nonSumFunction = 0; int nonSumFunction = 0;
for (Aggregator agg : structQueryParam.getAggregators()) { for (Aggregator agg : structQuery.getAggregators()) {
if (agg.getFunc() == null || "".equals(agg.getFunc())) { if (agg.getFunc() == null || "".equals(agg.getFunc())) {
return false; return false;
} }
@@ -65,8 +64,8 @@ public class MetricRatioConverter implements QueryConverter {
} }
/** Ratio */ /** Ratio */
public boolean isRatioAccept(StructQueryParam structQueryParam) { public boolean isRatioAccept(StructQuery structQuery) {
Long ratioFuncNum = structQueryParam.getAggregators().stream() Long ratioFuncNum = structQuery.getAggregators().stream()
.filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL) .filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)
|| f.getFunc().equals(AggOperatorEnum.RATIO_OVER))) || f.getFunc().equals(AggOperatorEnum.RATIO_OVER)))
.count(); .count();
@@ -79,44 +78,44 @@ public class MetricRatioConverter implements QueryConverter {
public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum, public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum,
String version) throws Exception { String version) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); StructQuery structQuery = queryStatement.getStructQuery();
check(structQueryParam); check(structQuery);
queryStatement.setEnableOptimize(false); queryStatement.setEnableOptimize(false);
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam(); OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
ontologyQueryParam.setAggOption(AggOption.AGGREGATION); ontologyQuery.setAggOption(AggOption.AGGREGATION);
String metricTableName = "v_metric_tb_tmp"; String metricTableName = "v_metric_tb_tmp";
boolean isOver = isOverRatio(structQueryParam); boolean isOver = isOverRatio(structQuery);
String sql = ""; String sql = "";
SqlQueryParam dsParam = queryStatement.getSqlQueryParam(); SqlQuery sqlQuery = queryStatement.getSqlQuery();
dsParam.setTable(metricTableName); sqlQuery.setTable(metricTableName);
switch (engineTypeEnum) { switch (engineTypeEnum) {
case H2: case H2:
sql = new H2EngineSql().sql(structQueryParam, isOver, true, metricTableName); sql = new H2EngineSql().sql(structQuery, isOver, true, metricTableName);
break; break;
case MYSQL: case MYSQL:
case DORIS: case DORIS:
case CLICKHOUSE: case CLICKHOUSE:
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) { if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
dsParam.setSupportWith(false); sqlQuery.setSupportWith(false);
} }
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) { if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(structQueryParam, isOver, sql = new MysqlEngineSql().sql(structQuery, isOver, sqlQuery.isSupportWith(),
dsParam.isSupportWith(), metricTableName); metricTableName);
} else { } else {
sql = new CkEngineSql().sql(structQueryParam, isOver, dsParam.isSupportWith(), sql = new CkEngineSql().sql(structQuery, isOver, sqlQuery.isSupportWith(),
metricTableName); metricTableName);
} }
break; break;
default: default:
} }
dsParam.setSql(sql); sqlQuery.setSql(sql);
} }
public class H2EngineSql implements EngineSql { public class H2EngineSql implements EngineSql {
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) { public String getOverSelect(StructQuery structQuery, boolean isOver) {
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s", return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
@@ -126,44 +125,43 @@ public class MetricRatioConverter implements QueryConverter {
return f.getColumn(); return f.getColumn();
} }
}).collect(Collectors.joining(",")); }).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver, public String getTimeSpan(StructQuery structQuery, boolean isOver, boolean isAdd) {
boolean isAdd) { if (Objects.nonNull(structQuery.getDateInfo())) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
String addStr = isAdd ? "" : "-"; String addStr = isAdd ? "" : "-";
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return "day," + (isOver ? addStr + "7" : addStr + "1"); return "day," + (isOver ? addStr + "7" : addStr + "1");
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? "month," + addStr + "1" : "day," + addStr + "7"; return isOver ? "month," + addStr + "1" : "day," + addStr + "7";
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) {
return isOver ? "year," + addStr + "1" : "month," + addStr + "1"; return isOver ? "year," + addStr + "1" : "month," + addStr + "1";
} }
} }
return ""; return "";
} }
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(structQueryParam); String timeDim = getTimeDim(structQuery);
String timeSpan = getTimeSpan(structQueryParam, isOver, true); String timeSpan = getTimeSpan(structQuery, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ", "%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
aliasRight + timeDim, aliasLeft + timeDim, timeSpan, aliasRight + timeDim, aliasLeft + timeDim, timeSpan,
aliasRight + timeDim); aliasRight + timeDim);
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ", return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false), aliasLeft + timeDim,
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan, return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan,
@@ -173,7 +171,7 @@ public class MetricRatioConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -184,36 +182,36 @@ public class MetricRatioConverter implements QueryConverter {
} }
@Override @Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) { String metricSql) {
String sql = String.format( String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."), getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQueryParam)); getLimit(structQuery));
return sql; return sql;
} }
} }
public class CkEngineSql extends MysqlEngineSql { public class CkEngineSql extends MysqlEngineSql {
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(structQueryParam); String timeDim = getTimeDim(structQuery);
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true); String timeSpan = "INTERVAL " + getTimeSpan(structQuery, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ", "toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan); aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s", return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false),
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -223,7 +221,7 @@ public class MetricRatioConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -234,49 +232,46 @@ public class MetricRatioConverter implements QueryConverter {
} }
@Override @Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) { String metricSql) {
if (!asWith) { if (!asWith) {
return String.format( return String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllSelect(structQueryParam, "t0."), getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getLimit(structQuery));
getOrderBy(structQueryParam), getLimit(structQueryParam));
} }
return String.format( return String.format(
",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s " ",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s "
+ "from t0 left join t1 on %s ) metric_tb_src %s %s ", + "from t0 left join t1 on %s ) metric_tb_src %s %s ",
metricSql, metricSql, getOverSelect(structQueryParam, isOver), metricSql, metricSql, getOverSelect(structQuery, isOver),
getAllSelect(structQueryParam, "t0."), getAllSelect(structQuery, "t0."), getAllJoinSelect(structQuery, "t1."),
getAllJoinSelect(structQueryParam, "t1."), getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), getLimit(structQuery));
getLimit(structQueryParam));
} }
} }
public class MysqlEngineSql implements EngineSql { public class MysqlEngineSql implements EngineSql {
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver, public String getTimeSpan(StructQuery structQuery, boolean isOver, boolean isAdd) {
boolean isAdd) { if (Objects.nonNull(structQuery.getDateInfo())) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
String addStr = isAdd ? "" : "-"; String addStr = isAdd ? "" : "-";
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return isOver ? addStr + "7 day" : addStr + "1 day"; return isOver ? addStr + "7 day" : addStr + "1 day";
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) {
return isOver ? addStr + "1 month" : addStr + "7 day"; return isOver ? addStr + "1 month" : addStr + "7 day";
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? addStr + "1 year" : addStr + "1 month"; return isOver ? addStr + "1 year" : addStr + "1 month";
} }
} }
return ""; return "";
} }
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) { public String getOverSelect(StructQuery structQuery, boolean isOver) {
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s", return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
@@ -286,26 +281,26 @@ public class MetricRatioConverter implements QueryConverter {
return f.getColumn(); return f.getColumn();
} }
}).collect(Collectors.joining(",")); }).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(structQueryParam); String timeDim = getTimeDim(structQuery);
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true); String timeSpan = "INTERVAL " + getTimeSpan(structQuery, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> { String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ", "%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan); aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} }
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s", return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false),
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -315,7 +310,7 @@ public class MetricRatioConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -326,46 +321,45 @@ public class MetricRatioConverter implements QueryConverter {
} }
@Override @Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith, public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) { String metricSql) {
String sql = String.format( String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."), getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql, getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam), getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQueryParam)); getLimit(structQuery));
return sql; return sql;
} }
} }
private String getAllJoinSelect(StructQueryParam structQueryParam, String alias) { private String getAllJoinSelect(StructQuery structQuery, String alias) {
String aggStr = structQueryParam.getAggregators().stream() String aggStr = structQuery.getAggregators().stream()
.map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll") .map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) { for (String group : structQuery.getGroups()) {
groups.add(alias + group + " as " + group + "_roll"); groups.add(alias + group + " as " + group + "_roll");
} }
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr; return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
} }
private static String getTimeDim(StructQueryParam structQueryParam) { private static String getTimeDim(StructQuery structQuery) {
return structQueryParam.getDateInfo().getDateField(); return structQuery.getDateInfo().getDateField();
} }
private static String getLimit(StructQueryParam structQueryParam) { private static String getLimit(StructQuery structQuery) {
if (structQueryParam != null && structQueryParam.getLimit() != null if (structQuery != null && structQuery.getLimit() != null && structQuery.getLimit() > 0) {
&& structQueryParam.getLimit() > 0) { return " limit " + String.valueOf(structQuery.getLimit());
return " limit " + String.valueOf(structQueryParam.getLimit());
} }
return ""; return "";
} }
private String getAllSelect(StructQueryParam structQueryParam, String alias) { private String getAllSelect(StructQuery structQuery, String alias) {
String aggStr = structQueryParam.getAggregators().stream() String aggStr = structQuery.getAggregators().stream().map(f -> getSelectField(f, alias))
.map(f -> getSelectField(f, alias)).collect(Collectors.joining(",")); .collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: alias + String.join("," + alias, structQueryParam.getGroups()) + "," + aggStr; : alias + String.join("," + alias, structQuery.getGroups()) + "," + aggStr;
} }
private String getSelectField(final Aggregator agg, String alias) { private String getSelectField(final Aggregator agg, String alias) {
@@ -377,25 +371,25 @@ public class MetricRatioConverter implements QueryConverter {
return sqlGenerateUtils.getSelectField(agg); return sqlGenerateUtils.getSelectField(agg);
} }
private static String getOrderBy(StructQueryParam structQueryParam) { private static String getOrderBy(StructQuery structQuery) {
return "order by " + getTimeDim(structQueryParam) + " desc"; return "order by " + getTimeDim(structQuery) + " desc";
} }
private boolean isOverRatio(StructQueryParam structQueryParam) { private boolean isOverRatio(StructQuery structQuery) {
Long overCt = structQueryParam.getAggregators().stream() Long overCt = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
return overCt > 0; return overCt > 0;
} }
private void check(StructQueryParam structQueryParam) throws Exception { private void check(StructQuery structQuery) throws Exception {
Long ratioOverNum = structQueryParam.getAggregators().stream() Long ratioOverNum = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
Long ratioRollNum = structQueryParam.getAggregators().stream() Long ratioRollNum = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count();
if (ratioOverNum > 0 && ratioRollNum > 0) { if (ratioOverNum > 0 && ratioRollNum > 0) {
throw new Exception("not support over ratio and roll ratio together "); throw new Exception("not support over ratio and roll ratio together ");
} }
if (getTimeDim(structQueryParam).isEmpty()) { if (getTimeDim(structQuery).isEmpty()) {
throw new Exception("miss time filter"); throw new Exception("miss time filter");
} }
} }

View File

@@ -2,7 +2,9 @@ package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** to supplement,translate the request Body */ /**
* A query converter performs preprocessing work to the QueryStatement before parsing.
*/
public interface QueryConverter { public interface QueryConverter {
boolean accept(QueryStatement queryStatement); boolean accept(QueryStatement queryStatement);

View File

@@ -6,30 +6,33 @@ import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Constants; import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.*; import java.util.List;
import java.util.stream.Collectors; import java.util.Map;
import java.util.stream.Stream; import java.util.Objects;
/**
* This converter rewrites S2SQL including conversion from metric/dimension name to bizName and
* build ontology query in preparation for generation of physical SQL.
*/
@Component("SqlQueryConverter") @Component("SqlQueryConverter")
@Slf4j @Slf4j
public class SqlQueryConverter implements QueryConverter { public class SqlQueryConverter implements QueryConverter {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL(); return Objects.nonNull(queryStatement.getSqlQuery()) && queryStatement.getIsS2SQL();
} }
@Override @Override
@@ -38,42 +41,40 @@ public class SqlQueryConverter implements QueryConverter {
rewriteOrderBy(queryStatement); rewriteOrderBy(queryStatement);
// fill sqlQuery // fill sqlQuery
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SqlQuery sqlQuery = queryStatement.getSqlQuery();
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam(); String tableName = SqlSelectHelper.getTableName(sqlQuery.getSql());
String tableName = SqlSelectHelper.getTableName(sqlQueryParam.getSql());
if (StringUtils.isEmpty(tableName)) { if (StringUtils.isEmpty(tableName)) {
return; return;
} }
sqlQueryParam.setTable(tableName.toLowerCase()); sqlQuery.setTable(tableName.toLowerCase());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
if (!sqlGenerateUtils.isSupportWith( if (!sqlGenerateUtils.isSupportWith(
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()), EngineType.fromString(semanticSchema.getDatabaseResp().getType().toUpperCase()),
semanticSchemaResp.getDatabaseResp().getVersion())) { semanticSchema.getDatabaseResp().getVersion())) {
sqlQueryParam.setSupportWith(false); sqlQuery.setSupportWith(false);
sqlQueryParam.setWithAlias(false); sqlQuery.setWithAlias(false);
} }
// build ontologyQuery // build ontologyQuery
List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQueryParam.getSql()); List<String> allQueryFields = SqlSelectHelper.getAllSelectFields(sqlQuery.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields); List<MetricSchemaResp> queryMetrics = semanticSchema.getMetrics(allQueryFields);
List<String> metrics = List<DimSchemaResp> queryDimensions = semanticSchema.getDimensions(allQueryFields);
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList()); OntologyQuery ontologyQuery = new OntologyQuery();
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields); ontologyQuery.getMetrics().addAll(queryMetrics);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); ontologyQuery.getDimensions().addAll(queryDimensions);
ontologyQueryParam.getMetrics().addAll(metrics);
ontologyQueryParam.getDimensions().addAll(dimensions);
AggOption sqlQueryAggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
AggOption sqlQueryAggOption = getAggOption(sqlQuery.getSql(), queryMetrics);
// if sql query itself has aggregation, ontology query just returns detail // if sql query itself has aggregation, ontology query just returns detail
if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) { if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) {
ontologyQueryParam.setAggOption(AggOption.NATIVE); ontologyQuery.setAggOption(AggOption.NATIVE);
} else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) { } else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !queryMetrics.isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.DEFAULT); ontologyQuery.setAggOption(AggOption.DEFAULT);
} }
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(ontologyQueryParam.getAggOption())); ontologyQuery.setNativeQuery(!AggOption.isAgg(ontologyQuery.getAggOption()));
queryStatement.setOntologyQueryParam(ontologyQueryParam);
queryStatement.setSql(sqlQueryParam.getSql()); queryStatement.setOntologyQuery(ontologyQuery);
log.info("parse sqlQuery [{}] ", sqlQueryParam); log.info("parse sqlQuery [{}] ", sqlQuery);
} }
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) { private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) {
@@ -109,34 +110,10 @@ public class SqlQueryConverter implements QueryConverter {
return AggOption.DEFAULT; return AggOption.DEFAULT;
} }
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
SchemaItem::getBizName, (k1, k2) -> k1));
// dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(),
// TimeDimensionEnum.DAY.getName());
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toSet());
}
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, MetricSchemaResp> metricLowerToNameMap =
semanticSchemaResp.getMetrics().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private void convertNameToBizName(QueryStatement queryStatement) { private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp); Map<String, String> fieldNameToBizNameMap = semanticSchema.getNameToBizNameMap();
String sql = queryStatement.getSqlQueryParam().getSql(); String sql = queryStatement.getSqlQuery().getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(), log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql); sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true); sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
@@ -145,42 +122,15 @@ public class SqlQueryConverter implements QueryConverter {
sql = SqlReplaceHelper.replaceTable(sql, sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId()); Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql); log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQueryParam().setSql(sql); queryStatement.getSqlQuery().setSql(sql);
} }
private void rewriteOrderBy(QueryStatement queryStatement) { private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number // replace order by field with the select sequence number
String sql = queryStatement.getSqlQueryParam().getSql(); String sql = queryStatement.getSqlQuery().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql); String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql); log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQueryParam().setSql(newSql); queryStatement.getSqlQuery().setSql(newSql);
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
dimensionResults.putAll(metricResults);
return dimensionResults;
}
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
String bizName) {
Set<Pair<String, String>> elements = new HashSet<>();
elements.add(Pair.of(name, bizName));
if (StringUtils.isNotBlank(aliasStr)) {
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
for (String alias : aliasList) {
elements.add(Pair.of(alias, bizName));
}
}
return elements.stream();
} }
} }

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType; import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils; import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -19,13 +19,12 @@ public class SqlVariableConverter implements QueryConverter {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQueryParam()) return Objects.nonNull(queryStatement.getStructQuery()) && !queryStatement.getIsS2SQL();
&& !queryStatement.getIsS2SQL();
} }
@Override @Override
public void convert(QueryStatement queryStatement) { public void convert(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
List<ModelResp> modelResps = semanticSchemaResp.getModelResps(); List<ModelResp> modelResps = semanticSchemaResp.getModelResps();
if (CollectionUtils.isEmpty(modelResps)) { if (CollectionUtils.isEmpty(modelResps)) {
return; return;
@@ -36,7 +35,7 @@ public class SqlVariableConverter implements QueryConverter {
String sqlParsed = String sqlParsed =
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(), modelResp.getModelDetail().getSqlVariables(),
queryStatement.getStructQueryParam().getParams()); queryStatement.getStructQuery().getParams());
DataModel dataModel = DataModel dataModel =
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName()); queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
dataModel.setSqlQuery(sqlParsed); dataModel.setSqlQuery(sqlParsed);

View File

@@ -1,74 +1,58 @@
package com.tencent.supersonic.headless.core.translator.converter; package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
@Component("ParserDefaultConverter") /**
* This converter converts struct semantic query into sql query by generating S2SQL based on
* structured semantic information.
*/
@Component("StructQueryConverter")
@Slf4j @Slf4j
public class StructQueryConverter implements QueryConverter { public class StructQueryConverter implements QueryConverter {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQueryParam()) return Objects.nonNull(queryStatement.getStructQuery()) && !queryStatement.getIsS2SQL();
&& !queryStatement.getIsS2SQL();
} }
@Override @Override
public void convert(QueryStatement queryStatement) throws Exception { public void convert(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); StructQuery structQuery = queryStatement.getStructQuery();
String dsTable = "t_1"; String dsTable = queryStatement.getDataSetName();
SqlQueryParam sqlParam = new SqlQueryParam(); if (Objects.isNull(dsTable)) {
sqlParam.setTable(dsTable); dsTable = "t_ds_temp";
}
SqlQuery sqlQuery = new SqlQuery();
sqlQuery.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s", String sql = String.format("select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable, sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam), sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getOrderBy(structQueryParam), sqlGenerateUtils.getLimit(structQuery));
sqlGenerateUtils.getLimit(structQueryParam));
Database database = queryStatement.getOntology().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) { if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
sqlParam.setSupportWith(false); sqlQuery.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s", sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable, sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam), sqlGenerateUtils.getGroupBy(structQuery),
sqlGenerateUtils.getOrderBy(structQueryParam), sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQueryParam)); sqlGenerateUtils.getLimit(structQuery));
} }
sqlParam.setSql(sql); sqlQuery.setSql(sql);
queryStatement.setSqlQueryParam(sqlParam); queryStatement.setSqlQuery(sqlQuery);
queryStatement.setIsS2SQL(true);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam(); log.info("parse structQuery [{}] ", queryStatement.getSqlQuery());
ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups());
ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
ontologyQueryParam.setWhere(where);
if (ontologyQueryParam.getMetrics().isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
} else {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery());
ontologyQueryParam.setOrder(structQueryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQueryParam.setLimit(structQueryParam.getLimit());
queryStatement.setOntologyQueryParam(ontologyQueryParam);
log.info("parse structQuery [{}] ", queryStatement.getSqlQueryParam());
} }
} }

View File

@@ -16,7 +16,7 @@ public class DbDialectOptimizer implements QueryOptimizer {
@Override @Override
public void rewrite(QueryStatement queryStatement) { public void rewrite(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
DatabaseResp database = semanticSchemaResp.getDatabaseResp(); DatabaseResp database = semanticSchemaResp.getDatabaseResp();
String sql = queryStatement.getSql(); String sql = queryStatement.getSql();
if (Objects.isNull(database) || Objects.isNull(database.getType())) { if (Objects.isNull(database) || Objects.isNull(database.getType())) {

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.headless.core.translator.optimizer; package com.tencent.supersonic.headless.core.translator.optimizer;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -15,7 +15,7 @@ public class DetailQueryOptimizer implements QueryOptimizer {
@Override @Override
public void rewrite(QueryStatement queryStatement) { public void rewrite(QueryStatement queryStatement) {
StructQueryParam structQueryParam = queryStatement.getStructQueryParam(); StructQuery structQuery = queryStatement.getStructQuery();
String sqlRaw = queryStatement.getSql().trim(); String sqlRaw = queryStatement.getSql().trim();
if (StringUtils.isEmpty(sqlRaw)) { if (StringUtils.isEmpty(sqlRaw)) {
throw new RuntimeException("sql is empty or null"); throw new RuntimeException("sql is empty or null");
@@ -33,8 +33,7 @@ public class DetailQueryOptimizer implements QueryOptimizer {
log.debug("after handleNoMetric, sql:{}", queryStatement.getSql()); log.debug("after handleNoMetric, sql:{}", queryStatement.getSql());
} }
public boolean isDetailQuery(StructQueryParam structQueryParam) { public boolean isDetailQuery(StructQuery structQuery) {
return Objects.nonNull(structQueryParam) return Objects.nonNull(structQuery) && structQuery.getQueryType().isNativeAggQuery();
&& structQueryParam.getQueryType().isNativeAggQuery();
} }
} }

View File

@@ -2,7 +2,9 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** A query parser generates physical SQL for the QueryStatement. */ /**
* A query parser generates physical SQL for the QueryStatement.
*/
public interface QueryParser { public interface QueryParser {
void parse(QueryStatement queryStatement) throws Exception; void parse(QueryStatement queryStatement) throws Exception;
} }

View File

@@ -1,8 +1,8 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite; package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.QueryParser; import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;

View File

@@ -1,10 +1,10 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite; package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric; import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology; import com.tencent.supersonic.headless.core.pojo.Ontology;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;
import org.apache.calcite.schema.Schema; import org.apache.calcite.schema.Schema;
@@ -33,11 +33,11 @@ public class S2CalciteSchema extends AbstractSchema {
return ontology.getDataModelMap(); return ontology.getDataModelMap();
} }
public List<Metric> getMetrics() { public List<MetricSchemaResp> getMetrics() {
return ontology.getMetrics(); return ontology.getMetrics();
} }
public Map<String, List<Dimension>> getDimensions() { public Map<String, List<DimSchemaResp>> getDimensions() {
return ontology.getDimensionMap(); return ontology.getDimensionMap();
} }

View File

@@ -3,29 +3,30 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.FilterRender;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.OutputRender;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Renderer; import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Renderer;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender; import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.*; import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Objects;
@Slf4j @Slf4j
public class SqlBuilder { public class SqlBuilder {
private final S2CalciteSchema schema; private final S2CalciteSchema schema;
private OntologyQueryParam ontologyQueryParam; private OntologyQuery ontologyQuery;
private SqlValidatorScope scope; private SqlValidatorScope scope;
private SqlNode parserNode; private SqlNode parserNode;
private boolean isAgg = false; private boolean isAgg = false;
@@ -36,11 +37,11 @@ public class SqlBuilder {
} }
public String buildOntologySql(QueryStatement queryStatement) throws Exception { public String buildOntologySql(QueryStatement queryStatement) throws Exception {
this.ontologyQueryParam = queryStatement.getOntologyQueryParam(); this.ontologyQuery = queryStatement.getOntologyQuery();
if (ontologyQueryParam.getLimit() == null) { if (ontologyQuery.getLimit() == null) {
ontologyQueryParam.setLimit(0L); ontologyQuery.setLimit(0L);
} }
this.aggOption = ontologyQueryParam.getAggOption(); this.aggOption = ontologyQuery.getAggOption();
buildParseNode(); buildParseNode();
Database database = queryStatement.getOntology().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
@@ -52,7 +53,7 @@ public class SqlBuilder {
// find relevant data models // find relevant data models
scope = SchemaBuilder.getScope(schema); scope = SchemaBuilder.getScope(schema);
List<DataModel> dataModels = List<DataModel> dataModels =
DataModelNode.getQueryDataModels(scope, schema, ontologyQueryParam); DataModelNode.getQueryDataModelsV2(schema.getOntology(), ontologyQuery);
if (dataModels == null || dataModels.isEmpty()) { if (dataModels == null || dataModels.isEmpty()) {
throw new Exception("data model not found"); throw new Exception("data model not found");
} }
@@ -61,23 +62,20 @@ public class SqlBuilder {
// build level by level // build level by level
LinkedList<Renderer> builders = new LinkedList<>(); LinkedList<Renderer> builders = new LinkedList<>();
builders.add(new SourceRender()); builders.add(new SourceRender());
builders.add(new FilterRender());
builders.add(new OutputRender());
ListIterator<Renderer> it = builders.listIterator(); ListIterator<Renderer> it = builders.listIterator();
int i = 0; int i = 0;
Renderer previous = null; Renderer previous = null;
while (it.hasNext()) { while (it.hasNext()) {
Renderer renderer = it.next(); Renderer renderer = it.next();
if (previous != null) { if (previous != null) {
previous.render(ontologyQueryParam, dataModels, scope, schema, !isAgg); previous.render(ontologyQuery, dataModels, scope, schema, !isAgg);
renderer.setTable(previous renderer.setTable(previous.builderAs(DataModelNode.getNames(dataModels) + "_" + i));
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
i++; i++;
} }
previous = renderer; previous = renderer;
} }
builders.getLast().render(ontologyQueryParam, dataModels, scope, schema, !isAgg); builders.getLast().render(ontologyQuery, dataModels, scope, schema, !isAgg);
parserNode = builders.getLast().builder(); parserNode = builders.getLast().build();
} }
private boolean getAgg(DataModel dataModel) { private boolean getAgg(DataModel dataModel) {
@@ -87,7 +85,7 @@ public class SqlBuilder {
// default by dataModel time aggregation // default by dataModel time aggregation
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime() if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) { .equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!ontologyQueryParam.isNativeQuery()) { if (!ontologyQuery.isNativeQuery()) {
return true; return true;
} }
} }

View File

@@ -1,54 +1,42 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite; package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import lombok.Data; import lombok.Data;
import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.*;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlSelect;
import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.parser.SqlParserPos;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** basic query project */ /** basic query project */
@Data @Data
public class TableView { public class TableView {
private List<SqlNode> filter = new ArrayList<>(); private Set<String> fields = Sets.newHashSet();
private List<SqlNode> dimension = new ArrayList<>(); private List<SqlNode> dimension = new ArrayList<>();
private List<SqlNode> measure = new ArrayList<>(); private List<SqlNode> metric = new ArrayList<>();
private SqlNodeList order; private SqlNodeList order;
private SqlNode fetch; private SqlNode fetch;
private SqlNode offset; private SqlNode offset;
private SqlNode table; private SqlNode table;
private List<SqlNode> select = Lists.newArrayList();
private String alias; private String alias;
private List<String> primary; private List<String> primary;
private DataModel dataModel; private DataModel dataModel;
public SqlNode build() { public SqlNode build() {
measure.addAll(dimension); List<SqlNode> selectNodeList = new ArrayList<>();
SqlNodeList dimensionNodeList = null; selectNodeList.addAll(metric);
if (dimension.size() > 0) { selectNodeList.addAll(dimension);
dimensionNodeList = new SqlNodeList(getGroup(dimension), SqlParserPos.ZERO); selectNodeList.addAll(select);
} return new SqlSelect(SqlParserPos.ZERO, null,
SqlNodeList filterNodeList = null; new SqlNodeList(selectNodeList, SqlParserPos.ZERO), table, null, null, null, null,
if (filter.size() > 0) { null, order, offset, fetch, null);
filterNodeList = new SqlNodeList(filter, SqlParserPos.ZERO);
}
return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(measure, SqlParserPos.ZERO),
table, filterNodeList, dimensionNodeList, null, null, null, order, offset, fetch,
null);
} }
private List<SqlNode> getGroup(List<SqlNode> sqlNodeList) {
return sqlNodeList.stream()
.map(s -> (s.getKind().equals(SqlKind.AS)
? ((SqlBasicCall) s).getOperandList().get(0)
: s))
.collect(Collectors.toList());
}
} }

View File

@@ -1,12 +1,21 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node; package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder; import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.*; import org.apache.calcite.sql.*;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
@@ -61,7 +70,7 @@ public class DataModelNode extends SemanticNode {
Set<String> dimensions = new HashSet<>(); Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>(); Set<String> metrics = new HashSet<>();
EngineType engineType = EngineType.fromString(datasource.getType()); EngineType engineType = EngineType.fromString(datasource.getType());
for (Dimension d : datasource.getDimensions()) { for (DimSchemaResp d : datasource.getDimensions()) {
List<SqlNode> identifiers = List<SqlNode> identifiers =
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope); expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.forEach(i -> dimensions.add(i.toString())); identifiers.forEach(i -> dimensions.add(i.toString()));
@@ -130,46 +139,36 @@ public class DataModelNode extends SemanticNode {
return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_")); return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_"));
} }
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam, public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery ontologyQuery,
Set<String> queryDimensions, Set<String> queryMeasures) { Set<String> queryDimensions, Set<String> queryMeasures) {
queryDimensions.addAll(queryParam.getDimensions().stream() ontologyQuery.getMetrics().forEach(m -> {
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) if (Objects.nonNull(m.getMetricDefineByMeasureParams())) {
? d.split(Constants.DIMENSION_IDENTIFY)[1] m.getMetricDefineByMeasureParams().getMeasures()
: d) .forEach(mm -> queryMeasures.add(mm.getName()));
.collect(Collectors.toSet())); }
Set<String> schemaMetricName = if (Objects.nonNull(m.getMetricDefineByFieldParams())) {
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet()); m.getMetricDefineByFieldParams().getFields()
ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName())) .forEach(mm -> queryMeasures.add(mm.getFieldName()));
.forEach(m -> { }
if (!CollectionUtils.isEmpty(m.getMetricTypeParams().getMeasures())) { });
m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName()));
}
if (!CollectionUtils.isEmpty(m.getMetricTypeParams().getFields())) {
m.getMetricTypeParams().getFields()
.forEach(mm -> queryMeasures.add(mm.getName()));
}
});
queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
.forEach(queryMeasures::add);
} }
public static void mergeQueryFilterDimensionMeasure(Ontology ontology, public static void mergeQueryFilterDimensionMeasure(Ontology ontology,
OntologyQueryParam queryParam, Set<String> dimensions, Set<String> measures, OntologyQuery ontologyQuery, Set<String> dimensions, Set<String> measures,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
EngineType engineType = ontology.getDatabase().getType(); EngineType engineType = ontology.getDatabase().getType();
if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) { if (Objects.nonNull(ontologyQuery.getWhere()) && !ontologyQuery.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>(); Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType), FilterNode.getFilterField(parse(ontologyQuery.getWhere(), scope, engineType),
filterConditions); filterConditions);
Set<String> queryMeasures = new HashSet<>(measures); Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName = Set<String> schemaMetricName = ontology.getMetrics().stream()
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet()); .map(MetricSchemaResp::getName).collect(Collectors.toSet());
for (String filterCondition : filterConditions) { for (String filterCondition : filterConditions) {
if (schemaMetricName.contains(filterCondition)) { if (schemaMetricName.contains(filterCondition)) {
ontology.getMetrics().stream() ontology.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(filterCondition)) .filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(m -> m.getMetricTypeParams().getMeasures() .forEach(m -> m.getMetricDefineByMeasureParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName()))); .forEach(mm -> queryMeasures.add(mm.getName())));
continue; continue;
} }
@@ -180,18 +179,51 @@ public class DataModelNode extends SemanticNode {
} }
} }
public static List<DataModel> getQueryDataModels(SqlValidatorScope scope, public static List<DataModel> getQueryDataModelsV2(Ontology ontology, OntologyQuery query) {
S2CalciteSchema schema, OntologyQueryParam queryParam) throws Exception { // first, sort models based on the number of query metrics
Ontology ontology = schema.getOntology(); Map<String, Integer> modelMetricCount = Maps.newHashMap();
query.getMetrics().forEach(m -> {
if (!modelMetricCount.containsKey(m.getModelBizName())) {
modelMetricCount.put(m.getModelBizName(), 1);
} else {
int count = modelMetricCount.get(m.getModelBizName());
modelMetricCount.put(m.getModelBizName(), count + 1);
}
});
List<String> metricsDataModels = modelMetricCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).map(e -> e.getKey())
.collect(Collectors.toList());
// first, sort models based on the number of query dimensions
Map<String, Integer> modelDimCount = Maps.newHashMap();
query.getDimensions().forEach(m -> {
if (!modelDimCount.containsKey(m.getModelBizName())) {
modelDimCount.put(m.getModelBizName(), 1);
} else {
int count = modelDimCount.get(m.getModelBizName());
modelDimCount.put(m.getModelBizName(), count + 1);
}
});
List<String> dimDataModels = modelDimCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).map(e -> e.getKey())
.collect(Collectors.toList());
Set<String> dataModelNames = Sets.newLinkedHashSet();
dataModelNames.addAll(metricsDataModels);
dataModelNames.addAll(dimDataModels);
return dataModelNames.stream().map(bizName -> ontology.getDataModelMap().get(bizName))
.collect(Collectors.toList());
}
public static List<DataModel> getQueryDataModels(Ontology ontology,
OntologyQuery ontologyQuery) {
// get query measures and dimensions // get query measures and dimensions
Set<String> queryMeasures = new HashSet<>(); Set<String> queryMeasures = new HashSet<>();
Set<String> queryDimensions = new HashSet<>(); Set<String> queryDimensions = new HashSet<>();
getQueryDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures); getQueryDimensionMeasure(ontology, ontologyQuery, queryDimensions, queryMeasures);
mergeQueryFilterDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures,
scope);
// first, find the base model // first, find the base model
DataModel baseDataModel = findBaseModel(ontology, queryMeasures, queryDimensions); DataModel baseDataModel = findBaseModel(ontology, ontologyQuery);
if (Objects.isNull(baseDataModel)) { if (Objects.isNull(baseDataModel)) {
throw new RuntimeException( throw new RuntimeException(
String.format("could not find matching dataModel, dimensions:%s, measures:%s", String.format("could not find matching dataModel, dimensions:%s, measures:%s",
@@ -204,7 +236,7 @@ public class DataModelNode extends SemanticNode {
} }
// second, traverse the ontology to find other related dataModels // second, traverse the ontology to find other related dataModels
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, queryParam, List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, ontologyQuery,
baseDataModel, queryDimensions, queryMeasures); baseDataModel, queryDimensions, queryMeasures);
if (CollectionUtils.isEmpty(relatedDataModels)) { if (CollectionUtils.isEmpty(relatedDataModels)) {
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel, relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
@@ -218,6 +250,45 @@ public class DataModelNode extends SemanticNode {
return relatedDataModels; return relatedDataModels;
} }
private static DataModel findBaseModel(Ontology ontology, OntologyQuery query) {
DataModel dataModel = null;
// first, try to find the model with the most query metrics
Map<String, Integer> modelMetricCount = Maps.newHashMap();
query.getMetrics().forEach(m -> {
if (!modelMetricCount.containsKey(m.getModelBizName())) {
modelMetricCount.put(m.getModelBizName(), 1);
} else {
int count = modelMetricCount.get(m.getModelBizName());
modelMetricCount.put(m.getModelBizName(), count + 1);
}
});
Optional<String> baseModelName = modelMetricCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).map(e -> e.getKey())
.findFirst();
if (baseModelName.isPresent()) {
dataModel = ontology.getDataModelMap().get(baseModelName.get());
} else {
// second, try to find the model with the most query dimensions
Map<String, Integer> modelDimCount = Maps.newHashMap();
query.getDimensions().forEach(m -> {
if (!modelDimCount.containsKey(m.getModelBizName())) {
modelDimCount.put(m.getModelBizName(), 1);
} else {
int count = modelDimCount.get(m.getModelBizName());
modelDimCount.put(m.getModelBizName(), count + 1);
}
});
baseModelName = modelMetricCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(e -> e.getKey()).findFirst();
if (baseModelName.isPresent()) {
dataModel = ontology.getDataModelMap().get(baseModelName.get());
}
}
return dataModel;
}
private static DataModel findBaseModel(Ontology ontology, Set<String> queryMeasures, private static DataModel findBaseModel(Ontology ontology, Set<String> queryMeasures,
Set<String> queryDimensions) { Set<String> queryDimensions) {
DataModel dataModel = null; DataModel dataModel = null;
@@ -239,8 +310,9 @@ public class DataModelNode extends SemanticNode {
} else { } else {
// second, try to find the model with the most matching dimensions // second, try to find the model with the most matching dimensions
Map<String, Integer> dataModelDimCount = new HashMap<>(); Map<String, Integer> dataModelDimCount = new HashMap<>();
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) { for (Map.Entry<String, List<DimSchemaResp>> entry : ontology.getDimensionMap()
Set<String> modelDimensions = entry.getValue().stream().map(Dimension::getName) .entrySet()) {
Set<String> modelDimensions = entry.getValue().stream().map(DimSchemaResp::getName)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
modelDimensions.retainAll(queryDimensions); modelDimensions.retainAll(queryDimensions);
dataModelDimCount.put(entry.getKey(), modelDimensions.size()); dataModelDimCount.put(entry.getKey(), modelDimensions.size());
@@ -261,8 +333,8 @@ public class DataModelNode extends SemanticNode {
boolean isAllMatch = true; boolean isAllMatch = true;
Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName) Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
Set<String> baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName) Set<String> baseDimensions = baseDataModel.getDimensions().stream()
.collect(Collectors.toSet()); .map(DimSchemaResp::getName).collect(Collectors.toSet());
baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName())); baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName()));
baseMeasures.retainAll(queryMeasures); baseMeasures.retainAll(queryMeasures);
@@ -290,7 +362,7 @@ public class DataModelNode extends SemanticNode {
} }
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology, private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
OntologyQueryParam queryParam, DataModel baseDataModel, Set<String> queryDimensions, OntologyQuery ontologyQuery, DataModel baseDataModel, Set<String> queryDimensions,
Set<String> queryMeasures) { Set<String> queryMeasures) {
Set<String> joinDataModelNames = new HashSet<>(); Set<String> joinDataModelNames = new HashSet<>();
List<DataModel> joinDataModels = new ArrayList<>(); List<DataModel> joinDataModels = new ArrayList<>();
@@ -318,13 +390,13 @@ public class DataModelNode extends SemanticNode {
: joinRelation.getJoinCondition().get(0).getLeft(); : joinRelation.getJoinCondition().get(0).getLeft();
if (!queryDimensions.isEmpty()) { if (!queryDimensions.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream() Set<String> linkDimension = other.getDimensions().stream()
.map(Dimension::getName).collect(Collectors.toSet()); .map(DimSchemaResp::getName).collect(Collectors.toSet());
other.getIdentifiers().forEach(i -> linkDimension.add(i.getName())); other.getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimensions); linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
// joinDim should be added to the query dimension // joinDim should be added to the query dimension
queryParam.getDimensions().add(joinDimName); // ontologyQuery.getDimensions().add(joinDimName);
} }
} }
Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName) Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName)
@@ -335,7 +407,7 @@ public class DataModelNode extends SemanticNode {
} }
if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) { if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) {
Set<String> linkDimension = ontology.getDimensionMap().get(other.getName()) Set<String> linkDimension = ontology.getDimensionMap().get(other.getName())
.stream().map(Dimension::getName).collect(Collectors.toSet()); .stream().map(DimSchemaResp::getName).collect(Collectors.toSet());
linkDimension.retainAll(queryDimensions); linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
@@ -408,7 +480,7 @@ public class DataModelNode extends SemanticNode {
boolean isMatch = false; boolean isMatch = false;
if (!queryDimension.isEmpty()) { if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream() Set<String> linkDimension = entry.getValue().getDimensions().stream()
.map(Dimension::getName).collect(Collectors.toSet()); .map(DimSchemaResp::getName).collect(Collectors.toSet());
entry.getValue().getIdentifiers().forEach(i -> linkDimension.add(i.getName())); entry.getValue().getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension); linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
@@ -428,9 +500,9 @@ public class DataModelNode extends SemanticNode {
} }
} }
} }
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) { for (Map.Entry<String, List<DimSchemaResp>> entry : ontology.getDimensionMap().entrySet()) {
if (!queryDimension.isEmpty()) { if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().stream().map(Dimension::getName) Set<String> linkDimension = entry.getValue().stream().map(DimSchemaResp::getName)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
linkDimension.retainAll(queryDimension); linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {

View File

@@ -1,8 +1,9 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node; package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.DataTypeEnums;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -11,40 +12,35 @@ import java.util.Objects;
public class DimensionNode extends SemanticNode { public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope, EngineType engineType) public static SqlNode build(DimSchemaResp dimension, SqlValidatorScope scope,
throws Exception { EngineType engineType) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType); return parse(dimension.getExpr(), scope, engineType);
return buildAs(dimension.getName(), sqlNode);
} }
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope, public static List<SqlNode> expand(DimSchemaResp dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception { EngineType engineType) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType); SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return expand(sqlNode, scope); return expand(sqlNode, scope);
} }
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope, public static SqlNode buildName(DimSchemaResp dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
return parse(dimension.getName(), scope, engineType);
}
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception { EngineType engineType) throws Exception {
return parse(dimension.getExpr(), scope, engineType); return parse(dimension.getExpr(), scope, engineType);
} }
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope, public static SqlNode buildNameAs(String alias, DimSchemaResp dimension,
EngineType engineType) throws Exception { SqlValidatorScope scope, EngineType engineType) throws Exception {
if ("".equals(alias)) { if ("".equals(alias)) {
return buildName(dimension, scope, engineType); return buildName(dimension, scope, engineType);
} }
SqlNode sqlNode = parse(dimension.getName(), scope, engineType); SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return buildAs(alias, sqlNode); return buildAs(alias, sqlNode);
} }
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope, public static SqlNode buildArray(DimSchemaResp dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception { EngineType engineType) throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) { if (Objects.nonNull(dimension.getDataType())
&& dimension.getDataType().equals(DataTypeEnums.ARRAY)) {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType); SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (isIdentifier(sqlNode)) { if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(), return buildAs(dimension.getName(),

View File

@@ -22,7 +22,4 @@ public class FilterNode extends SemanticNode {
} }
} }
public static boolean isMatchDataSource(Set<String> measures) {
return false;
}
} }

View File

@@ -1,14 +1,13 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node; package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class IdentifyNode extends SemanticNode { public class IdentifyNode extends SemanticNode {
@@ -17,16 +16,11 @@ public class IdentifyNode extends SemanticNode {
return parse(identify.getName(), scope, engineType); return parse(identify.getName(), scope, engineType);
} }
public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) {
return identifies.stream().filter(i -> type.name().equalsIgnoreCase(i.getType()))
.map(i -> i.getName()).collect(Collectors.toSet());
}
public static boolean isForeign(String name, List<Identify> identifies) { public static boolean isForeign(String name, List<Identify> identifies) {
Optional<Identify> identify = Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst(); identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) { if (identify.isPresent()) {
return Identify.Type.FOREIGN.name().equalsIgnoreCase(identify.get().getType()); return IdentifyType.foreign.equals(identify.get().getType());
} }
return false; return false;
} }
@@ -35,7 +29,7 @@ public class IdentifyNode extends SemanticNode {
Optional<Identify> identify = Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst(); identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) { if (identify.isPresent()) {
return Identify.Type.PRIMARY.name().equalsIgnoreCase(identify.get().getType()); return IdentifyType.primary.equals(identify.get().getType());
} }
return false; return false;
} }

View File

@@ -1,7 +1,7 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node; package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -9,16 +9,7 @@ public class MeasureNode extends SemanticNode {
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope, public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope,
EngineType engineType) throws Exception { EngineType engineType) throws Exception {
return buildAs(measure.getName(), getExpr(measure, alias, scope, engineType)); return getExpr(measure, alias, scope, engineType);
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope, engineType);
}
return buildAs(measure.getName(),
AggFunctionNode.build(measure.getAgg(), measure.getName(), scope, engineType));
} }
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope, private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope,

View File

@@ -1,8 +1,9 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node; package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data; import lombok.Data;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -14,29 +15,22 @@ import java.util.Optional;
@Data @Data
public class MetricNode extends SemanticNode { public class MetricNode extends SemanticNode {
private Metric metric; private MetricSchemaResp metric;
private Map<String, SqlNode> aggNode = new HashMap<>(); private Map<String, SqlNode> aggNode = new HashMap<>();
private Map<String, SqlNode> nonAggNode = new HashMap<>(); private Map<String, SqlNode> nonAggNode = new HashMap<>();
private Map<String, SqlNode> measureFilter = new HashMap<>(); private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>(); private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope, EngineType engineType) public static SqlNode build(MetricSchemaResp metric, SqlValidatorScope scope,
throws Exception { EngineType engineType) throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null return parse(metric.getExpr(), scope, engineType);
|| metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope, engineType);
}
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope, engineType);
return buildAs(metric.getName(), sqlNode);
} }
public static Boolean isMetricField(String name, S2CalciteSchema schema) { public static Boolean isMetricField(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream() Optional<MetricSchemaResp> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst(); .filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric(); return metric.isPresent()
&& metric.get().getMetricDefineType().equals(MetricDefineType.FIELD);
} }
public static Boolean isMetricField(Metric metric) {
return metric.getMetricTypeParams().isFieldMetric();
}
} }

View File

@@ -1,78 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/** process query specified filtering information */
public class FilterRender extends Renderer {
@Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView;
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = schema.getOntology().getDatabase().getType();
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
Set<String> whereFields = new HashSet<>();
FilterNode.getFilterField(filterNode, whereFields);
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
for (DataModel dataModel : dataModels) {
SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(), dataModel, schema, dimensions, metrics);
}
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
}
for (String dimension : queryDimensions) {
tableView.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
for (String metric : queryMetrics) {
Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema);
if (optionalMetric.isPresent() && MetricNode.isMetricField(optionalMetric.get())) {
// metric from field ignore
continue;
}
if (optionalMetric.isPresent()) {
tableView.getMeasure()
.add(MetricNode.build(optionalMetric.get(), scope, engineType));
} else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
}
tableView.setMeasure(SemanticNode.deduplicateNode(tableView.getMeasure()));
tableView.setDimension(SemanticNode.deduplicateNode(tableView.getDimension()));
if (filterNode != null) {
TableView filterView = new TableView();
filterView.setTable(SemanticNode.buildAs(Constants.DATASOURCE_TABLE_FILTER_PREFIX,
tableView.build()));
filterView.getFilter().add(filterNode);
filterView.getMeasure().add(SqlIdentifier.star(SqlParserPos.ZERO));
super.tableView = filterView;
}
}
}

View File

@@ -1,28 +1,20 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render; package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView; import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.AggFunctionNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.JoinConditionType; import org.apache.calcite.sql.*;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlJoin;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -30,16 +22,7 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple; import org.apache.commons.lang3.tuple.Triple;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** process the join conditions when the source number is greater than 1 */ /** process the join conditions when the source number is greater than 1 */
@@ -47,226 +30,63 @@ import java.util.stream.Collectors;
public class JoinRender extends Renderer { public class JoinRender extends Renderer {
@Override @Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
EngineType engineType = schema.getOntology().getDatabase().getType();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
Set<String> queryAllDimension = new HashSet<>();
Set<String> measures = new HashSet<>();
DataModelNode.getQueryDimensionMeasure(schema.getOntology(), metricCommand,
queryAllDimension, measures);
SqlNode left = null; SqlNode left = null;
TableView leftTable = null; TableView leftTable = null;
TableView innerView = new TableView(); Map<String, SqlNode> outerSelect = new HashMap<>();
TableView filterView = new TableView(); Map<String, String> beforeModels = new HashMap<>();
Map<String, SqlNode> innerSelect = new HashMap<>(); EngineType engineType = schema.getOntology().getDatabase().getType();
Set<String> filterDimension = new HashSet<>();
Map<String, String> beforeSources = new HashMap<>();
for (int i = 0; i < dataModels.size(); i++) { for (int i = 0; i < dataModels.size(); i++) {
final DataModel dataModel = dataModels.get(i); final DataModel dataModel = dataModels.get(i);
final Set<String> filterDimensions = new HashSet<>(); final Set<DimSchemaResp> queryDimensions =
final Set<String> filterMetrics = new HashSet<>(); ontologyQuery.getDimensionsByModel(dataModel.getId());
final Set<String> queryDimension = new HashSet<>(); final Set<MetricSchemaResp> queryMetrics =
final Set<String> queryMetrics = new HashSet<>(); ontologyQuery.getMetricsByModel(dataModel.getId());
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema,
filterDimensions, filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
reqMetric.addAll(filterMetrics);
reqMetric = uniqList(reqMetric);
List<String> reqDimension = new ArrayList<>(metricCommand.getDimensions());
reqDimension.addAll(filterDimensions);
reqDimension = uniqList(reqDimension);
Set<String> sourceMeasure = dataModel.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataModel, sourceMeasure,
scope, schema, nonAgg);
Set<String> dimension = dataModel.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataModel,
dimension, scope, schema);
List<String> primary = new ArrayList<>(); List<String> primary = new ArrayList<>();
for (Identify identify : dataModel.getIdentifiers()) { for (Identify identify : dataModel.getIdentifiers()) {
primary.add(identify.getName()); primary.add(identify.getName());
if (!fieldWhere.contains(identify.getName())) {
fieldWhere.add(identify.getName());
}
} }
List<String> dataSourceWhere = new ArrayList<>(fieldWhere);
addZipperField(dataModel, dataSourceWhere); TableView tableView = SourceRender.renderOne(queryMetrics, queryDimensions, dataModel, scope, schema);
TableView tableView =
SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataModels.get(i), scope, schema, true);
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString())); log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
tableView.setAlias(alias); tableView.setAlias(alias);
tableView.setPrimary(primary); tableView.setPrimary(primary);
tableView.setDataModel(dataModel); tableView.setDataModel(dataModel);
for (String field : tableView.getFields()) {
outerSelect.put(field, SemanticNode.parse(alias + "." + field, scope, engineType));
}
if (left == null) { if (left == null) {
leftTable = tableView; leftTable = tableView;
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)); left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView));
beforeSources.put(dataModel.getName(), leftTable.getAlias()); beforeModels.put(dataModel.getName(), leftTable.getAlias());
continue; continue;
} }
left = buildJoin(left, leftTable, tableView, beforeSources, dataModel, schema, scope); left = buildJoin(left, leftTable, tableView, beforeModels, dataModel, schema, scope);
leftTable = tableView; leftTable = tableView;
beforeSources.put(dataModel.getName(), tableView.getAlias()); beforeModels.put(dataModel.getName(), tableView.getAlias());
} }
for (Map.Entry<String, SqlNode> entry : innerSelect.entrySet()) { for (Map.Entry<String, SqlNode> entry : outerSelect.entrySet()) {
innerView.getMeasure().add(entry.getValue()); tableView.getSelect().add(entry.getValue());
} }
innerView.setTable(left); tableView.setTable(left);
filterView
.setTable(SemanticNode.buildAs(Constants.JOIN_TABLE_OUT_PREFIX, innerView.build()));
if (!filterDimension.isEmpty()) {
for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.parse(d, scope, engineType));
} else {
filterView.getDimension().add(SemanticNode.parse(d, scope, engineType));
}
}
}
filterView.setMeasure(SemanticNode.deduplicateNode(filterView.getMeasure()));
filterView.setDimension(SemanticNode.deduplicateNode(filterView.getDimension()));
super.tableView = filterView;
} }
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView, private SqlNode getTable(TableView tableView) {
Set<String> queryMetrics, List<String> reqMetrics, DataModel dataModel,
Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema,
boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = schema.getOntology().getDatabase().getType();
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias);
if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure, SemanticNode.buildAs(measure,
SemanticNode.parse(alias + "." + measure, scope, engineType)));
}
}
if (metricNode.getAggFunction() != null && !metricNode.getAggFunction().isEmpty()) {
for (Map.Entry<String, String> entry : metricNode.getAggFunction().entrySet()) {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope, engineType)));
} else {
filterView.getMeasure()
.add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(),
entry.getKey(), scope, engineType)));
}
}
}
}
}
}
}
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension,
Set<String> queryDimension, List<String> reqDimensions, DataModel dataModel,
Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema)
throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = schema.getOntology().getDatabase().getType();
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode
.parse(alias + "." + identifyDimension[1], scope, engineType)));
} else {
innerSelect.put(d, SemanticNode.buildAs(d,
SemanticNode.parse(alias + "." + d, scope, engineType)));
}
filterDimension.add(d);
}
}
}
private Set<String> getQueryDimension(Set<String> filterDimension,
Set<String> queryAllDimension, Set<String> whereFields) {
return filterDimension.stream()
.filter(d -> queryAllDimension.contains(d) || whereFields.contains(d))
.collect(Collectors.toSet());
}
private boolean getMatchMetric(S2CalciteSchema schema, Set<String> sourceMeasure, String m,
Set<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst();
boolean isAdd = false;
if (metric.isPresent()) {
Set<String> metricMeasures = metric.get().getMetricTypeParams().getMeasures().stream()
.map(me -> me.getName()).collect(Collectors.toSet());
if (sourceMeasure.containsAll(metricMeasures)) {
isAdd = true;
}
}
if (sourceMeasure.contains(m)) {
isAdd = true;
}
if (isAdd && !queryMetrics.contains(m)) {
queryMetrics.add(m);
}
return isAdd;
}
private boolean getMatchDimension(S2CalciteSchema schema, Set<String> sourceDimension,
DataModel dataModel, String d, Set<String> queryDimension) {
String oriDimension = d;
boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
oriDimension = d.split(Constants.DIMENSION_IDENTIFY)[1];
}
if (sourceDimension.contains(oriDimension)) {
isAdd = true;
}
for (Identify identify : dataModel.getIdentifiers()) {
if (identify.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
break;
}
}
if (schema.getDimensions().containsKey(dataModel.getName())) {
for (Dimension dim : schema.getDimensions().get(dataModel.getName())) {
if (dim.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
}
}
}
if (isAdd && !queryDimension.contains(oriDimension)) {
queryDimension.add(oriDimension);
}
return isAdd;
}
private SqlNode getTable(TableView tableView, SqlValidatorScope scope) throws Exception {
return SemanticNode.getTable(tableView.getTable()); return SemanticNode.getTable(tableView.getTable());
} }
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, private SqlNode buildJoin(SqlNode leftNode, TableView leftTable, TableView rightTable,
Map<String, String> before, DataModel dataModel, S2CalciteSchema schema, Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
EngineType engineType = schema.getOntology().getDatabase().getType(); EngineType engineType = schema.getOntology().getDatabase().getType();
SqlNode condition = SqlNode condition = getCondition(leftTable, rightTable, dataModel, schema, scope, engineType);
getCondition(leftTable, tableView, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral(""); SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema); JoinRelation matchJoinRelation = getMatchJoinRelation(before, rightTable, schema);
SqlNode joinRelationCondition = null; SqlNode joinRelationCondition = null;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) { if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType()); sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
@@ -275,9 +95,9 @@ public class JoinRender extends Renderer {
} }
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataModel().getTimePartType()) if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataModel().getTimePartType())
|| Materialization.TimePartType.ZIPPER || Materialization.TimePartType.ZIPPER
.equals(tableView.getDataModel().getTimePartType())) { .equals(rightTable.getDataModel().getTimePartType())) {
SqlNode zipperCondition = SqlNode zipperCondition =
getZipperCondition(leftTable, tableView, dataModel, schema, scope); getZipperCondition(leftTable, rightTable, dataModel, schema, scope);
if (Objects.nonNull(joinRelationCondition)) { if (Objects.nonNull(joinRelationCondition)) {
condition = new SqlBasicCall(SqlStdOperatorTable.AND, condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)), new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)),
@@ -287,9 +107,9 @@ public class JoinRender extends Renderer {
} }
} }
return new SqlJoin(SqlParserPos.ZERO, left, return new SqlJoin(SqlParserPos.ZERO, leftNode,
SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral, SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral,
SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)), SemanticNode.buildAs(rightTable.getAlias(), getTable(rightTable)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition); SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition);
} }
@@ -402,84 +222,84 @@ public class JoinRender extends Renderer {
} }
private void addZipperField(DataModel dataModel, List<String> fields) { private void addZipperField(DataModel dataModel, List<String> fields) {
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) { // if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
dataModel.getDimensions().stream() // dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) // .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.forEach(t -> { // .forEach(t -> {
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END) // if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)
&& !fields.contains(t.getName())) { // && !fields.contains(t.getName())) {
fields.add(t.getName()); // fields.add(t.getName());
} // }
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START) // if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)
&& !fields.contains(t.getName())) { // && !fields.contains(t.getName())) {
fields.add(t.getName()); // fields.add(t.getName());
} // }
}); // });
} // }
} }
private SqlNode getZipperCondition(TableView left, TableView right, DataModel dataModel, private SqlNode getZipperCondition(TableView left, TableView right, DataModel dataModel,
S2CalciteSchema schema, SqlValidatorScope scope) throws Exception { S2CalciteSchema schema, SqlValidatorScope scope) throws Exception {
if (Materialization.TimePartType.ZIPPER.equals(left.getDataModel().getTimePartType()) // if (Materialization.TimePartType.ZIPPER.equals(left.getDataModel().getTimePartType())
&& Materialization.TimePartType.ZIPPER // && Materialization.TimePartType.ZIPPER
.equals(right.getDataModel().getTimePartType())) { // .equals(right.getDataModel().getTimePartType())) {
throw new Exception("not support two zipper table"); // throw new Exception("not support two zipper table");
} // }
SqlNode condition = null; SqlNode condition = null;
Optional<Dimension> leftTime = left.getDataModel().getDimensions().stream() // Optional<Dimension> leftTime = left.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) // .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst(); // .findFirst();
Optional<Dimension> rightTime = right.getDataModel().getDimensions().stream() // Optional<Dimension> rightTime = right.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) // .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst(); // .findFirst();
if (leftTime.isPresent() && rightTime.isPresent()) { // if (leftTime.isPresent() && rightTime.isPresent()) {
//
String startTime = ""; // String startTime = "";
String endTime = ""; // String endTime = "";
String dateTime = ""; // String dateTime = "";
//
Optional<Dimension> startTimeOp = (Materialization.TimePartType.ZIPPER // Optional<Dimension> startTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel() // .equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream() // .getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME // .filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType())) // .equalsIgnoreCase(d.getType()))
.filter(d -> d.getName() // .filter(d -> d.getName()
.startsWith(Constants.MATERIALIZATION_ZIPPER_START)) // .startsWith(Constants.MATERIALIZATION_ZIPPER_START))
.findFirst(); // .findFirst();
Optional<Dimension> endTimeOp = (Materialization.TimePartType.ZIPPER // Optional<Dimension> endTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel() // .equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream() // .getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME // .filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType())) // .equalsIgnoreCase(d.getType()))
.filter(d -> d.getName() // .filter(d -> d.getName()
.startsWith(Constants.MATERIALIZATION_ZIPPER_END)) // .startsWith(Constants.MATERIALIZATION_ZIPPER_END))
.findFirst(); // .findFirst();
if (startTimeOp.isPresent() && endTimeOp.isPresent()) { // if (startTimeOp.isPresent() && endTimeOp.isPresent()) {
TableView zipper = Materialization.TimePartType.ZIPPER // TableView zipper = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right; // .equals(left.getDataModel().getTimePartType()) ? left : right;
TableView partMetric = Materialization.TimePartType.ZIPPER // TableView partMetric = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? right : left; // .equals(left.getDataModel().getTimePartType()) ? right : left;
Optional<Dimension> partTime = Materialization.TimePartType.ZIPPER // Optional<Dimension> partTime = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? rightTime : leftTime; // .equals(left.getDataModel().getTimePartType()) ? rightTime : leftTime;
startTime = zipper.getAlias() + "." + startTimeOp.get().getName(); // startTime = zipper.getAlias() + "." + startTimeOp.get().getName();
endTime = zipper.getAlias() + "." + endTimeOp.get().getName(); // endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName(); // dateTime = partMetric.getAlias() + "." + partTime.get().getName();
} // }
EngineType engineType = schema.getOntology().getDatabase().getType(); // EngineType engineType = schema.getOntology().getDatabase().getType();
ArrayList<SqlNode> operandList = // ArrayList<SqlNode> operandList =
new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType), // new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))); // SemanticNode.parse(dateTime, scope, engineType)));
condition = new SqlBasicCall(SqlStdOperatorTable.AND, // condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<SqlNode>(Arrays.asList( // new ArrayList<SqlNode>(Arrays.asList(
new SqlBasicCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, // new SqlBasicCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(Arrays.asList( // new ArrayList<SqlNode>(Arrays.asList(
SemanticNode.parse(startTime, scope, engineType), // SemanticNode.parse(startTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))), // SemanticNode.parse(dateTime, scope, engineType))),
SqlParserPos.ZERO, null), // SqlParserPos.ZERO, null),
new SqlBasicCall(SqlStdOperatorTable.GREATER_THAN, operandList, // new SqlBasicCall(SqlStdOperatorTable.GREATER_THAN, operandList,
SqlParserPos.ZERO, null))), // SqlParserPos.ZERO, null))),
SqlParserPos.ZERO, null); // SqlParserPos.ZERO, null);
} // }
return condition; return condition;
} }
} }

View File

@@ -2,12 +2,13 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.fun.SqlStdOperatorTable;
@@ -22,29 +23,28 @@ import java.util.List;
public class OutputRender extends Renderer { public class OutputRender extends Renderer {
@Override @Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
EngineType engineType = schema.getOntology().getDatabase().getType(); EngineType engineType = schema.getOntology().getDatabase().getType();
for (String dimension : metricCommand.getDimensions()) { for (DimSchemaResp dimension : ontologyQuery.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType)); tableView.getMetric().add(SemanticNode.parse(dimension.getExpr(), scope, engineType));
} }
for (String metric : metricCommand.getMetrics()) { for (MetricSchemaResp metric : ontologyQuery.getMetrics()) {
if (MetricNode.isMetricField(metric, schema)) { if (MetricNode.isMetricField(metric.getName(), schema)) {
// metric from field ignore // metric from field ignore
continue; continue;
} }
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope, engineType)); tableView.getMetric().add(SemanticNode.parse(metric.getName(), scope, engineType));
} }
if (metricCommand.getLimit() > 0) { if (ontologyQuery.getLimit() > 0) {
SqlNode offset = SqlNode offset =
SemanticNode.parse(metricCommand.getLimit().toString(), scope, engineType); SemanticNode.parse(ontologyQuery.getLimit().toString(), scope, engineType);
selectDataSet.setOffset(offset); tableView.setOffset(offset);
} }
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) { if (!CollectionUtils.isEmpty(ontologyQuery.getOrder())) {
List<SqlNode> orderList = new ArrayList<>(); List<SqlNode> orderList = new ArrayList<>();
for (ColumnOrder columnOrder : metricCommand.getOrder()) { for (ColumnOrder columnOrder : ontologyQuery.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) { if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO, orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[] {SemanticNode.parse(columnOrder.getCol(), scope, new SqlNode[] {SemanticNode.parse(columnOrder.getCol(), scope,
@@ -53,7 +53,7 @@ public class OutputRender extends Renderer {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope, engineType)); orderList.add(SemanticNode.parse(columnOrder.getCol(), scope, engineType));
} }
} }
selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO)); tableView.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO));
} }
} }
} }

View File

@@ -1,26 +1,15 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render; package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView; import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MeasureNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import lombok.Data; import lombok.Data;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/** process TableView */ /** process TableView */
@Data @Data
@@ -28,85 +17,11 @@ public abstract class Renderer {
protected TableView tableView = new TableView(); protected TableView tableView = new TableView();
public static Optional<Dimension> getDimensionByName(String name, DataModel datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Measure> getMeasureByName(String name, DataModel datasource) {
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Metric> getMetricByName(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric;
}
public static Optional<Identify> getIdentifyByName(String name, DataModel datasource) {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
}
public static MetricNode buildMetricNode(String metric, DataModel datasource,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg, String alias)
throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode();
EngineType engineType = EngineType.fromString(datasource.getType());
if (metricOpt.isPresent()) {
metricNode.setMetric(metricOpt.get());
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode().put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode().put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(),
measure.get().getAgg());
} else {
metricNode.getNonAggNode().put(m.getName(),
MeasureNode.buildNonAgg(alias, m, scope, engineType));
metricNode.getAggNode().put(m.getName(),
MeasureNode.buildAgg(m, nonAgg, scope, engineType));
metricNode.getAggFunction().put(m.getName(), m.getAgg());
}
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(m.getName(),
SemanticNode.parse(m.getConstraint(), scope, engineType));
}
}
return metricNode;
}
Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode().put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode().put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(measure.get().getName(),
SemanticNode.parse(measure.get().getConstraint(), scope, engineType));
}
}
return metricNode;
}
public static List<String> uniqList(List<String> list) {
Set<String> tmp = new HashSet<>(list);
return tmp.stream().collect(Collectors.toList());
}
public void setTable(SqlNode table) { public void setTable(SqlNode table) {
tableView.setTable(table); tableView.setTable(table);
} }
public SqlNode builder() { public SqlNode build() {
return tableView.build(); return tableView.build();
} }
@@ -114,6 +29,6 @@ public abstract class Renderer {
return SemanticNode.buildAs(alias, tableView.build()); return SemanticNode.buildAs(alias, tableView.build());
} }
public abstract void render(OntologyQueryParam metricCommand, List<DataModel> dataModels, public abstract void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception; SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
} }

View File

@@ -1,305 +1,60 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render; package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView; import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DimensionNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants.DIMENSION_DELIMITER;
/** process the table dataSet from the defined data model schema */ /** process the table dataSet from the defined data model schema */
@Slf4j @Slf4j
public class SourceRender extends Renderer { public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres, public static TableView renderOne(Set<MetricSchemaResp> queryMetrics,
Set<String> reqMetrics, Set<String> reqDimensions, String queryWhere, Set<DimSchemaResp> queryDimensions, DataModel dataModel, SqlValidatorScope scope,
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) S2CalciteSchema schema) {
throws Exception { TableView tableView = new TableView();
TableView dataSet = new TableView();
TableView output = new TableView();
Set<String> queryMetrics = new HashSet<>(reqMetrics);
Set<String> queryDimensions = new HashSet<>(reqDimensions);
List<String> fieldWhere = new ArrayList<>(fieldWheres);
Map<String, String> extendFields = new HashMap<>();
if (!fieldWhere.isEmpty()) {
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema,
dimensions, metrics);
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
datasource, scope, schema, nonAgg);
}
addTimeDimension(datasource, queryDimensions);
for (String metric : queryMetrics) {
MetricNode metricNode =
buildMetricNode(metric, datasource, scope, schema, nonAgg, alias);
if (!metricNode.getAggNode().isEmpty()) {
metricNode.getAggNode().entrySet().stream()
.forEach(m -> output.getMeasure().add(m.getValue()));
}
if (metricNode.getNonAggNode() != null) {
metricNode.getNonAggNode().entrySet().stream()
.forEach(m -> dataSet.getMeasure().add(m.getValue()));
}
if (metricNode.getMeasureFilter() != null) {
metricNode.getMeasureFilter().entrySet().stream()
.forEach(m -> dataSet.getFilter().add(m.getValue()));
}
}
for (String dimension : queryDimensions) {
if (dimension.contains(Constants.DIMENSION_IDENTIFY)
&& queryDimensions.contains(dimension.split(Constants.DIMENSION_IDENTIFY)[1])) {
continue;
}
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY)
? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension,
datasource, schema, nonAgg, extendFields, dataSet, output, scope);
}
output.setMeasure(SemanticNode.deduplicateNode(output.getMeasure()));
dataSet.setMeasure(SemanticNode.deduplicateNode(dataSet.getMeasure()));
SqlNode tableNode = DataModelNode.buildExtend(datasource, extendFields, scope);
dataSet.setTable(tableNode);
output.setTable(
SemanticNode.buildAs(Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName()
+ "_" + UUID.randomUUID().toString().substring(32), dataSet.build()));
return output;
}
private static void buildDimension(String alias, String dimension, DataModel datasource,
S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields,
TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
EngineType engineType = schema.getOntology().getDatabase().getType(); EngineType engineType = schema.getOntology().getDatabase().getType();
boolean isAdd = false; Set<String> queryFields = tableView.getFields();
if (!CollectionUtils.isEmpty(dimensionList)) { queryMetrics.stream().forEach(m -> queryFields.addAll(m.getFields()));
for (Dimension dim : dimensionList) { queryDimensions.stream().forEach(m -> queryFields.add(m.getBizName()));
if (!dim.getName().equalsIgnoreCase(dimension)) {
continue;
}
dataSet.getMeasure().add(DimensionNode.buildArray(dim, scope, engineType));
addExtendFields(dim, extendFields);
if (nonAgg) {
output.getMeasure().add(DimensionNode.buildName(dim, scope, engineType));
isAdd = true;
continue;
}
if ("".equals(alias)) { try {
output.getDimension().add(DimensionNode.buildName(dim, scope, engineType)); for (String field : queryFields) {
} else { tableView.getSelect().add(SemanticNode.parse(field, scope, engineType));
output.getDimension()
.add(DimensionNode.buildNameAs(alias, dim, scope, engineType));
}
isAdd = true;
break;
} }
tableView.setTable(DataModelNode.build(dataModel, scope));
} catch (Exception e) {
log.error("Failed to create sqlNode for data model {}", dataModel);
} }
if (!isAdd) {
Optional<Identify> identify = datasource.getIdentifiers().stream() return tableView;
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
if (identify.isPresent()) {
if (nonAgg) {
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
} else {
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getDimension()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
}
isAdd = true;
}
}
if (isAdd) {
return;
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
dataSet.getMeasure()
.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
addExtendFields(dimensionOptional.get(), extendFields);
if (nonAgg) {
output.getMeasure()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
return;
}
output.getDimension()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
}
} }
private static void addExtendFields(Dimension dimension, Map<String, String> extendFields) { public static boolean isDimension(String name, DataModel dataModel, S2CalciteSchema schema) {
if (dimension.getDataType().isArray()) { Optional<DimSchemaResp> dimension = dataModel.getDimensions().stream()
if (Objects.nonNull(dimension.getExt())
&& dimension.getExt().containsKey(DIMENSION_DELIMITER)) {
extendFields.put(dimension.getExpr(),
(String) dimension.getExt().get(DIMENSION_DELIMITER));
} else {
extendFields.put(dimension.getExpr(), "");
}
}
}
private static List<SqlNode> getWhereMeasure(List<String> fields, Set<String> queryMetrics,
Set<String> queryDimensions, Map<String, String> extendFields, DataModel datasource,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabase().getType();
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
iterator.remove();
}
}
for (String where : fields) {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(where)) {
continue;
}
whereNode.addAll(DimensionNode.expand(dim, scope, engineType));
isAdd = true;
}
}
Optional<Identify> identify = getIdentifyByName(where, datasource);
if (identify.isPresent()) {
whereNode.add(IdentifyNode.build(identify.get(), scope, engineType));
isAdd = true;
}
if (isAdd) {
continue;
}
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) {
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
addExtendFields(dimensionOptional.get(), extendFields);
}
}
return whereNode;
}
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
Set<String> queryMetrics, Set<String> queryDimensions, Map<String, String> extendFields,
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions,
extendFields, datasource, scope, schema, nonAgg);
dataSet.getMeasure().addAll(whereNode);
// getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
}
public static void whereDimMetric(List<String> fields, Set<String> queryMetrics,
Set<String> queryDimensions, DataModel datasource, S2CalciteSchema schema,
Set<String> dimensions, Set<String> metrics) {
for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
continue;
}
String filterField = field;
if (field.contains(Constants.DIMENSION_IDENTIFY)) {
filterField = field.split(Constants.DIMENSION_IDENTIFY)[1];
}
addField(filterField, field, datasource, schema, dimensions, metrics);
}
}
private static void addField(String field, String oriField, DataModel datasource,
S2CalciteSchema schema, Set<String> dimensions, Set<String> metrics) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dimension.isPresent()) {
dimensions.add(oriField);
return;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(field)).findFirst();
if (identify.isPresent()) {
dimensions.add(oriField);
return;
}
if (schema.getDimensions().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dataSourceDim.isPresent()) {
dimensions.add(oriField);
return;
}
}
Optional<Measure> metric = datasource.getMeasures().stream()
.filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (metric.isPresent()) {
metrics.add(oriField);
return;
}
Optional<Metric> datasourceMetric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (datasourceMetric.isPresent()) {
Set<String> measures = datasourceMetric.get().getMetricTypeParams().getMeasures()
.stream().map(m -> m.getName()).collect(Collectors.toSet());
if (datasource.getMeasures().stream().map(m -> m.getName()).collect(Collectors.toSet())
.containsAll(measures)) {
metrics.add(oriField);
return;
}
}
}
public static boolean isDimension(String name, DataModel datasource, S2CalciteSchema schema) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst(); .filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) { if (dimension.isPresent()) {
return true; return true;
} }
Optional<Identify> identify = datasource.getIdentifiers().stream() Optional<Identify> identify = dataModel.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst(); .filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) { if (identify.isPresent()) {
return true; return true;
} }
if (schema.getDimensions().containsKey(datasource.getName())) { if (schema.getDimensions().containsKey(dataModel.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName()) Optional<DimSchemaResp> dataSourceDim = schema.getDimensions().get(dataModel.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst(); .stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) { if (dataSourceDim.isPresent()) {
return true; return true;
@@ -308,52 +63,18 @@ public class SourceRender extends Renderer {
return false; return false;
} }
private static void addTimeDimension(DataModel dataModel, Set<String> queryDimension) {
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) { public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
Optional<Dimension> startTimeOp = dataModel.getDimensions().stream() SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) if (dataModels.size() == 1) {
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)) DataModel dataModel = dataModels.get(0);
.findFirst(); tableView = renderOne(ontologyQuery.getMetrics(), ontologyQuery.getDimensions(),
Optional<Dimension> endTimeOp = dataModel.getDimensions().stream() dataModel, scope, schema);
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END))
.findFirst();
if (startTimeOp.isPresent() && !queryDimension.contains(startTimeOp.get().getName())) {
queryDimension.add(startTimeOp.get().getName());
}
if (endTimeOp.isPresent() && !queryDimension.contains(endTimeOp.get().getName())) {
queryDimension.add(endTimeOp.get().getName());
}
} else { } else {
Optional<Dimension> timeOp = dataModel.getDimensions().stream() JoinRender joinRender = new JoinRender();
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) joinRender.render(ontologyQuery, dataModels, scope, schema, nonAgg);
.findFirst(); tableView = joinRender.getTableView();
if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) {
queryDimension.add(timeOp.get().getName());
}
} }
} }
public void render(OntologyQueryParam ontologyQueryParam, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = ontologyQueryParam.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabase().getType();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
if (dataModels.size() == 1) {
DataModel dataModel = dataModels.get(0);
super.tableView = renderOne("", fieldWhere, ontologyQueryParam.getMetrics(),
ontologyQueryParam.getDimensions(), ontologyQueryParam.getWhere(), dataModel,
scope, schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(ontologyQueryParam, dataModels, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
} }

View File

@@ -1,33 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class DataModel {
private Long id;
private String name;
private Long modelId;
private String type;
private String sqlQuery;
private String tableQuery;
private List<Identify> identifiers;
private List<Dimension> dimensions;
private List<Measure> measures;
private String aggTime;
private Materialization.TimePartType timePartType = Materialization.TimePartType.None;
}

View File

@@ -1,54 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import java.util.Arrays;
public enum DataType {
ARRAY("ARRAY"),
MAP("MAP"),
JSON("JSON"),
VARCHAR("VARCHAR"),
DATE("DATE"),
BIGINT("BIGINT"),
INT("INT"),
DOUBLE("DOUBLE"),
FLOAT("FLOAT"),
DECIMAL("DECIMAL"),
UNKNOWN("unknown");
private String type;
DataType(String type) {
this.type = type;
}
public String getType() {
return type;
}
public static DataType of(String type) {
for (DataType typeEnum : DataType.values()) {
if (typeEnum.getType().equalsIgnoreCase(type)) {
return typeEnum;
}
}
return DataType.UNKNOWN;
}
public boolean isObject() {
return Arrays.asList(ARRAY, MAP, JSON).contains(this);
}
public boolean isArray() {
return ARRAY.equals(this);
}
}

View File

@@ -1,27 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.List;
import java.util.Map;
@Data
@Builder
public class Dimension implements SemanticItem {
String name;
private String owners;
private String type;
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
private DataType dataType = DataType.UNKNOWN;
private String bizName;
private List<String> defaultValues;
private Map<String, Object> ext;
@Override
public String getName() {
return name;
}
}

View File

@@ -1,11 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Data;
@Data
public class DimensionTimeTypeParams {
private String isPrimary;
private String timeGranularity;
}

View File

@@ -1,20 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Identify {
public enum Type {
PRIMARY, FOREIGN
}
private String name;
// primary or foreign
private String type;
}

View File

@@ -1,26 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class Measure {
private String name;
// sum max min avg count distinct
private String agg;
private String expr;
private String constraint;
private String alias;
private String createMetric;
}

View File

@@ -1,19 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Data;
import java.util.List;
@Data
public class Metric implements SemanticItem {
private String name;
private List<String> owners;
private String type;
private MetricTypeParams metricTypeParams;
@Override
public String getName() {
return name;
}
}

View File

@@ -1,15 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Data;
import java.util.List;
@Data
public class MetricTypeParams {
private List<Measure> measures;
private List<Measure> metrics;
private List<Measure> fields;
private boolean isFieldMetric = false;
private String expr;
}

View File

@@ -1,28 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import com.tencent.supersonic.headless.core.pojo.Database;
import lombok.Data;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Data
public class Ontology {
private List<Metric> metrics = new ArrayList<>();
private Map<String, DataModel> dataModelMap = new HashMap<>();
private Map<String, List<Dimension>> dimensionMap = new HashMap<>();
private List<Materialization> materializationList = new ArrayList<>();
private List<JoinRelation> joinRelations;
private Database database;
public List<Dimension> getDimensions() {
return dimensionMap.values().stream().flatMap(Collection::stream)
.collect(Collectors.toList());
}
}

View File

@@ -1,20 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import lombok.Data;
import java.util.List;
import java.util.Set;
@Data
public class OntologyQueryParam {
private Set<String> metrics = Sets.newHashSet();
private Set<String> dimensions = Sets.newHashSet();
private String where;
private Long limit;
private List<ColumnOrder> order;
private boolean nativeQuery = true;
private AggOption aggOption = AggOption.NATIVE;
}

View File

@@ -1,7 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
public interface SemanticItem {
String getName();
String getType();
}

View File

@@ -1,7 +1,5 @@
package com.tencent.supersonic.headless.core.utils; package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Aggregator; import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.ItemDateResp; import com.tencent.supersonic.common.pojo.ItemDateResp;
@@ -10,15 +8,9 @@ import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.DateModeUtils; import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.common.util.SqlFilterUtils; import com.tencent.supersonic.common.util.SqlFilterUtils;
import com.tencent.supersonic.common.util.StringUtil; import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.config.ExecutorConfig; import com.tencent.supersonic.headless.core.config.ExecutorConfig;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutablePair;
@@ -71,26 +63,25 @@ public class SqlGenerateUtils {
return selectSql; return selectSql;
} }
public String getLimit(StructQueryParam structQueryParam) { public String getLimit(StructQuery structQuery) {
if (structQueryParam != null && structQueryParam.getLimit() != null if (structQuery != null && structQuery.getLimit() != null && structQuery.getLimit() > 0) {
&& structQueryParam.getLimit() > 0) { return " limit " + structQuery.getLimit();
return " limit " + structQueryParam.getLimit();
} }
return ""; return "";
} }
public String getSelect(StructQueryParam structQueryParam) { public String getSelect(StructQuery structQuery) {
String aggStr = structQueryParam.getAggregators().stream().map(this::getSelectField) String aggStr = structQuery.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getSelect(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) { public String getSelect(StructQuery structQuery, Map<String, String> deriveMetrics) {
String aggStr = structQueryParam.getAggregators().stream() String aggStr = structQuery.getAggregators().stream()
.map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(",")); .map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr; : String.join(",", structQuery.getGroups()) + "," + aggStr;
} }
public String getSelectField(final Aggregator agg) { public String getSelectField(final Aggregator agg) {
@@ -115,46 +106,46 @@ public class SqlGenerateUtils {
return deriveMetrics.get(agg.getColumn()); return deriveMetrics.get(agg.getColumn());
} }
public String getGroupBy(StructQueryParam structQueryParam) { public String getGroupBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQueryParam.getGroups())) { if (CollectionUtils.isEmpty(structQuery.getGroups())) {
return ""; return "";
} }
return "group by " + String.join(",", structQueryParam.getGroups()); return "group by " + String.join(",", structQuery.getGroups());
} }
public String getOrderBy(StructQueryParam structQueryParam) { public String getOrderBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) { if (CollectionUtils.isEmpty(structQuery.getOrders())) {
return ""; return "";
} }
return "order by " + structQueryParam.getOrders().stream() return "order by " + structQuery.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ") .map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
public String getOrderBy(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) { public String getOrderBy(StructQuery structQuery, Map<String, String> deriveMetrics) {
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) { if (CollectionUtils.isEmpty(structQuery.getOrders())) {
return ""; return "";
} }
if (!structQueryParam.getOrders().stream() if (!structQuery.getOrders().stream()
.anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) { .anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) {
return getOrderBy(structQueryParam); return getOrderBy(structQuery);
} }
return "order by " + structQueryParam.getOrders().stream() return "order by " + structQuery.getOrders().stream()
.map(order -> " " + (deriveMetrics.containsKey(order.getColumn()) .map(order -> " " + (deriveMetrics.containsKey(order.getColumn())
? deriveMetrics.get(order.getColumn()) ? deriveMetrics.get(order.getColumn())
: order.getColumn()) + " " + order.getDirection() + " ") : order.getColumn()) + " " + order.getDirection() + " ")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
public String generateWhere(StructQueryParam structQueryParam, ItemDateResp itemDateResp) { public String generateWhere(StructQuery structQuery, ItemDateResp itemDateResp) {
String whereClauseFromFilter = String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(structQueryParam.getDimensionFilters()); sqlFilterUtils.getWhereClause(structQuery.getDimensionFilters());
String whereFromDate = getDateWhereClause(structQueryParam.getDateInfo(), itemDateResp); String whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp);
return mergeDateWhereClause(structQueryParam, whereClauseFromFilter, whereFromDate); return mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate);
} }
private String mergeDateWhereClause(StructQueryParam structQueryParam, private String mergeDateWhereClause(StructQuery structQuery, String whereClauseFromFilter,
String whereClauseFromFilter, String whereFromDate) { String whereFromDate) {
if (StringUtils.isNotEmpty(whereFromDate) if (StringUtils.isNotEmpty(whereFromDate)
&& StringUtils.isNotEmpty(whereClauseFromFilter)) { && StringUtils.isNotEmpty(whereClauseFromFilter)) {
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter); return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
@@ -166,7 +157,7 @@ public class SqlGenerateUtils {
return whereFromDate; return whereFromDate;
} else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) { } else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) {
log.debug("the current date information is empty, enter the date initialization logic"); log.debug("the current date information is empty, enter the date initialization logic");
return dateModeUtils.defaultRecentDateInfo(structQueryParam.getDateInfo()); return dateModeUtils.defaultRecentDateInfo(structQuery.getDateInfo());
} }
return whereClauseFromFilter; return whereClauseFromFilter;
} }
@@ -190,12 +181,12 @@ public class SqlGenerateUtils {
return dateModeUtils.getDateWhereStr(dateInfo, dateDate); return dateModeUtils.getDateWhereStr(dateInfo, dateDate);
} }
public Triple<String, String, String> getBeginEndTime(StructQueryParam structQueryParam, public Triple<String, String, String> getBeginEndTime(StructQuery structQuery,
ItemDateResp dataDate) { ItemDateResp dataDate) {
if (Objects.isNull(structQueryParam.getDateInfo())) { if (Objects.isNull(structQuery.getDateInfo())) {
return Triple.of("", "", ""); return Triple.of("", "", "");
} }
DateConf dateConf = structQueryParam.getDateInfo(); DateConf dateConf = structQuery.getDateInfo();
String dateInfo = dateModeUtils.getSysDateCol(dateConf); String dateInfo = dateModeUtils.getSysDateCol(dateConf);
if (dateInfo.isEmpty()) { if (dateInfo.isEmpty()) {
return Triple.of("", "", ""); return Triple.of("", "", "");
@@ -257,83 +248,4 @@ public class SqlGenerateUtils {
return true; return true;
} }
public String generateDerivedMetric(final List<MetricSchemaResp> metricResps,
final Set<String> allFields, final Map<String, Measure> allMeasures,
final List<DimSchemaResp> dimensionResps, final String expression,
final MetricDefineType metricDefineType, AggOption aggOption,
Map<String, String> visitedMetric, Set<String> measures, Set<String> dimensions) {
Set<String> fields = SqlSelectHelper.getColumnFromExpr(expression);
if (!CollectionUtils.isEmpty(fields)) {
Map<String, String> replace = new HashMap<>();
for (String field : fields) {
switch (metricDefineType) {
case METRIC:
Optional<MetricSchemaResp> metricItem = metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field)).findFirst();
if (metricItem.isPresent()) {
if (visitedMetric.keySet().contains(field)) {
replace.put(field, visitedMetric.get(field));
break;
}
replace.put(field,
generateDerivedMetric(metricResps, allFields, allMeasures,
dimensionResps, getExpr(metricItem.get()),
metricItem.get().getMetricDefineType(), aggOption,
visitedMetric, measures, dimensions));
visitedMetric.put(field, replace.get(field));
}
break;
case MEASURE:
if (allMeasures.containsKey(field)) {
measures.add(field);
replace.put(field, getExpr(allMeasures.get(field), aggOption));
}
break;
case FIELD:
if (allFields.contains(field)) {
Optional<DimSchemaResp> dimensionItem = dimensionResps.stream()
.filter(d -> d.getBizName().equals(field)).findFirst();
if (dimensionItem.isPresent()) {
dimensions.add(field);
} else {
measures.add(field);
}
}
break;
default:
break;
}
}
if (!CollectionUtils.isEmpty(replace)) {
String expr = SqlReplaceHelper.replaceExpression(expression, replace);
log.debug("derived measure {}->{}", expression, expr);
return expr;
}
}
return expression;
}
public String getExpr(Measure measure, AggOption aggOption) {
if (AggOperatorEnum.COUNT_DISTINCT.getOperator().equalsIgnoreCase(measure.getAgg())) {
return AggOption.NATIVE.equals(aggOption) ? measure.getBizName()
: AggOperatorEnum.COUNT.getOperator() + " ( " + AggOperatorEnum.DISTINCT + " "
+ measure.getBizName() + " ) ";
}
return AggOption.NATIVE.equals(aggOption) ? measure.getBizName()
: measure.getAgg() + " ( " + measure.getBizName() + " ) ";
}
public String getExpr(MetricResp metricResp) {
if (Objects.isNull(metricResp.getMetricDefineType())) {
return metricResp.getMetricDefineByMeasureParams().getExpr();
}
if (metricResp.getMetricDefineType().equals(MetricDefineType.METRIC)) {
return metricResp.getMetricDefineByMetricParams().getExpr();
}
if (metricResp.getMetricDefineType().equals(MetricDefineType.FIELD)) {
return metricResp.getMetricDefineByFieldParams().getExpr();
}
// measure add agg function
return metricResp.getMetricDefineByMeasureParams().getExpr();
}
} }

View File

@@ -20,8 +20,8 @@ import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
import com.tencent.supersonic.headless.core.cache.QueryCache; import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.translator.SemanticTranslator; import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.annotation.S2DataPermission; import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
@@ -129,8 +129,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
for (QueryExecutor queryExecutor : queryExecutors) { for (QueryExecutor queryExecutor : queryExecutors) {
if (queryExecutor.accept(queryStatement)) { if (queryExecutor.accept(queryStatement)) {
queryResp = queryExecutor.execute(queryStatement); queryResp = queryExecutor.execute(queryStatement);
queryUtils.populateQueryColumns(queryResp, queryUtils.populateQueryColumns(queryResp, queryStatement.getSemanticSchema());
queryStatement.getSemanticSchemaResp());
} }
} }
@@ -287,9 +286,10 @@ public class S2SemanticLayerService implements SemanticLayerService {
if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo()) if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo())
&& StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) { && StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) {
queryStatement.setSql(semanticQueryReq.getSqlInfo().getQuerySQL()); queryStatement.setSql(semanticQueryReq.getSqlInfo().getQuerySQL());
queryStatement.setDataSetId(semanticQueryReq.getDataSetId());
queryStatement.setIsTranslated(true); queryStatement.setIsTranslated(true);
} }
queryStatement.setDataSetId(semanticQueryReq.getDataSetId());
queryStatement.setDataSetName(semanticQueryReq.getDataSetName());
return queryStatement; return queryStatement;
} }
@@ -302,7 +302,8 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = new QueryStatement(); QueryStatement queryStatement = new QueryStatement();
queryStatement.setEnableOptimize(queryUtils.enableOptimize()); queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setDataSetId(queryReq.getDataSetId()); queryStatement.setDataSetId(queryReq.getDataSetId());
queryStatement.setSemanticSchemaResp(semanticSchemaResp); queryStatement.setDataSetName(queryReq.getDataSetName());
queryStatement.setSemanticSchema(semanticSchemaResp);
queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp)); queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp));
return queryStatement; return queryStatement;
} }
@@ -311,9 +312,9 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = buildQueryStatement(querySqlReq); QueryStatement queryStatement = buildQueryStatement(querySqlReq);
queryStatement.setIsS2SQL(true); queryStatement.setIsS2SQL(true);
SqlQueryParam sqlQueryParam = new SqlQueryParam(); SqlQuery sqlQuery = new SqlQuery();
sqlQueryParam.setSql(querySqlReq.getSql()); sqlQuery.setSql(querySqlReq.getSql());
queryStatement.setSqlQueryParam(sqlQueryParam); queryStatement.setSqlQuery(sqlQuery);
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL // If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) { if (querySqlReq.needGetDataSetId()) {
@@ -325,9 +326,9 @@ public class S2SemanticLayerService implements SemanticLayerService {
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) { private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
QueryStatement queryStatement = buildQueryStatement(queryReq); QueryStatement queryStatement = buildQueryStatement(queryReq);
StructQueryParam structQueryParam = new StructQueryParam(); StructQuery structQuery = new StructQuery();
BeanUtils.copyProperties(queryReq, structQueryParam); BeanUtils.copyProperties(queryReq, structQuery);
queryStatement.setStructQueryParam(structQueryParam); queryStatement.setStructQuery(structQuery);
queryStatement.setIsS2SQL(false); queryStatement.setIsS2SQL(false);
return queryStatement; return queryStatement;
} }

View File

@@ -1,19 +1,10 @@
package com.tencent.supersonic.headless.server.manager; package com.tencent.supersonic.headless.server.manager;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.FieldParam; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.MeasureParam;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByFieldParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMetricParams;
import com.tencent.supersonic.headless.api.pojo.MetricParam;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType; import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp; import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.server.pojo.yaml.FieldParamYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.*;
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricParamYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -47,7 +38,7 @@ public class MetricYamlManager {
MetricDefineByMeasureParams metricDefineParams = MetricDefineByMeasureParams metricDefineParams =
metric.getMetricDefineByMeasureParams(); metric.getMetricDefineByMeasureParams();
metricTypeParamsYamlTpl.setExpr(metricDefineParams.getExpr()); metricTypeParamsYamlTpl.setExpr(metricDefineParams.getExpr());
List<MeasureParam> measures = metricDefineParams.getMeasures(); List<Measure> measures = metricDefineParams.getMeasures();
metricTypeParamsYamlTpl.setMeasures( metricTypeParamsYamlTpl.setMeasures(
measures.stream().map(MetricYamlManager::convert).collect(Collectors.toList())); measures.stream().map(MetricYamlManager::convert).collect(Collectors.toList()));
} else if (MetricDefineType.FIELD.equals(metric.getMetricDefineType())) { } else if (MetricDefineType.FIELD.equals(metric.getMetricDefineType())) {
@@ -68,7 +59,7 @@ public class MetricYamlManager {
return metricYamlTpl; return metricYamlTpl;
} }
public static MeasureYamlTpl convert(MeasureParam measure) { public static MeasureYamlTpl convert(Measure measure) {
MeasureYamlTpl measureYamlTpl = new MeasureYamlTpl(); MeasureYamlTpl measureYamlTpl = new MeasureYamlTpl();
measureYamlTpl.setName(measure.getBizName()); measureYamlTpl.setName(measure.getBizName());
measureYamlTpl.setConstraint(measure.getConstraint()); measureYamlTpl.setConstraint(measure.getConstraint());

View File

@@ -1,9 +1,6 @@
package com.tencent.supersonic.headless.server.manager; package com.tencent.supersonic.headless.server.manager;
import com.tencent.supersonic.headless.api.pojo.Dimension; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.ModelDetail;
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType; import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
@@ -52,8 +49,7 @@ public class ModelYamlManager {
dimensionYamlTpl.setExpr(dim.getBizName()); dimensionYamlTpl.setExpr(dim.getBizName());
} }
if (dim.getTypeParams() != null) { if (dim.getTypeParams() != null) {
DimensionTimeTypeParamsTpl dimensionTimeTypeParamsTpl = DimensionTimeTypeParams dimensionTimeTypeParamsTpl = new DimensionTimeTypeParams();
new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParamsTpl.setIsPrimary(dim.getTypeParams().getIsPrimary()); dimensionTimeTypeParamsTpl.setIsPrimary(dim.getTypeParams().getIsPrimary());
dimensionTimeTypeParamsTpl.setTimeGranularity(dim.getTypeParams().getTimeGranularity()); dimensionTimeTypeParamsTpl.setTimeGranularity(dim.getTypeParams().getTimeGranularity());
dimensionYamlTpl.setTypeParams(dimensionTimeTypeParamsTpl); dimensionYamlTpl.setTypeParams(dimensionTimeTypeParamsTpl);

View File

@@ -1,9 +1,19 @@
package com.tencent.supersonic.headless.server.manager; package com.tencent.supersonic.headless.server.manager;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.DataTypeEnums;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.DataModel;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*; import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType; import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType;
@@ -31,6 +41,16 @@ public class SemanticSchemaManager {
public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) { public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) {
Ontology ontology = new Ontology(); Ontology ontology = new Ontology();
ontology.setMetrics(semanticSchemaResp.getMetrics());
Map<String, List<DimSchemaResp>> model2Dimensions = Maps.newHashMap();
semanticSchemaResp.getDimensions().forEach(dim -> {
if (!model2Dimensions.containsKey(dim.getModelBizName())) {
model2Dimensions.put(dim.getModelBizName(), Lists.newArrayList());
}
model2Dimensions.get(dim.getModelBizName()).add(dim);
});
ontology.setDimensionMap(model2Dimensions);
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>(); Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>(); List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>(); List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
@@ -49,24 +69,15 @@ public class SemanticSchemaManager {
Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1)); Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1));
ontology.setDataModelMap(dataModelMap); ontology.setDataModelMap(dataModelMap);
} }
if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
}
ontology.setDimensionMap(dimensionMap);
}
if (!metricYamlTpls.isEmpty()) {
ontology.setMetrics(getMetrics(metricYamlTpls));
}
return ontology; return ontology;
} }
public static List<Metric> getMetrics(final List<MetricYamlTpl> t) { public static List<MetricSchemaResp> getMetrics(final List<MetricYamlTpl> t) {
return getMetricsByMetricYamlTpl(t); return getMetricsByMetricYamlTpl(t);
} }
public static List<Dimension> getDimensions(final List<DimensionYamlTpl> t) { public static List<DimSchemaResp> getDimensions(final List<DimensionYamlTpl> t) {
return getDimension(t); return getDimension(t);
} }
@@ -83,23 +94,21 @@ public class SemanticSchemaManager {
return dataModel; return dataModel;
} }
private static String getDataModelAggTime(List<Dimension> dimensions) { private static String getDataModelAggTime(List<DimSchemaResp> dimensions) {
Optional<Dimension> timeDimension = dimensions.stream() Optional<DimSchemaResp> timeDimension =
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) dimensions.stream().filter(DimSchemaResp::isTimeDimension).findFirst();
.findFirst(); if (timeDimension.isPresent() && Objects.nonNull(timeDimension.get().getTypeParams())) {
if (timeDimension.isPresent() return timeDimension.get().getTypeParams().getTimeGranularity();
&& Objects.nonNull(timeDimension.get().getDimensionTimeTypeParams())) {
return timeDimension.get().getDimensionTimeTypeParams().getTimeGranularity();
} }
return Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE; return Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE;
} }
private static List<Metric> getMetricsByMetricYamlTpl(List<MetricYamlTpl> metricYamlTpls) { private static List<MetricSchemaResp> getMetricsByMetricYamlTpl(
List<Metric> metrics = new ArrayList<>(); List<MetricYamlTpl> metricYamlTpls) {
List<MetricSchemaResp> metrics = new ArrayList<>();
for (MetricYamlTpl metricYamlTpl : metricYamlTpls) { for (MetricYamlTpl metricYamlTpl : metricYamlTpls) {
Metric metric = new Metric(); MetricSchemaResp metric = new MetricSchemaResp();
metric.setMetricTypeParams(getMetricTypeParams(metricYamlTpl.getTypeParams())); fillMetricTypeParams(metric, metricYamlTpl.getTypeParams());
metric.setOwners(metricYamlTpl.getOwners());
metric.setType(metricYamlTpl.getType()); metric.setType(metricYamlTpl.getType());
metric.setName(metricYamlTpl.getName()); metric.setName(metricYamlTpl.getName());
metrics.add(metric); metrics.add(metric);
@@ -107,55 +116,50 @@ public class SemanticSchemaManager {
return metrics; return metrics;
} }
private static MetricTypeParams getMetricTypeParams( private static void fillMetricTypeParams(MetricSchemaResp metric,
MetricTypeParamsYamlTpl metricTypeParamsYamlTpl) { MetricTypeParamsYamlTpl metricTypeParamsYamlTpl) {
MetricTypeParams metricTypeParams = new MetricTypeParams();
metricTypeParams.setExpr(metricTypeParamsYamlTpl.getExpr());
metricTypeParams.setFieldMetric(false);
if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMeasures())) { if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMeasures())) {
metricTypeParams.setMeasures(getMeasureParams(metricTypeParamsYamlTpl.getMeasures())); MetricDefineByMeasureParams params = new MetricDefineByMeasureParams();
params.setMeasures(getMeasureParams(metricTypeParamsYamlTpl.getMeasures()));
metric.setMetricDefinition(MetricDefineType.MEASURE, params);
} else if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMetrics())) {
MetricDefineByMetricParams params = new MetricDefineByMetricParams();
params.setMetrics(getMetricParams(metricTypeParamsYamlTpl.getMetrics()));
params.setExpr(metricTypeParamsYamlTpl.getExpr());
metric.setMetricDefinition(MetricDefineType.METRIC, params);
} else if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getFields())) {
MetricDefineByFieldParams params = new MetricDefineByFieldParams();
params.setExpr(metricTypeParamsYamlTpl.getExpr());
params.setFields(getFieldParams(metricTypeParamsYamlTpl.getFields()));
metric.setMetricDefinition(MetricDefineType.FIELD, params);
} }
if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMetrics())) {
metricTypeParams.setMeasures(getMetricParams(metricTypeParamsYamlTpl.getMetrics()));
metricTypeParams.setExpr(metricTypeParams.getMeasures().get(0).getExpr());
metricTypeParams.setFieldMetric(true);
}
if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getFields())) {
metricTypeParams.setMeasures(getFieldParams(metricTypeParamsYamlTpl.getFields()));
metricTypeParams.setExpr(metricTypeParams.getMeasures().get(0).getExpr());
metricTypeParams.setFieldMetric(true);
}
return metricTypeParams;
} }
private static List<Measure> getFieldParams(List<FieldParamYamlTpl> fieldParamYamlTpls) { private static List<FieldParam> getFieldParams(List<FieldParamYamlTpl> fieldParamYamlTpls) {
List<Measure> measures = new ArrayList<>(); List<FieldParam> fields = new ArrayList<>();
for (FieldParamYamlTpl fieldParamYamlTpl : fieldParamYamlTpls) { for (FieldParamYamlTpl fieldParamYamlTpl : fieldParamYamlTpls) {
Measure measure = new Measure(); FieldParam field = new FieldParam();
measure.setName(fieldParamYamlTpl.getFieldName()); field.setFieldName(fieldParamYamlTpl.getFieldName());
measure.setExpr(fieldParamYamlTpl.getFieldName()); fields.add(field);
measures.add(measure);
} }
return measures; return fields;
} }
private static List<Measure> getMetricParams(List<MetricParamYamlTpl> metricParamYamlTpls) { private static List<MetricParam> getMetricParams(List<MetricParamYamlTpl> metricParamYamlTpls) {
List<Measure> measures = new ArrayList<>(); List<MetricParam> metrics = new ArrayList<>();
for (MetricParamYamlTpl metricParamYamlTpl : metricParamYamlTpls) { for (MetricParamYamlTpl metricParamYamlTpl : metricParamYamlTpls) {
Measure measure = new Measure(); MetricParam metric = new MetricParam();
measure.setName(metricParamYamlTpl.getBizName()); metric.setBizName(metricParamYamlTpl.getBizName());
measure.setExpr(metricParamYamlTpl.getBizName()); metric.setId(metricParamYamlTpl.getId());
measures.add(measure); metrics.add(metric);
} }
return measures; return metrics;
} }
private static List<Measure> getMeasureParams(List<MeasureYamlTpl> measureYamlTpls) { private static List<Measure> getMeasureParams(List<MeasureYamlTpl> measureYamlTpls) {
List<Measure> measures = new ArrayList<>(); List<Measure> measures = new ArrayList<>();
for (MeasureYamlTpl measureYamlTpl : measureYamlTpls) { for (MeasureYamlTpl measureYamlTpl : measureYamlTpls) {
Measure measure = new Measure(); Measure measure = new Measure();
measure.setCreateMetric(measureYamlTpl.getCreateMetric());
measure.setExpr(measureYamlTpl.getExpr()); measure.setExpr(measureYamlTpl.getExpr());
measure.setAgg(measureYamlTpl.getAgg()); measure.setAgg(measureYamlTpl.getAgg());
measure.setName(measureYamlTpl.getName()); measure.setName(measureYamlTpl.getName());
@@ -166,34 +170,32 @@ public class SemanticSchemaManager {
return measures; return measures;
} }
private static List<Dimension> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) { private static List<DimSchemaResp> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) {
List<Dimension> dimensions = new ArrayList<>(); List<DimSchemaResp> dimensions = new ArrayList<>();
for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) { for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) {
Dimension dimension = Dimension.builder().build(); DimSchemaResp dimension = new DimSchemaResp();
dimension.setType(dimensionYamlTpl.getType()); // dimension.setType(dimensionYamlTpl.getType());
dimension.setExpr(dimensionYamlTpl.getExpr()); dimension.setExpr(dimensionYamlTpl.getExpr());
dimension.setName(dimensionYamlTpl.getName()); dimension.setName(dimensionYamlTpl.getName());
dimension.setOwners(dimensionYamlTpl.getOwners());
dimension.setBizName(dimensionYamlTpl.getBizName()); dimension.setBizName(dimensionYamlTpl.getBizName());
dimension.setDefaultValues(dimensionYamlTpl.getDefaultValues()); dimension.setDefaultValues(dimensionYamlTpl.getDefaultValues());
if (Objects.nonNull(dimensionYamlTpl.getDataType())) { if (Objects.nonNull(dimensionYamlTpl.getDataType())) {
dimension.setDataType(DataType.of(dimensionYamlTpl.getDataType().getType())); dimension.setDataType(dimensionYamlTpl.getDataType());
} }
if (Objects.isNull(dimension.getDataType())) { if (Objects.isNull(dimension.getDataType())) {
dimension.setDataType(DataType.UNKNOWN); dimension.setDataType(DataTypeEnums.UNKNOWN);
} }
if (Objects.nonNull(dimensionYamlTpl.getExt())) { if (Objects.nonNull(dimensionYamlTpl.getExt())) {
dimension.setExt(dimensionYamlTpl.getExt()); dimension.setExt(dimensionYamlTpl.getExt());
} }
dimension.setDimensionTimeTypeParams( dimension.setTypeParams(dimensionYamlTpl.getTypeParams());
getDimensionTimeTypeParams(dimensionYamlTpl.getTypeParams()));
dimensions.add(dimension); dimensions.add(dimension);
} }
return dimensions; return dimensions;
} }
private static DimensionTimeTypeParams getDimensionTimeTypeParams( private static DimensionTimeTypeParams getDimensionTimeTypeParams(
DimensionTimeTypeParamsTpl dimensionTimeTypeParamsTpl) { DimensionTimeTypeParams dimensionTimeTypeParamsTpl) {
DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams(); DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams();
if (dimensionTimeTypeParamsTpl != null) { if (dimensionTimeTypeParamsTpl != null) {
dimensionTimeTypeParams dimensionTimeTypeParams
@@ -238,7 +240,8 @@ public class SemanticSchemaManager {
return joinRelations; return joinRelations;
} }
public static void update(S2CalciteSchema schema, List<Metric> metric) throws Exception { public static void update(S2CalciteSchema schema, List<MetricSchemaResp> metric)
throws Exception {
if (schema != null) { if (schema != null) {
updateMetric(metric, schema.getMetrics()); updateMetric(metric, schema.getMetrics());
} }
@@ -260,31 +263,31 @@ public class SemanticSchemaManager {
} }
public static void update(S2CalciteSchema schema, String datasourceBizName, public static void update(S2CalciteSchema schema, String datasourceBizName,
List<Dimension> dimensionYamlTpls) throws Exception { List<DimSchemaResp> dimensionYamlTpls) throws Exception {
if (schema != null) { if (schema != null) {
Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema Optional<Map.Entry<String, List<DimSchemaResp>>> datasourceYamlTplMap = schema
.getDimensions().entrySet().stream() .getDimensions().entrySet().stream()
.filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst(); .filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst();
if (datasourceYamlTplMap.isPresent()) { if (datasourceYamlTplMap.isPresent()) {
updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue()); updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue());
} else { } else {
List<Dimension> dimensions = new ArrayList<>(); List<DimSchemaResp> dimensions = new ArrayList<>();
updateDimension(dimensionYamlTpls, dimensions); updateDimension(dimensionYamlTpls, dimensions);
schema.getDimensions().put(datasourceBizName, dimensions); schema.getDimensions().put(datasourceBizName, dimensions);
} }
} }
} }
private static void updateDimension(List<Dimension> dimensionYamlTpls, private static void updateDimension(List<DimSchemaResp> dimensionYamlTpls,
List<Dimension> dimensions) { List<DimSchemaResp> dimensions) {
if (CollectionUtils.isEmpty(dimensionYamlTpls)) { if (CollectionUtils.isEmpty(dimensionYamlTpls)) {
return; return;
} }
Set<String> toAdd = Set<String> toAdd =
dimensionYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet()); dimensionYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Dimension> iterator = dimensions.iterator(); Iterator<DimSchemaResp> iterator = dimensions.iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
Dimension cur = iterator.next(); DimSchemaResp cur = iterator.next();
if (toAdd.contains(cur.getName())) { if (toAdd.contains(cur.getName())) {
iterator.remove(); iterator.remove();
} }
@@ -292,15 +295,16 @@ public class SemanticSchemaManager {
dimensions.addAll(dimensionYamlTpls); dimensions.addAll(dimensionYamlTpls);
} }
private static void updateMetric(List<Metric> metricYamlTpls, List<Metric> metrics) { private static void updateMetric(List<MetricSchemaResp> metricYamlTpls,
List<MetricSchemaResp> metrics) {
if (CollectionUtils.isEmpty(metricYamlTpls)) { if (CollectionUtils.isEmpty(metricYamlTpls)) {
return; return;
} }
Set<String> toAdd = Set<String> toAdd =
metricYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet()); metricYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Metric> iterator = metrics.iterator(); Iterator<MetricSchemaResp> iterator = metrics.iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
Metric cur = iterator.next(); MetricSchemaResp cur = iterator.next();
if (toAdd.contains(cur.getName())) { if (toAdd.contains(cur.getName())) {
iterator.remove(); iterator.remove();
} }

View File

@@ -1,11 +0,0 @@
package com.tencent.supersonic.headless.server.pojo.yaml;
import lombok.Data;
@Data
public class DimensionTimeTypeParamsTpl {
private String isPrimary;
private String timeGranularity;
}

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.server.pojo.yaml; package com.tencent.supersonic.headless.server.pojo.yaml;
import com.tencent.supersonic.common.pojo.enums.DataTypeEnums; import com.tencent.supersonic.common.pojo.enums.DataTypeEnums;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import lombok.Data; import lombok.Data;
import java.util.List; import java.util.List;
@@ -19,7 +20,7 @@ public class DimensionYamlTpl {
private String expr; private String expr;
private DimensionTimeTypeParamsTpl typeParams; private DimensionTimeTypeParams typeParams;
private DataTypeEnums dataType; private DataTypeEnums dataType;

View File

@@ -8,58 +8,22 @@ import com.github.pagehelper.PageInfo;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.Aggregator; import com.tencent.supersonic.common.pojo.*;
import com.tencent.supersonic.common.pojo.DataEvent; import com.tencent.supersonic.common.pojo.enums.*;
import com.tencent.supersonic.common.pojo.DataItem;
import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.common.pojo.enums.EventType;
import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MeasureParam;
import com.tencent.supersonic.headless.api.pojo.MetaFilter;
import com.tencent.supersonic.headless.api.pojo.MetricParam;
import com.tencent.supersonic.headless.api.pojo.MetricQueryDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.SchemaElementMatch;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.MapModeEnum; import com.tencent.supersonic.headless.api.pojo.enums.MapModeEnum;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType; import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.request.MetaBatchReq; import com.tencent.supersonic.headless.api.pojo.request.*;
import com.tencent.supersonic.headless.api.pojo.request.MetricBaseReq; import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.api.pojo.request.MetricReq;
import com.tencent.supersonic.headless.api.pojo.request.PageMetricReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryMapReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryMetricReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DataSetMapInfo;
import com.tencent.supersonic.headless.api.pojo.response.DataSetResp;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
import com.tencent.supersonic.headless.api.pojo.response.MapInfoResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.server.facade.service.ChatLayerService; import com.tencent.supersonic.headless.server.facade.service.ChatLayerService;
import com.tencent.supersonic.headless.server.persistence.dataobject.CollectDO; import com.tencent.supersonic.headless.server.persistence.dataobject.CollectDO;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO; import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricQueryDefaultConfigDO; import com.tencent.supersonic.headless.server.persistence.dataobject.MetricQueryDefaultConfigDO;
import com.tencent.supersonic.headless.server.persistence.mapper.MetricDOMapper; import com.tencent.supersonic.headless.server.persistence.mapper.MetricDOMapper;
import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository; import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository;
import com.tencent.supersonic.headless.server.pojo.DimensionsFilter; import com.tencent.supersonic.headless.server.pojo.*;
import com.tencent.supersonic.headless.server.pojo.MetricFilter; import com.tencent.supersonic.headless.server.service.*;
import com.tencent.supersonic.headless.server.pojo.MetricsFilter;
import com.tencent.supersonic.headless.server.pojo.ModelCluster;
import com.tencent.supersonic.headless.server.pojo.ModelFilter;
import com.tencent.supersonic.headless.server.service.CollectService;
import com.tencent.supersonic.headless.server.service.DataSetService;
import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.ModelService;
import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper; import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper;
import com.tencent.supersonic.headless.server.utils.MetricCheckUtils; import com.tencent.supersonic.headless.server.utils.MetricCheckUtils;
import com.tencent.supersonic.headless.server.utils.MetricConverter; import com.tencent.supersonic.headless.server.utils.MetricConverter;
@@ -72,18 +36,7 @@ import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Service @Service
@@ -427,8 +380,8 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
return true; return true;
} }
} else if (MetricDefineType.MEASURE.equals(metricResp.getMetricDefineType())) { } else if (MetricDefineType.MEASURE.equals(metricResp.getMetricDefineType())) {
List<MeasureParam> measures = metricResp.getMetricDefineByMeasureParams().getMeasures(); List<Measure> measures = metricResp.getMetricDefineByMeasureParams().getMeasures();
List<String> fieldNameDepended = measures.stream().map(MeasureParam::getBizName) List<String> fieldNameDepended = measures.stream().map(Measure::getName)
// measure bizName = model bizName_fieldName // measure bizName = model bizName_fieldName
.map(name -> name.replaceFirst(metricResp.getModelBizName() + "_", "")) .map(name -> name.replaceFirst(metricResp.getModelBizName() + "_", ""))
.collect(Collectors.toList()); .collect(Collectors.toList());
@@ -705,12 +658,11 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
// Measure define will get from first measure // Measure define will get from first measure
if (MetricDefineType.MEASURE.equals(metricResp.getMetricDefineType())) { if (MetricDefineType.MEASURE.equals(metricResp.getMetricDefineType())) {
List<Measure> measures = modelResp.getModelDetail().getMeasures(); List<Measure> measures = modelResp.getModelDetail().getMeasures();
List<MeasureParam> measureParams = List<Measure> measureParams = metricResp.getMetricDefineByMeasureParams().getMeasures();
metricResp.getMetricDefineByMeasureParams().getMeasures();
if (CollectionUtils.isEmpty(measureParams)) { if (CollectionUtils.isEmpty(measureParams)) {
return null; return null;
} }
MeasureParam firstMeasure = measureParams.get(0); Measure firstMeasure = measureParams.get(0);
for (Measure measure : measures) { for (Measure measure : measures) {
if (measure.getBizName().equalsIgnoreCase(firstMeasure.getBizName())) { if (measure.getBizName().equalsIgnoreCase(firstMeasure.getBizName())) {

View File

@@ -26,7 +26,7 @@ public class MetricDrillDownChecker {
private MetricService metricService; private MetricService metricService;
public void checkQuery(QueryStatement queryStatement) { public void checkQuery(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
String sql = queryStatement.getSql(); String sql = queryStatement.getSql();
if (StringUtils.isBlank(sql)) { if (StringUtils.isBlank(sql)) {
return; return;

View File

@@ -7,21 +7,8 @@ import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.StatusEnum; import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.ColumnSchema; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.Dimension; import com.tencent.supersonic.headless.api.pojo.enums.*;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MeasureParam;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams;
import com.tencent.supersonic.headless.api.pojo.ModelDetail;
import com.tencent.supersonic.headless.api.pojo.ModelSchema;
import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.enums.SemanticType;
import com.tencent.supersonic.headless.api.pojo.request.DimensionReq; import com.tencent.supersonic.headless.api.pojo.request.DimensionReq;
import com.tencent.supersonic.headless.api.pojo.request.MetricReq; import com.tencent.supersonic.headless.api.pojo.request.MetricReq;
import com.tencent.supersonic.headless.api.pojo.request.ModelBuildReq; import com.tencent.supersonic.headless.api.pojo.request.ModelBuildReq;
@@ -34,12 +21,7 @@ import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.Arrays; import java.util.*;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class ModelConverter { public class ModelConverter {
@@ -141,9 +123,7 @@ public class ModelConverter {
metricReq.setModelId(modelDO.getId()); metricReq.setModelId(modelDO.getId());
MetricDefineByMeasureParams exprTypeParams = new MetricDefineByMeasureParams(); MetricDefineByMeasureParams exprTypeParams = new MetricDefineByMeasureParams();
exprTypeParams.setExpr(measure.getBizName()); exprTypeParams.setExpr(measure.getBizName());
MeasureParam measureParam = new MeasureParam(); exprTypeParams.setMeasures(Lists.newArrayList(measure));
BeanMapper.mapper(measure, measureParam);
exprTypeParams.setMeasures(Lists.newArrayList(measureParam));
metricReq.setMetricDefineByMeasureParams(exprTypeParams); metricReq.setMetricDefineByMeasureParams(exprTypeParams);
metricReq.setMetricDefineType(MetricDefineType.MEASURE); metricReq.setMetricDefineType(MetricDefineType.MEASURE);
return metricReq; return metricReq;

View File

@@ -1,243 +1,230 @@
package com.tencent.supersonic.headless.server.calcite; package com.tencent.supersonic.headless.server.calcite;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.yaml.*;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
@Slf4j @Slf4j
class HeadlessParserServiceTest { class HeadlessParserServiceTest {
//
public static SqlParserResp parser(S2CalciteSchema semanticSchema, // public static SqlParserResp parser(S2CalciteSchema semanticSchema, OntologyQuery
OntologyQueryParam ontologyQueryParam, boolean isAgg) { // ontologyQuery,
SqlParserResp sqlParser = new SqlParserResp(); // boolean isAgg) {
try { // SqlParserResp sqlParser = new SqlParserResp();
if (semanticSchema == null) { // try {
sqlParser.setErrMsg("headlessSchema not found"); // if (semanticSchema == null) {
return sqlParser; // sqlParser.setErrMsg("headlessSchema not found");
} // return sqlParser;
SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); // }
QueryStatement queryStatement = new QueryStatement(); // SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
queryStatement.setOntologyQueryParam(ontologyQueryParam); // QueryStatement queryStatement = new QueryStatement();
String sql = aggBuilder.buildOntologySql(queryStatement); // queryStatement.setOntologyQuery(ontologyQuery);
queryStatement.setSql(sql); // String sql = aggBuilder.buildOntologySql(queryStatement);
EngineType engineType = semanticSchema.getOntology().getDatabase().getType(); // queryStatement.setSql(sql);
sqlParser.setSql(aggBuilder.getSql(engineType)); // EngineType engineType = semanticSchema.getOntology().getDatabase().getType();
} catch (Exception e) { // sqlParser.setSql(aggBuilder.getSql(engineType));
sqlParser.setErrMsg(e.getMessage()); // } catch (Exception e) {
log.error("parser error metricQueryReq[{}] error [{}]", ontologyQueryParam, e); // sqlParser.setErrMsg(e.getMessage());
} // log.error("parser error metricQueryReq[{}] error [{}]", ontologyQuery, e);
return sqlParser; // }
} // return sqlParser;
// }
public void test() throws Exception { //
// public void test() throws Exception {
DataModelYamlTpl datasource = new DataModelYamlTpl(); //
datasource.setName("s2_pv_uv_statis"); // DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setSourceId(1L); // datasource.setName("s2_pv_uv_statis");
datasource.setSqlQuery( // datasource.setSourceId(1L);
"SELECT imp_date, user_name,page,1 as pv, user_name as uv FROM s2_pv_uv_statis"); // datasource.setSqlQuery(
// "SELECT imp_date, user_name,page,1 as pv, user_name as uv FROM s2_pv_uv_statis");
MeasureYamlTpl measure = new MeasureYamlTpl(); //
measure.setAgg("sum"); // MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setName("s2_pv_uv_statis_pv"); // measure.setAgg("sum");
measure.setExpr("pv"); // measure.setName("s2_pv_uv_statis_pv");
List<MeasureYamlTpl> measures = new ArrayList<>(); // measure.setExpr("pv");
measures.add(measure); // List<MeasureYamlTpl> measures = new ArrayList<>();
// measures.add(measure);
MeasureYamlTpl measure2 = new MeasureYamlTpl(); //
measure2.setAgg("count"); // MeasureYamlTpl measure2 = new MeasureYamlTpl();
measure2.setName("s2_pv_uv_statis_internal_cnt"); // measure2.setAgg("count");
measure2.setExpr("1"); // measure2.setName("s2_pv_uv_statis_internal_cnt");
measure2.setCreateMetric("true"); // measure2.setExpr("1");
measures.add(measure2); // measure2.setCreateMetric("true");
// measures.add(measure2);
MeasureYamlTpl measure3 = new MeasureYamlTpl(); //
measure3.setAgg("count"); // MeasureYamlTpl measure3 = new MeasureYamlTpl();
measure3.setName("s2_pv_uv_statis_uv"); // measure3.setAgg("count");
measure3.setExpr("uv"); // measure3.setName("s2_pv_uv_statis_uv");
measure3.setCreateMetric("true"); // measure3.setExpr("uv");
measures.add(measure3); // measure3.setCreateMetric("true");
// measures.add(measure3);
datasource.setMeasures(measures); //
// datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl(); //
dimension.setName("imp_date"); // DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setExpr("imp_date"); // dimension.setName("imp_date");
dimension.setType("time"); // dimension.setExpr("imp_date");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl(); // dimension.setType("time");
dimensionTimeTypeParams.setIsPrimary("true"); // DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams();
dimensionTimeTypeParams.setTimeGranularity("day"); // dimensionTimeTypeParams.setIsPrimary("true");
dimension.setTypeParams(dimensionTimeTypeParams); // dimensionTimeTypeParams.setTimeGranularity("day");
List<DimensionYamlTpl> dimensions = new ArrayList<>(); // dimension.setTypeParams(dimensionTimeTypeParams);
dimensions.add(dimension); // List<DimensionYamlTpl> dimensions = new ArrayList<>();
// dimensions.add(dimension);
DimensionYamlTpl dimension2 = new DimensionYamlTpl(); //
dimension2.setName("sys_imp_date"); // DimensionYamlTpl dimension2 = new DimensionYamlTpl();
dimension2.setExpr("imp_date"); // dimension2.setName("sys_imp_date");
dimension2.setType("time"); // dimension2.setExpr("imp_date");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams2 = new DimensionTimeTypeParamsTpl(); // dimension2.setType("time");
dimensionTimeTypeParams2.setIsPrimary("true"); // DimensionTimeTypeParams dimensionTimeTypeParams2 = new DimensionTimeTypeParams();
dimensionTimeTypeParams2.setTimeGranularity("day"); // dimensionTimeTypeParams2.setIsPrimary("true");
dimension2.setTypeParams(dimensionTimeTypeParams2); // dimensionTimeTypeParams2.setTimeGranularity("day");
dimensions.add(dimension2); // dimension2.setTypeParams(dimensionTimeTypeParams2);
// dimensions.add(dimension2);
DimensionYamlTpl dimension3 = new DimensionYamlTpl(); //
dimension3.setName("sys_imp_week"); // DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))"); // dimension3.setName("sys_imp_week");
dimension3.setType("time"); // dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl(); // dimension3.setType("time");
dimensionTimeTypeParams3.setIsPrimary("true"); // DimensionTimeTypeParams dimensionTimeTypeParams3 = new DimensionTimeTypeParams();
dimensionTimeTypeParams3.setTimeGranularity("day"); // dimensionTimeTypeParams3.setIsPrimary("true");
dimension3.setTypeParams(dimensionTimeTypeParams3); // dimensionTimeTypeParams3.setTimeGranularity("day");
dimensions.add(dimension3); // dimension3.setTypeParams(dimensionTimeTypeParams3);
// dimensions.add(dimension3);
datasource.setDimensions(dimensions); //
// datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>(); //
IdentifyYamlTpl identify = new IdentifyYamlTpl(); // List<IdentifyYamlTpl> identifies = new ArrayList<>();
identify.setName("user_name"); // IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setType("primary"); // identify.setName("user_name");
identifies.add(identify); // identify.setType("primary");
datasource.setIdentifiers(identifies); // identifies.add(identify);
S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build(); // datasource.setIdentifiers(identifies);
// S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build();
SemanticSchemaManager.update(semanticSchema, //
SemanticSchemaManager.getDataModel(datasource)); // SemanticSchemaManager.update(semanticSchema,
// SemanticSchemaManager.getDataModel(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl(); //
dimension1.setExpr("page"); // DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setName("page"); // dimension1.setExpr("page");
dimension1.setType("categorical"); // dimension1.setName("page");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>(); // dimension1.setType("categorical");
dimensionYamlTpls.add(dimension1); // List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
// dimensionYamlTpls.add(dimension1);
SemanticSchemaManager.update(semanticSchema, "s2_pv_uv_statis", //
SemanticSchemaManager.getDimensions(dimensionYamlTpls)); // SemanticSchemaManager.update(semanticSchema, "s2_pv_uv_statis",
// SemanticSchemaManager.getDimensions(dimensionYamlTpls));
MetricYamlTpl metric1 = new MetricYamlTpl(); //
metric1.setName("pv"); // MetricYamlTpl metric1 = new MetricYamlTpl();
metric1.setType("expr"); // metric1.setName("pv");
MetricTypeParamsYamlTpl metricTypeParams = new MetricTypeParamsYamlTpl(); // metric1.setType("expr");
List<MeasureYamlTpl> measures1 = new ArrayList<>(); // MetricTypeParamsYamlTpl metricTypeParams = new MetricTypeParamsYamlTpl();
MeasureYamlTpl measure1 = new MeasureYamlTpl(); // List<MeasureYamlTpl> measures1 = new ArrayList<>();
measure1.setName("s2_pv_uv_statis_pv"); // MeasureYamlTpl measure1 = new MeasureYamlTpl();
measures1.add(measure1); // measure1.setName("s2_pv_uv_statis_pv");
metricTypeParams.setMeasures(measures1); // measures1.add(measure1);
metricTypeParams.setExpr("s2_pv_uv_statis_pv"); // metricTypeParams.setMeasures(measures1);
metric1.setTypeParams(metricTypeParams); // metricTypeParams.setExpr("s2_pv_uv_statis_pv");
List<MetricYamlTpl> metric = new ArrayList<>(); // metric1.setTypeParams(metricTypeParams);
metric.add(metric1); // List<MetricYamlTpl> metric = new ArrayList<>();
// metric.add(metric1);
MetricYamlTpl metric2 = new MetricYamlTpl(); //
metric2.setName("uv"); // MetricYamlTpl metric2 = new MetricYamlTpl();
metric2.setType("expr"); // metric2.setName("uv");
MetricTypeParamsYamlTpl metricTypeParams1 = new MetricTypeParamsYamlTpl(); // metric2.setType("expr");
List<MeasureYamlTpl> measures2 = new ArrayList<>(); // MetricTypeParamsYamlTpl metricTypeParams1 = new MetricTypeParamsYamlTpl();
MeasureYamlTpl measure4 = new MeasureYamlTpl(); // List<MeasureYamlTpl> measures2 = new ArrayList<>();
measure4.setName("s2_pv_uv_statis_uv"); // MeasureYamlTpl measure4 = new MeasureYamlTpl();
measures2.add(measure4); // measure4.setName("s2_pv_uv_statis_uv");
metricTypeParams1.setMeasures(measures2); // measures2.add(measure4);
metricTypeParams1.setExpr("s2_pv_uv_statis_uv"); // metricTypeParams1.setMeasures(measures2);
metric2.setTypeParams(metricTypeParams1); // metricTypeParams1.setExpr("s2_pv_uv_statis_uv");
metric.add(metric2); // metric2.setTypeParams(metricTypeParams1);
// metric.add(metric2);
// HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric)); //
// // HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric));
OntologyQueryParam metricCommand = new OntologyQueryParam(); //
metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date"))); // OntologyQuery metricCommand = new OntologyQuery();
metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv"))); // metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date")));
metricCommand.setWhere( // metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv")));
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); // metricCommand.setWhere(
metricCommand.setLimit(1000L); // "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
List<ColumnOrder> orders = new ArrayList<>(); // metricCommand.setLimit(1000L);
orders.add(ColumnOrder.buildDesc("sys_imp_date")); // List<ColumnOrder> orders = new ArrayList<>();
metricCommand.setOrder(orders); // orders.add(ColumnOrder.buildDesc("sys_imp_date"));
System.out.println(parser(semanticSchema, metricCommand, true)); // metricCommand.setOrder(orders);
// System.out.println(parser(semanticSchema, metricCommand, true));
addDepartment(semanticSchema); //
// addDepartment(semanticSchema);
OntologyQueryParam metricCommand2 = new OntologyQueryParam(); //
metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date", // OntologyQuery metricCommand2 = new OntologyQuery();
"user_name__department", "user_name", "user_name__page"))); // metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date",
metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv"))); // "user_name__department", "user_name", "user_name__page")));
metricCommand2.setWhere( // metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv")));
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); // metricCommand2.setWhere(
metricCommand2.setLimit(1000L); // "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
List<ColumnOrder> orders2 = new ArrayList<>(); // metricCommand2.setLimit(1000L);
orders2.add(ColumnOrder.buildDesc("sys_imp_date")); // List<ColumnOrder> orders2 = new ArrayList<>();
metricCommand2.setOrder(orders2); // orders2.add(ColumnOrder.buildDesc("sys_imp_date"));
System.out.println(parser(semanticSchema, metricCommand2, true)); // metricCommand2.setOrder(orders2);
} // System.out.println(parser(semanticSchema, metricCommand2, true));
// }
private static void addDepartment(S2CalciteSchema semanticSchema) { //
DataModelYamlTpl datasource = new DataModelYamlTpl(); // private static void addDepartment(S2CalciteSchema semanticSchema) {
datasource.setName("user_department"); // DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setSourceId(1L); // datasource.setName("user_department");
datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department"); // datasource.setSourceId(1L);
// datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department");
MeasureYamlTpl measure = new MeasureYamlTpl(); //
measure.setAgg("count"); // MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setName("user_department_internal_cnt"); // measure.setAgg("count");
measure.setCreateMetric("true"); // measure.setName("user_department_internal_cnt");
measure.setExpr("1"); // measure.setCreateMetric("true");
List<MeasureYamlTpl> measures = new ArrayList<>(); // measure.setExpr("1");
measures.add(measure); // List<MeasureYamlTpl> measures = new ArrayList<>();
// measures.add(measure);
datasource.setMeasures(measures); //
// datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl(); //
dimension.setName("sys_imp_date"); // DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setExpr("imp_date"); // dimension.setName("sys_imp_date");
dimension.setType("time"); // dimension.setExpr("imp_date");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl(); // dimension.setType("time");
dimensionTimeTypeParams.setIsPrimary("true"); // DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setTimeGranularity("day"); // dimensionTimeTypeParams.setIsPrimary("true");
dimension.setTypeParams(dimensionTimeTypeParams); // dimensionTimeTypeParams.setTimeGranularity("day");
List<DimensionYamlTpl> dimensions = new ArrayList<>(); // dimension.setTypeParams(dimensionTimeTypeParams);
dimensions.add(dimension); // List<DimensionYamlTpl> dimensions = new ArrayList<>();
// dimensions.add(dimension);
DimensionYamlTpl dimension3 = new DimensionYamlTpl(); //
dimension3.setName("sys_imp_week"); // DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))"); // dimension3.setName("sys_imp_week");
dimension3.setType("time"); // dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl(); // dimension3.setType("time");
dimensionTimeTypeParams3.setIsPrimary("true"); // DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setTimeGranularity("week"); // dimensionTimeTypeParams3.setIsPrimary("true");
dimension3.setTypeParams(dimensionTimeTypeParams3); // dimensionTimeTypeParams3.setTimeGranularity("week");
dimensions.add(dimension3); // dimension3.setTypeParams(dimensionTimeTypeParams3);
// dimensions.add(dimension3);
datasource.setDimensions(dimensions); //
// datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>(); //
IdentifyYamlTpl identify = new IdentifyYamlTpl(); // List<IdentifyYamlTpl> identifies = new ArrayList<>();
identify.setName("user_name"); // IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setType("primary"); // identify.setName("user_name");
identifies.add(identify); // identify.setType("primary");
datasource.setIdentifiers(identifies); // identifies.add(identify);
// datasource.setIdentifiers(identifies);
semanticSchema.getDataModels().put("user_department", //
SemanticSchemaManager.getDataModel(datasource)); // semanticSchema.getDataModels().put("user_department",
// SemanticSchemaManager.getDataModel(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl(); //
dimension1.setExpr("department"); // DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setName("department"); // dimension1.setExpr("department");
dimension1.setType("categorical"); // dimension1.setName("department");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>(); // dimension1.setType("categorical");
dimensionYamlTpls.add(dimension1); // List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
// dimensionYamlTpls.add(dimension1);
semanticSchema.getDimensions().put("user_department", //
SemanticSchemaManager.getDimensions(dimensionYamlTpls)); // semanticSchema.getDimensions().put("user_department",
} // SemanticSchemaManager.getDimensions(dimensionYamlTpls));
// }
} }

View File

@@ -34,125 +34,125 @@ import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
public class MetricServiceImplTest { public class MetricServiceImplTest {
//
@Test // @Test
void createMetric() throws Exception { // void createMetric() throws Exception {
MetricRepository metricRepository = Mockito.mock(MetricRepository.class); // MetricRepository metricRepository = Mockito.mock(MetricRepository.class);
ModelService modelService = Mockito.mock(ModelService.class); // ModelService modelService = Mockito.mock(ModelService.class);
MetricService metricService = mockMetricService(metricRepository, modelService); // MetricService metricService = mockMetricService(metricRepository, modelService);
MetricReq metricReq = buildMetricReq(); // MetricReq metricReq = buildMetricReq();
when(modelService.getModel(metricReq.getModelId())).thenReturn(mockModelResp()); // when(modelService.getModel(metricReq.getModelId())).thenReturn(mockModelResp());
when(modelService.getModelByDomainIds(any())).thenReturn(Lists.newArrayList()); // when(modelService.getModelByDomainIds(any())).thenReturn(Lists.newArrayList());
MetricResp actualMetricResp = metricService.createMetric(metricReq, User.getDefaultUser()); // MetricResp actualMetricResp = metricService.createMetric(metricReq, User.getDefaultUser());
MetricResp expectedMetricResp = buildExpectedMetricResp(); // MetricResp expectedMetricResp = buildExpectedMetricResp();
Assertions.assertEquals(expectedMetricResp, actualMetricResp); // Assertions.assertEquals(expectedMetricResp, actualMetricResp);
} // }
//
@Test // @Test
void updateMetric() throws Exception { // void updateMetric() throws Exception {
MetricRepository metricRepository = Mockito.mock(MetricRepository.class); // MetricRepository metricRepository = Mockito.mock(MetricRepository.class);
ModelService modelService = Mockito.mock(ModelService.class); // ModelService modelService = Mockito.mock(ModelService.class);
MetricService metricService = mockMetricService(metricRepository, modelService); // MetricService metricService = mockMetricService(metricRepository, modelService);
MetricReq metricReq = buildMetricUpdateReq(); // MetricReq metricReq = buildMetricUpdateReq();
when(modelService.getModel(metricReq.getModelId())).thenReturn(mockModelResp()); // when(modelService.getModel(metricReq.getModelId())).thenReturn(mockModelResp());
when(modelService.getModelByDomainIds(any())).thenReturn(Lists.newArrayList()); // when(modelService.getModelByDomainIds(any())).thenReturn(Lists.newArrayList());
MetricDO metricDO = MetricConverter.convert2MetricDO(buildMetricReq()); // MetricDO metricDO = MetricConverter.convert2MetricDO(buildMetricReq());
when(metricRepository.getMetricById(metricDO.getId())).thenReturn(metricDO); // when(metricRepository.getMetricById(metricDO.getId())).thenReturn(metricDO);
MetricResp actualMetricResp = metricService.updateMetric(metricReq, User.getDefaultUser()); // MetricResp actualMetricResp = metricService.updateMetric(metricReq, User.getDefaultUser());
MetricResp expectedMetricResp = buildExpectedMetricResp(); // MetricResp expectedMetricResp = buildExpectedMetricResp();
Assertions.assertEquals(expectedMetricResp, actualMetricResp); // Assertions.assertEquals(expectedMetricResp, actualMetricResp);
} // }
//
private MetricService mockMetricService(MetricRepository metricRepository, // private MetricService mockMetricService(MetricRepository metricRepository,
ModelService modelService) { // ModelService modelService) {
AliasGenerateHelper aliasGenerateHelper = Mockito.mock(AliasGenerateHelper.class); // AliasGenerateHelper aliasGenerateHelper = Mockito.mock(AliasGenerateHelper.class);
CollectService collectService = Mockito.mock(CollectService.class); // CollectService collectService = Mockito.mock(CollectService.class);
ApplicationEventPublisher eventPublisher = Mockito.mock(ApplicationEventPublisher.class); // ApplicationEventPublisher eventPublisher = Mockito.mock(ApplicationEventPublisher.class);
DataSetService dataSetService = Mockito.mock(DataSetServiceImpl.class); // DataSetService dataSetService = Mockito.mock(DataSetServiceImpl.class);
DimensionService dimensionService = Mockito.mock(DimensionService.class); // DimensionService dimensionService = Mockito.mock(DimensionService.class);
ChatLayerService chatLayerService = Mockito.mock(ChatLayerService.class); // ChatLayerService chatLayerService = Mockito.mock(ChatLayerService.class);
return new MetricServiceImpl(metricRepository, modelService, aliasGenerateHelper, // return new MetricServiceImpl(metricRepository, modelService, aliasGenerateHelper,
collectService, dataSetService, eventPublisher, dimensionService, chatLayerService); // collectService, dataSetService, eventPublisher, dimensionService, chatLayerService);
} // }
//
private MetricReq buildMetricReq() { // private MetricReq buildMetricReq() {
MetricReq metricReq = new MetricReq(); // MetricReq metricReq = new MetricReq();
metricReq.setId(1L); // metricReq.setId(1L);
metricReq.setName("hr部门的访问次数"); // metricReq.setName("hr部门的访问次数");
metricReq.setBizName("pv"); // metricReq.setBizName("pv");
metricReq.setDescription("SuperSonic的访问情况"); // metricReq.setDescription("SuperSonic的访问情况");
metricReq.setAlias("pv"); // metricReq.setAlias("pv");
metricReq.setMetricDefineType(MetricDefineType.MEASURE); // metricReq.setMetricDefineType(MetricDefineType.MEASURE);
metricReq.setModelId(2L); // metricReq.setModelId(2L);
metricReq.setDataFormatType(DataFormatTypeEnum.PERCENT.getName()); // metricReq.setDataFormatType(DataFormatTypeEnum.PERCENT.getName());
DataFormat dataFormat = new DataFormat(); // DataFormat dataFormat = new DataFormat();
dataFormat.setDecimalPlaces(3); // dataFormat.setDecimalPlaces(3);
dataFormat.setNeedMultiply100(false); // dataFormat.setNeedMultiply100(false);
metricReq.setDataFormat(dataFormat); // metricReq.setDataFormat(dataFormat);
MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams(); // MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams();
typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"), // typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"),
new MeasureParam("s2_uv", "department='hr'"))); // new MeasureParam("s2_uv", "department='hr'")));
typeParams.setExpr("s2_pv/s2_uv"); // typeParams.setExpr("s2_pv/s2_uv");
metricReq.setMetricDefineByMeasureParams(typeParams); // metricReq.setMetricDefineByMeasureParams(typeParams);
metricReq.setClassifications(Lists.newArrayList("核心指标")); // metricReq.setClassifications(Lists.newArrayList("核心指标"));
metricReq.setRelateDimension(RelateDimension.builder().drillDownDimensions( // metricReq.setRelateDimension(RelateDimension.builder().drillDownDimensions(
Lists.newArrayList(new DrillDownDimension(1L), new DrillDownDimension(1L, false))) // Lists.newArrayList(new DrillDownDimension(1L), new DrillDownDimension(1L, false)))
.build()); // .build());
metricReq.setSensitiveLevel(SensitiveLevelEnum.LOW.getCode()); // metricReq.setSensitiveLevel(SensitiveLevelEnum.LOW.getCode());
metricReq.setExt(new HashMap<>()); // metricReq.setExt(new HashMap<>());
return metricReq; // return metricReq;
} // }
//
private MetricResp buildExpectedMetricResp() { // private MetricResp buildExpectedMetricResp() {
MetricResp metricResp = new MetricResp(); // MetricResp metricResp = new MetricResp();
metricResp.setId(1L); // metricResp.setId(1L);
metricResp.setName("hr部门的访问次数"); // metricResp.setName("hr部门的访问次数");
metricResp.setBizName("pv"); // metricResp.setBizName("pv");
metricResp.setDescription("SuperSonic的访问情况"); // metricResp.setDescription("SuperSonic的访问情况");
metricResp.setAlias("pv"); // metricResp.setAlias("pv");
metricResp.setMetricDefineType(MetricDefineType.MEASURE); // metricResp.setMetricDefineType(MetricDefineType.MEASURE);
metricResp.setModelId(2L); // metricResp.setModelId(2L);
metricResp.setDataFormatType(DataFormatTypeEnum.PERCENT.getName()); // metricResp.setDataFormatType(DataFormatTypeEnum.PERCENT.getName());
DataFormat dataFormat = new DataFormat(); // DataFormat dataFormat = new DataFormat();
dataFormat.setDecimalPlaces(3); // dataFormat.setDecimalPlaces(3);
dataFormat.setNeedMultiply100(false); // dataFormat.setNeedMultiply100(false);
metricResp.setDataFormat(dataFormat); // metricResp.setDataFormat(dataFormat);
MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams(); // MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams();
typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"), // typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"),
new MeasureParam("s2_uv", "department='hr'"))); // new MeasureParam("s2_uv", "department='hr'")));
typeParams.setExpr("s2_pv/s2_uv"); // typeParams.setExpr("s2_pv/s2_uv");
metricResp.setMetricDefineByMeasureParams(typeParams); // metricResp.setMetricDefineByMeasureParams(typeParams);
metricResp.setClassifications("核心指标"); // metricResp.setClassifications("核心指标");
metricResp.setRelateDimension(RelateDimension.builder().drillDownDimensions( // metricResp.setRelateDimension(RelateDimension.builder().drillDownDimensions(
Lists.newArrayList(new DrillDownDimension(1L), new DrillDownDimension(1L, false))) // Lists.newArrayList(new DrillDownDimension(1L), new DrillDownDimension(1L, false)))
.build()); // .build());
metricResp.setSensitiveLevel(SensitiveLevelEnum.LOW.getCode()); // metricResp.setSensitiveLevel(SensitiveLevelEnum.LOW.getCode());
metricResp.setExt(new HashMap<>()); // metricResp.setExt(new HashMap<>());
metricResp.setTypeEnum(TypeEnums.METRIC); // metricResp.setTypeEnum(TypeEnums.METRIC);
metricResp.setIsCollect(false); // metricResp.setIsCollect(false);
metricResp.setType(MetricType.DERIVED.name()); // metricResp.setType(MetricType.DERIVED.name());
metricResp.setStatus(StatusEnum.ONLINE.getCode()); // metricResp.setStatus(StatusEnum.ONLINE.getCode());
return metricResp; // return metricResp;
} // }
//
private MetricReq buildMetricUpdateReq() { // private MetricReq buildMetricUpdateReq() {
MetricReq metricReq = new MetricReq(); // MetricReq metricReq = new MetricReq();
metricReq.setId(1L); // metricReq.setId(1L);
metricReq.setName("hr部门的访问次数"); // metricReq.setName("hr部门的访问次数");
metricReq.setBizName("pv"); // metricReq.setBizName("pv");
metricReq.setMetricDefineType(MetricDefineType.MEASURE); // metricReq.setMetricDefineType(MetricDefineType.MEASURE);
MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams(); // MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams();
typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"), // typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"),
new MeasureParam("s2_uv", "department='hr'"))); // new MeasureParam("s2_uv", "department='hr'")));
typeParams.setExpr("s2_pv/s2_uv"); // typeParams.setExpr("s2_pv/s2_uv");
metricReq.setMetricDefineByMeasureParams(typeParams); // metricReq.setMetricDefineByMeasureParams(typeParams);
return metricReq; // return metricReq;
} // }
//
private ModelResp mockModelResp() { // private ModelResp mockModelResp() {
ModelResp modelResp = new ModelResp(); // ModelResp modelResp = new ModelResp();
modelResp.setId(2L); // modelResp.setId(2L);
modelResp.setDomainId(1L); // modelResp.setDomainId(1L);
return modelResp; // return modelResp;
} // }
} }

View File

@@ -30,7 +30,7 @@ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\ com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.DerivedMetricConverter,\ com.tencent.supersonic.headless.core.translator.converter.MetricExpressionConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\

View File

@@ -55,7 +55,7 @@ public class S2VisitsDemo extends S2BaseDemo {
DimensionResp departmentDimension = getDimension("department", userModel); DimensionResp departmentDimension = getDimension("department", userModel);
MetricResp metricUv = addMetric_uv(pvUvModel, departmentDimension); MetricResp metricUv = addMetric_uv(pvUvModel, departmentDimension);
DimensionResp pageDimension = getDimension("page", stayTimeModel); DimensionResp pageDimension = getDimension("visits_page", stayTimeModel);
updateDimension(stayTimeModel, pageDimension); updateDimension(stayTimeModel, pageDimension);
DimensionResp userDimension = getDimension("user_name", userModel); DimensionResp userDimension = getDimension("user_name", userModel);
MetricResp metricPv = addMetric_pv(pvUvModel, departmentDimension, userDimension); MetricResp metricPv = addMetric_pv(pvUvModel, departmentDimension, userDimension);
@@ -196,7 +196,7 @@ public class S2VisitsDemo extends S2BaseDemo {
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dimension> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 1);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
@@ -231,10 +231,10 @@ public class S2VisitsDemo extends S2BaseDemo {
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dimension> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 1); Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dimension dimension2 = new Dimension("页面", "page", DimensionType.categorical, 1); Dimension dimension2 = new Dimension("页面", "visits_page", DimensionType.categorical, 1);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);

View File

@@ -26,11 +26,11 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
### headless-core SPIs ### headless-core SPIs
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\ com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.DerivedMetricConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricExpressionConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\

View File

@@ -41,7 +41,7 @@ public class MetricTest extends BaseTest {
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true") @SetSystemProperty(key = "s2.test", value = "true")
public void testMetricModel() throws Exception { public void testMetricModel() throws Exception {
QueryResult actualResult = submitNewChat("超音数 访问次数", agent.getId()); QueryResult actualResult = submitNewChat("超音数访问次数", agent.getId());
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
@@ -79,6 +79,7 @@ public class MetricTest extends BaseTest {
assertQueryResult(expectedResult, actualResult); assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 1; assert actualResult.getQueryResults().size() == 1;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
} }
@Test @Test
@@ -104,11 +105,61 @@ public class MetricTest extends BaseTest {
assertQueryResult(expectedResult, actualResult); assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 1; assert actualResult.getQueryResults().size() == 1;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
} }
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true") @SetSystemProperty(key = "s2.test", value = "true")
public void testMetricGroupBy() throws Exception { public void testMetricGroupBy() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各用户的访问次数", agent.getId());
QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
expectedResult.setChatContext(expectedParseInfo);
expectedResult.setQueryMode(MetricGroupByQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("用户名"));
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 7,
DatePeriodEnum.DAY, startDay, endDay));
expectedParseInfo.setQueryType(QueryType.AGGREGATE);
assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 6;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
assert actualResult.getQuerySql().contains("s2_user_department");
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testMetricGroupByAndJoin() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", agent.getId());
QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
expectedResult.setChatContext(expectedParseInfo);
expectedResult.setQueryMode(MetricGroupByQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("部门"));
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 7,
DatePeriodEnum.DAY, startDay, endDay));
expectedParseInfo.setQueryType(QueryType.AGGREGATE);
assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 4;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testMetricGroupByAndMultiJoin() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数和停留时长", agent.getId()); QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数和停留时长", agent.getId());
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
@@ -128,6 +179,9 @@ public class MetricTest extends BaseTest {
assertQueryResult(expectedResult, actualResult); assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 4; assert actualResult.getQueryResults().size() == 4;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
assert actualResult.getQuerySql().contains("s2_user_department");
assert actualResult.getQuerySql().contains("s2_stay_time_statis");
} }
@Test @Test

View File

@@ -7,6 +7,7 @@ import com.tencent.supersonic.headless.api.pojo.response.SemanticTranslateResp;
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder; import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.SetSystemProperty;
import java.util.Collections; import java.util.Collections;
import java.util.Optional; import java.util.Optional;
@@ -38,6 +39,7 @@ public class TranslateTest extends BaseTest {
} }
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testStructExplain() throws Exception { public void testStructExplain() throws Exception {
QueryStructReq queryStructReq = QueryStructReq queryStructReq =
buildQueryStructReq(Collections.singletonList("department")); buildQueryStructReq(Collections.singletonList("department"));