[improvement][headless]Release brand new version of Translator module.
Some checks are pending
supersonic CentOS CI / build (21) (push) Waiting to run
supersonic mac CI / build (21) (push) Waiting to run
supersonic ubuntu CI / build (21) (push) Waiting to run
supersonic windows CI / build (21) (push) Waiting to run

This commit is contained in:
jerryjzhang
2025-01-05 00:00:18 +08:00
91 changed files with 1965 additions and 3158 deletions

View File

@@ -142,7 +142,7 @@ public class MetricRatioCalcProcessor implements ExecuteResultProcessor {
return new HashSet<>(); return new HashSet<>();
} }
return queryResult.getQueryColumns().stream() return queryResult.getQueryColumns().stream()
.flatMap(c -> SqlSelectHelper.getColumnFromExpr(c.getNameEn()).stream()) .flatMap(c -> SqlSelectHelper.getFieldsFromExpr(c.getNameEn()).stream())
.collect(Collectors.toSet()); .collect(Collectors.toSet());
} }

View File

@@ -1,13 +1,10 @@
package com.tencent.supersonic.common.jsqlparser; package com.tencent.supersonic.common.jsqlparser;
import net.sf.jsqlparser.expression.Alias; import net.sf.jsqlparser.expression.*;
import net.sf.jsqlparser.expression.BinaryExpression;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.ExpressionVisitorAdapter;
import net.sf.jsqlparser.expression.Function;
import net.sf.jsqlparser.parser.CCJSqlParserUtil; import net.sf.jsqlparser.parser.CCJSqlParserUtil;
import net.sf.jsqlparser.schema.Column; import net.sf.jsqlparser.schema.Column;
import net.sf.jsqlparser.statement.select.SelectItem; import net.sf.jsqlparser.statement.select.SelectItem;
import org.apache.commons.lang3.StringUtils;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@@ -50,7 +47,8 @@ public class QueryExpressionReplaceVisitor extends ExpressionVisitorAdapter {
String columnName = ""; String columnName = "";
if (expression instanceof Function) { if (expression instanceof Function) {
Function leftFunc = (Function) expression; Function leftFunc = (Function) expression;
if (leftFunc.getParameters().getExpressions().get(0) instanceof Column) { if (Objects.nonNull(leftFunc.getParameters())
&& leftFunc.getParameters().getExpressions().get(0) instanceof Column) {
Column column = (Column) leftFunc.getParameters().getExpressions().get(0); Column column = (Column) leftFunc.getParameters().getExpressions().get(0);
columnName = column.getColumnName(); columnName = column.getColumnName();
toReplace = getReplaceExpr(leftFunc, fieldExprMap); toReplace = getReplaceExpr(leftFunc, fieldExprMap);
@@ -75,8 +73,11 @@ public class QueryExpressionReplaceVisitor extends ExpressionVisitorAdapter {
public static Expression replace(Expression expression, Map<String, String> fieldExprMap) { public static Expression replace(Expression expression, Map<String, String> fieldExprMap) {
String toReplace = ""; String toReplace = "";
if (expression instanceof Function) { if (expression instanceof Function) {
Function function = (Function) expression;
if (function.getParameters().getExpressions().get(0) instanceof Column) {
toReplace = getReplaceExpr((Function) expression, fieldExprMap); toReplace = getReplaceExpr((Function) expression, fieldExprMap);
} }
}
if (expression instanceof Column) { if (expression instanceof Column) {
toReplace = getReplaceExpr((Column) expression, fieldExprMap); toReplace = getReplaceExpr((Column) expression, fieldExprMap);
} }
@@ -109,6 +110,16 @@ public class QueryExpressionReplaceVisitor extends ExpressionVisitorAdapter {
public static String getReplaceExpr(Function function, Map<String, String> fieldExprMap) { public static String getReplaceExpr(Function function, Map<String, String> fieldExprMap) {
Column column = (Column) function.getParameters().getExpressions().get(0); Column column = (Column) function.getParameters().getExpressions().get(0);
return getReplaceExpr(column, fieldExprMap); String expr = getReplaceExpr(column, fieldExprMap);
// if metric expr itself has agg function then replace original function in the SQL
if (StringUtils.isBlank(expr)) {
return expr;
} else if (!SqlSelectFunctionHelper.getAggregateFunctions(expr).isEmpty()) {
return expr;
} else {
String col = getReplaceExpr(column, fieldExprMap);
column.setColumnName(col);
return function.toString();
}
} }
} }

View File

@@ -229,6 +229,26 @@ public class SqlReplaceHelper {
orderByElement.accept(new OrderByReplaceVisitor(fieldNameMap, exactReplace)); orderByElement.accept(new OrderByReplaceVisitor(fieldNameMap, exactReplace));
} }
} }
List<Select> selects = operationList.getSelects();
if (!CollectionUtils.isEmpty(selects)) {
for (Select select : selects) {
if (select instanceof PlainSelect) {
replaceFieldsInPlainOneSelect(fieldNameMap, exactReplace, (PlainSelect) select);
}
}
}
List<WithItem> withItems = operationList.getWithItemsList();
if (!CollectionUtils.isEmpty(withItems)) {
for (WithItem withItem : withItems) {
Select select = withItem.getSelect();
if (select instanceof PlainSelect) {
replaceFieldsInPlainOneSelect(fieldNameMap, exactReplace, (PlainSelect) select);
} else if (select instanceof ParenthesedSelect) {
replaceFieldsInPlainOneSelect(fieldNameMap, exactReplace,
select.getPlainSelect());
}
}
}
} }
public static String replaceFunction(String sql, Map<String, String> functionMap) { public static String replaceFunction(String sql, Map<String, String> functionMap) {
@@ -611,7 +631,14 @@ public class SqlReplaceHelper {
Select selectStatement = SqlSelectHelper.getSelect(sql); Select selectStatement = SqlSelectHelper.getSelect(sql);
List<PlainSelect> plainSelectList = new ArrayList<>(); List<PlainSelect> plainSelectList = new ArrayList<>();
if (selectStatement instanceof PlainSelect) { if (selectStatement instanceof PlainSelect) {
// if with statement exists, replace expression in the with statement.
if (!CollectionUtils.isEmpty(selectStatement.getWithItemsList())) {
selectStatement.getWithItemsList().forEach(withItem -> {
plainSelectList.add(withItem.getSelect().getPlainSelect());
});
} else {
plainSelectList.add((PlainSelect) selectStatement); plainSelectList.add((PlainSelect) selectStatement);
}
} else if (selectStatement instanceof SetOperationList) { } else if (selectStatement instanceof SetOperationList) {
SetOperationList setOperationList = (SetOperationList) selectStatement; SetOperationList setOperationList = (SetOperationList) selectStatement;
if (!CollectionUtils.isEmpty(setOperationList.getSelects())) { if (!CollectionUtils.isEmpty(setOperationList.getSelects())) {
@@ -620,12 +647,35 @@ public class SqlReplaceHelper {
plainSelectList.add(subPlainSelect); plainSelectList.add(subPlainSelect);
}); });
} }
List<Select> selects = setOperationList.getSelects();
if (!CollectionUtils.isEmpty(selects)) {
for (Select select : selects) {
if (select instanceof PlainSelect) {
plainSelectList.add((PlainSelect) select);
}
}
}
List<WithItem> withItems = setOperationList.getWithItemsList();
if (!CollectionUtils.isEmpty(withItems)) {
for (WithItem withItem : withItems) {
Select select = withItem.getSelect();
if (select instanceof PlainSelect) {
plainSelectList.add((PlainSelect) select);
} else if (select instanceof ParenthesedSelect) {
plainSelectList.add(select.getPlainSelect());
}
}
}
} else { } else {
return sql; return sql;
} }
List<PlainSelect> plainSelects = SqlSelectHelper.getPlainSelects(plainSelectList); List<PlainSelect> plainSelects = SqlSelectHelper.getPlainSelects(plainSelectList);
for (PlainSelect plainSelect : plainSelects) { for (PlainSelect plainSelect : plainSelects) {
replacePlainSelectByExpr(plainSelect, replace); replacePlainSelectByExpr(plainSelect, replace);
if (SqlSelectHelper.hasAggregateFunction(plainSelect)) {
SqlSelectHelper.addMissingGroupby(plainSelect);
}
} }
return selectStatement.toString(); return selectStatement.toString();
} }

View File

@@ -714,61 +714,61 @@ public class SqlSelectHelper {
return table.getFullyQualifiedName(); return table.getFullyQualifiedName();
} }
public static Set<String> getColumnFromExpr(String expr) { public static Set<String> getFieldsFromExpr(String expr) {
Expression expression = QueryExpressionReplaceVisitor.getExpression(expr); Expression expression = QueryExpressionReplaceVisitor.getExpression(expr);
Set<String> columns = new HashSet<>(); Set<String> columns = new HashSet<>();
if (Objects.nonNull(expression)) { if (Objects.nonNull(expression)) {
getColumnFromExpr(expression, columns); getFieldsFromExpr(expression, columns);
} }
return columns; return columns;
} }
public static void getColumnFromExpr(Expression expression, Set<String> columns) { public static void getFieldsFromExpr(Expression expression, Set<String> columns) {
if (expression instanceof Column) { if (expression instanceof Column) {
columns.add(((Column) expression).getColumnName()); columns.add(((Column) expression).getColumnName());
} }
if (expression instanceof Function) { if (expression instanceof Function) {
ExpressionList<?> expressionList = ((Function) expression).getParameters(); ExpressionList<?> expressionList = ((Function) expression).getParameters();
for (Expression expr : expressionList) { for (Expression expr : expressionList) {
getColumnFromExpr(expr, columns); getFieldsFromExpr(expr, columns);
} }
} }
if (expression instanceof CaseExpression) { if (expression instanceof CaseExpression) {
CaseExpression expr = (CaseExpression) expression; CaseExpression expr = (CaseExpression) expression;
if (Objects.nonNull(expr.getWhenClauses())) { if (Objects.nonNull(expr.getWhenClauses())) {
for (WhenClause whenClause : expr.getWhenClauses()) { for (WhenClause whenClause : expr.getWhenClauses()) {
getColumnFromExpr(whenClause.getWhenExpression(), columns); getFieldsFromExpr(whenClause.getWhenExpression(), columns);
getColumnFromExpr(whenClause.getThenExpression(), columns); getFieldsFromExpr(whenClause.getThenExpression(), columns);
} }
} }
if (Objects.nonNull(expr.getElseExpression())) { if (Objects.nonNull(expr.getElseExpression())) {
getColumnFromExpr(expr.getElseExpression(), columns); getFieldsFromExpr(expr.getElseExpression(), columns);
} }
} }
if (expression instanceof BinaryExpression) { if (expression instanceof BinaryExpression) {
BinaryExpression expr = (BinaryExpression) expression; BinaryExpression expr = (BinaryExpression) expression;
getColumnFromExpr(expr.getLeftExpression(), columns); getFieldsFromExpr(expr.getLeftExpression(), columns);
getColumnFromExpr(expr.getRightExpression(), columns); getFieldsFromExpr(expr.getRightExpression(), columns);
} }
if (expression instanceof InExpression) { if (expression instanceof InExpression) {
InExpression inExpression = (InExpression) expression; InExpression inExpression = (InExpression) expression;
getColumnFromExpr(inExpression.getLeftExpression(), columns); getFieldsFromExpr(inExpression.getLeftExpression(), columns);
} }
if (expression instanceof Between) { if (expression instanceof Between) {
Between between = (Between) expression; Between between = (Between) expression;
getColumnFromExpr(between.getLeftExpression(), columns); getFieldsFromExpr(between.getLeftExpression(), columns);
} }
if (expression instanceof IsBooleanExpression) { if (expression instanceof IsBooleanExpression) {
IsBooleanExpression isBooleanExpression = (IsBooleanExpression) expression; IsBooleanExpression isBooleanExpression = (IsBooleanExpression) expression;
getColumnFromExpr(isBooleanExpression.getLeftExpression(), columns); getFieldsFromExpr(isBooleanExpression.getLeftExpression(), columns);
} }
if (expression instanceof IsNullExpression) { if (expression instanceof IsNullExpression) {
IsNullExpression isNullExpression = (IsNullExpression) expression; IsNullExpression isNullExpression = (IsNullExpression) expression;
getColumnFromExpr(isNullExpression.getLeftExpression(), columns); getFieldsFromExpr(isNullExpression.getLeftExpression(), columns);
} }
if (expression instanceof Parenthesis) { if (expression instanceof Parenthesis) {
Parenthesis expr = (Parenthesis) expression; Parenthesis expr = (Parenthesis) expression;
getColumnFromExpr(expr.getExpression(), columns); getFieldsFromExpr(expr.getExpression(), columns);
} }
} }
@@ -949,4 +949,31 @@ public class SqlSelectHelper {
} }
}); });
} }
public static void addMissingGroupby(PlainSelect plainSelect) {
if (Objects.nonNull(plainSelect.getGroupBy())
&& !plainSelect.getGroupBy().getGroupByExpressionList().isEmpty()) {
return;
}
GroupByElement groupBy = new GroupByElement();
for (SelectItem selectItem : plainSelect.getSelectItems()) {
Expression expression = selectItem.getExpression();
if (expression instanceof Column) {
groupBy.addGroupByExpression(expression);
}
}
if (!groupBy.getGroupByExpressionList().isEmpty()) {
plainSelect.setGroupByElement(groupBy);
}
}
public static boolean hasAggregateFunction(PlainSelect plainSelect) {
List<SelectItem<?>> selectItems = plainSelect.getSelectItems();
FunctionVisitor visitor = new FunctionVisitor();
for (SelectItem selectItem : selectItems) {
selectItem.accept(visitor);
}
return !visitor.getFunctionNames().isEmpty();
}
} }

View File

@@ -2,8 +2,10 @@ package com.tencent.supersonic.common.jsqlparser;
import com.tencent.supersonic.common.jsqlparser.DateVisitor.DateBoundInfo; import com.tencent.supersonic.common.jsqlparser.DateVisitor.DateBoundInfo;
import org.junit.Assert; import org.junit.Assert;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@Disabled
class SqlDateSelectHelperTest { class SqlDateSelectHelperTest {
@Test @Test

View File

@@ -89,12 +89,7 @@ public class DataSetSchema implements Serializable {
} }
public SchemaElement getPartitionDimension() { public SchemaElement getPartitionDimension() {
for (SchemaElement dimension : dimensions) { return dimensions.stream().filter(SchemaElement::isPartitionTime).findFirst().orElse(null);
if (dimension.isPartitionTime()) {
return dimension;
}
}
return null;
} }
public SchemaElement getPrimaryKey() { public SchemaElement getPrimaryKey() {

View File

@@ -4,8 +4,7 @@ import com.google.common.base.Objects;
import com.tencent.supersonic.common.pojo.RecordInfo; import com.tencent.supersonic.common.pojo.RecordInfo;
import com.tencent.supersonic.common.pojo.enums.SensitiveLevelEnum; import com.tencent.supersonic.common.pojo.enums.SensitiveLevelEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums; import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import lombok.Data; import lombok.*;
import lombok.ToString;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList; import java.util.ArrayList;
@@ -17,19 +16,20 @@ import java.util.List;
public class SchemaItem extends RecordInfo { public class SchemaItem extends RecordInfo {
private static String aliasSplit = ","; private static String aliasSplit = ",";
private Long id;
private String name; protected Long id;
private String bizName; protected String name;
private String description; protected String bizName;
private Integer status; protected String description;
private TypeEnums typeEnum; protected Integer status;
private Integer sensitiveLevel = SensitiveLevelEnum.LOW.getCode(); protected TypeEnums typeEnum;
protected Integer sensitiveLevel = SensitiveLevelEnum.LOW.getCode();
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {

View File

@@ -18,11 +18,6 @@ public enum MetricType {
return null; return null;
} }
public static Boolean isDerived(String src) {
MetricType metricType = of(src);
return Objects.nonNull(metricType) && metricType.equals(DERIVED);
}
public static Boolean isDerived(MetricDefineType metricDefineType, public static Boolean isDerived(MetricDefineType metricDefineType,
MetricDefineByMeasureParams typeParams) { MetricDefineByMeasureParams typeParams) {
if (MetricDefineType.METRIC.equals(metricDefineType)) { if (MetricDefineType.METRIC.equals(metricDefineType)) {

View File

@@ -22,5 +22,4 @@ public class DimSchemaResp extends DimensionResp {
public int hashCode() { public int hashCode() {
return super.hashCode(); return super.hashCode();
} }
} }

View File

@@ -65,4 +65,5 @@ public class SemanticSchemaResp {
} }
return names; return names;
} }
} }

View File

@@ -41,6 +41,7 @@ public abstract class BaseSemanticCorrector implements SemanticCorrector {
protected Map<String, String> getFieldNameMap(ChatQueryContext chatQueryContext, protected Map<String, String> getFieldNameMap(ChatQueryContext chatQueryContext,
Long dataSetId) { Long dataSetId) {
return getFieldNameMapFromDB(chatQueryContext, dataSetId); return getFieldNameMapFromDB(chatQueryContext, dataSetId);
} }

View File

@@ -100,6 +100,8 @@ public class PromptHelper {
} }
if (StringUtils.isNotEmpty(metric.getDefaultAgg())) { if (StringUtils.isNotEmpty(metric.getDefaultAgg())) {
metricStr.append(" AGGREGATE '" + metric.getDefaultAgg().toUpperCase() + "'"); metricStr.append(" AGGREGATE '" + metric.getDefaultAgg().toUpperCase() + "'");
} else {
metricStr.append(" AGGREGATE 'NONE'");
} }
metricStr.append(">"); metricStr.append(">");
metrics.add(metricStr.toString()); metrics.add(metricStr.toString());

View File

@@ -6,7 +6,6 @@ import com.tencent.supersonic.headless.core.pojo.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable.Builder; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable.Builder;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder; import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.TimeRange;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.adapter.enumerable.EnumerableRules; import org.apache.calcite.adapter.enumerable.EnumerableRules;
import org.apache.calcite.config.CalciteConnectionConfigImpl; import org.apache.calcite.config.CalciteConnectionConfigImpl;

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql; package com.tencent.supersonic.headless.core.executor;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -2,26 +2,32 @@ package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data; import lombok.Data;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/**
* An ontology comprises a group of data models that can be joined together either in star schema or
* snowflake schema.
*/
@Data @Data
public class Ontology { public class Ontology {
private List<Metric> metrics = new ArrayList<>();
private Map<String, DataModel> dataModelMap = new HashMap<>();
private Map<String, List<Dimension>> dimensionMap = new HashMap<>();
private List<Materialization> materializationList = new ArrayList<>();
private List<JoinRelation> joinRelations;
private DatabaseResp database; private DatabaseResp database;
private Map<String, ModelResp> modelMap = new HashMap<>();
private Map<String, List<MetricSchemaResp>> metricMap = new HashMap<>();
private Map<String, List<DimSchemaResp>> dimensionMap = new HashMap<>();
private List<JoinRelation> joinRelations;
public List<Dimension> getDimensions() { public List<MetricSchemaResp> getMetrics() {
return metricMap.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
}
public List<DimSchemaResp> getDimensions() {
return dimensionMap.values().stream().flatMap(Collection::stream) return dimensionMap.values().stream().flatMap(Collection::stream)
.collect(Collectors.toList()); .collect(Collectors.toList());
} }
@@ -39,4 +45,5 @@ public class Ontology {
} }
return null; return null;
} }
} }

View File

@@ -1,20 +1,60 @@
package com.tencent.supersonic.headless.core.pojo; package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import lombok.Data; import lombok.Data;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
/**
* An ontology query comprises metrics/dimensions that are relevant to the semantic query. Note that
* metrics/dimensions in the ontology query must be a subset of an ontology.
*/
@Data @Data
public class OntologyQuery { public class OntologyQuery {
private Set<String> metrics = Sets.newHashSet();
private Set<String> dimensions = Sets.newHashSet(); private Map<String, ModelResp> modelMap = Maps.newHashMap();
private String where; private Map<String, Set<MetricSchemaResp>> metricMap = Maps.newHashMap();
private Map<String, Set<DimSchemaResp>> dimensionMap = Maps.newHashMap();
private Set<String> fields = Sets.newHashSet();
private Long limit; private Long limit;
private List<ColumnOrder> order; private List<ColumnOrder> order;
private boolean nativeQuery = true; private boolean nativeQuery = true;
private AggOption aggOption = AggOption.NATIVE; private AggOption aggOption = AggOption.NATIVE;
public Set<ModelResp> getModels() {
return modelMap.values().stream().collect(Collectors.toSet());
}
public Set<DimSchemaResp> getDimensions() {
Set<DimSchemaResp> dimensions = Sets.newHashSet();
dimensionMap.entrySet().forEach(entry -> {
dimensions.addAll(entry.getValue());
});
return dimensions;
}
public Set<MetricSchemaResp> getMetrics() {
Set<MetricSchemaResp> metrics = Sets.newHashSet();
metricMap.entrySet().forEach(entry -> {
metrics.addAll(entry.getValue());
});
return metrics;
}
public Set<MetricSchemaResp> getMetricsByModel(String modelName) {
return metricMap.get(modelName);
}
public Set<DimSchemaResp> getDimensionsByModel(String modelName) {
return dimensionMap.get(modelName);
}
} }

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer; import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
@@ -34,11 +35,6 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
} }
} }
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQuery().getTable(), queryStatement.getErrMsg()));
}
mergeOntologyQuery(queryStatement); mergeOntologyQuery(queryStatement);
if (StringUtils.isNotBlank(queryStatement.getSqlQuery().getSimplifiedSql())) { if (StringUtils.isNotBlank(queryStatement.getSqlQuery().getSimplifiedSql())) {
@@ -62,6 +58,14 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
} }
private void mergeOntologyQuery(QueryStatement queryStatement) throws Exception { private void mergeOntologyQuery(QueryStatement queryStatement) throws Exception {
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
log.info("parse with ontology: [{}]", ontologyQuery);
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQuery().getTable(), queryStatement.getErrMsg()));
}
SqlQuery sqlQuery = queryStatement.getSqlQuery(); SqlQuery sqlQuery = queryStatement.getSqlQuery();
String ontologyQuerySql = sqlQuery.getSql(); String ontologyQuerySql = sqlQuery.getSql();
String ontologyInnerTable = sqlQuery.getTable(); String ontologyInnerTable = sqlQuery.getTable();
@@ -69,30 +73,29 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
List<Pair<String, String>> tables = new ArrayList<>(); List<Pair<String, String>> tables = new ArrayList<>();
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql)); tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
String finalSql = null;
if (sqlQuery.isSupportWith()) { if (sqlQuery.isSupportWith()) {
EngineType engineType = queryStatement.getOntology().getDatabaseType(); EngineType engineType = queryStatement.getOntology().getDatabaseType();
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) { if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
String withSql = "with " + tables.stream() finalSql = "with " + tables.stream()
.map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight())) .map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight()))
.collect(Collectors.joining(",")) + "\n" + ontologyQuerySql; .collect(Collectors.joining(",")) + "\n" + ontologyQuerySql;
queryStatement.setSql(withSql);
} else { } else {
List<String> withTableList = List<String> withTableList =
tables.stream().map(Pair::getLeft).collect(Collectors.toList()); tables.stream().map(Pair::getLeft).collect(Collectors.toList());
List<String> withSqlList = List<String> withSqlList =
tables.stream().map(Pair::getRight).collect(Collectors.toList()); tables.stream().map(Pair::getRight).collect(Collectors.toList());
String mergeSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql, finalSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql, withSqlList,
withSqlList, withTableList); withTableList);
queryStatement.setSql(mergeSql);
} }
} else { } else {
for (Pair<String, String> tb : tables) { for (Pair<String, String> tb : tables) {
ontologyQuerySql = StringUtils.replace(ontologyQuerySql, tb.getLeft(), finalSql = StringUtils.replace(ontologyQuerySql, tb.getLeft(),
"(" + tb.getRight() + ") " + (sqlQuery.isWithAlias() ? "" : tb.getLeft()), "(" + tb.getRight() + ") " + (sqlQuery.isWithAlias() ? "" : tb.getLeft()),
-1); -1);
} }
queryStatement.setSql(ontologyQuerySql);
} }
queryStatement.setSql(finalSql);
} }
} }

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** /**
* SemanticTranslator converts semantic query statement into SQL statement that can be executed * A semantic translator converts semantic query into SQL statement that can be executed against
* against physical data models. * physical data models.
*/ */
public interface SemanticTranslator { public interface SemanticTranslator {

View File

@@ -26,11 +26,8 @@ public class DbDialectOptimizer implements QueryOptimizer {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema(); SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
DatabaseResp database = semanticSchemaResp.getDatabaseResp(); DatabaseResp database = semanticSchemaResp.getDatabaseResp();
String sql = queryStatement.getSql(); String sql = queryStatement.getSql();
if (Objects.isNull(database) || Objects.isNull(database.getType())) { DbAdaptor engineAdaptor =
return; DbAdaptorFactory.getEngineAdaptor(database.getType().toLowerCase());
}
String type = database.getType();
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
if (Objects.nonNull(engineAdaptor)) { if (Objects.nonNull(engineAdaptor)) {
String adaptedSql = engineAdaptor.rewriteSql(sql); String adaptedSql = engineAdaptor.rewriteSql(sql);
queryStatement.setSql(adaptedSql); queryStatement.setSql(adaptedSql);

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql; package com.tencent.supersonic.headless.core.translator.parser;
public class Constants { public class Constants {

View File

@@ -3,8 +3,8 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper; import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.StringValue; import net.sf.jsqlparser.expression.StringValue;
@@ -20,6 +20,10 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/**
* This parser appends default dimension values (if configured) to the where statement.
*/
@Slf4j @Slf4j
@Component("DefaultDimValueParser") @Component("DefaultDimValueParser")
public class DefaultDimValueParser implements QueryParser { public class DefaultDimValueParser implements QueryParser {
@@ -27,12 +31,13 @@ public class DefaultDimValueParser implements QueryParser {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQuery()) return Objects.nonNull(queryStatement.getSqlQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql()); && StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql())
&& !CollectionUtils.isEmpty(queryStatement.getOntology().getDimensions());
} }
@Override @Override
public void parse(QueryStatement queryStatement) { public void parse(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream() List<DimSchemaResp> dimensions = queryStatement.getOntology().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues())) .filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) { if (CollectionUtils.isEmpty(dimensions)) {
@@ -45,7 +50,7 @@ public class DefaultDimValueParser implements QueryParser {
return; return;
} }
List<Expression> expressions = Lists.newArrayList(); List<Expression> expressions = Lists.newArrayList();
for (Dimension dimension : dimensions) { for (DimSchemaResp dimension : dimensions) {
ExpressionList expressionList = new ExpressionList(); ExpressionList expressionList = new ExpressionList();
List<Expression> exprs = new ArrayList<>(); List<Expression> exprs = new ArrayList<>();
dimension.getDefaultValues().forEach(value -> exprs.add(new StringValue(value))); dimension.getDefaultValues().forEach(value -> exprs.add(new StringValue(value)));
@@ -55,7 +60,7 @@ public class DefaultDimValueParser implements QueryParser {
inExpression.setRightExpression(expressionList); inExpression.setRightExpression(expressionList);
expressions.add(inExpression); expressions.add(inExpression);
if (Objects.nonNull(queryStatement.getSqlQuery().getTable())) { if (Objects.nonNull(queryStatement.getSqlQuery().getTable())) {
queryStatement.getOntologyQuery().getDimensions().add(dimension.getBizName()); queryStatement.getOntologyQuery().getDimensions().add(dimension);
} }
} }
sql = SqlAddHelper.addWhere(sql, expressions); sql = SqlAddHelper.addWhere(sql, expressions);

View File

@@ -0,0 +1,67 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* This parser replaces dimension bizName in the S2SQL with calculation expression (if configured).
*/
@Component("DimExpressionParser")
@Slf4j
public class DimExpressionParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQuery())
&& Objects.nonNull(queryStatement.getOntologyQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql())
&& !CollectionUtils.isEmpty(queryStatement.getOntologyQuery().getDimensions());
}
@Override
public void parse(QueryStatement queryStatement) throws Exception {
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
Map<String, String> bizName2Expr = getDimensionExpressions(semanticSchema, ontologyQuery);
if (!CollectionUtils.isEmpty(bizName2Expr)) {
String sql = SqlReplaceHelper.replaceSqlByExpression(sqlQuery.getSql(), bizName2Expr);
sqlQuery.setSql(sql);
}
}
private Map<String, String> getDimensionExpressions(SemanticSchemaResp semanticSchema,
OntologyQuery ontologyQuery) {
Set<DimSchemaResp> queryDimensions = ontologyQuery.getDimensions();
Set<String> queryFields = ontologyQuery.getFields();
log.debug("begin to generateDerivedMetric {} [{}]", queryDimensions);
Map<String, String> dim2Expr = new HashMap<>();
for (DimSchemaResp queryDim : queryDimensions) {
queryDim.getFields().addAll(SqlSelectHelper.getFieldsFromExpr(queryDim.getExpr()));
queryFields.addAll(queryDim.getFields());
if (!queryDim.getBizName().equals(queryDim.getExpr())) {
dim2Expr.put(queryDim.getBizName(), queryDim.getExpr());
}
}
return dim2Expr;
}
}

View File

@@ -0,0 +1,130 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*;
/**
* This parser replaces metric bizName in the S2SQL with calculation expression (if configured).
*/
@Component("MetricExpressionParser")
@Slf4j
public class MetricExpressionParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQuery())
&& Objects.nonNull(queryStatement.getOntologyQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql())
&& !CollectionUtils.isEmpty(queryStatement.getOntologyQuery().getMetrics());
}
@Override
public void parse(QueryStatement queryStatement) throws Exception {
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
Map<String, String> bizName2Expr = getMetricExpressions(semanticSchema, ontologyQuery);
if (!CollectionUtils.isEmpty(bizName2Expr)) {
String sql = SqlReplaceHelper.replaceSqlByExpression(sqlQuery.getSql(), bizName2Expr);
sqlQuery.setSql(sql);
}
}
private Map<String, String> getMetricExpressions(SemanticSchemaResp semanticSchema,
OntologyQuery ontologyQuery) {
List<MetricSchemaResp> allMetrics = semanticSchema.getMetrics();
Set<MetricSchemaResp> queryMetrics = ontologyQuery.getMetrics();
Set<String> queryFields = ontologyQuery.getFields();
log.debug("begin to generateDerivedMetric {} [{}]", queryMetrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchema.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Map<String, String> visitedMetrics = new HashMap<>();
Map<String, String> metric2Expr = new HashMap<>();
for (MetricSchemaResp queryMetric : queryMetrics) {
String fieldExpr = buildFieldExpr(allMetrics, allMeasures, queryMetric.getExpr(),
queryMetric.getMetricDefineType(), visitedMetrics);
// add all fields referenced in the expression
queryMetric.getFields().addAll(SqlSelectHelper.getFieldsFromExpr(fieldExpr));
queryFields.addAll(queryMetric.getFields());
if (!queryMetric.getBizName().equals(fieldExpr)) {
metric2Expr.put(queryMetric.getBizName(), fieldExpr);
}
}
return metric2Expr;
}
private String buildFieldExpr(final List<MetricSchemaResp> metricResps,
final Map<String, Measure> allMeasures, final String metricExpr,
final MetricDefineType metricDefineType, Map<String, String> visitedMetric) {
Set<String> fields = SqlSelectHelper.getFieldsFromExpr(metricExpr);
if (!CollectionUtils.isEmpty(fields)) {
Map<String, String> replace = new HashMap<>();
for (String field : fields) {
switch (metricDefineType) {
case METRIC:
// if defineType=METRIC, field should be the bizName of its parent metric
Optional<MetricSchemaResp> metricItem = metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field)).findFirst();
if (metricItem.isPresent()) {
if (visitedMetric.keySet().contains(field)) {
replace.put(field, visitedMetric.get(field));
break;
}
replace.put(field,
buildFieldExpr(metricResps, allMeasures,
metricItem.get().getExpr(),
metricItem.get().getMetricDefineType(), visitedMetric));
visitedMetric.put(field, replace.get(field));
}
break;
case MEASURE:
// if defineType=MEASURE, field should be the bizName of its measure
if (allMeasures.containsKey(field)) {
Measure measure = allMeasures.get(field);
String expr = metricExpr;
if (Objects.nonNull(measure.getAgg())) {
expr = String.format("%s (%s)", measure.getAgg(), metricExpr);
}
replace.put(field, expr);
}
break;
case FIELD:
default:
break;
}
}
if (!CollectionUtils.isEmpty(replace)) {
String expr = SqlReplaceHelper.replaceExpression(metricExpr, replace);
log.debug("derived metric {}->{}", metricExpr, expr);
return expr;
}
}
return metricExpr;
}
}

View File

@@ -5,13 +5,8 @@ import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum; import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.core.pojo.*;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -60,9 +55,8 @@ public class MetricRatioParser implements QueryParser {
@Override @Override
public void parse(QueryStatement queryStatement) throws Exception { public void parse(QueryStatement queryStatement) throws Exception {
DatabaseResp database = queryStatement.getOntology().getDatabase(); Ontology ontology = queryStatement.getOntology();
generateRatioSql(queryStatement, queryStatement.getOntology().getDatabaseType(), generateRatioSql(queryStatement, ontology.getDatabaseType(), ontology.getDatabaseVersion());
database.getVersion());
} }
/** Ratio */ /** Ratio */
@@ -89,8 +83,8 @@ public class MetricRatioParser implements QueryParser {
boolean isOver = isOverRatio(structQuery); boolean isOver = isOverRatio(structQuery);
String sql = ""; String sql = "";
SqlQuery dsParam = queryStatement.getSqlQuery(); SqlQuery sqlQuery = queryStatement.getSqlQuery();
dsParam.setTable(metricTableName); sqlQuery.setTable(metricTableName);
switch (engineTypeEnum) { switch (engineTypeEnum) {
case H2: case H2:
sql = new H2EngineSql().sql(structQuery, isOver, true, metricTableName); sql = new H2EngineSql().sql(structQuery, isOver, true, metricTableName);
@@ -99,19 +93,19 @@ public class MetricRatioParser implements QueryParser {
case DORIS: case DORIS:
case CLICKHOUSE: case CLICKHOUSE:
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) { if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
dsParam.setSupportWith(false); sqlQuery.setSupportWith(false);
} }
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) { if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(), sql = new MysqlEngineSql().sql(structQuery, isOver, sqlQuery.isSupportWith(),
metricTableName); metricTableName);
} else { } else {
sql = new CkEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(), sql = new CkEngineSql().sql(structQuery, isOver, sqlQuery.isSupportWith(),
metricTableName); metricTableName);
} }
break; break;
default: default:
} }
dsParam.setSql(sql); sqlQuery.setSql(sql);
} }
public class H2EngineSql implements EngineSql { public class H2EngineSql implements EngineSql {
@@ -346,15 +340,8 @@ public class MetricRatioParser implements QueryParser {
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr; return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
} }
private String getGroupDimWithOutTime(StructQuery structQuery) {
String timeDim = getTimeDim(structQuery);
return structQuery.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(","));
}
private static String getTimeDim(StructQuery structQuery) { private static String getTimeDim(StructQuery structQuery) {
DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class); return structQuery.getDateInfo().getDateField();
return dateModeUtils.getSysDateCol(structQuery.getDateInfo());
} }
private static String getLimit(StructQuery structQuery) { private static String getLimit(StructQuery structQuery) {
@@ -380,13 +367,6 @@ public class MetricRatioParser implements QueryParser {
return sqlGenerateUtils.getSelectField(agg); return sqlGenerateUtils.getSelectField(agg);
} }
private String getGroupBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQuery.getGroups())) {
return "";
}
return "group by " + String.join(",", structQuery.getGroups());
}
private static String getOrderBy(StructQuery structQuery) { private static String getOrderBy(StructQuery structQuery) {
return "order by " + getTimeDim(structQuery) + " desc"; return "order by " + getTimeDim(structQuery) + " desc";
} }

View File

@@ -2,7 +2,6 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.core.pojo.Ontology; import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder; import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -10,7 +9,10 @@ import org.springframework.stereotype.Component;
import java.util.Objects; import java.util.Objects;
/** the calcite parse implements */ /**
* This parser generates inner sql statement for the ontology query, which would be selected by the
* parsed sql query.
*/
@Component("OntologyQueryParser") @Component("OntologyQueryParser")
@Slf4j @Slf4j
public class OntologyQueryParser implements QueryParser { public class OntologyQueryParser implements QueryParser {

View File

@@ -2,7 +2,9 @@ package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** A query parser generates physical SQL for the QueryStatement. */ /**
* A query parser generates physical SQL for the QueryStatement.
*/
public interface QueryParser { public interface QueryParser {
boolean accept(QueryStatement queryStatement); boolean accept(QueryStatement queryStatement);

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite; package com.tencent.supersonic.headless.core.translator.parser;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;

View File

@@ -1,18 +1,17 @@
package com.tencent.supersonic.headless.core.translator.parser; package com.tencent.supersonic.headless.core.translator.parser;
import com.google.common.collect.Lists; import com.google.common.collect.Sets;
import com.tencent.supersonic.common.jsqlparser.SqlAsHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper; import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Constants; import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.SchemaItem; import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.*; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
@@ -21,12 +20,15 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/**
* This parser rewrites S2SQL including conversion from metric/dimension name to bizName and build
* ontology query in preparation for generation of physical SQL.
*/
@Component("SqlQueryParser") @Component("SqlQueryParser")
@Slf4j @Slf4j
public class SqlQueryParser implements QueryParser { public class SqlQueryParser implements QueryParser {
@@ -38,69 +40,46 @@ public class SqlQueryParser implements QueryParser {
@Override @Override
public void parse(QueryStatement queryStatement) throws Exception { public void parse(QueryStatement queryStatement) throws Exception {
// build ontologyQuery
SqlQuery sqlQuery = queryStatement.getSqlQuery();
List<String> queryFields = SqlSelectHelper.getAllSelectFields(sqlQuery.getSql());
Ontology ontology = queryStatement.getOntology();
OntologyQuery ontologyQuery = buildOntologyQuery(ontology, queryFields);
// check if there are fields not matched with any metric or dimension
if (queryFields.size() > ontologyQuery.getMetrics().size()
+ ontologyQuery.getDimensions().size()) {
queryStatement
.setErrMsg("There are fields in the SQL not matched with any semantic column.");
queryStatement.setStatus(1);
return;
}
queryStatement.setOntologyQuery(ontologyQuery);
AggOption sqlQueryAggOption = getAggOption(sqlQuery.getSql(), ontologyQuery.getMetrics());
ontologyQuery.setAggOption(sqlQueryAggOption);
convertNameToBizName(queryStatement); convertNameToBizName(queryStatement);
rewriteOrderBy(queryStatement); rewriteOrderBy(queryStatement);
// fill sqlQuery // fill sqlQuery
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
String tableName = SqlSelectHelper.getTableName(sqlQuery.getSql()); String tableName = SqlSelectHelper.getTableName(sqlQuery.getSql());
if (StringUtils.isEmpty(tableName)) { if (StringUtils.isEmpty(tableName)) {
return; return;
} }
sqlQuery.setTable(tableName.toLowerCase()); sqlQuery.setTable(tableName.toLowerCase());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
SemanticSchemaResp semanticSchema = queryStatement.getSemanticSchema();
if (!sqlGenerateUtils.isSupportWith( if (!sqlGenerateUtils.isSupportWith(
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()), EngineType.fromString(semanticSchema.getDatabaseResp().getType().toUpperCase()),
semanticSchemaResp.getDatabaseResp().getVersion())) { semanticSchema.getDatabaseResp().getVersion())) {
sqlQuery.setSupportWith(false); sqlQuery.setSupportWith(false);
sqlQuery.setWithAlias(false); sqlQuery.setWithAlias(false);
} }
// build ontologyQuery
List<String> queryFields = SqlSelectHelper.getAllSelectFields(sqlQuery.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, queryFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
List<DimSchemaResp> dimensionSchemas = getDimensions(semanticSchemaResp, queryFields);
List<String> dimensions =
dimensionSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
// check if there are fields not matched with any metric or dimension
if (queryFields.size() > metricSchemas.size() + dimensions.size()) {
List<String> semanticFields = Lists.newArrayList();
metricSchemas.forEach(m -> semanticFields.add(m.getBizName()));
dimensionSchemas.forEach(d -> semanticFields.add(d.getBizName()));
String errMsg =
String.format("Querying columns[%s] not matched with semantic fields[%s].",
queryFields, semanticFields);
queryStatement.setErrMsg(errMsg);
queryStatement.setStatus(1);
return;
}
OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQuery.getMetrics().addAll(metrics);
ontologyQuery.getDimensions().addAll(dimensions);
AggOption sqlQueryAggOption = getAggOption(sqlQuery.getSql(), metricSchemas);
// if sql query itself has aggregation, ontology query just returns detail
if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) {
ontologyQuery.setAggOption(AggOption.NATIVE);
} else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) {
ontologyQuery.setAggOption(AggOption.DEFAULT);
}
ontologyQuery.setNativeQuery(!AggOption.isAgg(ontologyQuery.getAggOption()));
queryStatement.setOntologyQuery(ontologyQuery);
generateDerivedMetric(sqlGenerateUtils, queryStatement);
queryStatement.setSql(sqlQuery.getSql());
// replace sql fields for db, must called after convertNameToBizName
String sqlRewrite = replaceSqlFieldsForHanaDB(queryStatement, sqlQuery.getSql());
sqlQuery.setSql(sqlRewrite);
log.info("parse sqlQuery [{}] ", sqlQuery); log.info("parse sqlQuery [{}] ", sqlQuery);
} }
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) { private AggOption getAggOption(String sql, Set<MetricSchemaResp> metricSchemas) {
if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) { if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) {
return AggOption.AGGREGATION; return AggOption.AGGREGATION;
} }
@@ -133,190 +112,13 @@ public class SqlQueryParser implements QueryParser {
return AggOption.DEFAULT; return AggOption.DEFAULT;
} }
private List<DimSchemaResp> getDimensions(SemanticSchemaResp semanticSchemaResp, private Map<String, String> getNameToBizNameMap(OntologyQuery query) {
List<String> allFields) {
Map<String, DimSchemaResp> dimensionLowerToNameMap =
semanticSchemaResp.getDimensions().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, MetricSchemaResp> metricLowerToNameMap =
semanticSchemaResp.getMetrics().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQuery sqlParam = queryStatement.getSqlQuery();
OntologyQuery ontologyParam = queryStatement.getOntologyQuery();
String sql = sqlParam.getSql();
Set<String> measures = new HashSet<>();
Map<String, String> replaces = generateDerivedMetric(sqlGenerateUtils, semanticSchemaResp,
ontologyParam.getAggOption(), ontologyParam.getMetrics(),
ontologyParam.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
ontologyParam.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
ontologyParam.getMetrics().addAll(measures);
} else {
// empty measure , fill default
ontologyParam.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, ontologyParam.getDimensions())));
}
}
sqlParam.setSql(sql);
}
private Map<String, String> generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
SemanticSchemaResp semanticSchemaResp, AggOption aggOption, Set<String> metrics,
Set<String> dimensions, Set<String> measures) {
Map<String, String> result = new HashMap<>();
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
// Check if any metric is derived
boolean hasDerivedMetrics =
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
if (!hasDerivedMetrics) {
return result;
}
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchemaResp.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Set<String> derivedDimensions = new HashSet<>();
Set<String> derivedMetrics = new HashSet<>();
Map<String, String> visitedMetrics = new HashMap<>();
for (MetricResp metricResp : metricResps) {
if (metrics.contains(metricResp.getBizName())) {
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
metricResp.getMetricDefineByMeasureParams());
if (isDerived) {
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
derivedMetrics, derivedDimensions);
result.put(metricResp.getBizName(), expr);
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
} else {
measures.add(metricResp.getBizName());
}
}
}
measures.addAll(derivedMetrics);
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
.forEach(dimensions::add);
return result;
}
/**
* special process for hanaDB,the sap hana DB don't support the chinese name as the column name,
* so we need to quote the column name after converting the convertNameToBizName called
*
* sap hana DB will auto translate the colume to upper case letter if not quoted. also we need
* to quote the field name if it is a lower case letter.
*
* @param queryStatement
* @param sql
* @return
*/
private String replaceSqlFieldsForHanaDB(QueryStatement queryStatement, String sql) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
if (!semanticSchemaResp.getDatabaseResp().getType()
.equalsIgnoreCase(EngineType.HANADB.getName())) {
return sql;
}
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
Map<String, String> fieldNameToBizNameMapQuote = new HashMap<>();
fieldNameToBizNameMap.forEach((key, value) -> {
if (!fieldNameToBizNameMapQuote.containsKey(value) && !value.matches("\".*\"")
&& !value.matches("[A-Z0-9_].*?")) {
fieldNameToBizNameMapQuote.put(value, "\"" + value + "\"");
}
});
String sqlNew = sql;
if (fieldNameToBizNameMapQuote.size() > 0) {
sqlNew = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMapQuote, true);
}
// replace alias field name
List<String> asFields = SqlAsHelper.getAsFields(sqlNew);
Map<String, String> fieldMapput = new HashMap<>();
for (String asField : asFields) {
String value = asField;
if (!value.matches("\".*?\"") && !value.matches("[A-Z0-9_].*?")) {
value = "\"" + asField + "\"";
fieldMapput.put(asField, value);
}
}
if (fieldMapput.size() > 0) {
sqlNew = SqlReplaceHelper.replaceAliasFieldName(sqlNew, fieldMapput);
}
return sqlNew;
}
private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = queryStatement.getSqlQuery().getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQuery().setSql(sql);
}
private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number
String sql = queryStatement.getSqlQuery().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQuery().setSql(newSql);
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
// support fieldName and field alias to bizName // support fieldName and field alias to bizName
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap( Map<String, String> dimensionResults = query.getDimensions().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap( Map<String, String> metricResults = query.getMetrics().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName())) entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1)); .collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
@@ -337,18 +139,93 @@ public class SqlQueryParser implements QueryParser {
return elements.stream(); return elements.stream();
} }
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, Set<String> dimensions) { private void convertNameToBizName(QueryStatement queryStatement) {
if (!CollectionUtils.isEmpty(dimensions)) { Map<String, String> fieldNameToBizNameMap =
Map<String, Long> modelMatchCnt = new HashMap<>(); getNameToBizNameMap(queryStatement.getOntologyQuery());
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) { String sql = queryStatement.getSqlQuery().getSql();
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions() log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
.stream().filter(d -> dimensions.contains(d.getBizName())).count()); sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQuery().setSql(sql);
} }
return modelMatchCnt.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) private void rewriteOrderBy(QueryStatement queryStatement) {
.map(Map.Entry::getKey).findFirst().orElse(""); // replace order by field with the select sequence number
String sql = queryStatement.getSqlQuery().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQuery().setSql(newSql);
} }
return semanticSchemaResp.getModelResps().get(0).getBizName();
private OntologyQuery buildOntologyQuery(Ontology ontology, List<String> queryFields) {
OntologyQuery ontologyQuery = new OntologyQuery();
Set<String> fields = Sets.newHashSet(queryFields);
// find belonging model for every querying metrics
ontology.getMetricMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
entry.getValue().forEach(m -> {
if (fields.contains(m.getName()) || fields.contains(m.getBizName())) {
if (!ontologyQuery.getMetricMap().containsKey(modelName)) {
ontologyQuery.getMetricMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getMetricMap().get(modelName).add(m);
fields.remove(m.getName());
fields.remove(m.getBizName());
}
});
});
// first try to find all querying dimensions in the models with querying metrics.
ontology.getDimensionMap().entrySet().stream()
.filter(entry -> ontologyQuery.getMetricMap().containsKey(entry.getKey()))
.forEach(entry -> {
String modelName = entry.getKey();
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(entry.getKey())) {
ontologyQuery.getDimensionMap().put(entry.getKey(),
Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(entry.getKey()).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}
});
});
// if there are still fields not found belonging models, try to find in the models without
// querying metrics.
if (!fields.isEmpty()) {
ontology.getDimensionMap().entrySet().forEach(entry -> {
String modelName = entry.getKey();
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
entry.getValue().forEach(d -> {
if (fields.contains(d.getName()) || fields.contains(d.getBizName())) {
if (!ontologyQuery.getDimensionMap().containsKey(modelName)) {
ontologyQuery.getDimensionMap().put(modelName, Sets.newHashSet());
}
ontologyQuery.getModelMap().put(modelName,
ontology.getModelMap().get(modelName));
ontologyQuery.getDimensionMap().get(modelName).add(d);
fields.remove(d.getName());
fields.remove(d.getBizName());
}
});
}
});
}
return ontologyQuery;
} }
} }

View File

@@ -4,7 +4,6 @@ import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils; import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -36,9 +35,9 @@ public class SqlVariableParser implements QueryParser {
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(), modelResp.getModelDetail().getSqlVariables(),
queryStatement.getStructQuery().getParams()); queryStatement.getStructQuery().getParams());
DataModel dataModel = ModelResp dataModel =
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName()); queryStatement.getOntology().getModelMap().get(modelResp.getBizName());
dataModel.setSqlQuery(sqlParsed); dataModel.getModelDetail().setSqlQuery(sqlParsed);
} }
} }
} }

View File

@@ -1,10 +1,6 @@
package com.tencent.supersonic.headless.core.translator.parser; package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery; import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery; import com.tencent.supersonic.headless.core.pojo.StructQuery;
@@ -13,8 +9,11 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
/**
* This parser converts struct semantic query into sql query by generating S2SQL based on structured
* semantic information.
*/
@Component("StructQueryParser") @Component("StructQueryParser")
@Slf4j @Slf4j
public class StructQueryParser implements QueryParser { public class StructQueryParser implements QueryParser {
@@ -29,42 +28,30 @@ public class StructQueryParser implements QueryParser {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQuery structQuery = queryStatement.getStructQuery(); StructQuery structQuery = queryStatement.getStructQuery();
String dsTable = "t_1"; String dsTable = queryStatement.getDataSetName();
SqlQuery sqlParam = new SqlQuery(); if (Objects.isNull(dsTable)) {
sqlParam.setTable(dsTable); dsTable = "t_ds_temp";
String sql = String.format("select %s from %s %s %s %s", }
SqlQuery sqlQuery = new SqlQuery();
sqlQuery.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable, sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.generateWhere(structQuery, null),
sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery), sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery)); sqlGenerateUtils.getLimit(structQuery));
if (!sqlGenerateUtils.isSupportWith(queryStatement.getOntology().getDatabaseType(), if (!sqlGenerateUtils.isSupportWith(queryStatement.getOntology().getDatabaseType(),
queryStatement.getOntology().getDatabaseVersion())) { queryStatement.getOntology().getDatabaseVersion())) {
sqlParam.setSupportWith(false); sqlQuery.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s", sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable, sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getGroupBy(structQuery),
sqlGenerateUtils.getOrderBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery)); sqlGenerateUtils.getLimit(structQuery));
} }
sqlParam.setSql(sql); sqlQuery.setSql(sql);
queryStatement.setSqlQuery(sqlParam); queryStatement.setSqlQuery(sqlQuery);
queryStatement.setIsS2SQL(true);
OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQuery.getDimensions().addAll(structQuery.getGroups());
ontologyQuery.getMetrics().addAll(structQuery.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQuery, null);
ontologyQuery.setWhere(where);
if (ontologyQuery.getMetrics().isEmpty()) {
ontologyQuery.setAggOption(AggOption.NATIVE);
} else {
ontologyQuery.setAggOption(AggOption.DEFAULT);
}
ontologyQuery.setNativeQuery(structQuery.getQueryType().isNativeAggQuery());
ontologyQuery.setOrder(structQuery.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQuery.setLimit(structQuery.getLimit());
queryStatement.setOntologyQuery(ontologyQuery);
log.info("parse structQuery [{}] ", queryStatement.getSqlQuery()); log.info("parse structQuery [{}] ", queryStatement.getSqlQuery());
} }

View File

@@ -1,17 +1,23 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node; package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.pojo.JoinRelation; import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology; import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery; import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.Constants;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.*; import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -23,28 +29,32 @@ import java.util.stream.Collectors;
@Slf4j @Slf4j
public class DataModelNode extends SemanticNode { public class DataModelNode extends SemanticNode {
public static SqlNode build(DataModel dataModel, SqlValidatorScope scope) throws Exception { public static SqlNode build(ModelResp dataModel, SqlValidatorScope scope) throws Exception {
String sqlTable = ""; String sqlTable = "";
if (dataModel.getSqlQuery() != null && !dataModel.getSqlQuery().isEmpty()) { if (dataModel.getModelDetail().getSqlQuery() != null
sqlTable = dataModel.getSqlQuery(); && !dataModel.getModelDetail().getSqlQuery().isEmpty()) {
} else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) { sqlTable = dataModel.getModelDetail().getSqlQuery();
if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) { } else if (dataModel.getModelDetail().getTableQuery() != null
String fullTableName = && !dataModel.getModelDetail().getTableQuery().isEmpty()) {
String.join(".public.", dataModel.getTableQuery().split("\\.")); if (dataModel.getModelDetail().getDbType()
.equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
String fullTableName = String.join(".public.",
dataModel.getModelDetail().getTableQuery().split("\\."));
sqlTable = "select * from " + fullTableName; sqlTable = "select * from " + fullTableName;
} else { } else {
sqlTable = "select * from " + dataModel.getTableQuery(); sqlTable = "select * from " + dataModel.getModelDetail().getTableQuery();
} }
} }
if (sqlTable.isEmpty()) { if (sqlTable.isEmpty()) {
throw new Exception("DataModelNode build error [tableSqlNode not found]"); throw new Exception("DataModelNode build error [tableSqlNode not found]");
} }
SqlNode source = getTable(sqlTable, scope, EngineType.fromString(dataModel.getType())); SqlNode source = getTable(sqlTable, scope,
EngineType.fromString(dataModel.getModelDetail().getDbType()));
addSchema(scope, dataModel, sqlTable); addSchema(scope, dataModel, sqlTable);
return buildAs(dataModel.getName(), source); return buildAs(dataModel.getName(), source);
} }
private static void addSchema(SqlValidatorScope scope, DataModel datasource, String table) private static void addSchema(SqlValidatorScope scope, ModelResp datasource, String table)
throws Exception { throws Exception {
Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table); Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table);
for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) { for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) {
@@ -58,22 +68,22 @@ public class DataModelNode extends SemanticNode {
} }
} }
private static void addSchemaTable(SqlValidatorScope scope, DataModel datasource, String db, private static void addSchemaTable(SqlValidatorScope scope, ModelResp dataModel, String db,
String tb, Set<String> fields) throws Exception { String tb, Set<String> fields) throws Exception {
Set<String> dateInfo = new HashSet<>(); Set<String> dateInfo = new HashSet<>();
Set<String> dimensions = new HashSet<>(); Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>(); Set<String> metrics = new HashSet<>();
EngineType engineType = EngineType.fromString(datasource.getType()); EngineType engineType = EngineType.fromString(dataModel.getModelDetail().getDbType());
for (Dimension d : datasource.getDimensions()) { for (Dimension d : dataModel.getModelDetail().getDimensions()) {
List<SqlNode> identifiers = List<SqlNode> identifiers =
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope); expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
identifiers.forEach(i -> dimensions.add(i.toString())); identifiers.forEach(i -> dimensions.add(i.toString()));
dimensions.add(d.getName()); dimensions.add(d.getName());
} }
for (Identify i : datasource.getIdentifiers()) { for (Identify i : dataModel.getIdentifiers()) {
dimensions.add(i.getName()); dimensions.add(i.getName());
} }
for (Measure m : datasource.getMeasures()) { for (Measure m : dataModel.getMeasures()) {
List<SqlNode> identifiers = List<SqlNode> identifiers =
expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope); expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
identifiers.forEach(i -> { identifiers.forEach(i -> {
@@ -88,26 +98,13 @@ public class DataModelNode extends SemanticNode {
for (String field : fields) { for (String field : fields) {
if (!metrics.contains(field) && !dimensions.contains(field)) { if (!metrics.contains(field) && !dimensions.contains(field)) {
dimensions.add(field); dimensions.add(field);
log.info("add column {} {}", datasource.getName(), field); log.info("add column {} {}", dataModel.getName(), field);
} }
} }
SchemaBuilder.addSourceView(scope.getValidator().getCatalogReader().getRootSchema(), db, tb, SchemaBuilder.addSourceView(scope.getValidator().getCatalogReader().getRootSchema(), db, tb,
dateInfo, dimensions, metrics); dateInfo, dimensions, metrics);
} }
public static SqlNode buildExtend(DataModel datasource, Map<String, String> exprList,
SqlValidatorScope scope) throws Exception {
if (CollectionUtils.isEmpty(exprList)) {
return build(datasource, scope);
}
EngineType engineType = EngineType.fromString(datasource.getType());
SqlNode dataSet = new SqlBasicCall(new LateralViewExplodeNode(exprList),
Arrays.asList(build(datasource, scope), new SqlNodeList(
getExtendField(exprList, scope, engineType), SqlParserPos.ZERO)),
SqlParserPos.ZERO);
return buildAs(datasource.getName() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, dataSet);
}
public static List<SqlNode> getExtendField(Map<String, String> exprList, public static List<SqlNode> getExtendField(Map<String, String> exprList,
SqlValidatorScope scope, EngineType engineType) throws Exception { SqlValidatorScope scope, EngineType engineType) throws Exception {
List<SqlNode> sqlNodeList = new ArrayList<>(); List<SqlNode> sqlNodeList = new ArrayList<>();
@@ -129,64 +126,15 @@ public class DataModelNode extends SemanticNode {
return sqlNode; return sqlNode;
} }
public static String getNames(List<DataModel> dataModelList) { public static List<ModelResp> getQueryDataModels(Ontology ontology,
return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_")); OntologyQuery ontologyQuery) {
}
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
Set<String> queryDimensions, Set<String> queryMeasures) {
queryDimensions.addAll(queryParam.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY)
? d.split(Constants.DIMENSION_IDENTIFY)[1]
: d)
.collect(Collectors.toSet()));
Set<String> schemaMetricName =
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName())));
queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
.forEach(queryMeasures::add);
}
public static void mergeQueryFilterDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
Set<String> dimensions, Set<String> measures, SqlValidatorScope scope)
throws Exception {
EngineType engineType = ontology.getDatabaseType();
if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType),
filterConditions);
Set<String> queryMeasures = new HashSet<>(measures);
Set<String> schemaMetricName =
ontology.getMetrics().stream().map(Metric::getName).collect(Collectors.toSet());
for (String filterCondition : filterConditions) {
if (schemaMetricName.contains(filterCondition)) {
ontology.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(filterCondition))
.forEach(m -> m.getMetricTypeParams().getMeasures()
.forEach(mm -> queryMeasures.add(mm.getName())));
continue;
}
dimensions.add(filterCondition);
}
measures.clear();
measures.addAll(queryMeasures);
}
}
public static List<DataModel> getQueryDataModels(SqlValidatorScope scope,
S2CalciteSchema schema, OntologyQuery queryParam) throws Exception {
Ontology ontology = schema.getOntology();
// get query measures and dimensions // get query measures and dimensions
Set<String> queryMeasures = new HashSet<>(); Set<String> queryMeasures = new HashSet<>();
Set<String> queryDimensions = new HashSet<>(); Set<String> queryDimensions = new HashSet<>();
getQueryDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures); getQueryDimensionMeasure(ontology, ontologyQuery, queryDimensions, queryMeasures);
mergeQueryFilterDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures,
scope);
// first, find the base model // first, find the base model
DataModel baseDataModel = findBaseModel(ontology, queryMeasures, queryDimensions); ModelResp baseDataModel = findBaseModel(ontology, ontologyQuery);
if (Objects.isNull(baseDataModel)) { if (Objects.isNull(baseDataModel)) {
throw new RuntimeException( throw new RuntimeException(
String.format("could not find matching dataModel, dimensions:%s, measures:%s", String.format("could not find matching dataModel, dimensions:%s, measures:%s",
@@ -199,7 +147,7 @@ public class DataModelNode extends SemanticNode {
} }
// second, traverse the ontology to find other related dataModels // second, traverse the ontology to find other related dataModels
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, queryParam, List<ModelResp> relatedDataModels = findRelatedModelsByRelation(ontology, ontologyQuery,
baseDataModel, queryDimensions, queryMeasures); baseDataModel, queryDimensions, queryMeasures);
if (CollectionUtils.isEmpty(relatedDataModels)) { if (CollectionUtils.isEmpty(relatedDataModels)) {
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel, relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
@@ -213,51 +161,66 @@ public class DataModelNode extends SemanticNode {
return relatedDataModels; return relatedDataModels;
} }
private static DataModel findBaseModel(Ontology ontology, Set<String> queryMeasures, public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery ontologyQuery,
Set<String> queryDimensions) { Set<String> queryDimensions, Set<String> queryMeasures) {
DataModel dataModel = null; ontologyQuery.getMetrics().forEach(m -> {
// first, try to find the model with the most matching measures if (Objects.nonNull(m.getMetricDefineByMeasureParams())) {
Map<String, Integer> dataModelMeasuresCount = new HashMap<>(); m.getMetricDefineByMeasureParams().getMeasures()
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) { .forEach(mm -> queryMeasures.add(mm.getName()));
Set<String> sourceMeasure = entry.getValue().getMeasures().stream() }
.map(Measure::getName).collect(Collectors.toSet()); if (Objects.nonNull(m.getMetricDefineByFieldParams())) {
sourceMeasure.retainAll(queryMeasures); m.getMetricDefineByFieldParams().getFields()
dataModelMeasuresCount.put(entry.getKey(), sourceMeasure.size()); .forEach(mm -> queryMeasures.add(mm.getFieldName()));
}
});
} }
log.info("dataModelMeasureCount: [{}]", dataModelMeasuresCount);
Optional<Map.Entry<String, Integer>> base =
dataModelMeasuresCount.entrySet().stream().filter(e -> e.getValue() > 0)
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
if (base.isPresent()) { private static ModelResp findBaseModel(Ontology ontology, OntologyQuery query) {
dataModel = ontology.getDataModelMap().get(base.get().getKey()); ModelResp dataModel = null;
// first, try to find the model with the most query metrics
Map<String, Integer> modelMetricCount = Maps.newHashMap();
query.getMetrics().forEach(m -> {
if (!modelMetricCount.containsKey(m.getModelBizName())) {
modelMetricCount.put(m.getModelBizName(), 1);
} else { } else {
// second, try to find the model with the most matching dimensions int count = modelMetricCount.get(m.getModelBizName());
Map<String, Integer> dataModelDimCount = new HashMap<>(); modelMetricCount.put(m.getModelBizName(), count + 1);
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) {
Set<String> modelDimensions = entry.getValue().stream().map(Dimension::getName)
.collect(Collectors.toSet());
modelDimensions.retainAll(queryDimensions);
dataModelDimCount.put(entry.getKey(), modelDimensions.size());
} }
log.info("dataModelDimCount: [{}]", dataModelDimCount); });
base = dataModelDimCount.entrySet().stream().filter(e -> e.getValue() > 0) Optional<String> baseModelName = modelMetricCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).map(e -> e.getKey())
if (base.isPresent()) { .findFirst();
dataModel = ontology.getDataModelMap().get(base.get().getKey()); if (baseModelName.isPresent()) {
dataModel = ontology.getModelMap().get(baseModelName.get());
} else {
// second, try to find the model with the most query dimensions
Map<String, Integer> modelDimCount = Maps.newHashMap();
query.getDimensions().forEach(m -> {
if (!modelDimCount.containsKey(m.getModelBizName())) {
modelDimCount.put(m.getModelBizName(), 1);
} else {
int count = modelDimCount.get(m.getModelBizName());
modelDimCount.put(m.getModelBizName(), count + 1);
}
});
baseModelName = modelMetricCount.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(e -> e.getKey()).findFirst();
if (baseModelName.isPresent()) {
dataModel = ontology.getModelMap().get(baseModelName.get());
} }
} }
return dataModel; return dataModel;
} }
private static boolean checkMatch(DataModel baseDataModel, Set<String> queryMeasures, private static boolean checkMatch(ModelResp baseDataModel, Set<String> queryMeasures,
Set<String> queryDimension) { Set<String> queryDimension) {
boolean isAllMatch = true; boolean isAllMatch = true;
Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName) Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
Set<String> baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName) Set<String> baseDimensions = baseDataModel.getModelDetail().getDimensions().stream()
.collect(Collectors.toSet()); .map(Dimension::getName).collect(Collectors.toSet());
baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName())); baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName()));
baseMeasures.retainAll(queryMeasures); baseMeasures.retainAll(queryMeasures);
@@ -284,11 +247,11 @@ public class DataModelNode extends SemanticNode {
return isAllMatch; return isAllMatch;
} }
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology, private static List<ModelResp> findRelatedModelsByRelation(Ontology ontology,
OntologyQuery queryParam, DataModel baseDataModel, Set<String> queryDimensions, OntologyQuery ontologyQuery, ModelResp baseDataModel, Set<String> queryDimensions,
Set<String> queryMeasures) { Set<String> queryMeasures) {
Set<String> joinDataModelNames = new HashSet<>(); Set<String> joinDataModelNames = new HashSet<>();
List<DataModel> joinDataModels = new ArrayList<>(); List<ModelResp> joinDataModels = new ArrayList<>();
Set<String> before = new HashSet<>(); Set<String> before = new HashSet<>();
before.add(baseDataModel.getName()); before.add(baseDataModel.getName());
@@ -307,30 +270,31 @@ public class DataModelNode extends SemanticNode {
} }
boolean isMatch = false; boolean isMatch = false;
boolean isRight = before.contains(joinRelation.getLeft()); boolean isRight = before.contains(joinRelation.getLeft());
DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight()) ModelResp other = isRight ? ontology.getModelMap().get(joinRelation.getRight())
: ontology.getDataModelMap().get(joinRelation.getLeft()); : ontology.getModelMap().get(joinRelation.getLeft());
String joinDimName = isRight ? joinRelation.getJoinCondition().get(0).getRight() String joinDimName = isRight ? joinRelation.getJoinCondition().get(0).getRight()
: joinRelation.getJoinCondition().get(0).getLeft(); : joinRelation.getJoinCondition().get(0).getLeft();
if (!queryDimensions.isEmpty()) { if (!queryDimensions.isEmpty()) {
Set<String> linkDimension = other.getDimensions().stream() Set<String> linkDimension = other.getModelDetail().getDimensions().stream()
.map(Dimension::getName).collect(Collectors.toSet()); .map(Dimension::getName).collect(Collectors.toSet());
other.getIdentifiers().forEach(i -> linkDimension.add(i.getName())); other.getModelDetail().getIdentifiers()
.forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimensions); linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
// joinDim should be added to the query dimension // joinDim should be added to the query dimension
queryParam.getDimensions().add(joinDimName); // ontologyQuery.getDimensions().add(joinDimName);
} }
} }
Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName) Set<String> linkMeasure = other.getModelDetail().getMeasures().stream()
.collect(Collectors.toSet()); .map(Measure::getName).collect(Collectors.toSet());
linkMeasure.retainAll(queryMeasures); linkMeasure.retainAll(queryMeasures);
if (!linkMeasure.isEmpty()) { if (!linkMeasure.isEmpty()) {
isMatch = true; isMatch = true;
} }
if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) { if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) {
Set<String> linkDimension = ontology.getDimensionMap().get(other.getName()) Set<String> linkDimension = ontology.getDimensionMap().get(other.getName())
.stream().map(Dimension::getName).collect(Collectors.toSet()); .stream().map(DimSchemaResp::getName).collect(Collectors.toSet());
linkDimension.retainAll(queryDimensions); linkDimension.retainAll(queryDimensions);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
@@ -362,7 +326,7 @@ public class DataModelNode extends SemanticNode {
orders.entrySet().stream() orders.entrySet().stream()
.sorted((entry1, entry2) -> entry2.getValue().compareTo(entry1.getValue())) // 倒序排序 .sorted((entry1, entry2) -> entry2.getValue().compareTo(entry1.getValue())) // 倒序排序
.forEach(d -> { .forEach(d -> {
joinDataModels.add(ontology.getDataModelMap().get(d.getKey())); joinDataModels.add(ontology.getModelMap().get(d.getKey()));
}); });
} }
return joinDataModels; return joinDataModels;
@@ -383,36 +347,37 @@ public class DataModelNode extends SemanticNode {
} }
} }
private static List<DataModel> findRelatedModelsByIdentifier(Ontology ontology, private static List<ModelResp> findRelatedModelsByIdentifier(Ontology ontology,
DataModel baseDataModel, Set<String> queryDimension, Set<String> measures) { ModelResp baseDataModel, Set<String> queryDimension, Set<String> measures) {
Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream().map(Identify::getName) Set<String> baseIdentifiers = baseDataModel.getModelDetail().getIdentifiers().stream()
.collect(Collectors.toSet()); .map(Identify::getName).collect(Collectors.toSet());
if (baseIdentifiers.isEmpty()) { if (baseIdentifiers.isEmpty()) {
return Collections.EMPTY_LIST; return Collections.EMPTY_LIST;
} }
Set<String> linkDataSourceName = new HashSet<>(); Set<String> linkDataSourceName = new HashSet<>();
List<DataModel> linkDataModels = new ArrayList<>(); List<ModelResp> linkDataModels = new ArrayList<>();
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) { for (Map.Entry<String, ModelResp> entry : ontology.getModelMap().entrySet()) {
if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) { if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
continue; continue;
} }
long identifierNum = entry.getValue().getIdentifiers().stream().map(Identify::getName) long identifierNum = entry.getValue().getModelDetail().getIdentifiers().stream()
.filter(baseIdentifiers::contains).count(); .map(Identify::getName).filter(baseIdentifiers::contains).count();
if (identifierNum > 0) { if (identifierNum > 0) {
boolean isMatch = false; boolean isMatch = false;
if (!queryDimension.isEmpty()) { if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().getDimensions().stream() Set<String> linkDimension = entry.getValue().getModelDetail().getDimensions()
.map(Dimension::getName).collect(Collectors.toSet()); .stream().map(Dimension::getName).collect(Collectors.toSet());
entry.getValue().getIdentifiers().forEach(i -> linkDimension.add(i.getName())); entry.getValue().getModelDetail().getIdentifiers()
.forEach(i -> linkDimension.add(i.getName()));
linkDimension.retainAll(queryDimension); linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
isMatch = true; isMatch = true;
} }
} }
if (!measures.isEmpty()) { if (!measures.isEmpty()) {
Set<String> linkMeasure = entry.getValue().getMeasures().stream() Set<String> linkMeasure = entry.getValue().getModelDetail().getMeasures()
.map(Measure::getName).collect(Collectors.toSet()); .stream().map(Measure::getName).collect(Collectors.toSet());
linkMeasure.retainAll(measures); linkMeasure.retainAll(measures);
if (!linkMeasure.isEmpty()) { if (!linkMeasure.isEmpty()) {
isMatch = true; isMatch = true;
@@ -423,9 +388,9 @@ public class DataModelNode extends SemanticNode {
} }
} }
} }
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) { for (Map.Entry<String, List<DimSchemaResp>> entry : ontology.getDimensionMap().entrySet()) {
if (!queryDimension.isEmpty()) { if (!queryDimension.isEmpty()) {
Set<String> linkDimension = entry.getValue().stream().map(Dimension::getName) Set<String> linkDimension = entry.getValue().stream().map(DimSchemaResp::getName)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
linkDimension.retainAll(queryDimension); linkDimension.retainAll(queryDimension);
if (!linkDimension.isEmpty()) { if (!linkDimension.isEmpty()) {
@@ -434,10 +399,10 @@ public class DataModelNode extends SemanticNode {
} }
} }
for (String linkName : linkDataSourceName) { for (String linkName : linkDataSourceName) {
linkDataModels.add(ontology.getDataModelMap().get(linkName)); linkDataModels.add(ontology.getModelMap().get(linkName));
} }
if (!CollectionUtils.isEmpty(linkDataModels)) { if (!CollectionUtils.isEmpty(linkDataModels)) {
List<DataModel> all = new ArrayList<>(); List<ModelResp> all = new ArrayList<>();
all.add(baseDataModel); all.add(baseDataModel);
all.addAll(linkDataModels); all.addAll(linkDataModels);
return all; return all;

View File

@@ -25,7 +25,9 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
/** push down the time filter into group using the RuntimeOptions defined minMaxTime */ /**
* push down the time filter into group using the RuntimeOptions defined minMaxTime
*/
public class FilterToGroupScanRule extends RelRule<Config> implements TransformationRule { public class FilterToGroupScanRule extends RelRule<Config> implements TransformationRule {
public static FilterTableScanRule.Config DEFAULT = public static FilterTableScanRule.Config DEFAULT =

View File

@@ -1,10 +1,11 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite; package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.pojo.JoinRelation; import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology; import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.parser.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Builder; import lombok.Builder;
import lombok.Data; import lombok.Data;
import org.apache.calcite.schema.Schema; import org.apache.calcite.schema.Schema;
@@ -29,15 +30,15 @@ public class S2CalciteSchema extends AbstractSchema {
return this; return this;
} }
public Map<String, DataModel> getDataModels() { public Map<String, ModelResp> getDataModels() {
return ontology.getDataModelMap(); return ontology.getModelMap();
} }
public List<Metric> getMetrics() { public List<MetricSchemaResp> getMetrics() {
return ontology.getMetrics(); return ontology.getMetrics();
} }
public Map<String, List<Dimension>> getDimensions() { public Map<String, List<DimSchemaResp>> getDimensions() {
return ontology.getDimensionMap(); return ontology.getDimensionMap();
} }

View File

@@ -26,14 +26,14 @@ public class SchemaBuilder {
public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1"; public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1";
public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2"; public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2";
public static SqlValidatorScope getScope(S2CalciteSchema schema) throws Exception { public static SqlValidatorScope getScope(S2CalciteSchema schema) {
Map<String, RelDataType> nameToTypeMap = new HashMap<>(); Map<String, RelDataType> nameToTypeMap = new HashMap<>();
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false); CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
rootSchema.add(schema.getSchemaKey(), schema); rootSchema.add(schema.getSchemaKey(), schema);
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema, Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory, Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
Configuration.config); Configuration.config);
EngineType engineType = schema.getOntology().getDatabaseType(); EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
S2SQLSqlValidatorImpl s2SQLSqlValidator = S2SQLSqlValidatorImpl s2SQLSqlValidator =
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader, new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.getValidatorConfig(engineType)); Configuration.typeFactory, Configuration.getValidatorConfig(engineType));

View File

@@ -1,12 +1,10 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node; package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.calcite.SemanticSqlDialect; import com.tencent.supersonic.common.calcite.SemanticSqlDialect;
import com.tencent.supersonic.common.calcite.SqlDialectFactory; import com.tencent.supersonic.common.calcite.SqlDialectFactory;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.calcite.FilterToGroupScanRule; import com.tencent.supersonic.headless.core.translator.parser.Constants;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepPlanner;

View File

@@ -2,107 +2,63 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery; import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode; import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.FilterRender; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.OutputRender; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Renderer; import com.tencent.supersonic.headless.core.pojo.*;
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender; import com.tencent.supersonic.headless.core.translator.parser.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.*;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.util.CollectionUtils;
import java.util.LinkedList; import java.util.*;
import java.util.List; import java.util.stream.Collectors;
import java.util.ListIterator;
import java.util.Objects;
@Slf4j @Slf4j
public class SqlBuilder { public class SqlBuilder {
private final S2CalciteSchema schema; private final S2CalciteSchema schema;
private OntologyQuery ontologyQuery; private final SqlValidatorScope scope;
private SqlValidatorScope scope;
private SqlNode parserNode;
private boolean isAgg = false;
private AggOption aggOption = AggOption.DEFAULT;
public SqlBuilder(S2CalciteSchema schema) { public SqlBuilder(S2CalciteSchema schema) {
this.schema = schema; this.schema = schema;
this.scope = SchemaBuilder.getScope(schema);
} }
public String buildOntologySql(QueryStatement queryStatement) throws Exception { public String buildOntologySql(QueryStatement queryStatement) throws Exception {
this.ontologyQuery = queryStatement.getOntologyQuery(); OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
if (ontologyQuery.getLimit() == null) { if (ontologyQuery.getLimit() == null) {
ontologyQuery.setLimit(0L); ontologyQuery.setLimit(0L);
} }
this.aggOption = ontologyQuery.getAggOption();
buildParseNode(); Set<ModelResp> dataModels = ontologyQuery.getModels();
optimizeParseNode(queryStatement.getOntology().getDatabaseType());
return getSql(queryStatement.getOntology().getDatabaseType());
}
private void buildParseNode() throws Exception {
// find relevant data models
scope = SchemaBuilder.getScope(schema);
List<DataModel> dataModels = DataModelNode.getQueryDataModels(scope, schema, ontologyQuery);
if (dataModels == null || dataModels.isEmpty()) { if (dataModels == null || dataModels.isEmpty()) {
throw new Exception("data model not found"); throw new Exception("data model not found");
} }
isAgg = getAgg(dataModels.get(0));
// build level by level TableView tableView = render(ontologyQuery, new ArrayList<>(dataModels), scope, schema);
LinkedList<Renderer> builders = new LinkedList<>(); SqlNode parserNode = tableView.build();
builders.add(new SourceRender()); DatabaseResp database = queryStatement.getOntology().getDatabase();
builders.add(new FilterRender()); EngineType engineType = EngineType.fromString(database.getType());
builders.add(new OutputRender()); parserNode = optimizeParseNode(parserNode, engineType);
ListIterator<Renderer> it = builders.listIterator();
int i = 0;
Renderer previous = null;
while (it.hasNext()) {
Renderer renderer = it.next();
if (previous != null) {
previous.render(ontologyQuery, dataModels, scope, schema, !isAgg);
renderer.setTable(previous
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
i++;
}
previous = renderer;
}
builders.getLast().render(ontologyQuery, dataModels, scope, schema, !isAgg);
parserNode = builders.getLast().builder();
}
private boolean getAgg(DataModel dataModel) {
if (!AggOption.DEFAULT.equals(aggOption)) {
return AggOption.isAgg(aggOption);
}
// default by dataModel time aggregation
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!ontologyQuery.isNativeQuery()) {
return true;
}
}
return isAgg;
}
public String getSql(EngineType engineType) {
return SemanticNode.getSql(parserNode, engineType); return SemanticNode.getSql(parserNode, engineType);
} }
private void optimizeParseNode(EngineType engineType) { private SqlNode optimizeParseNode(SqlNode parserNode, EngineType engineType) {
if (Objects.isNull(schema.getRuntimeOptions()) if (Objects.isNull(schema.getRuntimeOptions())
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize()) || Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|| !schema.getRuntimeOptions().getEnableOptimize()) { || !schema.getRuntimeOptions().getEnableOptimize()) {
return; return parserNode;
} }
SqlNode optimizeNode = null; SqlNode optimizeNode = null;
@@ -117,8 +73,237 @@ public class SqlBuilder {
} }
if (Objects.nonNull(optimizeNode)) { if (Objects.nonNull(optimizeNode)) {
parserNode = optimizeNode; return optimizeNode;
} }
return parserNode;
}
private TableView render(OntologyQuery ontologyQuery, List<ModelResp> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema) throws Exception {
SqlNode left = null;
TableView leftTable = null;
TableView outerTable = new TableView();
Map<String, SqlNode> outerSelect = new HashMap<>();
Map<String, String> beforeModels = new HashMap<>();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
for (int i = 0; i < dataModels.size(); i++) {
final ModelResp dataModel = dataModels.get(i);
final Set<DimSchemaResp> queryDimensions =
ontologyQuery.getDimensionsByModel(dataModel.getName());
final Set<MetricSchemaResp> queryMetrics =
ontologyQuery.getMetricsByModel(dataModel.getName());
List<String> primary = new ArrayList<>();
for (Identify identify : dataModel.getIdentifiers()) {
primary.add(identify.getName());
}
TableView tableView =
renderOne(queryMetrics, queryDimensions, dataModel, scope, schema);
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
tableView.setAlias(alias);
tableView.setPrimary(primary);
tableView.setDataModel(dataModel);
for (String field : tableView.getFields()) {
outerSelect.put(field, SemanticNode.parse(alias + "." + field, scope, engineType));
}
if (left == null) {
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView));
} else {
left = buildJoin(left, leftTable, tableView, beforeModels, dataModel, schema,
scope);
}
leftTable = tableView;
beforeModels.put(dataModel.getName(), leftTable.getAlias());
}
for (Map.Entry<String, SqlNode> entry : outerSelect.entrySet()) {
outerTable.getSelect().add(entry.getValue());
}
outerTable.setTable(left);
return outerTable;
}
private SqlNode getTable(TableView tableView) {
return SemanticNode.getTable(tableView.getTable());
}
private SqlNode buildJoin(SqlNode leftNode, TableView leftTable, TableView rightTable,
Map<String, String> before, ModelResp dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
SqlNode condition =
getCondition(leftTable, rightTable, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, rightTable, schema);
SqlNode joinRelationCondition;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition;
}
return new SqlJoin(SqlParserPos.ZERO, leftNode,
SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral,
SemanticNode.buildAs(rightTable.getAlias(), getTable(rightTable)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition);
}
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView,
S2CalciteSchema schema) {
JoinRelation matchJoinRelation = JoinRelation.builder().build();
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getLeft())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getLeft()) + "." + r.getLeft(),
r.getMiddle(), tableView.getAlias() + "." + r.getRight()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
// Added join condition judgment to solve the problem of join condition order
} else if (joinRelation.getLeft()
.equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getRight())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getRight()) + "." + r.getRight(),
r.getMiddle(), tableView.getAlias() + "." + r.getLeft()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
}
}
}
return matchJoinRelation;
}
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope,
EngineType engineType) throws Exception {
SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
if (Objects.isNull(condition)) {
condition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()), ons,
SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()),
ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
private SqlNode getCondition(TableView left, TableView right, ModelResp dataModel,
S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType)
throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
selectLeft.retainAll(selectRight);
SqlNode condition = null;
for (String on : selectLeft) {
if (!isDimension(on, dataModel, schema)) {
continue;
}
if (isForeign(on, left.getDataModel().getIdentifiers())) {
if (!isPrimary(on, right.getDataModel().getIdentifiers())) {
continue;
}
}
if (isForeign(on, right.getDataModel().getIdentifiers())) {
if (!isPrimary(on, left.getDataModel().getIdentifiers())) {
continue;
}
}
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
if (condition == null) {
condition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
public static TableView renderOne(Set<MetricSchemaResp> queryMetrics,
Set<DimSchemaResp> queryDimensions, ModelResp dataModel, SqlValidatorScope scope,
S2CalciteSchema schema) {
TableView tableView = new TableView();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
Set<String> queryFields = tableView.getFields();
if (Objects.nonNull(queryMetrics)) {
queryMetrics.stream().forEach(m -> queryFields.addAll(m.getFields()));
}
if (Objects.nonNull(queryDimensions)) {
queryDimensions.stream().forEach(d -> queryFields.addAll(d.getFields()));
}
try {
for (String field : queryFields) {
tableView.getSelect().add(SemanticNode.parse(field, scope, engineType));
}
tableView.setTable(DataModelNode.build(dataModel, scope));
} catch (Exception e) {
log.error("Failed to create sqlNode for data model {}", dataModel);
}
return tableView;
}
private static boolean isDimension(String name, ModelResp dataModel, S2CalciteSchema schema) {
Optional<Dimension> dimension = dataModel.getModelDetail().getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) {
return true;
}
Optional<Identify> identify = dataModel.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return true;
}
if (schema.getDimensions().containsKey(dataModel.getName())) {
Optional<DimSchemaResp> dataSourceDim = schema.getDimensions().get(dataModel.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) {
return true;
}
}
return false;
}
private static boolean isForeign(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return IdentifyType.foreign.equals(identify.get().getType());
}
return false;
}
private static boolean isPrimary(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return IdentifyType.primary.equals(identify.get().getType());
}
return false;
} }
} }

View File

@@ -1,9 +1,9 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite; package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel; import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import lombok.Data; import lombok.Data;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList; import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.SqlSelect;
@@ -11,44 +11,33 @@ import org.apache.calcite.sql.parser.SqlParserPos;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.Set;
/** basic query project */
@Data @Data
public class TableView { public class TableView {
private List<SqlNode> filter = new ArrayList<>(); private Set<String> fields = Sets.newHashSet();
private List<SqlNode> dimension = new ArrayList<>(); private List<SqlNode> select = Lists.newArrayList();
private List<SqlNode> measure = new ArrayList<>();
private SqlNodeList order; private SqlNodeList order;
private SqlNode fetch; private SqlNode fetch;
private SqlNode offset; private SqlNode offset;
private SqlNode table; private SqlNode table;
private String alias; private String alias;
private List<String> primary; private List<String> primary;
private DataModel dataModel; private ModelResp dataModel;
public SqlNode build() { public SqlNode build() {
measure.addAll(dimension); List<SqlNode> selectNodeList = new ArrayList<>();
SqlNodeList dimensionNodeList = null; if (select.isEmpty()) {
if (dimension.size() > 0) { return new SqlSelect(SqlParserPos.ZERO, null,
dimensionNodeList = new SqlNodeList(getGroup(dimension), SqlParserPos.ZERO); new SqlNodeList(SqlNodeList.SINGLETON_STAR, SqlParserPos.ZERO), table, null,
null, null, null, null, order, offset, fetch, null);
} else {
selectNodeList.addAll(select);
return new SqlSelect(SqlParserPos.ZERO, null,
new SqlNodeList(selectNodeList, SqlParserPos.ZERO), table, null, null, null,
null, null, order, offset, fetch, null);
} }
SqlNodeList filterNodeList = null;
if (filter.size() > 0) {
filterNodeList = new SqlNodeList(filter, SqlParserPos.ZERO);
}
return new SqlSelect(SqlParserPos.ZERO, null, new SqlNodeList(measure, SqlParserPos.ZERO),
table, filterNodeList, dimensionNodeList, null, null, null, order, offset, fetch,
null);
} }
private List<SqlNode> getGroup(List<SqlNode> sqlNodeList) {
return sqlNodeList.stream()
.map(s -> (s.getKind().equals(SqlKind.AS)
? ((SqlBasicCall) s).getOperandList().get(0)
: s))
.collect(Collectors.toList());
}
} }

View File

@@ -1,26 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.Objects;
public class AggFunctionNode extends SemanticNode {
public static SqlNode build(String agg, String name, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if (Objects.isNull(agg) || agg.isEmpty()) {
return parse(name, scope, engineType);
}
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name
+ " ) ", scope, engineType);
}
return parse(agg + " ( " + name + " ) ", scope, engineType);
}
public static enum AggFunction {
AVG, COUNT_DISTINCT, MAX, MIN, SUM, COUNT, DISTINCT
}
}

View File

@@ -1,61 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.List;
import java.util.Objects;
public class DimensionNode extends SemanticNode {
public static SqlNode build(Dimension dimension, SqlValidatorScope scope, EngineType engineType)
throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (!dimension.getName().equals(dimension.getExpr())) {
sqlNode = buildAs(dimension.getName(), sqlNode);
}
return sqlNode;
}
public static List<SqlNode> expand(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
return expand(sqlNode, scope);
}
public static SqlNode buildName(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
return parse(dimension.getName(), scope, engineType);
}
public static SqlNode buildExp(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
return parse(dimension.getExpr(), scope, engineType);
}
public static SqlNode buildNameAs(String alias, Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if ("".equals(alias)) {
return buildName(dimension, scope, engineType);
}
SqlNode sqlNode = parse(dimension.getName(), scope, engineType);
return buildAs(alias, sqlNode);
}
public static SqlNode buildArray(Dimension dimension, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if (Objects.nonNull(dimension.getDataType()) && dimension.getDataType().isArray()) {
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
if (isIdentifier(sqlNode)) {
return buildAs(dimension.getName(),
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope,
engineType));
}
throw new Exception("array dimension expr should only identify");
}
return build(dimension, scope, engineType);
}
}

View File

@@ -1,35 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlInternalOperator;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.SqlWriter.Frame;
import org.apache.calcite.sql.SqlWriter.FrameTypeEnum;
public class ExtendNode extends SqlInternalOperator {
public ExtendNode() {
super(SqlKind.EXTEND.lowerName, SqlKind.EXTEND);
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
Frame frame = writer.startList(FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(operator.getName());
SqlNodeList list = (SqlNodeList) call.operand(1);
Frame frameArgs = writer.startList("(", ")");
for (int i = 0; i < list.size(); i++) {
list.get(i).unparse(writer, 0, 0);
if (i < list.size() - 1) {
writer.sep(",");
}
}
writer.endList(frameArgs);
writer.endList(frame);
}
}

View File

@@ -1,28 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import java.util.Set;
public class FilterNode extends SemanticNode {
public static void getFilterField(SqlNode sqlNode, Set<String> fields) {
if (sqlNode instanceof SqlIdentifier) {
SqlIdentifier sqlIdentifier = (SqlIdentifier) sqlNode;
fields.add(sqlIdentifier.names.get(0).toLowerCase());
return;
}
if (sqlNode instanceof SqlBasicCall) {
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
for (SqlNode operand : sqlBasicCall.getOperandList()) {
getFilterField(operand, fields);
}
}
}
public static boolean isMatchDataSource(Set<String> measures) {
return false;
}
}

View File

@@ -1,42 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class IdentifyNode extends SemanticNode {
public static SqlNode build(Identify identify, SqlValidatorScope scope, EngineType engineType)
throws Exception {
return parse(identify.getName(), scope, engineType);
}
public static Set<String> getIdentifyNames(List<Identify> identifies, Identify.Type type) {
return identifies.stream().filter(i -> type.name().equalsIgnoreCase(i.getType()))
.map(i -> i.getName()).collect(Collectors.toSet());
}
public static boolean isForeign(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return Identify.Type.FOREIGN.name().equalsIgnoreCase(identify.get().getType());
}
return false;
}
public static boolean isPrimary(String name, List<Identify> identifies) {
Optional<Identify> identify =
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return Identify.Type.PRIMARY.name().equalsIgnoreCase(identify.get().getType());
}
return false;
}
}

View File

@@ -1,79 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlWriter;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
/** extend node to handle lateral explode dataSet */
public class LateralViewExplodeNode extends ExtendNode {
public final String sqlNameView = "view";
public final String sqlNameExplode = "explode";
public final String sqlNameExplodeSplit = "explode_split";
private Map<String, String> delimiterMap;
public LateralViewExplodeNode(Map<String, String> delimiterMap) {
super();
this.delimiterMap = delimiterMap;
}
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOperator operator = call.getOperator();
writer.setNeedWhitespace(true);
assert call.operandCount() == 2;
writer.sep(SqlKind.SELECT.lowerName);
writer.sep(SqlIdentifier.STAR.toString());
writer.sep("from");
SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.SIMPLE);
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
writer.setNeedWhitespace(true);
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
SqlNodeList list = (SqlNodeList) call.operand(1);
Ord node;
Iterator var = Ord.zip(list).iterator();
while (var.hasNext()) {
node = (Ord) var.next();
if (node.i > 0 && node.i % 2 > 0) {
writer.sep(SqlKind.AS.lowerName);
((SqlNode) node.e).unparse(writer, 0, 0);
continue;
}
if (node.i > 0 && node.i % 2 == 0) {
writer.sep(SqlKind.LATERAL.lowerName);
writer.sep(sqlNameView);
}
explode(writer, (SqlNode) node.e);
}
writer.endList(frame);
}
public void explode(SqlWriter writer, SqlNode sqlNode) {
String delimiter =
Objects.nonNull(delimiterMap) && delimiterMap.containsKey(sqlNode.toString())
? delimiterMap.get(sqlNode.toString())
: "";
if (delimiter.isEmpty()) {
writer.sep(sqlNameExplode);
} else {
writer.sep(sqlNameExplodeSplit);
}
SqlWriter.Frame frame = writer.startList("(", ")");
sqlNode.unparse(writer, 0, 0);
if (!delimiter.isEmpty()) {
writer.sep(",");
writer.sep(String.format("'%s'", delimiter));
}
writer.endList(frame);
writer.sep("tmp_sgl_" + sqlNode.toString());
}
}

View File

@@ -1,36 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
public class MeasureNode extends SemanticNode {
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if (measure.getExpr() == null) {
return getExpr(measure, alias, scope, engineType);
} else {
return buildAs(measure.getName(), getExpr(measure, alias, scope, engineType));
}
}
public static SqlNode buildAgg(Measure measure, boolean noAgg, SqlValidatorScope scope,
EngineType engineType) throws Exception {
if ((measure.getAgg() == null || measure.getAgg().isEmpty()) || noAgg) {
return parse(measure.getName(), scope, engineType);
}
return buildAs(measure.getName(),
AggFunctionNode.build(measure.getAgg(), measure.getName(), scope, engineType));
}
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope,
EngineType enginType) throws Exception {
if (measure.getExpr() == null) {
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope,
enginType);
}
return parse(measure.getExpr(), scope, enginType);
}
}

View File

@@ -1,42 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@Data
public class MetricNode extends SemanticNode {
private Metric metric;
private Map<String, SqlNode> aggNode = new HashMap<>();
private Map<String, SqlNode> nonAggNode = new HashMap<>();
private Map<String, SqlNode> measureFilter = new HashMap<>();
private Map<String, String> aggFunction = new HashMap<>();
public static SqlNode build(Metric metric, SqlValidatorScope scope, EngineType engineType)
throws Exception {
if (metric.getMetricTypeParams() == null || metric.getMetricTypeParams().getExpr() == null
|| metric.getMetricTypeParams().getExpr().isEmpty()) {
return parse(metric.getName(), scope, engineType);
}
SqlNode sqlNode = parse(metric.getMetricTypeParams().getExpr(), scope, engineType);
return buildAs(metric.getName(), sqlNode);
}
public static Boolean isMetricField(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric.isPresent() && metric.get().getMetricTypeParams().isFieldMetric();
}
public static Boolean isMetricField(Metric metric) {
return metric.getMetricTypeParams().isFieldMetric();
}
}

View File

@@ -1,78 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/** process query specified filtering information */
public class FilterRender extends Renderer {
@Override
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView;
SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = schema.getOntology().getDatabaseType();
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
Set<String> whereFields = new HashSet<>();
FilterNode.getFilterField(filterNode, whereFields);
List<String> fieldWhere = whereFields.stream().collect(Collectors.toList());
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
for (DataModel dataModel : dataModels) {
SourceRender.whereDimMetric(fieldWhere, metricCommand.getMetrics(),
metricCommand.getDimensions(), dataModel, schema, dimensions, metrics);
}
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
}
for (String dimension : queryDimensions) {
tableView.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
for (String metric : queryMetrics) {
Optional<Metric> optionalMetric = Renderer.getMetricByName(metric, schema);
if (optionalMetric.isPresent() && MetricNode.isMetricField(optionalMetric.get())) {
// metric from field ignore
continue;
}
if (optionalMetric.isPresent()) {
tableView.getMeasure()
.add(MetricNode.build(optionalMetric.get(), scope, engineType));
} else {
tableView.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
}
tableView.setMeasure(SemanticNode.deduplicateNode(tableView.getMeasure()));
tableView.setDimension(SemanticNode.deduplicateNode(tableView.getDimension()));
if (filterNode != null) {
TableView filterView = new TableView();
filterView.setTable(SemanticNode.buildAs(Constants.DATASOURCE_TABLE_FILTER_PREFIX,
tableView.build()));
filterView.getFilter().add(filterNode);
filterView.getMeasure().add(SqlIdentifier.star(SqlParserPos.ZERO));
super.tableView = filterView;
}
}
}

View File

@@ -1,485 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.AggFunctionNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.JoinConditionType;
import org.apache.calcite.sql.SqlBasicCall;
import org.apache.calcite.sql.SqlJoin;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Queue;
import java.util.Set;
import java.util.stream.Collectors;
/** process the join conditions when the source number is greater than 1 */
@Slf4j
public class JoinRender extends Renderer {
@Override
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
EngineType engineType = schema.getOntology().getDatabaseType();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
Set<String> queryAllDimension = new HashSet<>();
Set<String> measures = new HashSet<>();
DataModelNode.getQueryDimensionMeasure(schema.getOntology(), metricCommand,
queryAllDimension, measures);
SqlNode left = null;
TableView leftTable = null;
TableView innerView = new TableView();
TableView filterView = new TableView();
Map<String, SqlNode> innerSelect = new HashMap<>();
Set<String> filterDimension = new HashSet<>();
Map<String, String> beforeSources = new HashMap<>();
for (int i = 0; i < dataModels.size(); i++) {
final DataModel dataModel = dataModels.get(i);
final Set<String> filterDimensions = new HashSet<>();
final Set<String> filterMetrics = new HashSet<>();
final Set<String> queryDimension = new HashSet<>();
final Set<String> queryMetrics = new HashSet<>();
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema,
filterDimensions, filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
reqMetric.addAll(filterMetrics);
reqMetric = uniqList(reqMetric);
List<String> reqDimension = new ArrayList<>(metricCommand.getDimensions());
reqDimension.addAll(filterDimensions);
reqDimension = uniqList(reqDimension);
Set<String> sourceMeasure = dataModel.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
doMetric(innerSelect, filterView, queryMetrics, reqMetric, dataModel, sourceMeasure,
scope, schema, nonAgg);
Set<String> dimension = dataModel.getDimensions().stream().map(dd -> dd.getName())
.collect(Collectors.toSet());
doDimension(innerSelect, filterDimension, queryDimension, reqDimension, dataModel,
dimension, scope, schema);
List<String> primary = new ArrayList<>();
for (Identify identify : dataModel.getIdentifiers()) {
primary.add(identify.getName());
if (!fieldWhere.contains(identify.getName())) {
fieldWhere.add(identify.getName());
}
}
List<String> dataSourceWhere = new ArrayList<>(fieldWhere);
addZipperField(dataModel, dataSourceWhere);
TableView tableView =
SourceRender.renderOne("", dataSourceWhere, queryMetrics, queryDimension,
metricCommand.getWhere(), dataModels.get(i), scope, schema, true);
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
tableView.setAlias(alias);
tableView.setPrimary(primary);
tableView.setDataModel(dataModel);
if (left == null) {
leftTable = tableView;
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope));
beforeSources.put(dataModel.getName(), leftTable.getAlias());
continue;
}
left = buildJoin(left, leftTable, tableView, beforeSources, dataModel, schema, scope);
leftTable = tableView;
beforeSources.put(dataModel.getName(), tableView.getAlias());
}
for (Map.Entry<String, SqlNode> entry : innerSelect.entrySet()) {
innerView.getMeasure().add(entry.getValue());
}
innerView.setTable(left);
filterView
.setTable(SemanticNode.buildAs(Constants.JOIN_TABLE_OUT_PREFIX, innerView.build()));
if (!filterDimension.isEmpty()) {
for (String d : getQueryDimension(filterDimension, queryAllDimension, whereFields)) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.parse(d, scope, engineType));
} else {
filterView.getDimension().add(SemanticNode.parse(d, scope, engineType));
}
}
}
filterView.setMeasure(SemanticNode.deduplicateNode(filterView.getMeasure()));
filterView.setDimension(SemanticNode.deduplicateNode(filterView.getDimension()));
super.tableView = filterView;
}
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView,
Set<String> queryMetrics, List<String> reqMetrics, DataModel dataModel,
Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema,
boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = schema.getOntology().getDatabaseType();
for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias);
if (!metricNode.getNonAggNode().isEmpty()) {
for (String measure : metricNode.getNonAggNode().keySet()) {
innerSelect.put(measure, SemanticNode.buildAs(measure,
SemanticNode.parse(alias + "." + measure, scope, engineType)));
}
}
if (metricNode.getAggFunction() != null && !metricNode.getAggFunction().isEmpty()) {
for (Map.Entry<String, String> entry : metricNode.getAggFunction().entrySet()) {
if (metricNode.getNonAggNode().containsKey(entry.getKey())) {
if (nonAgg) {
filterView.getMeasure().add(SemanticNode.buildAs(entry.getKey(),
SemanticNode.parse(entry.getKey(), scope, engineType)));
} else {
filterView.getMeasure()
.add(SemanticNode.buildAs(entry.getKey(),
AggFunctionNode.build(entry.getValue(),
entry.getKey(), scope, engineType)));
}
}
}
}
}
}
}
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension,
Set<String> queryDimension, List<String> reqDimensions, DataModel dataModel,
Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema)
throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = schema.getOntology().getDatabaseType();
for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
String[] identifyDimension = d.split(Constants.DIMENSION_IDENTIFY);
innerSelect.put(d, SemanticNode.buildAs(d, SemanticNode
.parse(alias + "." + identifyDimension[1], scope, engineType)));
} else {
innerSelect.put(d, SemanticNode.buildAs(d,
SemanticNode.parse(alias + "." + d, scope, engineType)));
}
filterDimension.add(d);
}
}
}
private Set<String> getQueryDimension(Set<String> filterDimension,
Set<String> queryAllDimension, Set<String> whereFields) {
return filterDimension.stream()
.filter(d -> queryAllDimension.contains(d) || whereFields.contains(d))
.collect(Collectors.toSet());
}
private boolean getMatchMetric(S2CalciteSchema schema, Set<String> sourceMeasure, String m,
Set<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst();
boolean isAdd = false;
if (metric.isPresent()) {
Set<String> metricMeasures = metric.get().getMetricTypeParams().getMeasures().stream()
.map(me -> me.getName()).collect(Collectors.toSet());
if (sourceMeasure.containsAll(metricMeasures)) {
isAdd = true;
}
}
if (sourceMeasure.contains(m)) {
isAdd = true;
}
if (isAdd && !queryMetrics.contains(m)) {
queryMetrics.add(m);
}
return isAdd;
}
private boolean getMatchDimension(S2CalciteSchema schema, Set<String> sourceDimension,
DataModel dataModel, String d, Set<String> queryDimension) {
String oriDimension = d;
boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
oriDimension = d.split(Constants.DIMENSION_IDENTIFY)[1];
}
if (sourceDimension.contains(oriDimension)) {
isAdd = true;
}
for (Identify identify : dataModel.getIdentifiers()) {
if (identify.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
break;
}
}
if (schema.getDimensions().containsKey(dataModel.getName())) {
for (Dimension dim : schema.getDimensions().get(dataModel.getName())) {
if (dim.getName().equalsIgnoreCase(oriDimension)) {
isAdd = true;
}
}
}
if (isAdd && !queryDimension.contains(oriDimension)) {
queryDimension.add(oriDimension);
}
return isAdd;
}
private SqlNode getTable(TableView tableView, SqlValidatorScope scope) throws Exception {
return SemanticNode.getTable(tableView.getTable());
}
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView,
Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception {
EngineType engineType = schema.getOntology().getDatabaseType();
SqlNode condition =
getCondition(leftTable, tableView, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
JoinRelation matchJoinRelation = getMatchJoinRelation(before, tableView, schema);
SqlNode joinRelationCondition = null;
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
condition = joinRelationCondition;
}
if (Materialization.TimePartType.ZIPPER.equals(leftTable.getDataModel().getTimePartType())
|| Materialization.TimePartType.ZIPPER
.equals(tableView.getDataModel().getTimePartType())) {
SqlNode zipperCondition =
getZipperCondition(leftTable, tableView, dataModel, schema, scope);
if (Objects.nonNull(joinRelationCondition)) {
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(zipperCondition, joinRelationCondition)),
SqlParserPos.ZERO, null);
} else {
condition = zipperCondition;
}
}
return new SqlJoin(SqlParserPos.ZERO, left,
SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral,
SemanticNode.buildAs(tableView.getAlias(), getTable(tableView, scope)),
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition);
}
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView,
S2CalciteSchema schema) {
JoinRelation matchJoinRelation = JoinRelation.builder().build();
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
for (JoinRelation joinRelation : schema.getJoinRelations()) {
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getLeft())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getLeft()) + "." + r.getLeft(),
r.getMiddle(), tableView.getAlias() + "." + r.getRight()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
// Added join condition judgment to solve the problem of join condition order
} else if (joinRelation.getLeft()
.equalsIgnoreCase(tableView.getDataModel().getName())
&& before.containsKey(joinRelation.getRight())) {
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
.map(r -> Triple.of(
before.get(joinRelation.getRight()) + "." + r.getRight(),
r.getMiddle(), tableView.getAlias() + "." + r.getLeft()))
.collect(Collectors.toList()));
matchJoinRelation.setJoinType(joinRelation.getJoinType());
}
}
}
return matchJoinRelation;
}
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope,
EngineType engineType) throws Exception {
SqlNode condition = null;
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
if (Objects.isNull(condition)) {
condition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()), ons,
SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()),
ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
private SqlNode getCondition(TableView left, TableView right, DataModel dataModel,
S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType)
throws Exception {
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
selectLeft.retainAll(selectRight);
SqlNode condition = null;
for (String on : selectLeft) {
if (!SourceRender.isDimension(on, dataModel, schema)) {
continue;
}
if (IdentifyNode.isForeign(on, left.getDataModel().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, right.getDataModel().getIdentifiers())) {
continue;
}
}
if (IdentifyNode.isForeign(on, right.getDataModel().getIdentifiers())) {
if (!IdentifyNode.isPrimary(on, left.getDataModel().getIdentifiers())) {
continue;
}
}
List<SqlNode> ons = new ArrayList<>();
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
if (condition == null) {
condition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
continue;
}
SqlNode addCondition =
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
null);
}
return condition;
}
private static void joinOrder(int cnt, String id, Map<String, Set<String>> next,
Queue<String> orders, Map<String, Boolean> visited) {
visited.put(id, true);
orders.add(id);
if (orders.size() >= cnt) {
return;
}
for (String nextId : next.get(id)) {
if (!visited.get(nextId)) {
joinOrder(cnt, nextId, next, orders, visited);
if (orders.size() >= cnt) {
return;
}
}
}
orders.poll();
visited.put(id, false);
}
private void addZipperField(DataModel dataModel, List<String> fields) {
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.forEach(t -> {
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END)
&& !fields.contains(t.getName())) {
fields.add(t.getName());
}
if (t.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START)
&& !fields.contains(t.getName())) {
fields.add(t.getName());
}
});
}
}
private SqlNode getZipperCondition(TableView left, TableView right, DataModel dataModel,
S2CalciteSchema schema, SqlValidatorScope scope) throws Exception {
if (Materialization.TimePartType.ZIPPER.equals(left.getDataModel().getTimePartType())
&& Materialization.TimePartType.ZIPPER
.equals(right.getDataModel().getTimePartType())) {
throw new Exception("not support two zipper table");
}
SqlNode condition = null;
Optional<Dimension> leftTime = left.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
Optional<Dimension> rightTime = right.getDataModel().getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
if (leftTime.isPresent() && rightTime.isPresent()) {
String startTime = "";
String endTime = "";
String dateTime = "";
Optional<Dimension> startTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName()
.startsWith(Constants.MATERIALIZATION_ZIPPER_START))
.findFirst();
Optional<Dimension> endTimeOp = (Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right).getDataModel()
.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME
.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName()
.startsWith(Constants.MATERIALIZATION_ZIPPER_END))
.findFirst();
if (startTimeOp.isPresent() && endTimeOp.isPresent()) {
TableView zipper = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? left : right;
TableView partMetric = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? right : left;
Optional<Dimension> partTime = Materialization.TimePartType.ZIPPER
.equals(left.getDataModel().getTimePartType()) ? rightTime : leftTime;
startTime = zipper.getAlias() + "." + startTimeOp.get().getName();
endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName();
}
EngineType engineType = schema.getOntology().getDatabaseType();
ArrayList<SqlNode> operandList =
new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType)));
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
new ArrayList<SqlNode>(Arrays.asList(
new SqlBasicCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL,
new ArrayList<SqlNode>(Arrays.asList(
SemanticNode.parse(startTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))),
SqlParserPos.ZERO, null),
new SqlBasicCall(SqlStdOperatorTable.GREATER_THAN, operandList,
SqlParserPos.ZERO, null))),
SqlParserPos.ZERO, null);
}
return condition;
}
}

View File

@@ -1,59 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
/** process the query result items from query request */
public class OutputRender extends Renderer {
@Override
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
EngineType engineType = schema.getOntology().getDatabaseType();
for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
}
for (String metric : metricCommand.getMetrics()) {
if (MetricNode.isMetricField(metric, schema)) {
// metric from field ignore
continue;
}
selectDataSet.getMeasure().add(SemanticNode.parse(metric, scope, engineType));
}
if (metricCommand.getLimit() > 0) {
SqlNode offset =
SemanticNode.parse(metricCommand.getLimit().toString(), scope, engineType);
selectDataSet.setOffset(offset);
}
if (!CollectionUtils.isEmpty(metricCommand.getOrder())) {
List<SqlNode> orderList = new ArrayList<>();
for (ColumnOrder columnOrder : metricCommand.getOrder()) {
if (SqlStdOperatorTable.DESC.getName().equalsIgnoreCase(columnOrder.getOrder())) {
orderList.add(SqlStdOperatorTable.DESC.createCall(SqlParserPos.ZERO,
new SqlNode[] {SemanticNode.parse(columnOrder.getCol(), scope,
engineType)}));
} else {
orderList.add(SemanticNode.parse(columnOrder.getCol(), scope, engineType));
}
}
selectDataSet.setOrder(new SqlNodeList(orderList, SqlParserPos.ZERO));
}
}
}

View File

@@ -1,119 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MeasureNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/** process TableView */
@Data
public abstract class Renderer {
protected TableView tableView = new TableView();
public static Optional<Dimension> getDimensionByName(String name, DataModel datasource) {
return datasource.getDimensions().stream().filter(d -> d.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Measure> getMeasureByName(String name, DataModel datasource) {
return datasource.getMeasures().stream().filter(mm -> mm.getName().equalsIgnoreCase(name))
.findFirst();
}
public static Optional<Metric> getMetricByName(String name, S2CalciteSchema schema) {
Optional<Metric> metric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
return metric;
}
public static Optional<Identify> getIdentifyByName(String name, DataModel datasource) {
return datasource.getIdentifiers().stream().filter(i -> i.getName().equalsIgnoreCase(name))
.findFirst();
}
public static MetricNode buildMetricNode(String metric, DataModel datasource,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg, String alias)
throws Exception {
Optional<Metric> metricOpt = getMetricByName(metric, schema);
MetricNode metricNode = new MetricNode();
EngineType engineType = EngineType.fromString(datasource.getType());
if (metricOpt.isPresent()) {
metricNode.setMetric(metricOpt.get());
for (Measure m : metricOpt.get().getMetricTypeParams().getMeasures()) {
Optional<Measure> measure = getMeasureByName(m.getName(), datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode().put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode().put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(),
measure.get().getAgg());
} else {
metricNode.getNonAggNode().put(m.getName(),
MeasureNode.buildNonAgg(alias, m, scope, engineType));
metricNode.getAggNode().put(m.getName(),
MeasureNode.buildAgg(m, nonAgg, scope, engineType));
metricNode.getAggFunction().put(m.getName(), m.getAgg());
}
if (m.getConstraint() != null && !m.getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(m.getName(),
SemanticNode.parse(m.getConstraint(), scope, engineType));
}
}
return metricNode;
}
Optional<Measure> measure = getMeasureByName(metric, datasource);
if (measure.isPresent()) {
metricNode.getNonAggNode().put(measure.get().getName(),
MeasureNode.buildNonAgg(alias, measure.get(), scope, engineType));
metricNode.getAggNode().put(measure.get().getName(),
MeasureNode.buildAgg(measure.get(), nonAgg, scope, engineType));
metricNode.getAggFunction().put(measure.get().getName(), measure.get().getAgg());
if (measure.get().getConstraint() != null && !measure.get().getConstraint().isEmpty()) {
metricNode.getMeasureFilter().put(measure.get().getName(),
SemanticNode.parse(measure.get().getConstraint(), scope, engineType));
}
}
return metricNode;
}
public static List<String> uniqList(List<String> list) {
Set<String> tmp = new HashSet<>(list);
return tmp.stream().collect(Collectors.toList());
}
public void setTable(SqlNode table) {
tableView.setTable(table);
}
public SqlNode builder() {
return tableView.build();
}
public SqlNode builderAs(String alias) throws Exception {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
}

View File

@@ -1,359 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DimensionNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants.DIMENSION_DELIMITER;
/** process the table dataSet from the defined data model schema */
@Slf4j
public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres,
Set<String> reqMetrics, Set<String> reqDimensions, String queryWhere,
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
throws Exception {
TableView dataSet = new TableView();
TableView output = new TableView();
Set<String> queryMetrics = new HashSet<>(reqMetrics);
Set<String> queryDimensions = new HashSet<>(reqDimensions);
List<String> fieldWhere = new ArrayList<>(fieldWheres);
Map<String, String> extendFields = new HashMap<>();
if (!fieldWhere.isEmpty()) {
Set<String> dimensions = new HashSet<>();
Set<String> metrics = new HashSet<>();
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema,
dimensions, metrics);
queryMetrics.addAll(metrics);
queryDimensions.addAll(dimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
datasource, scope, schema, nonAgg);
}
addTimeDimension(datasource, queryDimensions);
for (String metric : queryMetrics) {
MetricNode metricNode =
buildMetricNode(metric, datasource, scope, schema, nonAgg, alias);
if (!metricNode.getAggNode().isEmpty()) {
metricNode.getAggNode().entrySet().stream()
.forEach(m -> output.getMeasure().add(m.getValue()));
}
if (metricNode.getNonAggNode() != null) {
metricNode.getNonAggNode().entrySet().stream()
.forEach(m -> dataSet.getMeasure().add(m.getValue()));
}
if (metricNode.getMeasureFilter() != null) {
metricNode.getMeasureFilter().entrySet().stream()
.forEach(m -> dataSet.getFilter().add(m.getValue()));
}
}
for (String dimension : queryDimensions) {
if (dimension.contains(Constants.DIMENSION_IDENTIFY)
&& queryDimensions.contains(dimension.split(Constants.DIMENSION_IDENTIFY)[1])) {
continue;
}
buildDimension(dimension.contains(Constants.DIMENSION_IDENTIFY) ? dimension : "",
dimension.contains(Constants.DIMENSION_IDENTIFY)
? dimension.split(Constants.DIMENSION_IDENTIFY)[1]
: dimension,
datasource, schema, nonAgg, extendFields, dataSet, output, scope);
}
output.setMeasure(SemanticNode.deduplicateNode(output.getMeasure()));
dataSet.setMeasure(SemanticNode.deduplicateNode(dataSet.getMeasure()));
SqlNode tableNode = DataModelNode.buildExtend(datasource, extendFields, scope);
dataSet.setTable(tableNode);
output.setTable(
SemanticNode.buildAs(Constants.DATASOURCE_TABLE_OUT_PREFIX + datasource.getName()
+ "_" + UUID.randomUUID().toString().substring(32), dataSet.build()));
return output;
}
private static void buildDimension(String alias, String dimension, DataModel datasource,
S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields,
TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
EngineType engineType = schema.getOntology().getDatabaseType();
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(dimension)) {
continue;
}
dataSet.getMeasure().add(DimensionNode.buildArray(dim, scope, engineType));
addExtendFields(dim, extendFields);
if (nonAgg) {
output.getMeasure().add(DimensionNode.buildName(dim, scope, engineType));
isAdd = true;
continue;
}
if ("".equals(alias)) {
output.getDimension().add(DimensionNode.buildName(dim, scope, engineType));
} else {
output.getDimension()
.add(DimensionNode.buildNameAs(alias, dim, scope, engineType));
}
isAdd = true;
break;
}
}
if (!isAdd) {
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(dimension)).findFirst();
if (identify.isPresent()) {
if (nonAgg) {
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
} else {
dataSet.getMeasure()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
output.getDimension()
.add(SemanticNode.parse(identify.get().getName(), scope, engineType));
}
isAdd = true;
}
}
if (isAdd) {
return;
}
Optional<Dimension> dimensionOptional = getDimensionByName(dimension, datasource);
if (dimensionOptional.isPresent()) {
dataSet.getMeasure()
.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
addExtendFields(dimensionOptional.get(), extendFields);
if (nonAgg) {
output.getMeasure()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
return;
}
output.getDimension()
.add(DimensionNode.buildName(dimensionOptional.get(), scope, engineType));
}
}
private static void addExtendFields(Dimension dimension, Map<String, String> extendFields) {
if (dimension.getDataType().isArray()) {
if (Objects.nonNull(dimension.getExt())
&& dimension.getExt().containsKey(DIMENSION_DELIMITER)) {
extendFields.put(dimension.getExpr(),
(String) dimension.getExt().get(DIMENSION_DELIMITER));
} else {
extendFields.put(dimension.getExpr(), "");
}
}
}
private static List<SqlNode> getWhereMeasure(List<String> fields, Set<String> queryMetrics,
Set<String> queryDimensions, Map<String, String> extendFields, DataModel datasource,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabaseType();
while (iterator.hasNext()) {
String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
iterator.remove();
}
}
for (String where : fields) {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) {
if (!dim.getName().equalsIgnoreCase(where)) {
continue;
}
whereNode.addAll(DimensionNode.expand(dim, scope, engineType));
isAdd = true;
}
}
Optional<Identify> identify = getIdentifyByName(where, datasource);
if (identify.isPresent()) {
whereNode.add(IdentifyNode.build(identify.get(), scope, engineType));
isAdd = true;
}
if (isAdd) {
continue;
}
Optional<Dimension> dimensionOptional = getDimensionByName(where, datasource);
if (dimensionOptional.isPresent()) {
whereNode.add(DimensionNode.buildArray(dimensionOptional.get(), scope, engineType));
addExtendFields(dimensionOptional.get(), extendFields);
}
}
return whereNode;
}
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
Set<String> queryMetrics, Set<String> queryDimensions, Map<String, String> extendFields,
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions,
extendFields, datasource, scope, schema, nonAgg);
dataSet.getMeasure().addAll(whereNode);
// getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
}
public static void whereDimMetric(List<String> fields, Set<String> queryMetrics,
Set<String> queryDimensions, DataModel datasource, S2CalciteSchema schema,
Set<String> dimensions, Set<String> metrics) {
for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
continue;
}
String filterField = field;
if (field.contains(Constants.DIMENSION_IDENTIFY)) {
filterField = field.split(Constants.DIMENSION_IDENTIFY)[1];
}
addField(filterField, field, datasource, schema, dimensions, metrics);
}
}
private static void addField(String field, String oriField, DataModel datasource,
S2CalciteSchema schema, Set<String> dimensions, Set<String> metrics) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dimension.isPresent()) {
dimensions.add(oriField);
return;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(field)).findFirst();
if (identify.isPresent()) {
dimensions.add(oriField);
return;
}
if (schema.getDimensions().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(field)).findFirst();
if (dataSourceDim.isPresent()) {
dimensions.add(oriField);
return;
}
}
Optional<Measure> metric = datasource.getMeasures().stream()
.filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (metric.isPresent()) {
metrics.add(oriField);
return;
}
Optional<Metric> datasourceMetric = schema.getMetrics().stream()
.filter(m -> m.getName().equalsIgnoreCase(field)).findFirst();
if (datasourceMetric.isPresent()) {
Set<String> measures = datasourceMetric.get().getMetricTypeParams().getMeasures()
.stream().map(m -> m.getName()).collect(Collectors.toSet());
if (datasource.getMeasures().stream().map(m -> m.getName()).collect(Collectors.toSet())
.containsAll(measures)) {
metrics.add(oriField);
return;
}
}
}
public static boolean isDimension(String name, DataModel datasource, S2CalciteSchema schema) {
Optional<Dimension> dimension = datasource.getDimensions().stream()
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dimension.isPresent()) {
return true;
}
Optional<Identify> identify = datasource.getIdentifiers().stream()
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
if (identify.isPresent()) {
return true;
}
if (schema.getDimensions().containsKey(datasource.getName())) {
Optional<Dimension> dataSourceDim = schema.getDimensions().get(datasource.getName())
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
if (dataSourceDim.isPresent()) {
return true;
}
}
return false;
}
private static void addTimeDimension(DataModel dataModel, Set<String> queryDimension) {
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
Optional<Dimension> startTimeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_START))
.findFirst();
Optional<Dimension> endTimeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.filter(d -> d.getName().startsWith(Constants.MATERIALIZATION_ZIPPER_END))
.findFirst();
if (startTimeOp.isPresent() && !queryDimension.contains(startTimeOp.get().getName())) {
queryDimension.add(startTimeOp.get().getName());
}
if (endTimeOp.isPresent() && !queryDimension.contains(endTimeOp.get().getName())) {
queryDimension.add(endTimeOp.get().getName());
}
} else {
Optional<Dimension> timeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
.findFirst();
if (timeOp.isPresent() && !queryDimension.contains(timeOp.get().getName())) {
queryDimension.add(timeOp.get().getName());
}
}
}
public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = ontologyQuery.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabaseType();
if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields);
fieldWhere = whereFields.stream().collect(Collectors.toList());
}
if (dataModels.size() == 1) {
DataModel dataModel = dataModels.get(0);
super.tableView = renderOne("", fieldWhere, ontologyQuery.getMetrics(),
ontologyQuery.getDimensions(), ontologyQuery.getWhere(), dataModel, scope,
schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(ontologyQuery, dataModels, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
}

View File

@@ -1,33 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class DataModel {
private Long id;
private String name;
private Long modelId;
private String type;
private String sqlQuery;
private String tableQuery;
private List<Identify> identifiers;
private List<Dimension> dimensions;
private List<Measure> measures;
private String aggTime;
private Materialization.TimePartType timePartType = Materialization.TimePartType.None;
}

View File

@@ -1,54 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import java.util.Arrays;
public enum DataType {
ARRAY("ARRAY"),
MAP("MAP"),
JSON("JSON"),
VARCHAR("VARCHAR"),
DATE("DATE"),
BIGINT("BIGINT"),
INT("INT"),
DOUBLE("DOUBLE"),
FLOAT("FLOAT"),
DECIMAL("DECIMAL"),
UNKNOWN("unknown");
private String type;
DataType(String type) {
this.type = type;
}
public String getType() {
return type;
}
public static DataType of(String type) {
for (DataType typeEnum : DataType.values()) {
if (typeEnum.getType().equalsIgnoreCase(type)) {
return typeEnum;
}
}
return DataType.UNKNOWN;
}
public boolean isObject() {
return Arrays.asList(ARRAY, MAP, JSON).contains(this);
}
public boolean isArray() {
return ARRAY.equals(this);
}
}

View File

@@ -1,28 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import lombok.Builder;
import lombok.Data;
import java.util.List;
import java.util.Map;
@Data
@Builder
public class Dimension implements SemanticItem {
String name;
private String owners;
private String type;
private String expr;
private DimensionTimeTypeParams dimensionTimeTypeParams;
private DataType dataType = DataType.UNKNOWN;
private String bizName;
private List<String> defaultValues;
private Map<String, Object> ext;
@Override
public String getName() {
return name;
}
}

View File

@@ -1,20 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Identify {
public enum Type {
PRIMARY, FOREIGN
}
private String name;
// primary or foreign
private String type;
}

View File

@@ -1,46 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.ArrayList;
import java.util.List;
@Data
@Builder
public class Materialization {
public enum TimePartType {
/**
* partition time type 1 - FULL, not use partition 2 - PARTITION , use time list 3 - ZIPPER,
* use [startDate, endDate] range time
*/
FULL("FULL"), PARTITION("PARTITION"), ZIPPER("ZIPPER"), None("");
private String name;
TimePartType(String name) {
this.name = name;
}
public static TimePartType of(String name) {
for (TimePartType typeEnum : TimePartType.values()) {
if (typeEnum.name.equalsIgnoreCase(name)) {
return typeEnum;
}
}
return TimePartType.None;
}
}
private TimePartType timePartType;
private String destinationTable;
private String dateInfo;
private String entities;
private Long modelId;
private Long dataBase;
private Long materializationId;
private Integer level;
private List<MaterializationElement> dimensions = new ArrayList<>();
private List<MaterializationElement> metrics = new ArrayList<>();
}

View File

@@ -1,13 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class MaterializationElement {
private List<TimeRange> timeRangeList;
private String name;
}

View File

@@ -1,26 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class Measure {
private String name;
// sum max min avg count distinct
private String agg;
private String expr;
private String constraint;
private String alias;
private String createMetric;
}

View File

@@ -1,19 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Data;
import java.util.List;
@Data
public class Metric implements SemanticItem {
private String name;
private List<String> owners;
private String type;
private MetricTypeParams metricTypeParams;
@Override
public String getName() {
return name;
}
}

View File

@@ -1,15 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
import lombok.Data;
import java.util.List;
@Data
public class MetricTypeParams {
private List<Measure> measures;
private List<Measure> metrics;
private List<Measure> fields;
private boolean isFieldMetric = false;
private String expr;
}

View File

@@ -1,7 +0,0 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
public interface SemanticItem {
String getName();
String getType();
}

View File

@@ -29,7 +29,7 @@ public class ComponentFactory {
initQueryOptimizer(); initQueryOptimizer();
initQueryExecutors(); initQueryExecutors();
initQueryAccelerators(); initQueryAccelerators();
initQueryParsers(); initQueryParser();
initQueryCache(); initQueryCache();
} }
@@ -55,8 +55,8 @@ public class ComponentFactory {
} }
public static List<QueryParser> getQueryParsers() { public static List<QueryParser> getQueryParsers() {
if (queryParsers.isEmpty()) { if (queryParsers == null) {
initQueryParsers(); initQueryParser();
} }
return queryParsers; return queryParsers;
} }
@@ -82,14 +82,18 @@ public class ComponentFactory {
} }
private static void initQueryExecutors() { private static void initQueryExecutors() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryExecutor.class, queryExecutors); init(QueryExecutor.class, queryExecutors);
} }
private static void initQueryAccelerators() { private static void initQueryAccelerators() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryAccelerator.class, queryAccelerators); init(QueryAccelerator.class, queryAccelerators);
} }
private static void initQueryParsers() { private static void initQueryParser() {
init(QueryParser.class, queryParsers); init(QueryParser.class, queryParsers);
} }

View File

@@ -1,7 +1,5 @@
package com.tencent.supersonic.headless.core.utils; package com.tencent.supersonic.headless.core.utils;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Aggregator; import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.ItemDateResp; import com.tencent.supersonic.common.pojo.ItemDateResp;
@@ -10,13 +8,7 @@ import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.DateModeUtils; import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.common.util.SqlFilterUtils; import com.tencent.supersonic.common.util.SqlFilterUtils;
import com.tencent.supersonic.common.util.StringUtil; import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.config.ExecutorConfig; import com.tencent.supersonic.headless.core.config.ExecutorConfig;
import com.tencent.supersonic.headless.core.pojo.StructQuery; import com.tencent.supersonic.headless.core.pojo.StructQuery;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -31,7 +23,8 @@ import java.time.format.DateTimeFormatter;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static com.tencent.supersonic.common.pojo.Constants.*; import static com.tencent.supersonic.common.pojo.Constants.DAY_FORMAT;
import static com.tencent.supersonic.common.pojo.Constants.JOIN_UNDERLINE;
/** tools functions to analyze queryStructReq */ /** tools functions to analyze queryStructReq */
@Component @Component
@@ -148,7 +141,12 @@ public class SqlGenerateUtils {
String whereClauseFromFilter = String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(structQuery.getDimensionFilters()); sqlFilterUtils.getWhereClause(structQuery.getDimensionFilters());
String whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp); String whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp);
return mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate); String mergedWhere =
mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate);
if (StringUtils.isNotBlank(mergedWhere)) {
mergedWhere = "where " + mergedWhere;
}
return mergedWhere;
} }
private String mergeDateWhereClause(StructQuery structQuery, String whereClauseFromFilter, private String mergeDateWhereClause(StructQuery structQuery, String whereClauseFromFilter,
@@ -255,87 +253,4 @@ public class SqlGenerateUtils {
return true; return true;
} }
public String generateInternalMetricName(String modelBizName) {
return modelBizName + UNDERLINE + executorConfig.getInternalMetricNameSuffix();
}
public String generateDerivedMetric(final List<MetricSchemaResp> metricResps,
final Set<String> allFields, final Map<String, Measure> allMeasures,
final List<DimSchemaResp> dimensionResps, final String expression,
final MetricDefineType metricDefineType, AggOption aggOption,
Map<String, String> visitedMetric, Set<String> measures, Set<String> dimensions) {
Set<String> fields = SqlSelectHelper.getColumnFromExpr(expression);
if (!CollectionUtils.isEmpty(fields)) {
Map<String, String> replace = new HashMap<>();
for (String field : fields) {
switch (metricDefineType) {
case METRIC:
Optional<MetricSchemaResp> metricItem = metricResps.stream()
.filter(m -> m.getBizName().equalsIgnoreCase(field)).findFirst();
if (metricItem.isPresent()) {
if (visitedMetric.keySet().contains(field)) {
replace.put(field, visitedMetric.get(field));
break;
}
replace.put(field,
generateDerivedMetric(metricResps, allFields, allMeasures,
dimensionResps, getExpr(metricItem.get()),
metricItem.get().getMetricDefineType(), aggOption,
visitedMetric, measures, dimensions));
visitedMetric.put(field, replace.get(field));
}
break;
case MEASURE:
if (allMeasures.containsKey(field)) {
measures.add(field);
replace.put(field, getExpr(allMeasures.get(field), aggOption));
}
break;
case FIELD:
if (allFields.contains(field)) {
Optional<DimSchemaResp> dimensionItem = dimensionResps.stream()
.filter(d -> d.getBizName().equals(field)).findFirst();
if (dimensionItem.isPresent()) {
dimensions.add(field);
} else {
measures.add(field);
}
}
break;
default:
break;
}
}
if (!CollectionUtils.isEmpty(replace)) {
String expr = SqlReplaceHelper.replaceExpression(expression, replace);
log.debug("derived measure {}->{}", expression, expr);
return expr;
}
}
return expression;
}
public String getExpr(Measure measure, AggOption aggOption) {
if (AggOperatorEnum.COUNT_DISTINCT.getOperator().equalsIgnoreCase(measure.getAgg())) {
return AggOption.NATIVE.equals(aggOption) ? measure.getExpr()
: AggOperatorEnum.COUNT.getOperator() + " ( " + AggOperatorEnum.DISTINCT + " "
+ measure.getExpr() + " ) ";
}
return AggOption.NATIVE.equals(aggOption) ? measure.getExpr()
: measure.getAgg() + " ( " + measure.getExpr() + " ) ";
}
public String getExpr(MetricResp metricResp) {
if (Objects.isNull(metricResp.getMetricDefineType())) {
return metricResp.getMetricDefineByMeasureParams().getExpr();
}
if (metricResp.getMetricDefineType().equals(MetricDefineType.METRIC)) {
return metricResp.getMetricDefineByMetricParams().getExpr();
}
if (metricResp.getMetricDefineType().equals(MetricDefineType.FIELD)) {
return metricResp.getMetricDefineByFieldParams().getExpr();
}
// measure add agg function
return metricResp.getMetricDefineByMeasureParams().getExpr();
}
} }

View File

@@ -220,8 +220,9 @@ public class SqlUtils {
} }
public SqlUtils build() { public SqlUtils build() {
DatabaseResp database = DatabaseResp.builder().name(this.name).type(this.type) DatabaseResp database = DatabaseResp.builder().name(this.name)
.url(this.jdbcUrl).username(this.username).password(this.password).build(); .type(this.type.toUpperCase()).url(this.jdbcUrl).username(this.username)
.password(this.password).build();
SqlUtils sqlUtils = new SqlUtils(database); SqlUtils sqlUtils = new SqlUtils(database);
sqlUtils.jdbcDataSource = this.jdbcDataSource; sqlUtils.jdbcDataSource = this.jdbcDataSource;

View File

@@ -6,6 +6,7 @@ import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum; import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema; import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.Dimension;
import com.tencent.supersonic.headless.api.pojo.MetaFilter; import com.tencent.supersonic.headless.api.pojo.MetaFilter;
import com.tencent.supersonic.headless.api.pojo.enums.SemanticType; import com.tencent.supersonic.headless.api.pojo.enums.SemanticType;
import com.tencent.supersonic.headless.api.pojo.request.*; import com.tencent.supersonic.headless.api.pojo.request.*;
@@ -30,6 +31,7 @@ import com.tencent.supersonic.headless.server.service.DataSetService;
import com.tencent.supersonic.headless.server.service.DimensionService; import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.MetricService; import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.SchemaService; import com.tencent.supersonic.headless.server.service.SchemaService;
import com.tencent.supersonic.headless.server.utils.MetricDrillDownChecker;
import com.tencent.supersonic.headless.server.utils.QueryUtils; import com.tencent.supersonic.headless.server.utils.QueryUtils;
import com.tencent.supersonic.headless.server.utils.StatUtils; import com.tencent.supersonic.headless.server.utils.StatUtils;
import lombok.SneakyThrows; import lombok.SneakyThrows;
@@ -52,6 +54,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
private final DataSetService dataSetService; private final DataSetService dataSetService;
private final SchemaService schemaService; private final SchemaService schemaService;
private final SemanticTranslator semanticTranslator; private final SemanticTranslator semanticTranslator;
private final MetricDrillDownChecker metricDrillDownChecker;
private final KnowledgeBaseService knowledgeBaseService; private final KnowledgeBaseService knowledgeBaseService;
private final MetricService metricService; private final MetricService metricService;
private final DimensionService dimensionService; private final DimensionService dimensionService;
@@ -61,6 +64,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
public S2SemanticLayerService(StatUtils statUtils, QueryUtils queryUtils, public S2SemanticLayerService(StatUtils statUtils, QueryUtils queryUtils,
SemanticSchemaManager semanticSchemaManager, DataSetService dataSetService, SemanticSchemaManager semanticSchemaManager, DataSetService dataSetService,
SchemaService schemaService, SemanticTranslator semanticTranslator, SchemaService schemaService, SemanticTranslator semanticTranslator,
MetricDrillDownChecker metricDrillDownChecker,
KnowledgeBaseService knowledgeBaseService, MetricService metricService, KnowledgeBaseService knowledgeBaseService, MetricService metricService,
DimensionService dimensionService) { DimensionService dimensionService) {
this.statUtils = statUtils; this.statUtils = statUtils;
@@ -69,6 +73,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
this.dataSetService = dataSetService; this.dataSetService = dataSetService;
this.schemaService = schemaService; this.schemaService = schemaService;
this.semanticTranslator = semanticTranslator; this.semanticTranslator = semanticTranslator;
this.metricDrillDownChecker = metricDrillDownChecker;
this.knowledgeBaseService = knowledgeBaseService; this.knowledgeBaseService = knowledgeBaseService;
this.metricService = metricService; this.metricService = metricService;
this.dimensionService = dimensionService; this.dimensionService = dimensionService;
@@ -115,6 +120,10 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = buildQueryStatement(queryReq, user); QueryStatement queryStatement = buildQueryStatement(queryReq, user);
semanticTranslator.translate(queryStatement); semanticTranslator.translate(queryStatement);
// Check whether the dimensions of the metric drill-down are correct temporarily,
// add the abstraction of a validator later.
metricDrillDownChecker.checkQuery(queryStatement);
// 4.execute query // 4.execute query
SemanticQueryResp queryResp = null; SemanticQueryResp queryResp = null;
for (QueryExecutor queryExecutor : queryExecutors) { for (QueryExecutor queryExecutor : queryExecutors) {
@@ -198,6 +207,14 @@ public class S2SemanticLayerService implements SemanticLayerService {
ModelResp modelResp = modelResps.get(0); ModelResp modelResp = modelResps.get(0);
String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(), String sql = String.format("select distinct %s from %s where 1=1", dimensionResp.getName(),
modelResp.getName()); modelResp.getName());
List<Dimension> timeDims = modelResp.getTimeDimension();
if (CollectionUtils.isNotEmpty(timeDims)) {
sql = String.format("%s and %s >= '%s' and %s <= '%s'", sql,
queryDimValueReq.getDateInfo().getDateField(),
queryDimValueReq.getDateInfo().getStartDate(),
queryDimValueReq.getDateInfo().getDateField(),
queryDimValueReq.getDateInfo().getEndDate());
}
if (StringUtils.isNotBlank(queryDimValueReq.getValue())) { if (StringUtils.isNotBlank(queryDimValueReq.getValue())) {
sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%" sql += " AND " + queryDimValueReq.getBizName() + " LIKE '%"
+ queryDimValueReq.getValue() + "%'"; + queryDimValueReq.getValue() + "%'";
@@ -269,10 +286,10 @@ public class S2SemanticLayerService implements SemanticLayerService {
if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo()) if (Objects.nonNull(queryStatement) && Objects.nonNull(semanticQueryReq.getSqlInfo())
&& StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) { && StringUtils.isNotBlank(semanticQueryReq.getSqlInfo().getQuerySQL())) {
queryStatement.setSql(semanticQueryReq.getSqlInfo().getQuerySQL()); queryStatement.setSql(semanticQueryReq.getSqlInfo().getQuerySQL());
queryStatement.setDataSetId(semanticQueryReq.getDataSetId());
queryStatement.setDataSetName(semanticQueryReq.getDataSetName());
queryStatement.setIsTranslated(true); queryStatement.setIsTranslated(true);
} }
queryStatement.setDataSetId(semanticQueryReq.getDataSetId());
queryStatement.setDataSetName(semanticQueryReq.getDataSetName());
return queryStatement; return queryStatement;
} }

View File

@@ -4,10 +4,7 @@ import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType; import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp; import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.*;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;

View File

@@ -1,14 +1,16 @@
package com.tencent.supersonic.headless.server.manager; package com.tencent.supersonic.headless.server.manager;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.core.pojo.JoinRelation; import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology; import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType;
import com.tencent.supersonic.headless.server.pojo.yaml.*; import com.tencent.supersonic.headless.server.pojo.yaml.*;
import com.tencent.supersonic.headless.server.service.SchemaService; import com.tencent.supersonic.headless.server.service.SchemaService;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -32,6 +34,24 @@ public class SemanticSchemaManager {
public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) { public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) {
Ontology ontology = new Ontology(); Ontology ontology = new Ontology();
Map<String, List<MetricSchemaResp>> model2Metrics = Maps.newHashMap();
semanticSchemaResp.getMetrics().forEach(dim -> {
if (!model2Metrics.containsKey(dim.getModelBizName())) {
model2Metrics.put(dim.getModelBizName(), Lists.newArrayList());
}
model2Metrics.get(dim.getModelBizName()).add(dim);
});
ontology.setMetricMap(model2Metrics);
Map<String, List<DimSchemaResp>> model2Dimensions = Maps.newHashMap();
semanticSchemaResp.getDimensions().forEach(dim -> {
if (!model2Dimensions.containsKey(dim.getModelBizName())) {
model2Dimensions.put(dim.getModelBizName(), Lists.newArrayList());
}
model2Dimensions.get(dim.getModelBizName()).add(dim);
});
ontology.setDimensionMap(model2Dimensions);
Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>(); Map<String, List<DimensionYamlTpl>> dimensionYamlTpls = new HashMap<>();
List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>(); List<DataModelYamlTpl> dataModelYamlTpls = new ArrayList<>();
List<MetricYamlTpl> metricYamlTpls = new ArrayList<>(); List<MetricYamlTpl> metricYamlTpls = new ArrayList<>();
@@ -45,25 +65,16 @@ public class SemanticSchemaManager {
getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName)); getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName));
} }
if (!dataModelYamlTpls.isEmpty()) { if (!dataModelYamlTpls.isEmpty()) {
Map<String, DataModel> dataModelMap = Map<String, ModelResp> dataModelMap =
dataModelYamlTpls.stream().map(SemanticSchemaManager::getDataModel).collect( dataModelYamlTpls.stream().map(SemanticSchemaManager::getDataModel).collect(
Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1)); Collectors.toMap(ModelResp::getName, item -> item, (k1, k2) -> k1));
ontology.setDataModelMap(dataModelMap); ontology.setModelMap(dataModelMap);
}
if (!dimensionYamlTpls.isEmpty()) {
Map<String, List<Dimension>> dimensionMap = new HashMap<>();
for (Map.Entry<String, List<DimensionYamlTpl>> entry : dimensionYamlTpls.entrySet()) {
dimensionMap.put(entry.getKey(), getDimensions(entry.getValue()));
}
ontology.setDimensionMap(dimensionMap);
}
if (!metricYamlTpls.isEmpty()) {
ontology.setMetrics(getMetrics(metricYamlTpls));
} }
return ontology; return ontology;
} }
public static List<Metric> getMetrics(final List<MetricYamlTpl> t) { public static List<MetricSchemaResp> getMetrics(final List<MetricYamlTpl> t) {
return getMetricsByMetricYamlTpl(t); return getMetricsByMetricYamlTpl(t);
} }
@@ -71,36 +82,34 @@ public class SemanticSchemaManager {
return getDimension(t); return getDimension(t);
} }
public static DataModel getDataModel(final DataModelYamlTpl d) { public static ModelResp getDataModel(final DataModelYamlTpl d) {
DataModel dataModel = DataModel.builder().id(d.getId()).modelId(d.getSourceId()) // ModelResp dataModel = ModelResp.builder()(d.getId()).modelId(d.getSourceId())
.type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName()) // .type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName())
.tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers())) // .tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers()))
.measures(getMeasureParams(d.getMeasures())) // .measures(getMeasureParams(d.getMeasures()))
.dimensions(getDimensions(d.getDimensions())).build(); // .dimensions(getDimensions(d.getDimensions())).build();
dataModel.setAggTime(getDataModelAggTime(dataModel.getDimensions())); ModelResp dataModel = new ModelResp();
if (Objects.nonNull(d.getModelSourceTypeEnum())) { dataModel.setId(d.getId());
dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name())); dataModel.setName(d.getName());
} ModelDetail modelDetail = new ModelDetail();
dataModel.setModelDetail(modelDetail);
modelDetail.setDbType(d.getType());
modelDetail.setSqlQuery(d.getSqlQuery());
modelDetail.setTableQuery(d.getTableQuery());
modelDetail.getIdentifiers().addAll(getIdentify(d.getIdentifiers()));
modelDetail.getMeasures().addAll(getMeasureParams(d.getMeasures()));
modelDetail.getDimensions().addAll(getDimensions(d.getDimensions()));
return dataModel; return dataModel;
} }
private static String getDataModelAggTime(List<Dimension> dimensions) { private static List<MetricSchemaResp> getMetricsByMetricYamlTpl(
Optional<Dimension> timeDimension = dimensions.stream() List<MetricYamlTpl> metricYamlTpls) {
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) List<MetricSchemaResp> metrics = new ArrayList<>();
.findFirst();
if (timeDimension.isPresent()
&& Objects.nonNull(timeDimension.get().getDimensionTimeTypeParams())) {
return timeDimension.get().getDimensionTimeTypeParams().getTimeGranularity();
}
return Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE;
}
private static List<Metric> getMetricsByMetricYamlTpl(List<MetricYamlTpl> metricYamlTpls) {
List<Metric> metrics = new ArrayList<>();
for (MetricYamlTpl metricYamlTpl : metricYamlTpls) { for (MetricYamlTpl metricYamlTpl : metricYamlTpls) {
Metric metric = new Metric(); MetricSchemaResp metric = new MetricSchemaResp();
metric.setMetricTypeParams(getMetricTypeParams(metricYamlTpl.getTypeParams())); fillMetricTypeParams(metric, metricYamlTpl.getTypeParams());
metric.setOwners(metricYamlTpl.getOwners());
metric.setType(metricYamlTpl.getType()); metric.setType(metricYamlTpl.getType());
metric.setName(metricYamlTpl.getName()); metric.setName(metricYamlTpl.getName());
metrics.add(metric); metrics.add(metric);
@@ -108,55 +117,50 @@ public class SemanticSchemaManager {
return metrics; return metrics;
} }
private static MetricTypeParams getMetricTypeParams( private static void fillMetricTypeParams(MetricSchemaResp metric,
MetricTypeParamsYamlTpl metricTypeParamsYamlTpl) { MetricTypeParamsYamlTpl metricTypeParamsYamlTpl) {
MetricTypeParams metricTypeParams = new MetricTypeParams();
metricTypeParams.setExpr(metricTypeParamsYamlTpl.getExpr());
metricTypeParams.setFieldMetric(false);
if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMeasures())) { if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMeasures())) {
metricTypeParams.setMeasures(getMeasureParams(metricTypeParamsYamlTpl.getMeasures())); MetricDefineByMeasureParams params = new MetricDefineByMeasureParams();
params.setMeasures(getMeasureParams(metricTypeParamsYamlTpl.getMeasures()));
metric.setMetricDefinition(MetricDefineType.MEASURE, params);
} else if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMetrics())) {
MetricDefineByMetricParams params = new MetricDefineByMetricParams();
params.setMetrics(getMetricParams(metricTypeParamsYamlTpl.getMetrics()));
params.setExpr(metricTypeParamsYamlTpl.getExpr());
metric.setMetricDefinition(MetricDefineType.METRIC, params);
} else if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getFields())) {
MetricDefineByFieldParams params = new MetricDefineByFieldParams();
params.setExpr(metricTypeParamsYamlTpl.getExpr());
params.setFields(getFieldParams(metricTypeParamsYamlTpl.getFields()));
metric.setMetricDefinition(MetricDefineType.FIELD, params);
} }
if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getMetrics())) {
metricTypeParams.setMeasures(getMetricParams(metricTypeParamsYamlTpl.getMetrics()));
metricTypeParams.setExpr(metricTypeParams.getMeasures().get(0).getExpr());
metricTypeParams.setFieldMetric(true);
}
if (!CollectionUtils.isEmpty(metricTypeParamsYamlTpl.getFields())) {
metricTypeParams.setMeasures(getFieldParams(metricTypeParamsYamlTpl.getFields()));
metricTypeParams.setExpr(metricTypeParams.getMeasures().get(0).getExpr());
metricTypeParams.setFieldMetric(true);
} }
return metricTypeParams; private static List<FieldParam> getFieldParams(List<FieldParamYamlTpl> fieldParamYamlTpls) {
} List<FieldParam> fields = new ArrayList<>();
private static List<Measure> getFieldParams(List<FieldParamYamlTpl> fieldParamYamlTpls) {
List<Measure> measures = new ArrayList<>();
for (FieldParamYamlTpl fieldParamYamlTpl : fieldParamYamlTpls) { for (FieldParamYamlTpl fieldParamYamlTpl : fieldParamYamlTpls) {
Measure measure = new Measure(); FieldParam field = new FieldParam();
measure.setName(fieldParamYamlTpl.getFieldName()); field.setFieldName(fieldParamYamlTpl.getFieldName());
measure.setExpr(fieldParamYamlTpl.getFieldName()); fields.add(field);
measures.add(measure);
} }
return measures; return fields;
} }
private static List<Measure> getMetricParams(List<MetricParamYamlTpl> metricParamYamlTpls) { private static List<MetricParam> getMetricParams(List<MetricParamYamlTpl> metricParamYamlTpls) {
List<Measure> measures = new ArrayList<>(); List<MetricParam> metrics = new ArrayList<>();
for (MetricParamYamlTpl metricParamYamlTpl : metricParamYamlTpls) { for (MetricParamYamlTpl metricParamYamlTpl : metricParamYamlTpls) {
Measure measure = new Measure(); MetricParam metric = new MetricParam();
measure.setName(metricParamYamlTpl.getBizName()); metric.setBizName(metricParamYamlTpl.getBizName());
measure.setExpr(metricParamYamlTpl.getBizName()); metric.setId(metricParamYamlTpl.getId());
measures.add(measure); metrics.add(metric);
} }
return measures; return metrics;
} }
private static List<Measure> getMeasureParams(List<MeasureYamlTpl> measureYamlTpls) { private static List<Measure> getMeasureParams(List<MeasureYamlTpl> measureYamlTpls) {
List<Measure> measures = new ArrayList<>(); List<Measure> measures = new ArrayList<>();
for (MeasureYamlTpl measureYamlTpl : measureYamlTpls) { for (MeasureYamlTpl measureYamlTpl : measureYamlTpls) {
Measure measure = new Measure(); Measure measure = new Measure();
measure.setCreateMetric(measureYamlTpl.getCreateMetric());
measure.setExpr(measureYamlTpl.getExpr()); measure.setExpr(measureYamlTpl.getExpr());
measure.setAgg(measureYamlTpl.getAgg()); measure.setAgg(measureYamlTpl.getAgg());
measure.setName(measureYamlTpl.getName()); measure.setName(measureYamlTpl.getName());
@@ -170,28 +174,30 @@ public class SemanticSchemaManager {
private static List<Dimension> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) { private static List<Dimension> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) {
List<Dimension> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) { for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) {
Dimension dimension = Dimension.builder().build(); Dimension dimension = new Dimension();
dimension.setType(dimensionYamlTpl.getType()); if (Objects.nonNull(dimensionYamlTpl.getType())) {
dimension.setType(DimensionType.valueOf(dimensionYamlTpl.getType()));
}
dimension.setExpr(dimensionYamlTpl.getExpr()); dimension.setExpr(dimensionYamlTpl.getExpr());
dimension.setName(dimensionYamlTpl.getName()); dimension.setName(dimensionYamlTpl.getName());
dimension.setOwners(dimensionYamlTpl.getOwners());
dimension.setBizName(dimensionYamlTpl.getBizName()); dimension.setBizName(dimensionYamlTpl.getBizName());
dimension.setDefaultValues(dimensionYamlTpl.getDefaultValues()); dimension.setTypeParams(dimensionYamlTpl.getTypeParams());
if (Objects.nonNull(dimensionYamlTpl.getDataType())) {
dimension.setDataType(DataType.of(dimensionYamlTpl.getDataType().getType()));
}
if (Objects.isNull(dimension.getDataType())) {
dimension.setDataType(DataType.UNKNOWN);
}
if (Objects.nonNull(dimensionYamlTpl.getExt())) {
dimension.setExt(dimensionYamlTpl.getExt());
}
dimension.setDimensionTimeTypeParams(dimensionYamlTpl.getTypeParams());
dimensions.add(dimension); dimensions.add(dimension);
} }
return dimensions; return dimensions;
} }
private static DimensionTimeTypeParams getDimensionTimeTypeParams(
DimensionTimeTypeParams dimensionTimeTypeParamsTpl) {
DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams();
if (dimensionTimeTypeParamsTpl != null) {
dimensionTimeTypeParams
.setTimeGranularity(dimensionTimeTypeParamsTpl.getTimeGranularity());
dimensionTimeTypeParams.setIsPrimary(dimensionTimeTypeParamsTpl.getIsPrimary());
}
return dimensionTimeTypeParams;
}
private static List<Identify> getIdentify(List<IdentifyYamlTpl> identifyYamlTpls) { private static List<Identify> getIdentify(List<IdentifyYamlTpl> identifyYamlTpls) {
List<Identify> identifies = new ArrayList<>(); List<Identify> identifies = new ArrayList<>();
for (IdentifyYamlTpl identifyYamlTpl : identifyYamlTpls) { for (IdentifyYamlTpl identifyYamlTpl : identifyYamlTpls) {
@@ -227,17 +233,18 @@ public class SemanticSchemaManager {
return joinRelations; return joinRelations;
} }
public static void update(S2CalciteSchema schema, List<Metric> metric) throws Exception { public static void update(S2CalciteSchema schema, List<MetricSchemaResp> metric)
throws Exception {
if (schema != null) { if (schema != null) {
updateMetric(metric, schema.getMetrics()); updateMetric(metric, schema.getMetrics());
} }
} }
public static void update(S2CalciteSchema schema, DataModel datasourceYamlTpl) public static void update(S2CalciteSchema schema, ModelResp datasourceYamlTpl)
throws Exception { throws Exception {
if (schema != null) { if (schema != null) {
String dataSourceName = datasourceYamlTpl.getName(); String dataSourceName = datasourceYamlTpl.getName();
Optional<Entry<String, DataModel>> datasourceYamlTplMap = Optional<Entry<String, ModelResp>> datasourceYamlTplMap =
schema.getDataModels().entrySet().stream() schema.getDataModels().entrySet().stream()
.filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst(); .filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst();
if (datasourceYamlTplMap.isPresent()) { if (datasourceYamlTplMap.isPresent()) {
@@ -249,31 +256,31 @@ public class SemanticSchemaManager {
} }
public static void update(S2CalciteSchema schema, String datasourceBizName, public static void update(S2CalciteSchema schema, String datasourceBizName,
List<Dimension> dimensionYamlTpls) throws Exception { List<DimSchemaResp> dimensionYamlTpls) throws Exception {
if (schema != null) { if (schema != null) {
Optional<Map.Entry<String, List<Dimension>>> datasourceYamlTplMap = schema Optional<Map.Entry<String, List<DimSchemaResp>>> datasourceYamlTplMap = schema
.getDimensions().entrySet().stream() .getDimensions().entrySet().stream()
.filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst(); .filter(t -> t.getKey().equalsIgnoreCase(datasourceBizName)).findFirst();
if (datasourceYamlTplMap.isPresent()) { if (datasourceYamlTplMap.isPresent()) {
updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue()); updateDimension(dimensionYamlTpls, datasourceYamlTplMap.get().getValue());
} else { } else {
List<Dimension> dimensions = new ArrayList<>(); List<DimSchemaResp> dimensions = new ArrayList<>();
updateDimension(dimensionYamlTpls, dimensions); updateDimension(dimensionYamlTpls, dimensions);
schema.getDimensions().put(datasourceBizName, dimensions); schema.getDimensions().put(datasourceBizName, dimensions);
} }
} }
} }
private static void updateDimension(List<Dimension> dimensionYamlTpls, private static void updateDimension(List<DimSchemaResp> dimensionYamlTpls,
List<Dimension> dimensions) { List<DimSchemaResp> dimensions) {
if (CollectionUtils.isEmpty(dimensionYamlTpls)) { if (CollectionUtils.isEmpty(dimensionYamlTpls)) {
return; return;
} }
Set<String> toAdd = Set<String> toAdd =
dimensionYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet()); dimensionYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Dimension> iterator = dimensions.iterator(); Iterator<DimSchemaResp> iterator = dimensions.iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
Dimension cur = iterator.next(); DimSchemaResp cur = iterator.next();
if (toAdd.contains(cur.getName())) { if (toAdd.contains(cur.getName())) {
iterator.remove(); iterator.remove();
} }
@@ -281,15 +288,16 @@ public class SemanticSchemaManager {
dimensions.addAll(dimensionYamlTpls); dimensions.addAll(dimensionYamlTpls);
} }
private static void updateMetric(List<Metric> metricYamlTpls, List<Metric> metrics) { private static void updateMetric(List<MetricSchemaResp> metricYamlTpls,
List<MetricSchemaResp> metrics) {
if (CollectionUtils.isEmpty(metricYamlTpls)) { if (CollectionUtils.isEmpty(metricYamlTpls)) {
return; return;
} }
Set<String> toAdd = Set<String> toAdd =
metricYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet()); metricYamlTpls.stream().map(m -> m.getName()).collect(Collectors.toSet());
Iterator<Metric> iterator = metrics.iterator(); Iterator<MetricSchemaResp> iterator = metrics.iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
Metric cur = iterator.next(); MetricSchemaResp cur = iterator.next();
if (toAdd.contains(cur.getName())) { if (toAdd.contains(cur.getName())) {
iterator.remove(); iterator.remove();
} }

View File

@@ -15,6 +15,7 @@ public class QueryStatDO {
private String traceId; private String traceId;
private Long modelId; private Long modelId;
private Long dataSetId; private Long dataSetId;
@TableField("query_user")
private String queryUser; private String queryUser;
private String createdAt; private String createdAt;
/** corresponding type, such as sql, struct, etc */ /** corresponding type, such as sql, struct, etc */
@@ -27,7 +28,8 @@ public class QueryStatDO {
private String queryStructCmd; private String queryStructCmd;
@TableField("struct_cmd_md5") @TableField("struct_cmd_md5")
private String queryStructCmdMd5; private String queryStructCmdMd5;
private String querySql; @TableField("query_sql")
private String sql;
private String sqlMd5; private String sqlMd5;
private String queryEngine; private String queryEngine;
// private Long startTime; // private Long startTime;

View File

@@ -190,7 +190,7 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
private SemanticQueryResp queryWithColumns(String sql, DatabaseResp database) { private SemanticQueryResp queryWithColumns(String sql, DatabaseResp database) {
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp(); SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
SqlUtils sqlUtils = this.sqlUtils.init(database); SqlUtils sqlUtils = this.sqlUtils.init(database);
log.info("query SQL: {}", sql); log.info("query SQL: {}", StringUtils.normalizeSpace(sql));
sqlUtils.queryInternal(sql, queryResultWithColumns); sqlUtils.queryInternal(sql, queryResultWithColumns);
return queryResultWithColumns; return queryResultWithColumns;
} }

View File

@@ -62,22 +62,19 @@ public class DimensionServiceImpl extends ServiceImpl<DimensionDOMapper, Dimensi
private DataSetService dataSetService; private DataSetService dataSetService;
private TagMetaService tagMetaService;
@Autowired @Autowired
private ApplicationEventPublisher eventPublisher; private ApplicationEventPublisher eventPublisher;
public DimensionServiceImpl(DimensionRepository dimensionRepository, ModelService modelService, public DimensionServiceImpl(DimensionRepository dimensionRepository, ModelService modelService,
AliasGenerateHelper aliasGenerateHelper, DatabaseService databaseService, AliasGenerateHelper aliasGenerateHelper, DatabaseService databaseService,
ModelRelaService modelRelaService, DataSetService dataSetService, ModelRelaService modelRelaService, DataSetService dataSetService) {
TagMetaService tagMetaService) {
this.modelService = modelService; this.modelService = modelService;
this.dimensionRepository = dimensionRepository; this.dimensionRepository = dimensionRepository;
this.aliasGenerateHelper = aliasGenerateHelper; this.aliasGenerateHelper = aliasGenerateHelper;
this.databaseService = databaseService; this.databaseService = databaseService;
this.modelRelaService = modelRelaService; this.modelRelaService = modelRelaService;
this.dataSetService = dataSetService; this.dataSetService = dataSetService;
this.tagMetaService = tagMetaService;
} }
@Override @Override

View File

@@ -240,7 +240,7 @@ public class DownloadServiceImpl implements DownloadService {
QueryStructReq queryStructReq = new QueryStructReq(); QueryStructReq queryStructReq = new QueryStructReq();
queryStructReq.setGroups(dimensionResps.stream().map(DimensionResp::getBizName) queryStructReq.setGroups(dimensionResps.stream().map(DimensionResp::getBizName)
.collect(Collectors.toList())); .collect(Collectors.toList()));
queryStructReq.getGroups().add(0, getTimeDimension(dateConf)); queryStructReq.getGroups().add(0, dateConf.getDateField());
Aggregator aggregator = new Aggregator(); Aggregator aggregator = new Aggregator();
aggregator.setColumn(metricResp.getBizName()); aggregator.setColumn(metricResp.getBizName());
queryStructReq.setAggregators(Lists.newArrayList(aggregator)); queryStructReq.setAggregators(Lists.newArrayList(aggregator));
@@ -250,10 +250,6 @@ public class DownloadServiceImpl implements DownloadService {
return queryStructReq; return queryStructReq;
} }
private String getTimeDimension(DateConf dateConf) {
return dateConf.getDateField();
}
private Map<String, List<MetricResp>> getMetricMap(List<MetricResp> metricResps) { private Map<String, List<MetricResp>> getMetricMap(List<MetricResp> metricResps) {
for (MetricResp metricResp : metricResps) { for (MetricResp metricResp : metricResps) {
List<DrillDownDimension> drillDownDimensions = List<DrillDownDimension> drillDownDimensions =

View File

@@ -8,7 +8,6 @@ import com.github.pagehelper.PageInfo;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.pojo.*; import com.tencent.supersonic.common.pojo.*;
import com.tencent.supersonic.common.pojo.enums.*; import com.tencent.supersonic.common.pojo.enums.*;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
@@ -59,15 +58,12 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
private ApplicationEventPublisher eventPublisher; private ApplicationEventPublisher eventPublisher;
private TagMetaService tagMetaService;
private ChatLayerService chatLayerService; private ChatLayerService chatLayerService;
public MetricServiceImpl(MetricRepository metricRepository, ModelService modelService, public MetricServiceImpl(MetricRepository metricRepository, ModelService modelService,
AliasGenerateHelper aliasGenerateHelper, CollectService collectService, AliasGenerateHelper aliasGenerateHelper, CollectService collectService,
DataSetService dataSetService, ApplicationEventPublisher eventPublisher, DataSetService dataSetService, ApplicationEventPublisher eventPublisher,
DimensionService dimensionService, TagMetaService tagMetaService, DimensionService dimensionService, @Lazy ChatLayerService chatLayerService) {
@Lazy ChatLayerService chatLayerService) {
this.metricRepository = metricRepository; this.metricRepository = metricRepository;
this.modelService = modelService; this.modelService = modelService;
this.aliasGenerateHelper = aliasGenerateHelper; this.aliasGenerateHelper = aliasGenerateHelper;
@@ -75,7 +71,6 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
this.collectService = collectService; this.collectService = collectService;
this.dataSetService = dataSetService; this.dataSetService = dataSetService;
this.dimensionService = dimensionService; this.dimensionService = dimensionService;
this.tagMetaService = tagMetaService;
this.chatLayerService = chatLayerService; this.chatLayerService = chatLayerService;
} }
@@ -386,7 +381,7 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
} }
} else if (MetricDefineType.MEASURE.equals(metricResp.getMetricDefineType())) { } else if (MetricDefineType.MEASURE.equals(metricResp.getMetricDefineType())) {
List<Measure> measures = metricResp.getMetricDefineByMeasureParams().getMeasures(); List<Measure> measures = metricResp.getMetricDefineByMeasureParams().getMeasures();
List<String> fieldNameDepended = measures.stream().map(Measure::getBizName) List<String> fieldNameDepended = measures.stream().map(Measure::getName)
// measure bizName = model bizName_fieldName // measure bizName = model bizName_fieldName
.map(name -> name.replaceFirst(metricResp.getModelBizName() + "_", "")) .map(name -> name.replaceFirst(metricResp.getModelBizName() + "_", ""))
.collect(Collectors.toList()); .collect(Collectors.toList());
@@ -660,28 +655,12 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
&& !metricResp.getDefaultAgg().isEmpty())) { && !metricResp.getDefaultAgg().isEmpty())) {
return metricResp.getDefaultAgg(); return metricResp.getDefaultAgg();
} }
// FIELD define will get from expr
if (MetricDefineType.FIELD.equals(metricResp.getMetricDefineType())) {
return SqlSelectFunctionHelper.getFirstAggregateFunctions(metricResp.getExpr());
}
// METRIC define will get from first metric
if (MetricDefineType.METRIC.equals(metricResp.getMetricDefineType())) {
if (!CollectionUtils.isEmpty(metricResp.getMetricDefineByMetricParams().getMetrics())) {
MetricParam metricParam =
metricResp.getMetricDefineByMetricParams().getMetrics().get(0);
MetricResp firstMetricResp =
getMetric(modelResp.getDomainId(), metricParam.getBizName());
if (Objects.nonNull(firstMetricResp)) {
return getDefaultAgg(firstMetricResp, modelResp);
}
return "";
}
}
// Measure define will get from first measure // Measure define will get from first measure
if (MetricDefineType.MEASURE.equals(metricResp.getMetricDefineType())) {
List<Measure> measures = modelResp.getModelDetail().getMeasures(); List<Measure> measures = modelResp.getModelDetail().getMeasures();
List<Measure> measureParams = metricResp.getMetricDefineByMeasureParams().getMeasures(); List<Measure> measureParams = metricResp.getMetricDefineByMeasureParams().getMeasures();
if (CollectionUtils.isEmpty(measureParams)) { if (CollectionUtils.isEmpty(measureParams)) {
return ""; return null;
} }
Measure firstMeasure = measureParams.get(0); Measure firstMeasure = measureParams.get(0);
@@ -690,7 +669,9 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
return measure.getAgg(); return measure.getAgg();
} }
} }
return ""; }
return null;
} }
@Override @Override
@@ -716,31 +697,31 @@ public class MetricServiceImpl extends ServiceImpl<MetricDOMapper, MetricDO>
queryMetricReq.setDateInfo(null); queryMetricReq.setDateInfo(null);
} }
// 4. set groups // 4. set groups
List<String> dimensionBizNames = dimensionResps.stream() List<String> dimensionNames = dimensionResps.stream()
.filter(entry -> modelCluster.getModelIds().contains(entry.getModelId())) .filter(entry -> modelCluster.getModelIds().contains(entry.getModelId()))
.filter(entry -> queryMetricReq.getDimensionNames().contains(entry.getName()) .filter(entry -> queryMetricReq.getDimensionNames().contains(entry.getName())
|| queryMetricReq.getDimensionNames().contains(entry.getBizName()) || queryMetricReq.getDimensionNames().contains(entry.getBizName())
|| queryMetricReq.getDimensionIds().contains(entry.getId())) || queryMetricReq.getDimensionIds().contains(entry.getId()))
.map(SchemaItem::getBizName).collect(Collectors.toList()); .map(SchemaItem::getName).collect(Collectors.toList());
QueryStructReq queryStructReq = new QueryStructReq(); QueryStructReq queryStructReq = new QueryStructReq();
DateConf dateInfo = queryMetricReq.getDateInfo(); DateConf dateInfo = queryMetricReq.getDateInfo();
if (Objects.nonNull(dateInfo) && dateInfo.isGroupByDate()) { if (Objects.nonNull(dateInfo) && dateInfo.isGroupByDate()) {
queryStructReq.getGroups().add(dateInfo.getDateField()); queryStructReq.getGroups().add(dateInfo.getDateField());
} }
if (!CollectionUtils.isEmpty(dimensionBizNames)) { if (!CollectionUtils.isEmpty(dimensionNames)) {
queryStructReq.getGroups().addAll(dimensionBizNames); queryStructReq.getGroups().addAll(dimensionNames);
} }
// 5. set aggregators // 5. set aggregators
List<String> metricBizNames = metricResps.stream() List<String> metricNames = metricResps.stream()
.filter(entry -> modelCluster.getModelIds().contains(entry.getModelId())) .filter(entry -> modelCluster.getModelIds().contains(entry.getModelId()))
.map(SchemaItem::getBizName).collect(Collectors.toList()); .map(SchemaItem::getName).collect(Collectors.toList());
if (CollectionUtils.isEmpty(metricBizNames)) { if (CollectionUtils.isEmpty(metricNames)) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Invalid input parameters, unable to obtain valid metrics"); "Invalid input parameters, unable to obtain valid metrics");
} }
List<Aggregator> aggregators = new ArrayList<>(); List<Aggregator> aggregators = new ArrayList<>();
for (String metricBizName : metricBizNames) { for (String metricBizName : metricNames) {
Aggregator aggregator = new Aggregator(); Aggregator aggregator = new Aggregator();
aggregator.setColumn(metricBizName); aggregator.setColumn(metricBizName);
aggregators.add(aggregator); aggregators.add(aggregator);

View File

@@ -2,12 +2,15 @@ package com.tencent.supersonic.headless.server.service.impl;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.auth.api.authentication.service.UserService; import com.tencent.supersonic.auth.api.authentication.service.UserService;
import com.tencent.supersonic.common.pojo.DataEvent;
import com.tencent.supersonic.common.pojo.DataItem;
import com.tencent.supersonic.common.pojo.ItemDateResp; import com.tencent.supersonic.common.pojo.ItemDateResp;
import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.AuthType; import com.tencent.supersonic.common.pojo.enums.AuthType;
import com.tencent.supersonic.common.pojo.enums.EventType; import com.tencent.supersonic.common.pojo.enums.EventType;
import com.tencent.supersonic.common.pojo.enums.StatusEnum; import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.pojo.exception.InvalidArgumentException; import com.tencent.supersonic.common.pojo.exception.InvalidArgumentException;
import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.api.pojo.DBColumn; import com.tencent.supersonic.headless.api.pojo.DBColumn;
@@ -51,7 +54,7 @@ import com.tencent.supersonic.headless.server.utils.NameCheckUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.annotation.Lazy; import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@@ -69,7 +72,10 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Service @Service
@@ -94,13 +100,16 @@ public class ModelServiceImpl implements ModelService {
private final ModelRelaService modelRelaService; private final ModelRelaService modelRelaService;
private final ThreadPoolExecutor executor; private final ApplicationEventPublisher eventPublisher;
ExecutorService executor =
new ThreadPoolExecutor(0, 5, 5L, TimeUnit.SECONDS, new LinkedBlockingQueue<>());
public ModelServiceImpl(ModelRepository modelRepository, DatabaseService databaseService, public ModelServiceImpl(ModelRepository modelRepository, DatabaseService databaseService,
@Lazy DimensionService dimensionService, @Lazy MetricService metricService, @Lazy DimensionService dimensionService, @Lazy MetricService metricService,
DomainService domainService, UserService userService, DataSetService dataSetService, DomainService domainService, UserService userService, DataSetService dataSetService,
DateInfoRepository dateInfoRepository, ModelRelaService modelRelaService, DateInfoRepository dateInfoRepository, ModelRelaService modelRelaService,
@Qualifier("commonExecutor") ThreadPoolExecutor executor) { ApplicationEventPublisher eventPublisher) {
this.modelRepository = modelRepository; this.modelRepository = modelRepository;
this.databaseService = databaseService; this.databaseService = databaseService;
this.dimensionService = dimensionService; this.dimensionService = dimensionService;
@@ -110,17 +119,18 @@ public class ModelServiceImpl implements ModelService {
this.dataSetService = dataSetService; this.dataSetService = dataSetService;
this.dateInfoRepository = dateInfoRepository; this.dateInfoRepository = dateInfoRepository;
this.modelRelaService = modelRelaService; this.modelRelaService = modelRelaService;
this.executor = executor; this.eventPublisher = eventPublisher;
} }
@Override @Override
@Transactional @Transactional
public ModelResp createModel(ModelReq modelReq, User user) throws Exception { public ModelResp createModel(ModelReq modelReq, User user) throws Exception {
// checkParams(modelReq); checkParams(modelReq);
ModelDO modelDO = ModelConverter.convert(modelReq, user); ModelDO modelDO = ModelConverter.convert(modelReq, user);
modelRepository.createModel(modelDO); modelRepository.createModel(modelDO);
batchCreateDimension(modelDO, user); batchCreateDimension(modelDO, user);
batchCreateMetric(modelDO, user); batchCreateMetric(modelDO, user);
sendEvent(modelDO, EventType.ADD);
return ModelConverter.convert(modelDO); return ModelConverter.convert(modelDO);
} }
@@ -140,13 +150,14 @@ public class ModelServiceImpl implements ModelService {
@Override @Override
@Transactional @Transactional
public ModelResp updateModel(ModelReq modelReq, User user) throws Exception { public ModelResp updateModel(ModelReq modelReq, User user) throws Exception {
// checkParams(modelReq); checkParams(modelReq);
checkRelations(modelReq); checkRelations(modelReq);
ModelDO modelDO = modelRepository.getModelById(modelReq.getId()); ModelDO modelDO = modelRepository.getModelById(modelReq.getId());
ModelConverter.convert(modelDO, modelReq, user); ModelConverter.convert(modelDO, modelReq, user);
modelRepository.updateModel(modelDO); modelRepository.updateModel(modelDO);
batchCreateDimension(modelDO, user); batchCreateDimension(modelDO, user);
batchCreateMetric(modelDO, user); batchCreateMetric(modelDO, user);
sendEvent(modelDO, EventType.UPDATE);
return ModelConverter.convert(modelDO); return ModelConverter.convert(modelDO);
} }
@@ -607,4 +618,16 @@ public class ModelServiceImpl implements ModelService {
} }
return false; return false;
} }
private void sendEvent(ModelDO modelDO, EventType eventType) {
DataItem dataItem = getDataItem(modelDO);
eventPublisher.publishEvent(new DataEvent(this, Lists.newArrayList(dataItem), eventType));
}
private DataItem getDataItem(ModelDO modelDO) {
return DataItem.builder().id(modelDO.getId().toString()).name(modelDO.getName())
.bizName(modelDO.getBizName()).modelId(modelDO.getId().toString())
.domainId(modelDO.getDomainId().toString()).type(TypeEnums.DIMENSION).build();
}
} }

View File

@@ -2,17 +2,7 @@ package com.tencent.supersonic.headless.server.utils;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.DimensionConstants; import com.tencent.supersonic.common.pojo.DimensionConstants;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
import com.tencent.supersonic.headless.api.pojo.DimValueMap;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.RelateDimension;
import com.tencent.supersonic.headless.api.pojo.RelatedSchemaElement;
import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SchemaElementType;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.SchemaValueMap;
import com.tencent.supersonic.headless.api.pojo.response.DataSetSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.DataSetSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
@@ -21,11 +11,7 @@ import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class DataSetSchemaBuilder { public class DataSetSchemaBuilder {

View File

@@ -29,10 +29,11 @@ public class ModelConverter {
public static ModelDO convert(ModelReq modelReq, User user) { public static ModelDO convert(ModelReq modelReq, User user) {
ModelDO modelDO = new ModelDO(); ModelDO modelDO = new ModelDO();
// ModelDetail modelDetail = createModelDetail(modelReq); // ModelDetail modelDetail = createModelDetail(modelReq);
ModelDetail modelDetail = modelReq.getModelDetail();
modelReq.createdBy(user.getName()); modelReq.createdBy(user.getName());
BeanMapper.mapper(modelReq, modelDO); BeanMapper.mapper(modelReq, modelDO);
modelDO.setStatus(StatusEnum.ONLINE.getCode()); modelDO.setStatus(StatusEnum.ONLINE.getCode());
modelDO.setModelDetail(JSONObject.toJSONString(modelReq.getModelDetail())); modelDO.setModelDetail(JSONObject.toJSONString(modelDetail));
modelDO.setDrillDownDimensions(JSONObject.toJSONString(modelReq.getDrillDownDimensions())); modelDO.setDrillDownDimensions(JSONObject.toJSONString(modelReq.getDrillDownDimensions()));
if (modelReq.getExt() != null) { if (modelReq.getExt() != null) {
modelDO.setExt(JSONObject.toJSONString(modelReq.getExt())); modelDO.setExt(JSONObject.toJSONString(modelReq.getExt()));
@@ -123,9 +124,7 @@ public class ModelConverter {
metricReq.setModelId(modelDO.getId()); metricReq.setModelId(modelDO.getId());
MetricDefineByMeasureParams exprTypeParams = new MetricDefineByMeasureParams(); MetricDefineByMeasureParams exprTypeParams = new MetricDefineByMeasureParams();
exprTypeParams.setExpr(measure.getExpr()); exprTypeParams.setExpr(measure.getExpr());
Measure measureParam = new Measure(); exprTypeParams.setMeasures(Lists.newArrayList(measure));
BeanMapper.mapper(measure, measureParam);
exprTypeParams.setMeasures(Lists.newArrayList(measureParam));
metricReq.setMetricDefineByMeasureParams(exprTypeParams); metricReq.setMetricDefineByMeasureParams(exprTypeParams);
metricReq.setMetricDefineType(MetricDefineType.MEASURE); metricReq.setMetricDefineType(MetricDefineType.MEASURE);
return metricReq; return metricReq;
@@ -165,11 +164,14 @@ public class ModelConverter {
getIdentifyType(fieldType).name(), columnSchema.getColumnName(), 1); getIdentifyType(fieldType).name(), columnSchema.getColumnName(), 1);
modelDetail.getIdentifiers().add(identify); modelDetail.getIdentifiers().add(identify);
} else if (FieldType.measure.equals(fieldType)) { } else if (FieldType.measure.equals(fieldType)) {
Measure measure = new Measure(columnSchema.getName(), columnSchema.getColumnName(), Measure measure = new Measure(columnSchema.getName(),
columnSchema.getAgg().getOperator(), 1); modelReq.getBizName() + "_" + columnSchema.getColumnName(),
columnSchema.getColumnName(), columnSchema.getAgg().getOperator(), 1);
modelDetail.getMeasures().add(measure); modelDetail.getMeasures().add(measure);
} else { } else {
Dimension dim = new Dimension(columnSchema.getName(), columnSchema.getColumnName(), Dimension dim = new Dimension(columnSchema.getName(),
modelReq.getBizName() + "_" + columnSchema.getColumnName(),
columnSchema.getColumnName(),
DimensionType.valueOf(columnSchema.getFiledType().name()), 1); DimensionType.valueOf(columnSchema.getFiledType().name()), 1);
modelDetail.getDimensions().add(dim); modelDetail.getDimensions().add(dim);
} }
@@ -264,14 +266,17 @@ public class ModelConverter {
private static ModelDetail createModelDetail(ModelReq modelReq) { private static ModelDetail createModelDetail(ModelReq modelReq) {
ModelDetail modelDetail = new ModelDetail(); ModelDetail modelDetail = new ModelDetail();
// List<Measure> measures = modelReq.getModelDetail().getMeasures(); List<Measure> measures = modelReq.getModelDetail().getMeasures();
// for (Measure measure : measures) { if (measures == null) {
// if (StringUtils.isBlank(measure.getBizName())) { measures = Lists.newArrayList();
// continue; }
// } for (Measure measure : measures) {
// measure.setExpr(measure.getBizName()); if (StringUtils.isBlank(measure.getBizName())) {
// measure.setBizName(String.format("%s_%", modelReq.getBizName(), measure.getExpr())); continue;
// } }
measure.setExpr(measure.getBizName());
measure.setBizName(String.format("%s_%s", modelReq.getBizName(), measure.getExpr()));
}
BeanMapper.mapper(modelReq.getModelDetail(), modelDetail); BeanMapper.mapper(modelReq.getModelDetail(), modelDetail);
return modelDetail; return modelDetail;
} }

View File

@@ -1,244 +1,230 @@
package com.tencent.supersonic.headless.server.calcite; package com.tencent.supersonic.headless.server.calcite;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.yaml.*;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
@Slf4j @Slf4j
class HeadlessParserServiceTest { class HeadlessParserServiceTest {
//
public static SqlParserResp parser(S2CalciteSchema semanticSchema, OntologyQuery ontologyQuery, // public static SqlParserResp parser(S2CalciteSchema semanticSchema, OntologyQuery
boolean isAgg) { // ontologyQuery,
SqlParserResp sqlParser = new SqlParserResp(); // boolean isAgg) {
try { // SqlParserResp sqlParser = new SqlParserResp();
if (semanticSchema == null) { // try {
sqlParser.setErrMsg("headlessSchema not found"); // if (semanticSchema == null) {
return sqlParser; // sqlParser.setErrMsg("headlessSchema not found");
} // return sqlParser;
SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); // }
QueryStatement queryStatement = new QueryStatement(); // SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
queryStatement.setOntologyQuery(ontologyQuery); // QueryStatement queryStatement = new QueryStatement();
String sql = aggBuilder.buildOntologySql(queryStatement); // queryStatement.setOntologyQuery(ontologyQuery);
queryStatement.setSql(sql); // String sql = aggBuilder.buildOntologySql(queryStatement);
EngineType engineType = semanticSchema.getOntology().getDatabaseType(); // queryStatement.setSql(sql);
sqlParser.setSql(aggBuilder.getSql(engineType)); // EngineType engineType = semanticSchema.getOntology().getDatabase().getType();
} catch (Exception e) { // sqlParser.setSql(aggBuilder.getSql(engineType));
sqlParser.setErrMsg(e.getMessage()); // } catch (Exception e) {
log.error("parser error metricQueryReq[{}] error [{}]", ontologyQuery, e); // sqlParser.setErrMsg(e.getMessage());
} // log.error("parser error metricQueryReq[{}] error [{}]", ontologyQuery, e);
return sqlParser; // }
} // return sqlParser;
// }
public void test() throws Exception { //
// public void test() throws Exception {
DataModelYamlTpl datasource = new DataModelYamlTpl(); //
datasource.setName("s2_pv_uv_statis"); // DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setSourceId(1L); // datasource.setName("s2_pv_uv_statis");
datasource.setSqlQuery( // datasource.setSourceId(1L);
"SELECT imp_date, user_name,page,1 as pv, user_name as uv FROM s2_pv_uv_statis"); // datasource.setSqlQuery(
// "SELECT imp_date, user_name,page,1 as pv, user_name as uv FROM s2_pv_uv_statis");
MeasureYamlTpl measure = new MeasureYamlTpl(); //
measure.setAgg("sum"); // MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setName("s2_pv_uv_statis_pv"); // measure.setAgg("sum");
measure.setExpr("pv"); // measure.setName("s2_pv_uv_statis_pv");
List<MeasureYamlTpl> measures = new ArrayList<>(); // measure.setExpr("pv");
measures.add(measure); // List<MeasureYamlTpl> measures = new ArrayList<>();
// measures.add(measure);
MeasureYamlTpl measure2 = new MeasureYamlTpl(); //
measure2.setAgg("count"); // MeasureYamlTpl measure2 = new MeasureYamlTpl();
measure2.setName("s2_pv_uv_statis_internal_cnt"); // measure2.setAgg("count");
measure2.setExpr("1"); // measure2.setName("s2_pv_uv_statis_internal_cnt");
measure2.setCreateMetric("true"); // measure2.setExpr("1");
measures.add(measure2); // measure2.setCreateMetric("true");
// measures.add(measure2);
MeasureYamlTpl measure3 = new MeasureYamlTpl(); //
measure3.setAgg("count"); // MeasureYamlTpl measure3 = new MeasureYamlTpl();
measure3.setName("s2_pv_uv_statis_uv"); // measure3.setAgg("count");
measure3.setExpr("uv"); // measure3.setName("s2_pv_uv_statis_uv");
measure3.setCreateMetric("true"); // measure3.setExpr("uv");
measures.add(measure3); // measure3.setCreateMetric("true");
// measures.add(measure3);
datasource.setMeasures(measures); //
// datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl(); //
dimension.setName("imp_date"); // DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setExpr("imp_date"); // dimension.setName("imp_date");
dimension.setType("time"); // dimension.setExpr("imp_date");
DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams(); // dimension.setType("time");
dimensionTimeTypeParams.setIsPrimary("true"); // DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams();
dimensionTimeTypeParams.setTimeGranularity("day"); // dimensionTimeTypeParams.setIsPrimary("true");
dimension.setTypeParams(dimensionTimeTypeParams); // dimensionTimeTypeParams.setTimeGranularity("day");
List<DimensionYamlTpl> dimensions = new ArrayList<>(); // dimension.setTypeParams(dimensionTimeTypeParams);
dimensions.add(dimension); // List<DimensionYamlTpl> dimensions = new ArrayList<>();
// dimensions.add(dimension);
DimensionYamlTpl dimension2 = new DimensionYamlTpl(); //
dimension2.setName("sys_imp_date"); // DimensionYamlTpl dimension2 = new DimensionYamlTpl();
dimension2.setExpr("imp_date"); // dimension2.setName("sys_imp_date");
dimension2.setType("time"); // dimension2.setExpr("imp_date");
DimensionTimeTypeParams dimensionTimeTypeParams2 = new DimensionTimeTypeParams(); // dimension2.setType("time");
dimensionTimeTypeParams2.setIsPrimary("true"); // DimensionTimeTypeParams dimensionTimeTypeParams2 = new DimensionTimeTypeParams();
dimensionTimeTypeParams2.setTimeGranularity("day"); // dimensionTimeTypeParams2.setIsPrimary("true");
dimension2.setTypeParams(dimensionTimeTypeParams2); // dimensionTimeTypeParams2.setTimeGranularity("day");
dimensions.add(dimension2); // dimension2.setTypeParams(dimensionTimeTypeParams2);
// dimensions.add(dimension2);
DimensionYamlTpl dimension3 = new DimensionYamlTpl(); //
dimension3.setName("sys_imp_week"); // DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))"); // dimension3.setName("sys_imp_week");
dimension3.setType("time"); // dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
DimensionTimeTypeParams dimensionTimeTypeParams3 = new DimensionTimeTypeParams(); // dimension3.setType("time");
dimensionTimeTypeParams3.setIsPrimary("true"); // DimensionTimeTypeParams dimensionTimeTypeParams3 = new DimensionTimeTypeParams();
dimensionTimeTypeParams3.setTimeGranularity("day"); // dimensionTimeTypeParams3.setIsPrimary("true");
dimension3.setTypeParams(dimensionTimeTypeParams3); // dimensionTimeTypeParams3.setTimeGranularity("day");
dimensions.add(dimension3); // dimension3.setTypeParams(dimensionTimeTypeParams3);
// dimensions.add(dimension3);
datasource.setDimensions(dimensions); //
// datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>(); //
IdentifyYamlTpl identify = new IdentifyYamlTpl(); // List<IdentifyYamlTpl> identifies = new ArrayList<>();
identify.setName("user_name"); // IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setType("primary"); // identify.setName("user_name");
identifies.add(identify); // identify.setType("primary");
datasource.setIdentifiers(identifies); // identifies.add(identify);
S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build(); // datasource.setIdentifiers(identifies);
// S2CalciteSchema semanticSchema = S2CalciteSchema.builder().build();
SemanticSchemaManager.update(semanticSchema, //
SemanticSchemaManager.getDataModel(datasource)); // SemanticSchemaManager.update(semanticSchema,
// SemanticSchemaManager.getDataModel(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl(); //
dimension1.setExpr("page"); // DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setName("page"); // dimension1.setExpr("page");
dimension1.setType("categorical"); // dimension1.setName("page");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>(); // dimension1.setType("categorical");
dimensionYamlTpls.add(dimension1); // List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
// dimensionYamlTpls.add(dimension1);
SemanticSchemaManager.update(semanticSchema, "s2_pv_uv_statis", //
SemanticSchemaManager.getDimensions(dimensionYamlTpls)); // SemanticSchemaManager.update(semanticSchema, "s2_pv_uv_statis",
// SemanticSchemaManager.getDimensions(dimensionYamlTpls));
MetricYamlTpl metric1 = new MetricYamlTpl(); //
metric1.setName("pv"); // MetricYamlTpl metric1 = new MetricYamlTpl();
metric1.setType("expr"); // metric1.setName("pv");
MetricTypeParamsYamlTpl metricTypeParams = new MetricTypeParamsYamlTpl(); // metric1.setType("expr");
List<MeasureYamlTpl> measures1 = new ArrayList<>(); // MetricTypeParamsYamlTpl metricTypeParams = new MetricTypeParamsYamlTpl();
MeasureYamlTpl measure1 = new MeasureYamlTpl(); // List<MeasureYamlTpl> measures1 = new ArrayList<>();
measure1.setName("s2_pv_uv_statis_pv"); // MeasureYamlTpl measure1 = new MeasureYamlTpl();
measures1.add(measure1); // measure1.setName("s2_pv_uv_statis_pv");
metricTypeParams.setMeasures(measures1); // measures1.add(measure1);
metricTypeParams.setExpr("s2_pv_uv_statis_pv"); // metricTypeParams.setMeasures(measures1);
metric1.setTypeParams(metricTypeParams); // metricTypeParams.setExpr("s2_pv_uv_statis_pv");
List<MetricYamlTpl> metric = new ArrayList<>(); // metric1.setTypeParams(metricTypeParams);
metric.add(metric1); // List<MetricYamlTpl> metric = new ArrayList<>();
// metric.add(metric1);
MetricYamlTpl metric2 = new MetricYamlTpl(); //
metric2.setName("uv"); // MetricYamlTpl metric2 = new MetricYamlTpl();
metric2.setType("expr"); // metric2.setName("uv");
MetricTypeParamsYamlTpl metricTypeParams1 = new MetricTypeParamsYamlTpl(); // metric2.setType("expr");
List<MeasureYamlTpl> measures2 = new ArrayList<>(); // MetricTypeParamsYamlTpl metricTypeParams1 = new MetricTypeParamsYamlTpl();
MeasureYamlTpl measure4 = new MeasureYamlTpl(); // List<MeasureYamlTpl> measures2 = new ArrayList<>();
measure4.setName("s2_pv_uv_statis_uv"); // MeasureYamlTpl measure4 = new MeasureYamlTpl();
measures2.add(measure4); // measure4.setName("s2_pv_uv_statis_uv");
metricTypeParams1.setMeasures(measures2); // measures2.add(measure4);
metricTypeParams1.setExpr("s2_pv_uv_statis_uv"); // metricTypeParams1.setMeasures(measures2);
metric2.setTypeParams(metricTypeParams1); // metricTypeParams1.setExpr("s2_pv_uv_statis_uv");
metric.add(metric2); // metric2.setTypeParams(metricTypeParams1);
// metric.add(metric2);
// HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric)); //
// // HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric));
OntologyQuery metricCommand = new OntologyQuery(); //
metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date"))); // OntologyQuery metricCommand = new OntologyQuery();
metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv"))); // metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date")));
metricCommand.setWhere( // metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv")));
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); // metricCommand.setWhere(
metricCommand.setLimit(1000L); // "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
List<ColumnOrder> orders = new ArrayList<>(); // metricCommand.setLimit(1000L);
orders.add(ColumnOrder.buildDesc("sys_imp_date")); // List<ColumnOrder> orders = new ArrayList<>();
metricCommand.setOrder(orders); // orders.add(ColumnOrder.buildDesc("sys_imp_date"));
System.out.println(parser(semanticSchema, metricCommand, true)); // metricCommand.setOrder(orders);
// System.out.println(parser(semanticSchema, metricCommand, true));
addDepartment(semanticSchema); //
// addDepartment(semanticSchema);
OntologyQuery metricCommand2 = new OntologyQuery(); //
metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date", // OntologyQuery metricCommand2 = new OntologyQuery();
"user_name__department", "user_name", "user_name__page"))); // metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date",
metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv"))); // "user_name__department", "user_name", "user_name__page")));
metricCommand2.setWhere( // metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv")));
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); // metricCommand2.setWhere(
metricCommand2.setLimit(1000L); // "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
List<ColumnOrder> orders2 = new ArrayList<>(); // metricCommand2.setLimit(1000L);
orders2.add(ColumnOrder.buildDesc("sys_imp_date")); // List<ColumnOrder> orders2 = new ArrayList<>();
metricCommand2.setOrder(orders2); // orders2.add(ColumnOrder.buildDesc("sys_imp_date"));
System.out.println(parser(semanticSchema, metricCommand2, true)); // metricCommand2.setOrder(orders2);
} // System.out.println(parser(semanticSchema, metricCommand2, true));
// }
private static void addDepartment(S2CalciteSchema semanticSchema) { //
DataModelYamlTpl datasource = new DataModelYamlTpl(); // private static void addDepartment(S2CalciteSchema semanticSchema) {
datasource.setName("user_department"); // DataModelYamlTpl datasource = new DataModelYamlTpl();
datasource.setSourceId(1L); // datasource.setName("user_department");
datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department"); // datasource.setSourceId(1L);
// datasource.setSqlQuery("SELECT imp_date,user_name,department FROM s2_user_department");
MeasureYamlTpl measure = new MeasureYamlTpl(); //
measure.setAgg("count"); // MeasureYamlTpl measure = new MeasureYamlTpl();
measure.setName("user_department_internal_cnt"); // measure.setAgg("count");
measure.setCreateMetric("true"); // measure.setName("user_department_internal_cnt");
measure.setExpr("1"); // measure.setCreateMetric("true");
List<MeasureYamlTpl> measures = new ArrayList<>(); // measure.setExpr("1");
measures.add(measure); // List<MeasureYamlTpl> measures = new ArrayList<>();
// measures.add(measure);
datasource.setMeasures(measures); //
// datasource.setMeasures(measures);
DimensionYamlTpl dimension = new DimensionYamlTpl(); //
dimension.setName("sys_imp_date"); // DimensionYamlTpl dimension = new DimensionYamlTpl();
dimension.setExpr("imp_date"); // dimension.setName("sys_imp_date");
dimension.setType("time"); // dimension.setExpr("imp_date");
DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams(); // dimension.setType("time");
dimensionTimeTypeParams.setIsPrimary("true"); // DimensionTimeTypeParamsTpl dimensionTimeTypeParams = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams.setTimeGranularity("day"); // dimensionTimeTypeParams.setIsPrimary("true");
dimension.setTypeParams(dimensionTimeTypeParams); // dimensionTimeTypeParams.setTimeGranularity("day");
List<DimensionYamlTpl> dimensions = new ArrayList<>(); // dimension.setTypeParams(dimensionTimeTypeParams);
dimensions.add(dimension); // List<DimensionYamlTpl> dimensions = new ArrayList<>();
// dimensions.add(dimension);
DimensionYamlTpl dimension3 = new DimensionYamlTpl(); //
dimension3.setName("sys_imp_week"); // DimensionYamlTpl dimension3 = new DimensionYamlTpl();
dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))"); // dimension3.setName("sys_imp_week");
dimension3.setType("time"); // dimension3.setExpr("to_monday(from_unixtime(unix_timestamp(imp_date), 'yyyy-MM-dd'))");
DimensionTimeTypeParams dimensionTimeTypeParams3 = new DimensionTimeTypeParams(); // dimension3.setType("time");
dimensionTimeTypeParams3.setIsPrimary("true"); // DimensionTimeTypeParamsTpl dimensionTimeTypeParams3 = new DimensionTimeTypeParamsTpl();
dimensionTimeTypeParams3.setTimeGranularity("week"); // dimensionTimeTypeParams3.setIsPrimary("true");
dimension3.setTypeParams(dimensionTimeTypeParams3); // dimensionTimeTypeParams3.setTimeGranularity("week");
dimensions.add(dimension3); // dimension3.setTypeParams(dimensionTimeTypeParams3);
// dimensions.add(dimension3);
datasource.setDimensions(dimensions); //
// datasource.setDimensions(dimensions);
List<IdentifyYamlTpl> identifies = new ArrayList<>(); //
IdentifyYamlTpl identify = new IdentifyYamlTpl(); // List<IdentifyYamlTpl> identifies = new ArrayList<>();
identify.setName("user_name"); // IdentifyYamlTpl identify = new IdentifyYamlTpl();
identify.setType("primary"); // identify.setName("user_name");
identifies.add(identify); // identify.setType("primary");
datasource.setIdentifiers(identifies); // identifies.add(identify);
// datasource.setIdentifiers(identifies);
semanticSchema.getDataModels().put("user_department", //
SemanticSchemaManager.getDataModel(datasource)); // semanticSchema.getDataModels().put("user_department",
// SemanticSchemaManager.getDataModel(datasource));
DimensionYamlTpl dimension1 = new DimensionYamlTpl(); //
dimension1.setExpr("department"); // DimensionYamlTpl dimension1 = new DimensionYamlTpl();
dimension1.setName("department"); // dimension1.setExpr("department");
dimension1.setType("categorical"); // dimension1.setName("department");
List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>(); // dimension1.setType("categorical");
dimensionYamlTpls.add(dimension1); // List<DimensionYamlTpl> dimensionYamlTpls = new ArrayList<>();
// dimensionYamlTpls.add(dimension1);
semanticSchema.getDimensions().put("user_department", //
SemanticSchemaManager.getDimensions(dimensionYamlTpls)); // semanticSchema.getDimensions().put("user_department",
} // SemanticSchemaManager.getDimensions(dimensionYamlTpls));
// }
} }

View File

@@ -0,0 +1,158 @@
package com.tencent.supersonic.headless.server.service;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.DataFormat;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.DataFormatTypeEnum;
import com.tencent.supersonic.common.pojo.enums.SensitiveLevelEnum;
import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
import com.tencent.supersonic.headless.api.pojo.MeasureParam;
import com.tencent.supersonic.headless.api.pojo.MetricDefineByMeasureParams;
import com.tencent.supersonic.headless.api.pojo.RelateDimension;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.request.MetricReq;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.server.facade.service.ChatLayerService;
import com.tencent.supersonic.headless.server.persistence.dataobject.MetricDO;
import com.tencent.supersonic.headless.server.persistence.repository.MetricRepository;
import com.tencent.supersonic.headless.server.service.impl.DataSetServiceImpl;
import com.tencent.supersonic.headless.server.service.impl.MetricServiceImpl;
import com.tencent.supersonic.headless.server.utils.AliasGenerateHelper;
import com.tencent.supersonic.headless.server.utils.MetricConverter;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.springframework.context.ApplicationEventPublisher;
import java.util.HashMap;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
public class MetricServiceImplTest {
//
// @Test
// void createMetric() throws Exception {
// MetricRepository metricRepository = Mockito.mock(MetricRepository.class);
// ModelService modelService = Mockito.mock(ModelService.class);
// MetricService metricService = mockMetricService(metricRepository, modelService);
// MetricReq metricReq = buildMetricReq();
// when(modelService.getModel(metricReq.getModelId())).thenReturn(mockModelResp());
// when(modelService.getModelByDomainIds(any())).thenReturn(Lists.newArrayList());
// MetricResp actualMetricResp = metricService.createMetric(metricReq, User.getDefaultUser());
// MetricResp expectedMetricResp = buildExpectedMetricResp();
// Assertions.assertEquals(expectedMetricResp, actualMetricResp);
// }
//
// @Test
// void updateMetric() throws Exception {
// MetricRepository metricRepository = Mockito.mock(MetricRepository.class);
// ModelService modelService = Mockito.mock(ModelService.class);
// MetricService metricService = mockMetricService(metricRepository, modelService);
// MetricReq metricReq = buildMetricUpdateReq();
// when(modelService.getModel(metricReq.getModelId())).thenReturn(mockModelResp());
// when(modelService.getModelByDomainIds(any())).thenReturn(Lists.newArrayList());
// MetricDO metricDO = MetricConverter.convert2MetricDO(buildMetricReq());
// when(metricRepository.getMetricById(metricDO.getId())).thenReturn(metricDO);
// MetricResp actualMetricResp = metricService.updateMetric(metricReq, User.getDefaultUser());
// MetricResp expectedMetricResp = buildExpectedMetricResp();
// Assertions.assertEquals(expectedMetricResp, actualMetricResp);
// }
//
// private MetricService mockMetricService(MetricRepository metricRepository,
// ModelService modelService) {
// AliasGenerateHelper aliasGenerateHelper = Mockito.mock(AliasGenerateHelper.class);
// CollectService collectService = Mockito.mock(CollectService.class);
// ApplicationEventPublisher eventPublisher = Mockito.mock(ApplicationEventPublisher.class);
// DataSetService dataSetService = Mockito.mock(DataSetServiceImpl.class);
// DimensionService dimensionService = Mockito.mock(DimensionService.class);
// ChatLayerService chatLayerService = Mockito.mock(ChatLayerService.class);
// return new MetricServiceImpl(metricRepository, modelService, aliasGenerateHelper,
// collectService, dataSetService, eventPublisher, dimensionService, chatLayerService);
// }
//
// private MetricReq buildMetricReq() {
// MetricReq metricReq = new MetricReq();
// metricReq.setId(1L);
// metricReq.setName("hr部门的访问次数");
// metricReq.setBizName("pv");
// metricReq.setDescription("SuperSonic的访问情况");
// metricReq.setAlias("pv");
// metricReq.setMetricDefineType(MetricDefineType.MEASURE);
// metricReq.setModelId(2L);
// metricReq.setDataFormatType(DataFormatTypeEnum.PERCENT.getName());
// DataFormat dataFormat = new DataFormat();
// dataFormat.setDecimalPlaces(3);
// dataFormat.setNeedMultiply100(false);
// metricReq.setDataFormat(dataFormat);
// MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams();
// typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"),
// new MeasureParam("s2_uv", "department='hr'")));
// typeParams.setExpr("s2_pv/s2_uv");
// metricReq.setMetricDefineByMeasureParams(typeParams);
// metricReq.setClassifications(Lists.newArrayList("核心指标"));
// metricReq.setRelateDimension(RelateDimension.builder().drillDownDimensions(
// Lists.newArrayList(new DrillDownDimension(1L), new DrillDownDimension(1L, false)))
// .build());
// metricReq.setSensitiveLevel(SensitiveLevelEnum.LOW.getCode());
// metricReq.setExt(new HashMap<>());
// return metricReq;
// }
//
// private MetricResp buildExpectedMetricResp() {
// MetricResp metricResp = new MetricResp();
// metricResp.setId(1L);
// metricResp.setName("hr部门的访问次数");
// metricResp.setBizName("pv");
// metricResp.setDescription("SuperSonic的访问情况");
// metricResp.setAlias("pv");
// metricResp.setMetricDefineType(MetricDefineType.MEASURE);
// metricResp.setModelId(2L);
// metricResp.setDataFormatType(DataFormatTypeEnum.PERCENT.getName());
// DataFormat dataFormat = new DataFormat();
// dataFormat.setDecimalPlaces(3);
// dataFormat.setNeedMultiply100(false);
// metricResp.setDataFormat(dataFormat);
// MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams();
// typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"),
// new MeasureParam("s2_uv", "department='hr'")));
// typeParams.setExpr("s2_pv/s2_uv");
// metricResp.setMetricDefineByMeasureParams(typeParams);
// metricResp.setClassifications("核心指标");
// metricResp.setRelateDimension(RelateDimension.builder().drillDownDimensions(
// Lists.newArrayList(new DrillDownDimension(1L), new DrillDownDimension(1L, false)))
// .build());
// metricResp.setSensitiveLevel(SensitiveLevelEnum.LOW.getCode());
// metricResp.setExt(new HashMap<>());
// metricResp.setTypeEnum(TypeEnums.METRIC);
// metricResp.setIsCollect(false);
// metricResp.setType(MetricType.DERIVED.name());
// metricResp.setStatus(StatusEnum.ONLINE.getCode());
// return metricResp;
// }
//
// private MetricReq buildMetricUpdateReq() {
// MetricReq metricReq = new MetricReq();
// metricReq.setId(1L);
// metricReq.setName("hr部门的访问次数");
// metricReq.setBizName("pv");
// metricReq.setMetricDefineType(MetricDefineType.MEASURE);
// MetricDefineByMeasureParams typeParams = new MetricDefineByMeasureParams();
// typeParams.setMeasures(Lists.newArrayList(new MeasureParam("s2_pv", "department='hr'"),
// new MeasureParam("s2_uv", "department='hr'")));
// typeParams.setExpr("s2_pv/s2_uv");
// metricReq.setMetricDefineByMeasureParams(typeParams);
// return metricReq;
// }
//
// private ModelResp mockModelResp() {
// ModelResp modelResp = new ModelResp();
// modelResp.setId(2L);
// modelResp.setDomainId(1L);
// return modelResp;
// }
}

View File

@@ -22,10 +22,10 @@ import com.tencent.supersonic.headless.server.utils.ModelConverter;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.springframework.context.ApplicationEventPublisher;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@@ -77,10 +77,10 @@ class ModelServiceImplTest {
DateInfoRepository dateInfoRepository = Mockito.mock(DateInfoRepository.class); DateInfoRepository dateInfoRepository = Mockito.mock(DateInfoRepository.class);
DataSetService viewService = Mockito.mock(DataSetService.class); DataSetService viewService = Mockito.mock(DataSetService.class);
ModelRelaService modelRelaService = Mockito.mock(ModelRelaService.class); ModelRelaService modelRelaService = Mockito.mock(ModelRelaService.class);
ThreadPoolExecutor threadPoolExecutor = Mockito.mock(ThreadPoolExecutor.class); ApplicationEventPublisher eventPublisher = Mockito.mock(ApplicationEventPublisher.class);
return new ModelServiceImpl(modelRepository, databaseService, dimensionService, return new ModelServiceImpl(modelRepository, databaseService, dimensionService,
metricService, domainService, userService, viewService, dateInfoRepository, metricService, domainService, userService, viewService, dateInfoRepository,
modelRelaService, threadPoolExecutor); modelRelaService, eventPublisher);
} }
private ModelReq mockModelReq() { private ModelReq mockModelReq() {

View File

@@ -25,18 +25,20 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
### headless-core SPIs ### headless-core SPIs
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\ com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.parser.DimExpressionParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricExpressionParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.executor.QueryExecutor=\ com.tencent.supersonic.headless.core.executor.QueryExecutor=\
com.tencent.supersonic.headless.core.executor.JdbcExecutor com.tencent.supersonic.headless.core.executor.JdbcExecutor

View File

@@ -53,14 +53,14 @@ public class S2VisitsDemo extends S2BaseDemo {
// create metrics and dimensions // create metrics and dimensions
DimensionResp departmentDimension = getDimension("department", userModel); DimensionResp departmentDimension = getDimension("department", userModel);
MetricResp metricPv = getMetric("pv", pvUvModel); MetricResp metricUv = addMetric_uv(pvUvModel, departmentDimension);
DimensionResp pageDimension = getDimension("page", stayTimeModel); DimensionResp pageDimension = getDimension("visits_page", stayTimeModel);
updateDimension(stayTimeModel, pageDimension); updateDimension(stayTimeModel, pageDimension);
DimensionResp userDimension = getDimension("user_name", userModel); DimensionResp userDimension = getDimension("user_name", userModel);
updateMetric(stayTimeModel, departmentDimension, userDimension); MetricResp metricPv = addMetric_pv(pvUvModel, departmentDimension, userDimension);
updateMetric_pv(pvUvModel, departmentDimension, userDimension, metricPv);
addMetric_uv(pvUvModel, departmentDimension); addMetric_pv_avg(metricPv, metricUv, departmentDimension, pvUvModel);
// create dict conf for dimensions // create dict conf for dimensions
enableDimensionValue(departmentDimension); enableDimensionValue(departmentDimension);
@@ -69,7 +69,7 @@ public class S2VisitsDemo extends S2BaseDemo {
// create data set // create data set
DataSetResp s2DataSet = addDataSet(s2Domain); DataSetResp s2DataSet = addDataSet(s2Domain);
addAuthGroup_1(stayTimeModel); addAuthGroup_1(stayTimeModel);
addAuthGroup_2(pvUvModel); addAuthGroup_2(stayTimeModel);
// create terms and plugin // create terms and plugin
addTerm(s2Domain); addTerm(s2Domain);
@@ -162,12 +162,11 @@ public class S2VisitsDemo extends S2BaseDemo {
modelReq.setAdminOrgs(Collections.emptyList()); modelReq.setAdminOrgs(Collections.emptyList());
ModelDetail modelDetail = new ModelDetail(); ModelDetail modelDetail = new ModelDetail();
List<Identify> identifiers = new ArrayList<>(); List<Identify> identifiers = new ArrayList<>();
identifiers.add(new Identify("用户", IdentifyType.primary.name(), "user_name", 1)); identifiers.add(new Identify("用户", IdentifyType.primary.name(), "user_name", 1));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dimension> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
dimensions.add(new Dimension("部门", "department", DimensionType.categorical, 1)); dimensions.add(new Dimension("部门", "department", DimensionType.categorical, 1));
// dimensions.add(new Dimension("用户", "user_name", DimensionType.categorical, 1));
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
List<Field> fields = Lists.newArrayList(); List<Field> fields = Lists.newArrayList();
fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build()); fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build());
@@ -175,7 +174,7 @@ public class S2VisitsDemo extends S2BaseDemo {
modelDetail.setFields(fields); modelDetail.setFields(fields);
modelDetail.setMeasures(Collections.emptyList()); modelDetail.setMeasures(Collections.emptyList());
modelDetail.setQueryType("sql_query"); modelDetail.setQueryType("sql_query");
modelDetail.setSqlQuery("select user_name,department from s2_user_department"); modelDetail.setSqlQuery("select * from s2_user_department");
modelReq.setModelDetail(modelDetail); modelReq.setModelDetail(modelDetail);
return modelService.createModel(modelReq, defaultUser); return modelService.createModel(modelReq, defaultUser);
} }
@@ -197,28 +196,19 @@ public class S2VisitsDemo extends S2BaseDemo {
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dimension> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("", "imp_date", DimensionType.partition_time, 0); Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 1);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0); Dimension dimension2 = new Dimension("", "page", DimensionType.categorical, 0);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
List<Measure> measures = new ArrayList<>();
Measure measure1 = new Measure("访问次数", "pv", AggOperatorEnum.SUM.name(), 1);
measures.add(measure1);
Measure measure2 = new Measure("访问用户数", "user_id", AggOperatorEnum.SUM.name(), 0);
measures.add(measure2);
modelDetail.setMeasures(measures);
List<Field> fields = Lists.newArrayList(); List<Field> fields = Lists.newArrayList();
fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build()); fields.add(Field.builder().fieldName("user_name").dataType("Varchar").build());
fields.add(Field.builder().fieldName("imp_date").dataType("Date").build()); fields.add(Field.builder().fieldName("imp_date").dataType("Date").build());
fields.add(Field.builder().fieldName("page").dataType("Varchar").build()); fields.add(Field.builder().fieldName("page").dataType("Varchar").build());
fields.add(Field.builder().fieldName("pv").dataType("Long").build());
fields.add(Field.builder().fieldName("user_id").dataType("Varchar").build());
modelDetail.setFields(fields); modelDetail.setFields(fields);
modelDetail.setSqlQuery("SELECT imp_date, user_name, page, 1 as pv, " modelDetail.setSqlQuery("SELECT * FROM s2_pv_uv_statis");
+ "user_name as user_id FROM s2_pv_uv_statis");
modelDetail.setQueryType("sql_query"); modelDetail.setQueryType("sql_query");
modelReq.setModelDetail(modelDetail); modelReq.setModelDetail(modelDetail);
return modelService.createModel(modelReq, defaultUser); return modelService.createModel(modelReq, defaultUser);
@@ -231,20 +221,20 @@ public class S2VisitsDemo extends S2BaseDemo {
modelReq.setDescription("停留时长统计"); modelReq.setDescription("停留时长统计");
modelReq.setDomainId(s2Domain.getId()); modelReq.setDomainId(s2Domain.getId());
modelReq.setDatabaseId(s2Database.getId()); modelReq.setDatabaseId(s2Database.getId());
modelReq.setViewers(Arrays.asList("admin", "tom", "jack")); modelReq.setViewers(Arrays.asList("admin", "jack"));
modelReq.setViewOrgs(Collections.singletonList("1")); modelReq.setViewOrgs(Collections.singletonList("1"));
modelReq.setAdmins(Collections.singletonList("admin")); modelReq.setAdmins(Collections.singletonList("admin"));
modelReq.setAdminOrgs(Collections.emptyList()); modelReq.setAdminOrgs(Collections.emptyList());
List<Identify> identifiers = new ArrayList<>(); List<Identify> identifiers = new ArrayList<>();
ModelDetail modelDetail = new ModelDetail(); ModelDetail modelDetail = new ModelDetail();
identifiers.add(new Identify("用户", IdentifyType.foreign.name(), "user_name", 0)); identifiers.add(new Identify("用户", IdentifyType.foreign.name(), "user_name", 0));
modelDetail.setIdentifiers(identifiers); modelDetail.setIdentifiers(identifiers);
List<Dimension> dimensions = new ArrayList<>(); List<Dimension> dimensions = new ArrayList<>();
Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 1); Dimension dimension1 = new Dimension("数据日期", "imp_date", DimensionType.partition_time, 0);
dimension1.setTypeParams(new DimensionTimeTypeParams()); dimension1.setTypeParams(new DimensionTimeTypeParams());
dimensions.add(dimension1); dimensions.add(dimension1);
Dimension dimension2 = new Dimension("页面", "page", DimensionType.categorical, 1); Dimension dimension2 = new Dimension("页面", "visits_page", DimensionType.categorical, 1);
dimension2.setExpr("page"); dimension2.setExpr("page");
dimensions.add(dimension2); dimensions.add(dimension2);
modelDetail.setDimensions(dimensions); modelDetail.setDimensions(dimensions);
@@ -259,8 +249,7 @@ public class S2VisitsDemo extends S2BaseDemo {
fields.add(Field.builder().fieldName("page").dataType("Varchar").build()); fields.add(Field.builder().fieldName("page").dataType("Varchar").build());
fields.add(Field.builder().fieldName("stay_hours").dataType("Double").build()); fields.add(Field.builder().fieldName("stay_hours").dataType("Double").build());
modelDetail.setFields(fields); modelDetail.setFields(fields);
modelDetail modelDetail.setSqlQuery("select * from s2_stay_time_statis");
.setSqlQuery("select imp_date,user_name,stay_hours,page from s2_stay_time_statis");
modelDetail.setQueryType("sql_query"); modelDetail.setQueryType("sql_query");
modelReq.setModelDetail(modelDetail); modelReq.setModelDetail(modelDetail);
return modelService.createModel(modelReq, defaultUser); return modelService.createModel(modelReq, defaultUser);
@@ -295,49 +284,23 @@ public class S2VisitsDemo extends S2BaseDemo {
dimensionService.updateDimension(dimensionReq, defaultUser); dimensionService.updateDimension(dimensionReq, defaultUser);
} }
private void updateMetric(ModelResp stayTimeModel, DimensionResp departmentDimension, private MetricResp addMetric_pv(ModelResp pvUvModel, DimensionResp departmentDimension,
DimensionResp userDimension) throws Exception { DimensionResp userDimension) throws Exception {
MetricResp stayHoursMetric = metricService.getMetric(stayTimeModel.getId(), "stay_hours");
MetricReq metricReq = new MetricReq();
metricReq.setModelId(stayTimeModel.getId());
metricReq.setId(stayHoursMetric.getId());
metricReq.setName("停留时长");
metricReq.setBizName("stay_hours");
metricReq.setSensitiveLevel(SensitiveLevelEnum.HIGH.getCode());
metricReq.setDescription("停留时长");
metricReq.setClassifications(Collections.singletonList("核心指标"));
MetricDefineByMeasureParams metricTypeParams = new MetricDefineByMeasureParams();
metricTypeParams.setExpr("stay_hours");
List<Measure> measures = new ArrayList<>();
Measure measure = new Measure("停留时长", "stay_hours", AggOperatorEnum.SUM.getOperator(), 0);
measures.add(measure);
metricTypeParams.setMeasures(measures);
metricReq.setMetricDefineByMeasureParams(metricTypeParams);
metricReq.setMetricDefineType(MetricDefineType.MEASURE);
metricReq.setRelateDimension(getRelateDimension(
Lists.newArrayList(departmentDimension.getId(), userDimension.getId())));
metricService.updateMetric(metricReq, defaultUser);
}
private void updateMetric_pv(ModelResp pvUvModel, DimensionResp departmentDimension,
DimensionResp userDimension, MetricResp metricPv) throws Exception {
MetricReq metricReq = new MetricReq(); MetricReq metricReq = new MetricReq();
metricReq.setModelId(pvUvModel.getId()); metricReq.setModelId(pvUvModel.getId());
metricReq.setId(metricPv.getId());
metricReq.setName("访问次数"); metricReq.setName("访问次数");
metricReq.setBizName("pv"); metricReq.setBizName("pv");
metricReq.setDescription("一段时间内用户的访问次数"); metricReq.setDescription("一段时间内用户的访问次数");
MetricDefineByMeasureParams metricTypeParams = new MetricDefineByMeasureParams(); MetricDefineByFieldParams metricTypeParams = new MetricDefineByFieldParams();
metricTypeParams.setExpr("pv"); metricTypeParams.setExpr("count(1)");
List<Measure> measures = new ArrayList<>(); // List<FieldParam> fieldParams = new ArrayList<>();
Measure measure = new Measure("访问次数", "pv", AggOperatorEnum.SUM.getOperator(), 0); // fieldParams.add(new FieldParam("imp_date"));
measures.add(measure); // metricTypeParams.setFields(fieldParams);
metricTypeParams.setMeasures(measures); metricReq.setMetricDefineByFieldParams(metricTypeParams);
metricReq.setMetricDefineByMeasureParams(metricTypeParams); metricReq.setMetricDefineType(MetricDefineType.FIELD);
metricReq.setMetricDefineType(MetricDefineType.MEASURE);
metricReq.setRelateDimension(getRelateDimension( metricReq.setRelateDimension(getRelateDimension(
Lists.newArrayList(departmentDimension.getId(), userDimension.getId()))); Lists.newArrayList(departmentDimension.getId(), userDimension.getId())));
metricService.updateMetric(metricReq, defaultUser); return metricService.createMetric(metricReq, defaultUser);
} }
private MetricResp addMetric_uv(ModelResp uvModel, DimensionResp departmentDimension) private MetricResp addMetric_uv(ModelResp uvModel, DimensionResp departmentDimension)
@@ -361,6 +324,31 @@ public class S2VisitsDemo extends S2BaseDemo {
return metricService.createMetric(metricReq, defaultUser); return metricService.createMetric(metricReq, defaultUser);
} }
private MetricResp addMetric_pv_avg(MetricResp metricPv, MetricResp metricUv,
DimensionResp departmentDimension, ModelResp pvModel) throws Exception {
MetricReq metricReq = new MetricReq();
metricReq.setModelId(pvModel.getId());
metricReq.setName("人均访问次数");
metricReq.setBizName("pv_avg");
metricReq.setSensitiveLevel(SensitiveLevelEnum.HIGH.getCode());
metricReq.setDescription("每个用户平均访问的次数");
metricReq.setClassifications(Collections.singletonList("核心指标"));
metricReq.setAlias("平均访问次数");
MetricDefineByMetricParams metricTypeParams = new MetricDefineByMetricParams();
metricTypeParams.setExpr("pv/uv");
List<MetricParam> metrics = new ArrayList<>();
MetricParam pv = new MetricParam(metricPv.getId(), metricPv.getBizName());
MetricParam uv = new MetricParam(metricUv.getId(), metricUv.getBizName());
metrics.add(pv);
metrics.add(uv);
metricTypeParams.setMetrics(metrics);
metricReq.setMetricDefineByMetricParams(metricTypeParams);
metricReq.setMetricDefineType(MetricDefineType.METRIC);
metricReq.setRelateDimension(
getRelateDimension(Lists.newArrayList(departmentDimension.getId())));
return metricService.createMetric(metricReq, defaultUser);
}
private DataSetResp addDataSet(DomainResp s2Domain) { private DataSetResp addDataSet(DomainResp s2Domain) {
DataSetReq dataSetReq = new DataSetReq(); DataSetReq dataSetReq = new DataSetReq();
dataSetReq.setName("超音数数据集"); dataSetReq.setName("超音数数据集");
@@ -409,15 +397,14 @@ public class S2VisitsDemo extends S2BaseDemo {
authService.addOrUpdateAuthGroup(authGroupReq); authService.addOrUpdateAuthGroup(authGroupReq);
} }
private void addAuthGroup_2(ModelResp pvuvModel) { private void addAuthGroup_2(ModelResp model) {
AuthGroup authGroupReq = new AuthGroup(); AuthGroup authGroupReq = new AuthGroup();
authGroupReq.setModelId(pvuvModel.getId()); authGroupReq.setModelId(model.getId());
authGroupReq.setName("tom_row_permission"); authGroupReq.setName("tom_row_permission");
List<AuthRule> authRules = new ArrayList<>(); List<AuthRule> authRules = new ArrayList<>();
authGroupReq.setAuthRules(authRules); authGroupReq.setAuthRules(authRules);
authGroupReq.setDimensionFilters(Collections.singletonList("user_name = 'tom'")); authGroupReq.setDimensionFilters(Collections.singletonList("user_name = 'tom'"));
authGroupReq.setDimensionFilterDescription("用户名='tom'");
authGroupReq.setAuthorizedUsers(Collections.singletonList("tom")); authGroupReq.setAuthorizedUsers(Collections.singletonList("tom"));
authGroupReq.setAuthorizedDepartmentIds(Collections.emptyList()); authGroupReq.setAuthorizedDepartmentIds(Collections.emptyList());
authService.addOrUpdateAuthGroup(authGroupReq); authService.addOrUpdateAuthGroup(authGroupReq);

View File

@@ -26,18 +26,20 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
### headless-core SPIs ### headless-core SPIs
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\ com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.parser.DimExpressionParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricExpressionParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.executor.QueryExecutor=\ com.tencent.supersonic.headless.core.executor.QueryExecutor=\
com.tencent.supersonic.headless.core.executor.JdbcExecutor com.tencent.supersonic.headless.core.executor.JdbcExecutor

View File

@@ -41,7 +41,7 @@ public class MetricTest extends BaseTest {
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true") @SetSystemProperty(key = "s2.test", value = "true")
public void testMetricModel() throws Exception { public void testMetricModel() throws Exception {
QueryResult actualResult = submitNewChat("超音数 访问次数", agent.getId()); QueryResult actualResult = submitNewChat("超音数访问次数", agent.getId());
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
@@ -57,6 +57,29 @@ public class MetricTest extends BaseTest {
assertQueryResult(expectedResult, actualResult); assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 1; assert actualResult.getQueryResults().size() == 1;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testDerivedMetricModel() throws Exception {
QueryResult actualResult = submitNewChat("超音数 人均访问次数", agent.getId());
QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
expectedResult.setChatContext(expectedParseInfo);
expectedResult.setQueryMode(MetricModelQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("人均访问次数"));
expectedParseInfo.setDateInfo(
DataUtils.getDateConf(DateConf.DateMode.BETWEEN, unit, period, startDay, endDay));
expectedParseInfo.setQueryType(QueryType.AGGREGATE);
assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 1;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
} }
@Test @Test
@@ -73,9 +96,9 @@ public class MetricTest extends BaseTest {
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户"); SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户");
expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name", expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name",
FilterOperatorEnum.EQUALS, "alice", "用户", userElement.getId())); FilterOperatorEnum.EQUALS, "alice", "用户", userElement.getId()));
expectedParseInfo.setDateInfo( expectedParseInfo.setDateInfo(
DataUtils.getDateConf(DateConf.DateMode.BETWEEN, unit, period, startDay, endDay)); DataUtils.getDateConf(DateConf.DateMode.BETWEEN, unit, period, startDay, endDay));
@@ -83,11 +106,60 @@ public class MetricTest extends BaseTest {
assertQueryResult(expectedResult, actualResult); assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 1; assert actualResult.getQueryResults().size() == 1;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
} }
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true") @SetSystemProperty(key = "s2.test", value = "true")
public void testMetricGroupBy() throws Exception { public void testMetricGroupBy() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各用户的访问次数", agent.getId());
QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
expectedResult.setChatContext(expectedParseInfo);
expectedResult.setQueryMode(MetricGroupByQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("用户名"));
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 7,
DatePeriodEnum.DAY, startDay, endDay));
expectedParseInfo.setQueryType(QueryType.AGGREGATE);
assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 6;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testMetricGroupByAndJoin() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", agent.getId());
QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
expectedResult.setChatContext(expectedParseInfo);
expectedResult.setQueryMode(MetricGroupByQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("部门"));
expectedParseInfo.setDateInfo(DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 7,
DatePeriodEnum.DAY, startDay, endDay));
expectedParseInfo.setQueryType(QueryType.AGGREGATE);
assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 4;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testMetricGroupByAndMultiJoin() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数和停留时长", agent.getId()); QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数和停留时长", agent.getId());
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
@@ -107,6 +179,9 @@ public class MetricTest extends BaseTest {
assertQueryResult(expectedResult, actualResult); assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 4; assert actualResult.getQueryResults().size() == 4;
assert actualResult.getQuerySql().contains("s2_pv_uv_statis");
assert actualResult.getQuerySql().contains("s2_user_department");
assert actualResult.getQuerySql().contains("s2_stay_time_statis");
} }
@Test @Test
@@ -120,14 +195,14 @@ public class MetricTest extends BaseTest {
expectedResult.setQueryMode(MetricFilterQuery.QUERY_MODE); expectedResult.setQueryMode(MetricFilterQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE); expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("用户")); expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("用户"));
List<String> list = new ArrayList<>(); List<String> list = new ArrayList<>();
list.add("alice"); list.add("alice");
list.add("lucy"); list.add("lucy");
SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户"); SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户");
QueryFilter dimensionFilter = DataUtils.getFilter("user_name", FilterOperatorEnum.IN, list, QueryFilter dimensionFilter = DataUtils.getFilter("user_name", FilterOperatorEnum.IN, list,
"用户", userElement.getId()); "用户", userElement.getId());
expectedParseInfo.getDimensionFilters().add(dimensionFilter); expectedParseInfo.getDimensionFilters().add(dimensionFilter);
expectedParseInfo.setDateInfo( expectedParseInfo.setDateInfo(
@@ -151,7 +226,7 @@ public class MetricTest extends BaseTest {
expectedParseInfo.setAggType(MAX); expectedParseInfo.setAggType(MAX);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("用户")); expectedParseInfo.getDimensions().add(DataUtils.getSchemaElement("用户"));
expectedParseInfo.setDateInfo( expectedParseInfo.setDateInfo(
DataUtils.getDateConf(3, DateConf.DateMode.BETWEEN, DatePeriodEnum.DAY)); DataUtils.getDateConf(3, DateConf.DateMode.BETWEEN, DatePeriodEnum.DAY));
@@ -161,6 +236,7 @@ public class MetricTest extends BaseTest {
} }
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testMetricGroupBySum() throws Exception { public void testMetricGroupBySum() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数总和", agent.getId()); QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数总和", agent.getId());
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
@@ -197,10 +273,10 @@ public class MetricTest extends BaseTest {
expectedResult.setQueryMode(MetricFilterQuery.QUERY_MODE); expectedResult.setQueryMode(MetricFilterQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE); expectedParseInfo.setAggType(NONE);
SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户"); SchemaElement userElement = getSchemaElementByName(schema.getDimensions(), "用户");
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数")); expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name", expectedParseInfo.getDimensionFilters().add(DataUtils.getFilter("user_name",
FilterOperatorEnum.EQUALS, "alice", "用户", userElement.getId())); FilterOperatorEnum.EQUALS, "alice", "用户", userElement.getId()));
expectedParseInfo.setDateInfo( expectedParseInfo.setDateInfo(
DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 1, period, startDay, startDay)); DataUtils.getDateConf(DateConf.DateMode.BETWEEN, 1, period, startDay, startDay));

View File

@@ -95,7 +95,7 @@ public class BaseTest extends BaseApplication {
queryStructReq.setQueryType(queryType); queryStructReq.setQueryType(queryType);
Aggregator aggregator = new Aggregator(); Aggregator aggregator = new Aggregator();
aggregator.setFunc(AggOperatorEnum.SUM); aggregator.setFunc(AggOperatorEnum.SUM);
aggregator.setColumn("pv"); aggregator.setColumn("stay_hours");
queryStructReq.setAggregators(Arrays.asList(aggregator)); queryStructReq.setAggregators(Arrays.asList(aggregator));
if (CollectionUtils.isNotEmpty(groups)) { if (CollectionUtils.isNotEmpty(groups)) {
@@ -111,7 +111,7 @@ public class BaseTest extends BaseApplication {
List<Order> orders = new ArrayList<>(); List<Order> orders = new ArrayList<>();
Order order = new Order(); Order order = new Order();
order.setColumn("pv"); order.setColumn("stay_hours");
orders.add(order); orders.add(order);
queryStructReq.setOrders(orders); queryStructReq.setOrders(orders);
return queryStructReq; return queryStructReq;

View File

@@ -1,33 +0,0 @@
package com.tencent.supersonic.headless;
import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.request.FieldRemovedReq;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.UnAvailableItemResp;
import com.tencent.supersonic.headless.server.service.ModelService;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
public class ModelSchemaTest extends BaseTest {
@Autowired
private ModelService modelService;
@Test
void testGetUnAvailableItem() {
FieldRemovedReq fieldRemovedReq = new FieldRemovedReq();
fieldRemovedReq.setModelId(2L);
fieldRemovedReq.setFields(Lists.newArrayList("pv"));
UnAvailableItemResp unAvailableItemResp = modelService.getUnAvailableItem(fieldRemovedReq);
List<Long> expectedUnAvailableMetricId = Lists.newArrayList(1L);
List<Long> actualUnAvailableMetricId =
unAvailableItemResp.getMetricResps().stream().map(MetricResp::getId)
.sorted(Comparator.naturalOrder()).collect(Collectors.toList());
Assertions.assertEquals(expectedUnAvailableMetricId, actualUnAvailableMetricId);
}
}

View File

@@ -22,25 +22,12 @@ public class QueryByMetricTest extends BaseTest {
@Autowired @Autowired
protected MetricService metricService; protected MetricService metricService;
@Test
public void testWithMetricAndDimensionBizNames() throws Exception {
QueryMetricReq queryMetricReq = new QueryMetricReq();
queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv"));
queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department"));
queryMetricReq.getFilters().add(Filter.builder().name("imp_date")
.operator(FilterOperatorEnum.MINOR_THAN_EQUALS).relation(Filter.Relation.FILTER)
.value(LocalDate.now().toString()).build());
SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser());
Assert.assertNotNull(queryResp.getResultList());
Assert.assertEquals(6, queryResp.getResultList().size());
}
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true") @SetSystemProperty(key = "s2.test", value = "true")
public void testWithMetricAndDimensionNames() throws Exception { public void testWithMetricAndDimensionNames() throws Exception {
QueryMetricReq queryMetricReq = new QueryMetricReq(); QueryMetricReq queryMetricReq = new QueryMetricReq();
queryMetricReq.setMetricNames(Arrays.asList("停留时长", "访问次数")); queryMetricReq.setMetricNames(Arrays.asList("停留时长", "访问次数"));
queryMetricReq.setDimensionNames(Arrays.asList("用户", "部门")); queryMetricReq.setDimensionNames(Arrays.asList("用户", "部门"));
queryMetricReq.getFilters() queryMetricReq.getFilters()
.add(Filter.builder().name("数据日期").operator(FilterOperatorEnum.MINOR_THAN_EQUALS) .add(Filter.builder().name("数据日期").operator(FilterOperatorEnum.MINOR_THAN_EQUALS)
.relation(Filter.Relation.FILTER).value(LocalDate.now().toString()) .relation(Filter.Relation.FILTER).value(LocalDate.now().toString())
@@ -51,21 +38,23 @@ public class QueryByMetricTest extends BaseTest {
} }
@Test @Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testWithDomainId() throws Exception { public void testWithDomainId() throws Exception {
QueryMetricReq queryMetricReq = new QueryMetricReq(); QueryMetricReq queryMetricReq = new QueryMetricReq();
queryMetricReq.setDomainId(1L); queryMetricReq.setDomainId(1L);
queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv")); queryMetricReq.setMetricNames(Arrays.asList("停留时长", "访问次数"));
queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department")); queryMetricReq.setDimensionNames(Arrays.asList("用户名", "部门"));
queryMetricReq.getFilters().add(Filter.builder().name("imp_date") queryMetricReq.getFilters()
.operator(FilterOperatorEnum.MINOR_THAN_EQUALS).relation(Filter.Relation.FILTER) .add(Filter.builder().name("数据日期").operator(FilterOperatorEnum.MINOR_THAN_EQUALS)
.value(LocalDate.now().toString()).build()); .relation(Filter.Relation.FILTER).value(LocalDate.now().toString())
.build());
SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser()); SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser());
Assert.assertNotNull(queryResp.getResultList()); Assert.assertNotNull(queryResp.getResultList());
Assert.assertEquals(6, queryResp.getResultList().size()); Assert.assertEquals(6, queryResp.getResultList().size());
queryMetricReq.setDomainId(2L); queryMetricReq.setDomainId(2L);
queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv")); queryMetricReq.setMetricNames(Arrays.asList("停留时长", "访问次数"));
queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department")); queryMetricReq.setDimensionNames(Arrays.asList("用户名", "部门"));
assertThrows(IllegalArgumentException.class, assertThrows(IllegalArgumentException.class,
() -> queryByMetric(queryMetricReq, User.getDefaultUser())); () -> queryByMetric(queryMetricReq, User.getDefaultUser()));
} }
@@ -76,9 +65,10 @@ public class QueryByMetricTest extends BaseTest {
queryMetricReq.setDomainId(1L); queryMetricReq.setDomainId(1L);
queryMetricReq.setMetricIds(Arrays.asList(1L, 3L)); queryMetricReq.setMetricIds(Arrays.asList(1L, 3L));
queryMetricReq.setDimensionIds(Arrays.asList(1L, 2L)); queryMetricReq.setDimensionIds(Arrays.asList(1L, 2L));
queryMetricReq.getFilters().add(Filter.builder().name("imp_date") queryMetricReq.getFilters()
.operator(FilterOperatorEnum.MINOR_THAN_EQUALS).relation(Filter.Relation.FILTER) .add(Filter.builder().name("数据日期").operator(FilterOperatorEnum.MINOR_THAN_EQUALS)
.value(LocalDate.now().toString()).build()); .relation(Filter.Relation.FILTER).value(LocalDate.now().toString())
.build());
SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser()); SemanticQueryResp queryResp = queryByMetric(queryMetricReq, User.getDefaultUser());
Assert.assertNotNull(queryResp.getResultList()); Assert.assertNotNull(queryResp.getResultList());
Assert.assertEquals(6, queryResp.getResultList().size()); Assert.assertEquals(6, queryResp.getResultList().size());

View File

@@ -19,11 +19,11 @@ public class QueryBySqlTest extends BaseTest {
@Test @Test
public void testDetailQuery() throws Exception { public void testDetailQuery() throws Exception {
SemanticQueryResp semanticQueryResp = SemanticQueryResp semanticQueryResp =
queryBySql("SELECT 用户,访问次数 FROM 超音数PVUV统计 WHERE 用户='alice' "); queryBySql("SELECT 用户,访问次数 FROM 超音数PVUV统计 WHERE 用户='alice' ");
assertEquals(2, semanticQueryResp.getColumns().size()); assertEquals(2, semanticQueryResp.getColumns().size());
QueryColumn firstColumn = semanticQueryResp.getColumns().get(0); QueryColumn firstColumn = semanticQueryResp.getColumns().get(0);
assertEquals("用户", firstColumn.getName()); assertEquals("用户", firstColumn.getName());
QueryColumn secondColumn = semanticQueryResp.getColumns().get(1); QueryColumn secondColumn = semanticQueryResp.getColumns().get(1);
assertEquals("访问次数", secondColumn.getName()); assertEquals("访问次数", secondColumn.getName());
assertTrue(semanticQueryResp.getResultList().size() > 0); assertTrue(semanticQueryResp.getResultList().size() > 0);
@@ -87,17 +87,6 @@ public class QueryBySqlTest extends BaseTest {
assertTrue(result2.isUseCache()); assertTrue(result2.isUseCache());
} }
@Test
public void testBizNameQuery() throws Exception {
SemanticQueryResp result1 =
queryBySql("SELECT SUM(pv) FROM 超音数PVUV统计 WHERE department ='HR'");
SemanticQueryResp result2 = queryBySql("SELECT SUM(访问次数) FROM 超音数PVUV统计 WHERE 部门 ='HR'");
assertEquals(1, result1.getColumns().size());
assertEquals(1, result2.getColumns().size());
assertEquals(result1.getColumns().get(0), result2.getColumns().get(0));
assertEquals(result1.getResultList(), result2.getResultList());
}
@Test @Test
public void testAuthorization_model() { public void testAuthorization_model() {
User alice = DataUtils.getUserAlice(); User alice = DataUtils.getUserAlice();
@@ -108,27 +97,16 @@ public class QueryBySqlTest extends BaseTest {
@Test @Test
public void testAuthorization_sensitive_metric() throws Exception { public void testAuthorization_sensitive_metric() throws Exception {
User tom = DataUtils.getUserTom(); User tom = DataUtils.getUserAlice();
assertThrows(InvalidPermissionException.class, assertThrows(InvalidPermissionException.class,
() -> queryBySql("SELECT SUM(stay_hours) FROM 停留时长统计 WHERE department ='HR'", () -> queryBySql("SELECT pv_avg FROM 停留时长统计 WHERE department ='HR'", tom));
tom));
} }
@Test @Test
public void testAuthorization_sensitive_metric_jack() throws Exception { public void testAuthorization_sensitive_metric_jack() throws Exception {
User jack = DataUtils.getUserJack(); User jack = DataUtils.getUserJack();
SemanticQueryResp semanticQueryResp = SemanticQueryResp semanticQueryResp = queryBySql("SELECT SUM(停留时长) FROM 停留时长统计", jack);
queryBySql("SELECT SUM(stay_hours) FROM 停留时长统计", jack);
Assertions.assertTrue(semanticQueryResp.getResultList().size() > 0); Assertions.assertTrue(semanticQueryResp.getResultList().size() > 0);
} }
@Test
public void testAuthorization_row_permission() throws Exception {
User tom = DataUtils.getUserTom();
SemanticQueryResp semanticQueryResp =
queryBySql("SELECT SUM(pv) FROM 超音数PVUV统计 WHERE department ='HR'", tom);
Assertions.assertNotNull(semanticQueryResp.getQueryAuthorization().getMessage());
Assertions.assertTrue(semanticQueryResp.getSql().contains("user_name = 'tom'")
|| semanticQueryResp.getSql().contains("`user_name` = 'tom'"));
}
} }

View File

@@ -14,11 +14,7 @@ import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.util.DataUtils; import com.tencent.supersonic.util.DataUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.*;
import org.junit.jupiter.api.MethodOrderer;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestMethodOrder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@@ -32,13 +28,14 @@ import static org.junit.Assert.assertTrue;
@Slf4j @Slf4j
@TestMethodOrder(MethodOrderer.OrderAnnotation.class) @TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@Disabled
public class QueryByStructTest extends BaseTest { public class QueryByStructTest extends BaseTest {
@Test @Test
@Order(0) @Order(0)
public void testCacheQuery() { public void testCacheQuery() {
QueryStructReq queryStructReq1 = buildQueryStructReq(Arrays.asList("department")); QueryStructReq queryStructReq1 = buildQueryStructReq(Arrays.asList("部门"));
QueryStructReq queryStructReq2 = buildQueryStructReq(Arrays.asList("department")); QueryStructReq queryStructReq2 = buildQueryStructReq(Arrays.asList("部门"));
QueryCache queryCache = ComponentFactory.getQueryCache(); QueryCache queryCache = ComponentFactory.getQueryCache();
String cacheKey1 = queryCache.getCacheKey(queryStructReq1); String cacheKey1 = queryCache.getCacheKey(queryStructReq1);
String cacheKey2 = queryCache.getCacheKey(queryStructReq2); String cacheKey2 = queryCache.getCacheKey(queryStructReq2);
@@ -48,16 +45,14 @@ public class QueryByStructTest extends BaseTest {
@Test @Test
public void testDetailQuery() throws Exception { public void testDetailQuery() throws Exception {
QueryStructReq queryStructReq = QueryStructReq queryStructReq =
buildQueryStructReq(Arrays.asList("user_name", "department"), QueryType.DETAIL); buildQueryStructReq(Arrays.asList("用户名", "部门"), QueryType.DETAIL);
SemanticQueryResp semanticQueryResp = SemanticQueryResp semanticQueryResp =
semanticLayerService.queryByReq(queryStructReq, User.getDefaultUser()); semanticLayerService.queryByReq(queryStructReq, User.getDefaultUser());
assertEquals(3, semanticQueryResp.getColumns().size()); assertEquals(3, semanticQueryResp.getColumns().size());
QueryColumn firstColumn = semanticQueryResp.getColumns().get(0); QueryColumn firstColumn = semanticQueryResp.getColumns().get(0);
assertEquals("用户", firstColumn.getName()); assertEquals("用户", firstColumn.getName());
QueryColumn secondColumn = semanticQueryResp.getColumns().get(1); QueryColumn secondColumn = semanticQueryResp.getColumns().get(1);
assertEquals("部门", secondColumn.getName()); assertEquals("部门", secondColumn.getName());
QueryColumn thirdColumn = semanticQueryResp.getColumns().get(2);
assertEquals("访问次数", thirdColumn.getName());
assertTrue(semanticQueryResp.getResultList().size() > 0); assertTrue(semanticQueryResp.getResultList().size() > 0);
} }
@@ -68,26 +63,26 @@ public class QueryByStructTest extends BaseTest {
semanticLayerService.queryByReq(queryStructReq, User.getDefaultUser()); semanticLayerService.queryByReq(queryStructReq, User.getDefaultUser());
assertEquals(1, semanticQueryResp.getColumns().size()); assertEquals(1, semanticQueryResp.getColumns().size());
QueryColumn queryColumn = semanticQueryResp.getColumns().get(0); QueryColumn queryColumn = semanticQueryResp.getColumns().get(0);
assertEquals("访问次数", queryColumn.getName()); assertEquals("停留时长", queryColumn.getName());
assertEquals(1, semanticQueryResp.getResultList().size()); assertEquals(1, semanticQueryResp.getResultList().size());
} }
@Test @Test
public void testGroupByQuery() throws Exception { public void testGroupByQuery() throws Exception {
QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("department")); QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("部门"));
SemanticQueryResp result = SemanticQueryResp result =
semanticLayerService.queryByReq(queryStructReq, User.getDefaultUser()); semanticLayerService.queryByReq(queryStructReq, User.getDefaultUser());
assertEquals(2, result.getColumns().size()); assertEquals(2, result.getColumns().size());
QueryColumn firstColumn = result.getColumns().get(0); QueryColumn firstColumn = result.getColumns().get(0);
QueryColumn secondColumn = result.getColumns().get(1); QueryColumn secondColumn = result.getColumns().get(1);
assertEquals("部门", firstColumn.getName()); assertEquals("部门", firstColumn.getName());
assertEquals("访问次数", secondColumn.getName()); assertEquals("停留时长", secondColumn.getName());
assertNotNull(result.getResultList().size()); assertNotNull(result.getResultList().size());
} }
@Test @Test
public void testFilterQuery() throws Exception { public void testFilterQuery() throws Exception {
QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("department")); QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("部门"));
List<Filter> dimensionFilters = new ArrayList<>(); List<Filter> dimensionFilters = new ArrayList<>();
Filter filter = new Filter(); Filter filter = new Filter();
filter.setName("部门"); filter.setName("部门");
@@ -103,16 +98,16 @@ public class QueryByStructTest extends BaseTest {
QueryColumn firstColumn = result.getColumns().get(0); QueryColumn firstColumn = result.getColumns().get(0);
QueryColumn secondColumn = result.getColumns().get(1); QueryColumn secondColumn = result.getColumns().get(1);
assertEquals("部门", firstColumn.getName()); assertEquals("部门", firstColumn.getName());
assertEquals("访问次数", secondColumn.getName()); assertEquals("停留时长", secondColumn.getName());
assertEquals(1, result.getResultList().size()); assertEquals(1, result.getResultList().size());
assertEquals("HR", result.getResultList().get(0).get("department").toString()); assertEquals("HR", result.getResultList().get(0).get("部门").toString());
} }
@Test @Test
public void testAuthorization_model() { public void testAuthorization_model() {
User alice = DataUtils.getUserAlice(); User alice = DataUtils.getUserAlice();
setDomainNotOpenToAll(); setDomainNotOpenToAll();
QueryStructReq queryStructReq1 = buildQueryStructReq(Arrays.asList("department")); QueryStructReq queryStructReq1 = buildQueryStructReq(Arrays.asList("部门"));
assertThrows(InvalidPermissionException.class, assertThrows(InvalidPermissionException.class,
() -> semanticLayerService.queryByReq(queryStructReq1, alice)); () -> semanticLayerService.queryByReq(queryStructReq1, alice));
} }
@@ -122,9 +117,8 @@ public class QueryByStructTest extends BaseTest {
User tom = DataUtils.getUserTom(); User tom = DataUtils.getUserTom();
Aggregator aggregator = new Aggregator(); Aggregator aggregator = new Aggregator();
aggregator.setFunc(AggOperatorEnum.SUM); aggregator.setFunc(AggOperatorEnum.SUM);
aggregator.setColumn("stay_hours"); aggregator.setColumn("人均访问次数");
QueryStructReq queryStructReq = QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("部门"), aggregator);
buildQueryStructReq(Arrays.asList("department"), aggregator);
assertThrows(InvalidPermissionException.class, assertThrows(InvalidPermissionException.class,
() -> semanticLayerService.queryByReq(queryStructReq, tom)); () -> semanticLayerService.queryByReq(queryStructReq, tom));
} }
@@ -134,11 +128,11 @@ public class QueryByStructTest extends BaseTest {
User tom = DataUtils.getUserTom(); User tom = DataUtils.getUserTom();
Aggregator aggregator = new Aggregator(); Aggregator aggregator = new Aggregator();
aggregator.setFunc(AggOperatorEnum.SUM); aggregator.setFunc(AggOperatorEnum.SUM);
aggregator.setColumn("pv"); aggregator.setColumn("停留时长");
QueryStructReq queryStructReq1 = QueryStructReq queryStructReq1 =
buildQueryStructReq(Collections.singletonList("department"), aggregator); buildQueryStructReq(Collections.singletonList("部门"), aggregator);
SemanticQueryResp semanticQueryResp = semanticLayerService.queryByReq(queryStructReq1, tom); SemanticQueryResp semanticQueryResp = semanticLayerService.queryByReq(queryStructReq1, tom);
Assertions.assertNotNull(semanticQueryResp.getQueryAuthorization().getMessage()); Assertions.assertNotNull(semanticQueryResp.getQueryAuthorization().getMessage());
Assertions.assertTrue(semanticQueryResp.getSql().contains("user_name = 'tom'")); Assertions.assertTrue(semanticQueryResp.getSql().contains("用户名 = 'tom'"));
} }
} }

View File

@@ -1,51 +0,0 @@
package com.tencent.supersonic.headless;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.demo.S2VisitsDemo;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.SemanticTranslateResp;
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.Optional;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class TranslateTest extends BaseTest {
private Long dataSetId;
@BeforeEach
public void init() {
agent = getAgentByName(S2VisitsDemo.AGENT_NAME);
schema = schemaService.getSemanticSchema(agent.getDataSetIds());
Optional<Long> id = agent.getDataSetIds().stream().findFirst();
dataSetId = id.orElse(1L);
}
@Test
public void testSqlExplain() throws Exception {
String sql = "SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数PVUV统计 GROUP BY 部门 ";
SemanticTranslateResp explain = semanticLayerService
.translate(QueryReqBuilder.buildS2SQLReq(sql, dataSetId), User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
assertTrue(explain.getQuerySQL().contains("department"));
assertTrue(explain.getQuerySQL().contains("pv"));
}
@Test
public void testStructExplain() throws Exception {
QueryStructReq queryStructReq =
buildQueryStructReq(Collections.singletonList("department"));
SemanticTranslateResp explain =
semanticLayerService.translate(queryStructReq, User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
assertTrue(explain.getQuerySQL().contains("department"));
assertTrue(explain.getQuerySQL().contains("pv"));
}
}

View File

@@ -0,0 +1,114 @@
package com.tencent.supersonic.headless;
import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.demo.S2VisitsDemo;
import com.tencent.supersonic.headless.api.pojo.response.SemanticTranslateResp;
import com.tencent.supersonic.headless.chat.utils.QueryReqBuilder;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junitpioneer.jupiter.SetSystemProperty;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Objects;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class TranslatorTest extends BaseTest {
private Long dataSetId;
@BeforeEach
public void init() {
agent = getAgentByName(S2VisitsDemo.AGENT_NAME);
schema = schemaService.getSemanticSchema(agent.getDataSetIds());
if (Objects.nonNull(agent)) {
dataSetId = agent.getDataSetIds().stream().findFirst().get();
}
}
@Test
public void testSql() throws Exception {
String sql =
"SELECT SUM(访问次数) AS _总访问次数_ FROM 超音数数据集 WHERE 数据日期 >= '2024-11-15' AND 数据日期 <= '2024-12-15'";
SemanticTranslateResp explain = semanticLayerService
.translate(QueryReqBuilder.buildS2SQLReq(sql, dataSetId), User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
assertTrue(explain.getQuerySQL().contains("count(1)"));
executeSql(explain.getQuerySQL());
}
@Test
public void testSql_1() throws Exception {
String sql = "SELECT 部门, SUM(访问次数) AS 总访问次数 FROM 超音数PVUV统计 GROUP BY 部门 ";
SemanticTranslateResp explain = semanticLayerService
.translate(QueryReqBuilder.buildS2SQLReq(sql, dataSetId), User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
assertTrue(explain.getQuerySQL().contains("department"));
assertTrue(explain.getQuerySQL().contains("count(1)"));
executeSql(explain.getQuerySQL());
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testSql_2() throws Exception {
String sql =
"WITH _department_visits_ AS (SELECT 部门, SUM(访问次数) AS _total_visits_ FROM 超音数数据集 WHERE 数据日期 >= '2024-11-15' AND 数据日期 <= '2024-12-15' GROUP BY 部门) SELECT 部门 FROM _department_visits_ ORDER BY _total_visits_ DESC LIMIT 2";
SemanticTranslateResp explain = semanticLayerService
.translate(QueryReqBuilder.buildS2SQLReq(sql, dataSetId), User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
assertTrue(explain.getQuerySQL().toLowerCase().contains("department"));
assertTrue(explain.getQuerySQL().toLowerCase().contains("count(1)"));
executeSql(explain.getQuerySQL());
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testSql_3() throws Exception {
String sql =
"WITH recent_data AS (SELECT 用户名, 访问次数 FROM 超音数数据集 WHERE 部门 = 'marketing' AND 数据日期 >= '2024-12-01' AND 数据日期 <= '2024-12-15') SELECT 用户名 FROM recent_data ORDER BY 访问次数 DESC LIMIT 1";
SemanticTranslateResp explain = semanticLayerService
.translate(QueryReqBuilder.buildS2SQLReq(sql, dataSetId), User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
assertTrue(explain.getQuerySQL().toLowerCase().contains("department"));
assertTrue(explain.getQuerySQL().toLowerCase().contains("count(1)"));
executeSql(explain.getQuerySQL());
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testSql_unionALL() throws Exception {
String sql = new String(
Files.readAllBytes(
Paths.get(ClassLoader.getSystemResource("sql/testUnion.sql").toURI())),
StandardCharsets.UTF_8);
SemanticTranslateResp explain = semanticLayerService
.translate(QueryReqBuilder.buildS2SQLReq(sql, dataSetId), User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
assertTrue(explain.getQuerySQL().contains("department"));
assertTrue(explain.getQuerySQL().contains("pv"));
executeSql(explain.getQuerySQL());
}
@Test
@SetSystemProperty(key = "s2.test", value = "true")
public void testSql_with() throws Exception {
String sql = new String(
Files.readAllBytes(
Paths.get(ClassLoader.getSystemResource("sql/testWith.sql").toURI())),
StandardCharsets.UTF_8);
SemanticTranslateResp explain = semanticLayerService
.translate(QueryReqBuilder.buildS2SQLReq(sql, dataSetId), User.getDefaultUser());
assertNotNull(explain);
assertNotNull(explain.getQuerySQL());
executeSql(explain.getQuerySQL());
}
}

View File

@@ -74,6 +74,7 @@ public class DataUtils {
public static DateConf getDateConf(DateConf.DateMode dateMode, Integer unit, public static DateConf getDateConf(DateConf.DateMode dateMode, Integer unit,
DatePeriodEnum period, String startDate, String endDate) { DatePeriodEnum period, String startDate, String endDate) {
DateConf dateInfo = new DateConf(); DateConf dateInfo = new DateConf();
dateInfo.setDateField("imp_date");
dateInfo.setUnit(unit); dateInfo.setUnit(unit);
dateInfo.setDateMode(dateMode); dateInfo.setDateMode(dateMode);
dateInfo.setPeriod(period); dateInfo.setPeriod(period);

View File

@@ -0,0 +1,34 @@
WITH
recent_week AS (
SELECT
SUM(访) AS _访问次数_,
COUNT(DISTINCT ) AS _访问用户数_
FROM
WHERE
>= '2024-12-20'
AND <= '2024-12-27'
),
first_week_december AS (
SELECT
SUM(访) AS _访问次数_,
COUNT(DISTINCT ) AS _访问用户数_
FROM
WHERE
>= '2024-12-01'
AND <= '2024-12-07'
)
SELECT
'最近7天' AS _时间段_,
_访问次数_,
_访问用户数_
FROM
recent_week
UNION ALL
SELECT
'12月第一个星期' AS _时间段_,
_访问次数_,
_访问用户数_
FROM
first_week_december

View File

@@ -0,0 +1,29 @@
WITH
weekly_visits AS (
SELECT
YEAR () AS _year_,
WEEK () AS _week_,
SUM(访) AS total_visits
FROM
WHERE
(
>= '2024-11-18'
AND <= '2024-11-25'
)
GROUP BY
YEAR (),
WEEK ()
)
SELECT
_year_,
_week_,
total_visits
FROM
weekly_visits
WHERE
(_year_ = YEAR (CURRENT_DATE))
ORDER BY
total_visits DESC
LIMIT
1