Merge branch 'master' into feature/fontend_route_rebuild

# Conflicts:
#	webapp/packages/supersonic-fe/src/pages/SemanticModel/View/components/DataSetTable.tsx
#	webapp/packages/supersonic-fe/src/pages/SemanticModel/View/index.tsx
#	webapp/packages/supersonic-fe/src/pages/SemanticModel/components/DomainManagerTab.tsx
This commit is contained in:
tristanliu
2024-11-26 20:32:34 +08:00
67 changed files with 2166 additions and 2316 deletions

View File

@@ -528,7 +528,7 @@ public class SqlReplaceHelper {
} }
} }
private static Select replaceAggAliasOrderItem(Select selectStatement) { private static Select replaceAggAliasOrderbyField(Select selectStatement) {
if (selectStatement instanceof PlainSelect) { if (selectStatement instanceof PlainSelect) {
PlainSelect plainSelect = (PlainSelect) selectStatement; PlainSelect plainSelect = (PlainSelect) selectStatement;
if (Objects.nonNull(plainSelect.getOrderByElements())) { if (Objects.nonNull(plainSelect.getOrderByElements())) {
@@ -564,15 +564,15 @@ public class SqlReplaceHelper {
if (plainSelect.getFromItem() instanceof ParenthesedSelect) { if (plainSelect.getFromItem() instanceof ParenthesedSelect) {
ParenthesedSelect parenthesedSelect = (ParenthesedSelect) plainSelect.getFromItem(); ParenthesedSelect parenthesedSelect = (ParenthesedSelect) plainSelect.getFromItem();
parenthesedSelect parenthesedSelect
.setSelect(replaceAggAliasOrderItem(parenthesedSelect.getSelect())); .setSelect(replaceAggAliasOrderbyField(parenthesedSelect.getSelect()));
} }
return selectStatement; return selectStatement;
} }
return selectStatement; return selectStatement;
} }
public static String replaceAggAliasOrderItem(String sql) { public static String replaceAggAliasOrderbyField(String sql) {
Select selectStatement = replaceAggAliasOrderItem(SqlSelectHelper.getSelect(sql)); Select selectStatement = replaceAggAliasOrderbyField(SqlSelectHelper.getSelect(sql));
return selectStatement.toString(); return selectStatement.toString();
} }

View File

@@ -4,6 +4,7 @@ import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.ItemDateResp; import com.tencent.supersonic.common.pojo.ItemDateResp;
import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum; import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import lombok.Data; import lombok.Data;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -32,14 +33,9 @@ import static com.tencent.supersonic.common.pojo.Constants.MONTH_FORMAT;
@Data @Data
public class DateModeUtils { public class DateModeUtils {
@Value("${s2.query.parameter.sys.date:sys_imp_date}") private final String sysDateCol = TimeDimensionEnum.DAY.getName();
private String sysDateCol; private final String sysDateMonthCol = TimeDimensionEnum.MONTH.getName();
private final String sysDateWeekCol = TimeDimensionEnum.WEEK.getName();
@Value("${s2.query.parameter.sys.month:sys_imp_month}")
private String sysDateMonthCol;
@Value("${s2.query.parameter.sys.month:sys_imp_week}")
private String sysDateWeekCol;
@Value("${s2.query.parameter.sys.zipper.begin:start_}") @Value("${s2.query.parameter.sys.zipper.begin:start_}")
private String sysZipperDateColBegin; private String sysZipperDateColBegin;

View File

@@ -16,6 +16,7 @@ import java.time.temporal.TemporalAdjuster;
import java.time.temporal.TemporalAdjusters; import java.time.temporal.TemporalAdjusters;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Calendar;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@@ -201,6 +202,13 @@ public class DateUtils {
return false; return false;
} }
public static Long calculateDiffMs(Date createAt) {
Calendar calendar = Calendar.getInstance();
Date now = calendar.getTime();
long milliseconds = now.getTime() - createAt.getTime();
return milliseconds;
}
public static boolean isDateString(String value, String format) { public static boolean isDateString(String value, String format) {
try { try {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format); DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format);

View File

@@ -325,10 +325,10 @@ class SqlReplaceHelperTest {
} }
@Test @Test
void testReplaceAggAliasOrderItem() { void testReplaceAggAliasOrderbyField() {
String sql = "SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 " String sql = "SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 "
+ "GROUP BY 部门 ORDER BY SUM(访问次数) DESC LIMIT 10) AS top10"; + "GROUP BY 部门 ORDER BY SUM(访问次数) DESC LIMIT 10) AS top10";
String replaceSql = SqlReplaceHelper.replaceAggAliasOrderItem(sql); String replaceSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
Assert.assertEquals( Assert.assertEquals(
"SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 " "SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 "
+ "GROUP BY 部门 ORDER BY 2 DESC LIMIT 10) AS top10", + "GROUP BY 部门 ORDER BY 2 DESC LIMIT 10) AS top10",

View File

@@ -1,17 +0,0 @@
package com.tencent.supersonic.headless.api.pojo;
import com.google.common.collect.Lists;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import lombok.Data;
import java.util.List;
@Data
public class MetricTable {
private String alias;
private List<String> metrics = Lists.newArrayList();
private List<String> dimensions = Lists.newArrayList();
private String where;
private AggOption aggOption = AggOption.DEFAULT;
}

View File

@@ -1,24 +0,0 @@
package com.tencent.supersonic.headless.api.pojo.request;
import com.tencent.supersonic.headless.api.pojo.MetricTable;
import lombok.Data;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Data
public class ParseSqlReq {
private Map<String, String> variables;
private String sql = "";
private List<MetricTable> tables;
private boolean supportWith = true;
private boolean withAlias = true;
public Map<String, String> getVariables() {
if (variables == null) {
variables = new HashMap<>();
}
return variables;
}
}

View File

@@ -34,12 +34,11 @@ public class QueryFilter implements Serializable {
QueryFilter that = (QueryFilter) o; QueryFilter that = (QueryFilter) o;
return Objects.equal(bizName, that.bizName) && Objects.equal(name, that.name) return Objects.equal(bizName, that.bizName) && Objects.equal(name, that.name)
&& operator == that.operator && Objects.equal(value, that.value) && operator == that.operator && Objects.equal(value, that.value)
&& Objects.equal(elementID, that.elementID)
&& Objects.equal(function, that.function); && Objects.equal(function, that.function);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hashCode(bizName, name, operator, value, elementID, function); return Objects.hashCode(bizName, name, operator, value, function);
} }
} }

View File

@@ -0,0 +1,23 @@
package com.tencent.supersonic.headless.api.pojo.request;
import javax.validation.constraints.NotNull;
import com.tencent.supersonic.common.pojo.PageBaseReq;
import lombok.Data;
import java.util.List;
/**
* @author: kanedai
* @date: 2024/11/24
*/
@Data
public class ValueTaskQueryReq extends PageBaseReq {
@NotNull
private Long itemId;
private List<String> taskStatusList;
private String key;
}

View File

@@ -1,14 +0,0 @@
package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.MetricTable;
import lombok.Data;
import java.util.List;
@Data
public class DataSetQueryParam {
private String sql = "";
private List<MetricTable> tables;
private boolean supportWith = true;
private boolean withAlias = true;
}

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.tencent.supersonic.common.pojo.RecordInfo; import com.tencent.supersonic.common.pojo.RecordInfo;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.AESEncryptionUtil; import com.tencent.supersonic.common.util.AESEncryptionUtil;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Builder; import lombok.Builder;
@@ -36,7 +37,7 @@ public class Database extends RecordInfo {
private String schema; private String schema;
/** mysql,clickhouse */ /** mysql,clickhouse */
private String type; private EngineType type;
private List<String> admins = Lists.newArrayList(); private List<String> admins = Lists.newArrayList();

View File

@@ -1,17 +0,0 @@
package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import lombok.Data;
import java.util.List;
@Data
public class MetricQueryParam {
private List<String> metrics;
private List<String> dimensions;
private String where;
private Long limit;
private List<ColumnOrder> order;
private boolean nativeQuery = false;
}

View File

@@ -1,34 +1,25 @@
package com.tencent.supersonic.headless.core.pojo; package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import lombok.Data; import lombok.Data;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Triple; import org.apache.commons.lang3.tuple.Triple;
import java.util.List;
@Data @Data
public class QueryStatement { public class QueryStatement {
private Long dataSetId; private Long dataSetId;
private List<Long> modelIds;
private String sql; private String sql;
private String errMsg; private String errMsg;
private QueryParam queryParam; private StructQueryParam structQueryParam;
private MetricQueryParam metricQueryParam; private SqlQueryParam sqlQueryParam;
private DataSetQueryParam dataSetQueryParam; private OntologyQueryParam ontologyQueryParam;
private Integer status = 0; private Integer status = 0;
private Boolean isS2SQL = false; private Boolean isS2SQL = false;
private List<ImmutablePair<String, String>> timeRanges;
private Boolean enableOptimize = true; private Boolean enableOptimize = true;
private Triple<String, String, String> minMaxTime; private Triple<String, String, String> minMaxTime;
private String dataSetSql;
private String dataSetAlias;
private String dataSetSimplifySql;
private Boolean enableLimitWrapper = false;
private Ontology ontology; private Ontology ontology;
private SemanticSchemaResp semanticSchemaResp; private SemanticSchemaResp semanticSchemaResp;
private Integer limit = 1000; private Integer limit = 1000;
@@ -41,9 +32,4 @@ public class QueryStatement {
public boolean isTranslated() { public boolean isTranslated() {
return isTranslated != null && isTranslated && isOk(); return isTranslated != null && isTranslated && isOk();
} }
public QueryStatement error(String msg) {
this.setErrMsg(msg);
return this;
}
} }

View File

@@ -0,0 +1,12 @@
package com.tencent.supersonic.headless.core.pojo;
import lombok.Data;
@Data
public class SqlQueryParam {
private String sql;
private String table;
private boolean supportWith = true;
private boolean withAlias = true;
private String simplifiedSql;
}

View File

@@ -1,21 +1,18 @@
package com.tencent.supersonic.headless.api.pojo; package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.common.pojo.Aggregator; import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.DateConf; import com.tencent.supersonic.common.pojo.DateConf;
import com.tencent.supersonic.common.pojo.Filter; import com.tencent.supersonic.common.pojo.Filter;
import com.tencent.supersonic.common.pojo.Order; import com.tencent.supersonic.common.pojo.Order;
import com.tencent.supersonic.common.pojo.enums.QueryType; import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.headless.api.pojo.Param;
import lombok.Data; import lombok.Data;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
@Data @Data
public class QueryParam { public class StructQueryParam {
// struct
private List<String> groups = new ArrayList(); private List<String> groups = new ArrayList();
private List<Aggregator> aggregators = new ArrayList(); private List<Aggregator> aggregators = new ArrayList();
private List<Order> orders = new ArrayList(); private List<Order> orders = new ArrayList();
@@ -24,17 +21,5 @@ public class QueryParam {
private DateConf dateInfo; private DateConf dateInfo;
private Long limit = 2000L; private Long limit = 2000L;
private QueryType queryType; private QueryType queryType;
private String s2SQL;
private String correctS2SQL;
private Long dataSetId;
private String dataSetName;
private Set<Long> modelIds = new HashSet<>();
private List<Param> params = new ArrayList<>(); private List<Param> params = new ArrayList<>();
// metric
private List<String> metrics = new ArrayList();
private List<String> dimensions;
private String where;
private List<ColumnOrder> order;
private boolean nativeQuery = false;
} }

View File

@@ -1,507 +1,99 @@
package com.tencent.supersonic.headless.core.translator; package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils; import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.pojo.enums.QueryType;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.MetricTable;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter; import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.tuple.Pair;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
@Component @Component
@Slf4j @Slf4j
public class DefaultSemanticTranslator implements SemanticTranslator { public class DefaultSemanticTranslator implements SemanticTranslator {
@Autowired
private SqlGenerateUtils sqlGenerateUtils;
public void translate(QueryStatement queryStatement) { public void translate(QueryStatement queryStatement) {
if (queryStatement.isTranslated()) { if (queryStatement.isTranslated()) {
return; return;
} }
try { try {
preprocess(queryStatement); for (QueryConverter converter : ComponentFactory.getQueryConverters()) {
parse(queryStatement); if (converter.accept(queryStatement)) {
optimize(queryStatement); log.debug("QueryConverter accept [{}]", converter.getClass().getName());
converter.convert(queryStatement);
}
}
doOntologyParse(queryStatement);
if (StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSimplifiedSql())) {
queryStatement.setSql(queryStatement.getSqlQueryParam().getSimplifiedSql());
}
if (StringUtils.isBlank(queryStatement.getSql())) {
throw new RuntimeException("parse exception: " + queryStatement.getErrMsg());
}
if (!SqlSelectHelper.hasLimit(queryStatement.getSql())) {
queryStatement
.setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit());
}
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStatement);
}
} catch (Exception e) { } catch (Exception e) {
queryStatement.setErrMsg(e.getMessage()); queryStatement.setErrMsg(e.getMessage());
log.error("Failed to translate semantic query [{}]", e.getMessage(), e);
} }
} }
private void parse(QueryStatement queryStatement) throws Exception { private void doOntologyParse(QueryStatement queryStatement) throws Exception {
QueryParam queryParam = queryStatement.getQueryParam(); OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam();
if (Objects.isNull(queryStatement.getDataSetQueryParam())) { log.info("parse with ontology: [{}]", ontologyQueryParam);
queryStatement.setDataSetQueryParam(new DataSetQueryParam()); ComponentFactory.getQueryParser().parse(queryStatement);
} if (!queryStatement.isOk()) {
if (Objects.isNull(queryStatement.getMetricQueryParam())) { throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.setMetricQueryParam(new MetricQueryParam()); queryStatement.getSqlQueryParam().getTable(), queryStatement.getErrMsg()));
} }
log.debug("SemanticConverter before [{}]", queryParam); SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
for (QueryConverter headlessConverter : ComponentFactory.getQueryConverters()) { String ontologyQuerySql = sqlQueryParam.getSql();
if (headlessConverter.accept(queryStatement)) { String ontologyInnerTable = sqlQueryParam.getTable();
log.debug("SemanticConverter accept [{}]", headlessConverter.getClass().getName()); String ontologyInnerSql = queryStatement.getSql();
headlessConverter.convert(queryStatement);
List<Pair<String, String>> tables = new ArrayList<>();
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
if (sqlQueryParam.isSupportWith()) {
EngineType engineType = queryStatement.getOntology().getDatabase().getType();
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
String withSql = "with " + tables.stream()
.map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight()))
.collect(Collectors.joining(",")) + "\n" + ontologyQuerySql;
queryStatement.setSql(withSql);
} else {
List<String> withTableList =
tables.stream().map(Pair::getLeft).collect(Collectors.toList());
List<String> withSqlList =
tables.stream().map(Pair::getRight).collect(Collectors.toList());
String mergeSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql,
withSqlList, withTableList);
queryStatement.setSql(mergeSql);
} }
}
log.debug("SemanticConverter after {} {} {}", queryParam,
queryStatement.getDataSetQueryParam(), queryStatement.getMetricQueryParam());
if (!queryStatement.getDataSetQueryParam().getSql().isEmpty()) {
doParse(queryStatement.getDataSetQueryParam(), queryStatement);
} else { } else {
queryStatement.getMetricQueryParam() for (Pair<String, String> tb : tables) {
.setNativeQuery(queryParam.getQueryType().isNativeAggQuery()); ontologyQuerySql =
doParse(queryStatement, StringUtils.replace(ontologyQuerySql, tb.getLeft(), "(" + tb.getRight()
AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery())); + ") " + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1);
}
if (StringUtils.isEmpty(queryStatement.getSql())) {
throw new RuntimeException("parse Exception: " + queryStatement.getErrMsg());
}
if (StringUtils.isNotBlank(queryStatement.getSql())
&& !SqlSelectHelper.hasLimit(queryStatement.getSql())) {
String querySql =
queryStatement.getSql() + " limit " + queryStatement.getLimit().toString();
queryStatement.setSql(querySql);
}
}
private QueryStatement doParse(DataSetQueryParam dataSetQueryParam,
QueryStatement queryStatement) {
log.info("parse dataSetQuery [{}] ", dataSetQueryParam);
Ontology ontology = queryStatement.getOntology();
EngineType engineType = EngineType.fromString(ontology.getDatabase().getType());
try {
if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) {
List<String[]> tables = new ArrayList<>();
boolean isSingleTable = dataSetQueryParam.getTables().size() == 1;
for (MetricTable metricTable : dataSetQueryParam.getTables()) {
QueryStatement tableSql = parserSql(metricTable, isSingleTable,
dataSetQueryParam, queryStatement);
if (isSingleTable && StringUtils.isNotBlank(tableSql.getDataSetSimplifySql())) {
queryStatement.setSql(tableSql.getDataSetSimplifySql());
queryStatement.setDataSetQueryParam(dataSetQueryParam);
return queryStatement;
}
tables.add(new String[] {metricTable.getAlias(), tableSql.getSql()});
}
if (!tables.isEmpty()) {
String sql;
if (dataSetQueryParam.isSupportWith()) {
if (!SqlMergeWithUtils.hasWith(engineType, dataSetQueryParam.getSql())) {
sql = "with "
+ tables.stream()
.map(t -> String.format("%s as (%s)", t[0], t[1]))
.collect(Collectors.joining(","))
+ "\n" + dataSetQueryParam.getSql();
} else {
List<String> parentWithNameList = tables.stream().map(table -> table[0])
.collect(Collectors.toList());
List<String> parentSqlList = tables.stream().map(table -> table[1])
.collect(Collectors.toList());
sql = SqlMergeWithUtils.mergeWith(engineType,
dataSetQueryParam.getSql(), parentSqlList, parentWithNameList);
}
} else {
sql = dataSetQueryParam.getSql();
for (String[] tb : tables) {
sql = StringUtils.replace(sql, tb[0], "(" + tb[1] + ") "
+ (dataSetQueryParam.isWithAlias() ? "" : tb[0]), -1);
}
}
queryStatement.setSql(sql);
queryStatement.setDataSetQueryParam(dataSetQueryParam);
return queryStatement;
}
} }
} catch (Exception e) { queryStatement.setSql(ontologyQuerySql);
log.error("physicalSql error {}", e);
queryStatement.setErrMsg(e.getMessage());
}
return queryStatement;
}
private QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) {
MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam();
log.info("parse metricQuery [{}] isAgg [{}]", metricQueryParam, isAgg);
try {
ComponentFactory.getQueryParser().parse(queryStatement, isAgg);
} catch (Exception e) {
queryStatement.setErrMsg(e.getMessage());
log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e);
}
return queryStatement;
}
private QueryStatement parserSql(MetricTable metricTable, Boolean isSingleMetricTable,
DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) throws Exception {
MetricQueryParam metricQueryParam = new MetricQueryParam();
metricQueryParam.setMetrics(metricTable.getMetrics());
metricQueryParam.setDimensions(metricTable.getDimensions());
metricQueryParam.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere()));
metricQueryParam.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption()));
QueryStatement tableSql = new QueryStatement();
tableSql.setIsS2SQL(false);
tableSql.setMetricQueryParam(metricQueryParam);
tableSql.setMinMaxTime(queryStatement.getMinMaxTime());
tableSql.setEnableOptimize(queryStatement.getEnableOptimize());
tableSql.setDataSetId(queryStatement.getDataSetId());
tableSql.setOntology(queryStatement.getOntology());
if (isSingleMetricTable) {
tableSql.setDataSetSql(dataSetQueryParam.getSql());
tableSql.setDataSetAlias(metricTable.getAlias());
}
tableSql = doParse(tableSql, metricTable.getAggOption());
if (!tableSql.isOk()) {
throw new Exception(String.format("parser table [%s] error [%s]",
metricTable.getAlias(), tableSql.getErrMsg()));
}
return tableSql;
}
private void optimize(QueryStatement queryStatement) {
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStatement);
} }
} }
private void preprocess(QueryStatement queryStatement) {
if (StringUtils.isBlank(queryStatement.getSql())) {
return;
}
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
convertNameToBizName(queryStatement);
rewriteFunction(queryStatement);
queryStatement.setSql(SqlRemoveHelper.removeUnderscores(queryStatement.getSql()));
String tableName = SqlSelectHelper.getTableName(queryStatement.getSql());
if (StringUtils.isEmpty(tableName)) {
return;
}
// correct order item is same as agg alias
String reqSql = queryStatement.getSql();
queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(queryStatement.getSql()));
log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql());
// 5.build MetricTables
List<String> allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
QueryStructReq queryStructReq = new QueryStructReq();
MetricTable metricTable = new MetricTable();
metricTable.getMetrics().addAll(metrics);
metricTable.getDimensions().addAll(dimensions);
metricTable.setAlias(tableName.toLowerCase());
// if metric empty , fill model default
if (CollectionUtils.isEmpty(metricTable.getMetrics())) {
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
} else {
queryStructReq.getAggregators()
.addAll(metricTable.getMetrics().stream()
.map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN))
.collect(Collectors.toList()));
}
AggOption aggOption = getAggOption(queryStatement, metricSchemas);
metricTable.setAggOption(aggOption);
List<MetricTable> tables = new ArrayList<>();
tables.add(metricTable);
// 6.build ParseSqlReq
DataSetQueryParam datasetQueryParam = new DataSetQueryParam();
datasetQueryParam.setTables(tables);
datasetQueryParam.setSql(queryStatement.getSql());
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()),
database.getVersion())) {
datasetQueryParam.setSupportWith(false);
datasetQueryParam.setWithAlias(false);
}
// 7. do deriveMetric
generateDerivedMetric(semanticSchemaResp, aggOption, datasetQueryParam);
// 8.physicalSql by ParseSqlReq
// queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(queryStatement.getSql()));
queryStructReq.setDataSetId(queryStatement.getDataSetId());
queryStructReq.setQueryType(getQueryType(aggOption));
log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq);
QueryParam queryParam = new QueryParam();
BeanUtils.copyProperties(queryStructReq, queryParam);
queryStatement.setQueryParam(queryParam);
queryStatement.setDataSetQueryParam(datasetQueryParam);
// queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq));
}
private AggOption getAggOption(QueryStatement queryStatement,
List<MetricSchemaResp> metricSchemas) {
String sql = queryStatement.getSql();
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)
&& !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) {
log.debug("getAggOption simple sql set to DEFAULT");
return AggOption.DEFAULT;
}
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql)
|| SqlSelectFunctionHelper.hasFunction(sql, "count")
|| SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
return AggOption.OUTER;
}
// if (queryStatement.isInnerLayerNative()) {
// return AggOption.NATIVE;
// }
if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql)
|| SqlSelectHelper.hasGroupBy(sql)) {
return AggOption.OUTER;
}
long defaultAggNullCnt = metricSchemas.stream().filter(
m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg()))
.count();
if (defaultAggNullCnt > 0) {
log.debug("getAggOption find null defaultAgg metric set to NATIVE");
return AggOption.OUTER;
}
return AggOption.DEFAULT;
}
private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = queryStatement.getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceSqlByPositions(sql);
log.debug("replaceSqlByPositions:{}", sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.setSql(sql);
}
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
SchemaItem::getBizName, (k1, k2) -> k1));
dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(),
TimeDimensionEnum.DAY.getName());
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toSet());
}
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, MetricSchemaResp> metricLowerToNameMap =
semanticSchemaResp.getMetrics().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private void rewriteFunction(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
return;
}
String type = database.getType();
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
if (Objects.nonNull(engineAdaptor)) {
String functionNameCorrector =
engineAdaptor.functionNameCorrector(queryStatement.getSql());
queryStatement.setSql(functionNameCorrector);
}
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
dimensionResults.putAll(metricResults);
return dimensionResults;
}
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
String bizName) {
Set<Pair<String, String>> elements = new HashSet<>();
elements.add(Pair.of(name, bizName));
if (StringUtils.isNotBlank(aliasStr)) {
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
for (String alias : aliasList) {
elements.add(Pair.of(alias, bizName));
}
}
return elements.stream();
}
private QueryType getQueryType(AggOption aggOption) {
boolean isAgg = AggOption.isAgg(aggOption);
QueryType queryType = QueryType.DETAIL;
if (isAgg) {
queryType = QueryType.AGGREGATE;
}
return queryType;
}
private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption,
DataSetQueryParam viewQueryParam) {
String sql = viewQueryParam.getSql();
for (MetricTable metricTable : viewQueryParam.getTables()) {
Set<String> measures = new HashSet<>();
Map<String, String> replaces = generateDerivedMetric(semanticSchemaResp, aggOption,
metricTable.getMetrics(), metricTable.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
metricTable.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
metricTable.setMetrics(new ArrayList<>(measures));
} else {
// empty measure , fill default
metricTable.setMetrics(new ArrayList<>());
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
}
}
}
viewQueryParam.setSql(sql);
}
private Map<String, String> generateDerivedMetric(SemanticSchemaResp semanticSchemaResp,
AggOption aggOption, List<String> metrics, List<String> dimensions,
Set<String> measures) {
Map<String, String> result = new HashMap<>();
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
// Check if any metric is derived
boolean hasDerivedMetrics =
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
if (!hasDerivedMetrics) {
return result;
}
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchemaResp.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Set<String> derivedDimensions = new HashSet<>();
Set<String> derivedMetrics = new HashSet<>();
Map<String, String> visitedMetrics = new HashMap<>();
for (MetricResp metricResp : metricResps) {
if (metrics.contains(metricResp.getBizName())) {
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
metricResp.getMetricDefineByMeasureParams());
if (isDerived) {
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
derivedMetrics, derivedDimensions);
result.put(metricResp.getBizName(), expr);
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
} else {
measures.add(metricResp.getBizName());
}
}
}
measures.addAll(derivedMetrics);
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
.forEach(dimensions::add);
return result;
}
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List<String> dimensions) {
if (!CollectionUtils.isEmpty(dimensions)) {
Map<String, Long> modelMatchCnt = new HashMap<>();
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
}
return modelMatchCnt.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(m -> m.getKey()).findFirst().orElse("");
}
return semanticSchemaResp.getModelResps().get(0).getBizName();
}
} }

View File

@@ -1,14 +1,12 @@
package com.tencent.supersonic.headless.core.translator; package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
/** Remove the default metric added by the system when the query only has dimensions */ /** Remove the default metric added by the system when the query only has dimensions */
@Slf4j @Slf4j
@@ -17,26 +15,26 @@ public class DetailQueryOptimizer implements QueryOptimizer {
@Override @Override
public void rewrite(QueryStatement queryStatement) { public void rewrite(QueryStatement queryStatement) {
QueryParam queryParam = queryStatement.getQueryParam(); StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
String sqlRaw = queryStatement.getSql().trim(); String sqlRaw = queryStatement.getSql().trim();
if (StringUtils.isEmpty(sqlRaw)) { if (StringUtils.isEmpty(sqlRaw)) {
throw new RuntimeException("sql is empty or null"); throw new RuntimeException("sql is empty or null");
} }
log.debug("before handleNoMetric, sql:{}", sqlRaw); log.debug("before handleNoMetric, sql:{}", sqlRaw);
if (isDetailQuery(queryParam)) { // if (isDetailQuery(structQueryParam)) {
if (queryParam.getMetrics().size() == 0 // if (!CollectionUtils.isEmpty(structQueryParam.getGroups())) {
&& !CollectionUtils.isEmpty(queryParam.getGroups())) { // String sqlForm = "select %s from ( %s ) src_no_metric";
String sqlForm = "select %s from ( %s ) src_no_metric"; // String sql = String.format(sqlForm,
String sql = String.format(sqlForm, // structQueryParam.getGroups().stream().collect(Collectors.joining(",")),
queryParam.getGroups().stream().collect(Collectors.joining(",")), sqlRaw); // sqlRaw);
queryStatement.setSql(sql); // queryStatement.setSql(sql);
} // }
} // }
log.debug("after handleNoMetric, sql:{}", queryStatement.getSql()); log.debug("after handleNoMetric, sql:{}", queryStatement.getSql());
} }
public boolean isDetailQuery(QueryParam queryParam) { public boolean isDetailQuery(StructQueryParam structQueryParam) {
return Objects.nonNull(queryParam) && queryParam.getQueryType().isNativeAggQuery() return Objects.nonNull(structQueryParam)
&& CollectionUtils.isEmpty(queryParam.getMetrics()); && structQueryParam.getQueryType().isNativeAggQuery();
} }
} }

View File

@@ -1,9 +1,8 @@
package com.tencent.supersonic.headless.core.translator; package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** A query parser generates physical SQL for the QueryStatement. */ /** A query parser generates physical SQL for the QueryStatement. */
public interface QueryParser { public interface QueryParser {
void parse(QueryStatement queryStatement, AggOption aggOption) throws Exception; void parse(QueryStatement queryStatement) throws Exception;
} }

View File

@@ -1,6 +1,5 @@
package com.tencent.supersonic.headless.core.translator.calcite; package com.tencent.supersonic.headless.core.translator.calcite;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.QueryParser; import com.tencent.supersonic.headless.core.translator.QueryParser;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
@@ -16,7 +15,7 @@ import org.springframework.stereotype.Component;
public class CalciteQueryParser implements QueryParser { public class CalciteQueryParser implements QueryParser {
@Override @Override
public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception { public void parse(QueryStatement queryStatement) throws Exception {
Ontology ontology = queryStatement.getOntology(); Ontology ontology = queryStatement.getOntology();
if (ontology == null) { if (ontology == null) {
queryStatement.setErrMsg("No ontology could be found"); queryStatement.setErrMsg("No ontology could be found");
@@ -29,7 +28,8 @@ public class CalciteQueryParser implements QueryParser {
.enableOptimize(queryStatement.getEnableOptimize()).build()) .enableOptimize(queryStatement.getEnableOptimize()).build())
.build(); .build();
SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema); SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema);
sqlBuilder.build(queryStatement, isAgg); String sql = sqlBuilder.buildOntologySql(queryStatement);
queryStatement.setSql(sql);
} }
} }

View File

@@ -0,0 +1,20 @@
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import lombok.Data;
import java.util.List;
import java.util.Set;
@Data
public class OntologyQueryParam {
private Set<String> metrics = Sets.newHashSet();
private Set<String> dimensions = Sets.newHashSet();
private String where;
private Long limit;
private List<ColumnOrder> order;
private boolean nativeQuery = false;
private AggOption aggOption = AggOption.DEFAULT;
}

View File

@@ -33,7 +33,7 @@ public class SchemaBuilder {
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema, Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory, Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
Configuration.config); Configuration.config);
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
S2SQLSqlValidatorImpl s2SQLSqlValidator = S2SQLSqlValidatorImpl s2SQLSqlValidator =
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader, new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
Configuration.typeFactory, Configuration.getValidatorConfig(engineType)); Configuration.typeFactory, Configuration.getValidatorConfig(engineType));

View File

@@ -1,14 +1,13 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql; package com.tencent.supersonic.headless.core.translator.calcite.sql;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender;
@@ -17,23 +16,16 @@ import com.tencent.supersonic.headless.core.translator.calcite.sql.render.Render
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender; import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.sql.validate.SqlValidatorScope; import org.apache.calcite.sql.validate.SqlValidatorScope;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Objects;
/** parsing from query dimensions and metrics */
@Slf4j @Slf4j
public class SqlBuilder { public class SqlBuilder {
private final S2CalciteSchema schema; private final S2CalciteSchema schema;
private MetricQueryParam metricQueryParam; private OntologyQueryParam ontologyQueryParam;
private SqlValidatorScope scope; private SqlValidatorScope scope;
private SqlNode parserNode; private SqlNode parserNode;
private boolean isAgg = false; private boolean isAgg = false;
@@ -43,45 +35,24 @@ public class SqlBuilder {
this.schema = schema; this.schema = schema;
} }
public void build(QueryStatement queryStatement, AggOption aggOption) throws Exception { public String buildOntologySql(QueryStatement queryStatement) throws Exception {
this.metricQueryParam = queryStatement.getMetricQueryParam(); this.ontologyQueryParam = queryStatement.getOntologyQueryParam();
if (metricQueryParam.getMetrics() == null) { if (ontologyQueryParam.getLimit() == null) {
metricQueryParam.setMetrics(new ArrayList<>()); ontologyQueryParam.setLimit(0L);
} }
if (metricQueryParam.getDimensions() == null) { this.aggOption = ontologyQueryParam.getAggOption();
metricQueryParam.setDimensions(new ArrayList<>());
}
if (metricQueryParam.getLimit() == null) {
metricQueryParam.setLimit(0L);
}
this.aggOption = aggOption;
buildParseNode(); buildParseNode();
Database database = queryStatement.getOntology().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
EngineType engineType = EngineType.fromString(database.getType()); optimizeParseNode(database.getType());
optimizeParseNode(engineType); return getSql(database.getType());
String sql = getSql(engineType);
queryStatement.setSql(sql);
if (Objects.nonNull(queryStatement.getEnableOptimize())
&& queryStatement.getEnableOptimize()
&& Objects.nonNull(queryStatement.getDataSetAlias())
&& !queryStatement.getDataSetAlias().isEmpty()) {
// simplify model sql with query sql
String simplifySql = rewrite(getSqlByDataSet(engineType, sql,
queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()), engineType);
if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) {
log.debug("simplifySql [{}]", simplifySql);
queryStatement.setDataSetSimplifySql(simplifySql);
}
}
} }
private void buildParseNode() throws Exception { private void buildParseNode() throws Exception {
// find the match Datasource // find relevant data models
scope = SchemaBuilder.getScope(schema); scope = SchemaBuilder.getScope(schema);
List<DataModel> dataModels = List<DataModel> dataModels =
DataModelNode.getRelatedDataModels(scope, schema, metricQueryParam); DataModelNode.getRelatedDataModels(scope, schema, ontologyQueryParam);
if (dataModels == null || dataModels.isEmpty()) { if (dataModels == null || dataModels.isEmpty()) {
throw new Exception("data model not found"); throw new Exception("data model not found");
} }
@@ -98,14 +69,14 @@ public class SqlBuilder {
while (it.hasNext()) { while (it.hasNext()) {
Renderer renderer = it.next(); Renderer renderer = it.next();
if (previous != null) { if (previous != null) {
previous.render(metricQueryParam, dataModels, scope, schema, !isAgg); previous.render(ontologyQueryParam, dataModels, scope, schema, !isAgg);
renderer.setTable(previous renderer.setTable(previous
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i))); .builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
i++; i++;
} }
previous = renderer; previous = renderer;
} }
builders.getLast().render(metricQueryParam, dataModels, scope, schema, !isAgg); builders.getLast().render(ontologyQueryParam, dataModels, scope, schema, !isAgg);
parserNode = builders.getLast().builder(); parserNode = builders.getLast().builder();
} }
@@ -116,7 +87,7 @@ public class SqlBuilder {
// default by dataModel time aggregation // default by dataModel time aggregation
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime() if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) { .equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!metricQueryParam.isNativeQuery()) { if (!ontologyQueryParam.isNativeQuery()) {
return true; return true;
} }
} }
@@ -164,13 +135,4 @@ public class SqlBuilder {
} }
} }
private String getSqlByDataSet(EngineType engineType, String parentSql, String dataSetSql,
String parentAlias) throws SqlParseException {
if (!SqlMergeWithUtils.hasWith(engineType, dataSetSql)) {
return String.format("with %s as (%s) %s", parentAlias, parentSql, dataSetSql);
}
return SqlMergeWithUtils.mergeWith(engineType, dataSetSql,
Collections.singletonList(parentSql), Collections.singletonList(parentAlias));
}
} }

View File

@@ -4,13 +4,13 @@ import com.google.common.collect.Lists;
import com.tencent.supersonic.common.calcite.Configuration; import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder; import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -150,25 +150,25 @@ public class DataModelNode extends SemanticNode {
} }
public static void getQueryDimensionMeasure(S2CalciteSchema schema, public static void getQueryDimensionMeasure(S2CalciteSchema schema,
MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures) { OntologyQueryParam queryParam, Set<String> queryDimensions, Set<String> queryMeasures) {
queryDimension.addAll(metricCommand.getDimensions().stream() queryDimensions.addAll(queryParam.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY) .map(d -> d.contains(Constants.DIMENSION_IDENTIFY)
? d.split(Constants.DIMENSION_IDENTIFY)[1] ? d.split(Constants.DIMENSION_IDENTIFY)[1]
: d) : d)
.collect(Collectors.toSet())); .collect(Collectors.toSet()));
Set<String> schemaMetricName = Set<String> schemaMetricName =
schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet()); schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
schema.getMetrics().stream().filter(m -> metricCommand.getMetrics().contains(m.getName())) schema.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName()))
.forEach(m -> m.getMetricTypeParams().getMeasures().stream() .forEach(m -> m.getMetricTypeParams().getMeasures().stream()
.forEach(mm -> measures.add(mm.getName()))); .forEach(mm -> queryMeasures.add(mm.getName())));
metricCommand.getMetrics().stream().filter(m -> !schemaMetricName.contains(m)) queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
.forEach(m -> measures.add(m)); .forEach(m -> queryMeasures.add(m));
} }
public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema, public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema,
MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures, OntologyQueryParam metricCommand, Set<String> queryDimension, Set<String> measures,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) { if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>(); Set<String> filterConditions = new HashSet<>();
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType), FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType),
@@ -192,23 +192,23 @@ public class DataModelNode extends SemanticNode {
} }
public static List<DataModel> getRelatedDataModels(SqlValidatorScope scope, public static List<DataModel> getRelatedDataModels(SqlValidatorScope scope,
S2CalciteSchema schema, MetricQueryParam metricCommand) throws Exception { S2CalciteSchema schema, OntologyQueryParam queryParam) throws Exception {
List<DataModel> dataModels = new ArrayList<>(); List<DataModel> dataModels = new ArrayList<>();
// check by metric // check by metric
List<String> measures = new ArrayList<>(); Set<String> queryMeasures = new HashSet<>();
Set<String> queryDimension = new HashSet<>(); Set<String> queryDimensions = new HashSet<>();
getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures); getQueryDimensionMeasure(schema, queryParam, queryDimensions, queryMeasures);
DataModel baseDataModel = null; DataModel baseDataModel = null;
// one , match measure count // one , match measure count
Map<String, Integer> dataSourceMeasures = new HashMap<>(); Map<String, Integer> dataSourceMeasures = new HashMap<>();
for (Map.Entry<String, DataModel> entry : schema.getDataModels().entrySet()) { for (Map.Entry<String, DataModel> entry : schema.getDataModels().entrySet()) {
Set<String> sourceMeasure = entry.getValue().getMeasures().stream() Set<String> sourceMeasure = entry.getValue().getMeasures().stream()
.map(mm -> mm.getName()).collect(Collectors.toSet()); .map(mm -> mm.getName()).collect(Collectors.toSet());
sourceMeasure.retainAll(measures); sourceMeasure.retainAll(queryMeasures);
dataSourceMeasures.put(entry.getKey(), sourceMeasure.size()); dataSourceMeasures.put(entry.getKey(), sourceMeasure.size());
} }
log.info("dataSourceMeasures [{}]", dataSourceMeasures); log.info("metrics: [{}]", dataSourceMeasures);
Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream() Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst(); .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
if (base.isPresent()) { if (base.isPresent()) {
@@ -229,19 +229,18 @@ public class DataModelNode extends SemanticNode {
} }
filterMeasure.addAll(sourceMeasure); filterMeasure.addAll(sourceMeasure);
filterMeasure.addAll(dimension); filterMeasure.addAll(dimension);
EngineType engineType = EngineType engineType = schema.getOntology().getDatabase().getType();
EngineType.fromString(schema.getOntology().getDatabase().getType()); mergeQueryFilterDimensionMeasure(schema, queryParam, queryDimensions, queryMeasures,
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures,
scope); scope);
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension, boolean isAllMatch = checkMatch(sourceMeasure, queryDimensions, queryMeasures,
metricCommand, scope, engineType); dimension, queryParam, scope, engineType);
if (isAllMatch) { if (isAllMatch) {
log.debug("baseDataModel match all "); log.debug("baseDataModel match all ");
return dataModels; return dataModels;
} }
// find all dataSource has the same identifiers // find all dataSource has the same identifiers
List<DataModel> linkDataModels = getLinkDataSourcesByJoinRelation(queryDimension, List<DataModel> linkDataModels = getLinkDataSourcesByJoinRelation(queryDimensions,
measures, baseDataModel, schema); queryMeasures, baseDataModel, schema);
if (CollectionUtils.isEmpty(linkDataModels)) { if (CollectionUtils.isEmpty(linkDataModels)) {
log.debug("baseDataModel get by identifiers "); log.debug("baseDataModel get by identifiers ");
Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream() Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream()
@@ -250,24 +249,23 @@ public class DataModelNode extends SemanticNode {
throw new Exception( throw new Exception(
"datasource error : " + baseDataModel.getName() + " miss identifier"); "datasource error : " + baseDataModel.getName() + " miss identifier");
} }
linkDataModels = getLinkDataSources(baseIdentifiers, queryDimension, measures, linkDataModels = getLinkDataSources(baseIdentifiers, queryDimensions, queryMeasures,
baseDataModel, schema); baseDataModel, schema);
if (linkDataModels.isEmpty()) { if (linkDataModels.isEmpty()) {
throw new Exception(String.format( throw new Exception(String.format(
"not find the match datasource : dimension[%s],measure[%s]", "not find the match datasource : dimension[%s],measure[%s]",
queryDimension, measures)); queryDimensions, queryMeasures));
} }
} }
log.debug("linkDataModels {}", linkDataModels); log.debug("linkDataModels {}", linkDataModels);
return linkDataModels; return linkDataModels;
// dataModels.addAll(linkDataModels);
} }
return dataModels; return dataModels;
} }
private static boolean checkMatch(Set<String> sourceMeasure, Set<String> queryDimension, private static boolean checkMatch(Set<String> sourceMeasure, Set<String> queryDimension,
List<String> measures, Set<String> dimension, MetricQueryParam metricCommand, Set<String> measures, Set<String> dimension, OntologyQueryParam metricCommand,
SqlValidatorScope scope, EngineType engineType) throws Exception { SqlValidatorScope scope, EngineType engineType) throws Exception {
boolean isAllMatch = true; boolean isAllMatch = true;
sourceMeasure.retainAll(measures); sourceMeasure.retainAll(measures);
@@ -300,7 +298,7 @@ public class DataModelNode extends SemanticNode {
} }
private static List<DataModel> getLinkDataSourcesByJoinRelation(Set<String> queryDimension, private static List<DataModel> getLinkDataSourcesByJoinRelation(Set<String> queryDimension,
List<String> measures, DataModel baseDataModel, S2CalciteSchema schema) { Set<String> measures, DataModel baseDataModel, S2CalciteSchema schema) {
Set<String> linkDataSourceName = new HashSet<>(); Set<String> linkDataSourceName = new HashSet<>();
List<DataModel> linkDataModels = new ArrayList<>(); List<DataModel> linkDataModels = new ArrayList<>();
Set<String> before = new HashSet<>(); Set<String> before = new HashSet<>();
@@ -384,7 +382,7 @@ public class DataModelNode extends SemanticNode {
} }
private static List<DataModel> getLinkDataSources(Set<String> baseIdentifiers, private static List<DataModel> getLinkDataSources(Set<String> baseIdentifiers,
Set<String> queryDimension, List<String> measures, DataModel baseDataModel, Set<String> queryDimension, Set<String> measures, DataModel baseDataModel,
S2CalciteSchema schema) { S2CalciteSchema schema) {
Set<String> linkDataSourceName = new HashSet<>(); Set<String> linkDataSourceName = new HashSet<>();
List<DataModel> linkDataModels = new ArrayList<>(); List<DataModel> linkDataModels = new ArrayList<>();

View File

@@ -1,10 +1,10 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.render; package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
@@ -26,13 +26,13 @@ import java.util.stream.Collectors;
public class FilterRender extends Renderer { public class FilterRender extends Renderer {
@Override @Override
public void render(MetricQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView; TableView tableView = super.tableView;
SqlNode filterNode = null; SqlNode filterNode = null;
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics()); List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions()); List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) { if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType); filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.render; package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
@@ -9,6 +8,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode;
@@ -47,10 +47,10 @@ import java.util.stream.Collectors;
public class JoinRender extends Renderer { public class JoinRender extends Renderer {
@Override @Override
public void render(MetricQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere(); String queryWhere = metricCommand.getWhere();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>(); List<String> fieldWhere = new ArrayList<>();
if (queryWhere != null && !queryWhere.isEmpty()) { if (queryWhere != null && !queryWhere.isEmpty()) {
@@ -59,7 +59,7 @@ public class JoinRender extends Renderer {
fieldWhere = whereFields.stream().collect(Collectors.toList()); fieldWhere = whereFields.stream().collect(Collectors.toList());
} }
Set<String> queryAllDimension = new HashSet<>(); Set<String> queryAllDimension = new HashSet<>();
List<String> measures = new ArrayList<>(); Set<String> measures = new HashSet<>();
DataModelNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures); DataModelNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures);
SqlNode left = null; SqlNode left = null;
TableView leftTable = null; TableView leftTable = null;
@@ -73,8 +73,8 @@ public class JoinRender extends Renderer {
final DataModel dataModel = dataModels.get(i); final DataModel dataModel = dataModels.get(i);
final Set<String> filterDimensions = new HashSet<>(); final Set<String> filterDimensions = new HashSet<>();
final Set<String> filterMetrics = new HashSet<>(); final Set<String> filterMetrics = new HashSet<>();
final List<String> queryDimension = new ArrayList<>(); final Set<String> queryDimension = new HashSet<>();
final List<String> queryMetrics = new ArrayList<>(); final Set<String> queryMetrics = new HashSet<>();
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema, SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema,
filterDimensions, filterMetrics); filterDimensions, filterMetrics);
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics()); List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
@@ -142,11 +142,11 @@ public class JoinRender extends Renderer {
} }
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView, private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView,
List<String> queryMetrics, List<String> reqMetrics, DataModel dataModel, Set<String> queryMetrics, List<String> reqMetrics, DataModel dataModel,
Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema, Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema,
boolean nonAgg) throws Exception { boolean nonAgg) throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
for (String m : reqMetrics) { for (String m : reqMetrics) {
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) { if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias); MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias);
@@ -177,11 +177,11 @@ public class JoinRender extends Renderer {
} }
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension, private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension,
List<String> queryDimension, List<String> reqDimensions, DataModel dataModel, Set<String> queryDimension, List<String> reqDimensions, DataModel dataModel,
Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema) Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema)
throws Exception { throws Exception {
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName(); String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
for (String d : reqDimensions) { for (String d : reqDimensions) {
if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) { if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) {
if (d.contains(Constants.DIMENSION_IDENTIFY)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) {
@@ -205,7 +205,7 @@ public class JoinRender extends Renderer {
} }
private boolean getMatchMetric(S2CalciteSchema schema, Set<String> sourceMeasure, String m, private boolean getMatchMetric(S2CalciteSchema schema, Set<String> sourceMeasure, String m,
List<String> queryMetrics) { Set<String> queryMetrics) {
Optional<Metric> metric = schema.getMetrics().stream() Optional<Metric> metric = schema.getMetrics().stream()
.filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst(); .filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst();
boolean isAdd = false; boolean isAdd = false;
@@ -226,7 +226,7 @@ public class JoinRender extends Renderer {
} }
private boolean getMatchDimension(S2CalciteSchema schema, Set<String> sourceDimension, private boolean getMatchDimension(S2CalciteSchema schema, Set<String> sourceDimension,
DataModel dataModel, String d, List<String> queryDimension) { DataModel dataModel, String d, Set<String> queryDimension) {
String oriDimension = d; String oriDimension = d;
boolean isAdd = false; boolean isAdd = false;
if (d.contains(Constants.DIMENSION_IDENTIFY)) { if (d.contains(Constants.DIMENSION_IDENTIFY)) {
@@ -261,7 +261,7 @@ public class JoinRender extends Renderer {
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView, private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView,
Map<String, String> before, DataModel dataModel, S2CalciteSchema schema, Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
SqlValidatorScope scope) throws Exception { SqlValidatorScope scope) throws Exception {
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
SqlNode condition = SqlNode condition =
getCondition(leftTable, tableView, dataModel, schema, scope, engineType); getCondition(leftTable, tableView, dataModel, schema, scope, engineType);
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral(""); SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
@@ -454,8 +454,7 @@ public class JoinRender extends Renderer {
endTime = zipper.getAlias() + "." + endTimeOp.get().getName(); endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
dateTime = partMetric.getAlias() + "." + partTime.get().getName(); dateTime = partMetric.getAlias() + "." + partTime.get().getName();
} }
EngineType engineType = EngineType engineType = schema.getOntology().getDatabase().getType();
EngineType.fromString(schema.getOntology().getDatabase().getType());
ArrayList<SqlNode> operandList = ArrayList<SqlNode> operandList =
new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType), new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
SemanticNode.parse(dateTime, scope, engineType))); SemanticNode.parse(dateTime, scope, engineType)));

View File

@@ -2,8 +2,8 @@ package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
@@ -22,10 +22,10 @@ import java.util.List;
public class OutputRender extends Renderer { public class OutputRender extends Renderer {
@Override @Override
public void render(MetricQueryParam metricCommand, List<DataModel> dataModels, public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView; TableView selectDataSet = super.tableView;
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
for (String dimension : metricCommand.getDimensions()) { for (String dimension : metricCommand.getDimensions()) {
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType)); selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
} }

View File

@@ -1,12 +1,12 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.render; package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode;
@@ -114,6 +114,6 @@ public abstract class Renderer {
return SemanticNode.buildAs(alias, tableView.build()); return SemanticNode.buildAs(alias, tableView.build());
} }
public abstract void render(MetricQueryParam metricCommand, List<DataModel> dataModels, public abstract void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception; SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
} }

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.headless.core.translator.calcite.sql.render; package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
@@ -9,6 +8,7 @@ import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView; import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode; import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
@@ -41,14 +41,14 @@ import static com.tencent.supersonic.headless.core.translator.calcite.s2sql.Cons
public class SourceRender extends Renderer { public class SourceRender extends Renderer {
public static TableView renderOne(String alias, List<String> fieldWheres, public static TableView renderOne(String alias, List<String> fieldWheres,
List<String> reqMetrics, List<String> reqDimensions, String queryWhere, Set<String> reqMetrics, Set<String> reqDimensions, String queryWhere,
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
throws Exception { throws Exception {
TableView dataSet = new TableView(); TableView dataSet = new TableView();
TableView output = new TableView(); TableView output = new TableView();
List<String> queryMetrics = new ArrayList<>(reqMetrics); Set<String> queryMetrics = new HashSet<>(reqMetrics);
List<String> queryDimensions = new ArrayList<>(reqDimensions); Set<String> queryDimensions = new HashSet<>(reqDimensions);
List<String> fieldWhere = new ArrayList<>(fieldWheres); List<String> fieldWhere = new ArrayList<>(fieldWheres);
Map<String, String> extendFields = new HashMap<>(); Map<String, String> extendFields = new HashMap<>();
if (!fieldWhere.isEmpty()) { if (!fieldWhere.isEmpty()) {
@@ -57,9 +57,7 @@ public class SourceRender extends Renderer {
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema, whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema,
dimensions, metrics); dimensions, metrics);
queryMetrics.addAll(metrics); queryMetrics.addAll(metrics);
queryMetrics = uniqList(queryMetrics);
queryDimensions.addAll(dimensions); queryDimensions.addAll(dimensions);
queryDimensions = uniqList(queryDimensions);
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields, mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
datasource, scope, schema, nonAgg); datasource, scope, schema, nonAgg);
} }
@@ -109,7 +107,7 @@ public class SourceRender extends Renderer {
S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields, S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields,
TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception { TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName()); List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
boolean isAdd = false; boolean isAdd = false;
if (!CollectionUtils.isEmpty(dimensionList)) { if (!CollectionUtils.isEmpty(dimensionList)) {
for (Dimension dim : dimensionList) { for (Dimension dim : dimensionList) {
@@ -182,12 +180,12 @@ public class SourceRender extends Renderer {
} }
} }
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics, private static List<SqlNode> getWhereMeasure(List<String> fields, Set<String> queryMetrics,
List<String> queryDimensions, Map<String, String> extendFields, DataModel datasource, Set<String> queryDimensions, Map<String, String> extendFields, DataModel datasource,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
Iterator<String> iterator = fields.iterator(); Iterator<String> iterator = fields.iterator();
List<SqlNode> whereNode = new ArrayList<>(); List<SqlNode> whereNode = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
while (iterator.hasNext()) { while (iterator.hasNext()) {
String cur = iterator.next(); String cur = iterator.next();
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) { if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
@@ -224,17 +222,17 @@ public class SourceRender extends Renderer {
} }
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet, private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
List<String> queryMetrics, List<String> queryDimensions, Set<String> queryMetrics, Set<String> queryDimensions, Map<String, String> extendFields,
Map<String, String> extendFields, DataModel datasource, SqlValidatorScope scope, DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
S2CalciteSchema schema, boolean nonAgg) throws Exception { throws Exception {
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions, List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions,
extendFields, datasource, scope, schema, nonAgg); extendFields, datasource, scope, schema, nonAgg);
dataSet.getMeasure().addAll(whereNode); dataSet.getMeasure().addAll(whereNode);
// getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema); // getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
} }
public static void whereDimMetric(List<String> fields, List<String> queryMetrics, public static void whereDimMetric(List<String> fields, Set<String> queryMetrics,
List<String> queryDimensions, DataModel datasource, S2CalciteSchema schema, Set<String> queryDimensions, DataModel datasource, S2CalciteSchema schema,
Set<String> dimensions, Set<String> metrics) { Set<String> dimensions, Set<String> metrics) {
for (String field : fields) { for (String field : fields) {
if (queryDimensions.contains(field) || queryMetrics.contains(field)) { if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
@@ -310,7 +308,7 @@ public class SourceRender extends Renderer {
return false; return false;
} }
private static void addTimeDimension(DataModel dataModel, List<String> queryDimension) { private static void addTimeDimension(DataModel dataModel, Set<String> queryDimension) {
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) { if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
Optional<Dimension> startTimeOp = dataModel.getDimensions().stream() Optional<Dimension> startTimeOp = dataModel.getDimensions().stream()
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType())) .filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
@@ -336,12 +334,12 @@ public class SourceRender extends Renderer {
} }
} }
public void render(MetricQueryParam metricQueryParam, List<DataModel> dataModels, public void render(OntologyQueryParam ontologyQueryParam, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception { SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricQueryParam.getWhere(); String queryWhere = ontologyQueryParam.getWhere();
Set<String> whereFields = new HashSet<>(); Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>(); List<String> fieldWhere = new ArrayList<>();
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType()); EngineType engineType = schema.getOntology().getDatabase().getType();
if (queryWhere != null && !queryWhere.isEmpty()) { if (queryWhere != null && !queryWhere.isEmpty()) {
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType); SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
FilterNode.getFilterField(sqlNode, whereFields); FilterNode.getFilterField(sqlNode, whereFields);
@@ -349,13 +347,13 @@ public class SourceRender extends Renderer {
} }
if (dataModels.size() == 1) { if (dataModels.size() == 1) {
DataModel dataModel = dataModels.get(0); DataModel dataModel = dataModels.get(0);
super.tableView = renderOne("", fieldWhere, metricQueryParam.getMetrics(), super.tableView = renderOne("", fieldWhere, ontologyQueryParam.getMetrics(),
metricQueryParam.getDimensions(), metricQueryParam.getWhere(), dataModel, scope, ontologyQueryParam.getDimensions(), ontologyQueryParam.getWhere(), dataModel,
schema, nonAgg); scope, schema, nonAgg);
return; return;
} }
JoinRender joinRender = new JoinRender(); JoinRender joinRender = new JoinRender();
joinRender.render(metricQueryParam, dataModels, scope, schema, nonAgg); joinRender.render(ontologyQueryParam, dataModels, scope, schema, nonAgg);
super.tableView = joinRender.getTableView(); super.tableView = joinRender.getTableView();
} }
} }

View File

@@ -4,7 +4,6 @@ import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper; import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper; import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.MetricTable;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -28,8 +27,8 @@ public class DefaultDimValueConverter implements QueryConverter {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
return !Objects.isNull(queryStatement.getDataSetQueryParam()) return Objects.nonNull(queryStatement.getSqlQueryParam())
&& !StringUtils.isBlank(queryStatement.getDataSetQueryParam().getSql()); && StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSql());
} }
@Override @Override
@@ -40,15 +39,13 @@ public class DefaultDimValueConverter implements QueryConverter {
if (CollectionUtils.isEmpty(dimensions)) { if (CollectionUtils.isEmpty(dimensions)) {
return; return;
} }
String sql = queryStatement.getDataSetQueryParam().getSql(); String sql = queryStatement.getSqlQueryParam().getSql();
List<String> whereFields = SqlSelectHelper.getWhereFields(sql).stream() List<String> whereFields = SqlSelectHelper.getWhereFields(sql).stream()
.filter(field -> !TimeDimensionEnum.containsTimeDimension(field)) .filter(field -> !TimeDimensionEnum.containsTimeDimension(field))
.collect(Collectors.toList()); .collect(Collectors.toList());
if (!CollectionUtils.isEmpty(whereFields)) { if (!CollectionUtils.isEmpty(whereFields)) {
return; return;
} }
MetricTable metricTable =
queryStatement.getDataSetQueryParam().getTables().stream().findFirst().orElse(null);
List<Expression> expressions = Lists.newArrayList(); List<Expression> expressions = Lists.newArrayList();
for (Dimension dimension : dimensions) { for (Dimension dimension : dimensions) {
ExpressionList expressionList = new ExpressionList(); ExpressionList expressionList = new ExpressionList();
@@ -59,11 +56,11 @@ public class DefaultDimValueConverter implements QueryConverter {
inExpression.setLeftExpression(new Column(dimension.getBizName())); inExpression.setLeftExpression(new Column(dimension.getBizName()));
inExpression.setRightExpression(expressionList); inExpression.setRightExpression(expressionList);
expressions.add(inExpression); expressions.add(inExpression);
if (metricTable != null) { if (Objects.nonNull(queryStatement.getSqlQueryParam().getTable())) {
metricTable.getDimensions().add(dimension.getBizName()); queryStatement.getOntologyQueryParam().getDimensions().add(dimension.getBizName());
} }
} }
sql = SqlAddHelper.addWhere(sql, expressions); sql = SqlAddHelper.addWhere(sql, expressions);
queryStatement.getDataSetQueryParam().setSql(sql); queryStatement.getSqlQueryParam().setSql(sql);
} }
} }

View File

@@ -6,82 +6,46 @@ import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.ContextUtils; import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.common.util.DateModeUtils; import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.headless.api.pojo.MetricTable;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
import com.tencent.supersonic.headless.core.pojo.Database; import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils; import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** supplement the QueryStatement when query with custom aggregation method */
@Component("CalculateAggConverter") @Component("CalculateAggConverter")
@Slf4j @Slf4j
public class CalculateAggConverter implements QueryConverter { public class MetricRatioConverter implements QueryConverter {
public interface EngineSql { public interface EngineSql {
String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql); String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
} String metricSql);
public DataSetQueryParam generateSqlCommend(QueryStatement queryStatement,
EngineType engineTypeEnum, String version) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
QueryParam queryParam = queryStatement.getQueryParam();
// 同环比
if (isRatioAccept(queryParam)) {
return generateRatioSqlCommand(queryStatement, engineTypeEnum, version);
}
DataSetQueryParam sqlCommand = new DataSetQueryParam();
String metricTableName = "v_metric_tb_tmp";
MetricTable metricTable = new MetricTable();
metricTable.setAlias(metricTableName);
metricTable.setMetrics(queryParam.getMetrics());
metricTable.setDimensions(queryParam.getGroups());
String where = sqlGenerateUtils.generateWhere(queryParam, null);
log.info("in generateSqlCommand, complete where:{}", where);
metricTable.setWhere(where);
metricTable.setAggOption(AggOption.AGGREGATION);
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
String sql = String.format("select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(queryParam), metricTableName,
sqlGenerateUtils.getGroupBy(queryParam), sqlGenerateUtils.getOrderBy(queryParam),
sqlGenerateUtils.getLimit(queryParam));
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
sqlCommand.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(queryParam), metricTableName,
sqlGenerateUtils.getGroupBy(queryParam),
sqlGenerateUtils.getOrderBy(queryParam), sqlGenerateUtils.getLimit(queryParam));
}
sqlCommand.setSql(sql);
return sqlCommand;
} }
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryParam()) || queryStatement.getIsS2SQL()) { if (Objects.isNull(queryStatement.getStructQueryParam()) || queryStatement.getIsS2SQL()
|| !isRatioAccept(queryStatement.getStructQueryParam())) {
return false; return false;
} }
QueryParam queryParam = queryStatement.getQueryParam(); StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
if (queryParam.getQueryType().isNativeAggQuery()) { if (structQueryParam.getQueryType().isNativeAggQuery()
return false; || CollectionUtils.isEmpty(structQueryParam.getAggregators())) {
}
if (CollectionUtils.isEmpty(queryParam.getAggregators())) {
return false; return false;
} }
int nonSumFunction = 0; int nonSumFunction = 0;
for (Aggregator agg : queryParam.getAggregators()) { for (Aggregator agg : structQueryParam.getAggregators()) {
if (agg.getFunc() == null || "".equals(agg.getFunc())) { if (agg.getFunc() == null || "".equals(agg.getFunc())) {
return false; return false;
} }
@@ -98,14 +62,12 @@ public class CalculateAggConverter implements QueryConverter {
@Override @Override
public void convert(QueryStatement queryStatement) throws Exception { public void convert(QueryStatement queryStatement) throws Exception {
Database database = queryStatement.getOntology().getDatabase(); Database database = queryStatement.getOntology().getDatabase();
DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement, generateRatioSql(queryStatement, database.getType(), database.getVersion());
EngineType.fromString(database.getType().toUpperCase()), database.getVersion());
queryStatement.setDataSetQueryParam(dataSetQueryParam);
} }
/** Ratio */ /** Ratio */
public boolean isRatioAccept(QueryParam queryParam) { public boolean isRatioAccept(StructQueryParam structQueryParam) {
Long ratioFuncNum = queryParam.getAggregators().stream() Long ratioFuncNum = structQueryParam.getAggregators().stream()
.filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL) .filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)
|| f.getFunc().equals(AggOperatorEnum.RATIO_OVER))) || f.getFunc().equals(AggOperatorEnum.RATIO_OVER)))
.count(); .count();
@@ -115,53 +77,47 @@ public class CalculateAggConverter implements QueryConverter {
return false; return false;
} }
public DataSetQueryParam generateRatioSqlCommand(QueryStatement queryStatement, public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum,
EngineType engineTypeEnum, String version) throws Exception { String version) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class); SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
QueryParam queryParam = queryStatement.getQueryParam(); StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
check(queryParam); check(structQueryParam);
queryStatement.setEnableOptimize(false); queryStatement.setEnableOptimize(false);
DataSetQueryParam sqlCommand = new DataSetQueryParam(); OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam();
ontologyQueryParam.setAggOption(AggOption.AGGREGATION);
String metricTableName = "v_metric_tb_tmp"; String metricTableName = "v_metric_tb_tmp";
MetricTable metricTable = new MetricTable(); boolean isOver = isOverRatio(structQueryParam);
metricTable.setAlias(metricTableName);
metricTable.setMetrics(queryParam.getMetrics());
metricTable.setDimensions(queryParam.getGroups());
String where = sqlGenerateUtils.generateWhere(queryParam, null);
log.info("in generateSqlCommend, complete where:{}", where);
metricTable.setWhere(where);
metricTable.setAggOption(AggOption.AGGREGATION);
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
boolean isOver = isOverRatio(queryParam);
String sql = ""; String sql = "";
SqlQueryParam dsParam = queryStatement.getSqlQueryParam();
dsParam.setTable(metricTableName);
switch (engineTypeEnum) { switch (engineTypeEnum) {
case H2: case H2:
sql = new H2EngineSql().sql(queryParam, isOver, true, metricTableName); sql = new H2EngineSql().sql(structQueryParam, isOver, true, metricTableName);
break; break;
case MYSQL: case MYSQL:
case DORIS: case DORIS:
case CLICKHOUSE: case CLICKHOUSE:
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) { if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
sqlCommand.setSupportWith(false); dsParam.setSupportWith(false);
} }
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) { if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(), sql = new MysqlEngineSql().sql(structQueryParam, isOver,
metricTableName); dsParam.isSupportWith(), metricTableName);
} else { } else {
sql = new CkEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(), sql = new CkEngineSql().sql(structQueryParam, isOver, dsParam.isSupportWith(),
metricTableName); metricTableName);
} }
break; break;
default: default:
} }
sqlCommand.setSql(sql); dsParam.setSql(sql);
return sqlCommand;
} }
public class H2EngineSql implements EngineSql { public class H2EngineSql implements EngineSql {
public String getOverSelect(QueryParam queryParam, boolean isOver) { public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) {
String aggStr = queryParam.getAggregators().stream().map(f -> { String aggStr = structQueryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s", return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
@@ -171,43 +127,44 @@ public class CalculateAggConverter implements QueryConverter {
return f.getColumn(); return f.getColumn();
} }
}).collect(Collectors.joining(",")); }).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr; : String.join(",", structQueryParam.getGroups()) + "," + aggStr;
} }
public String getTimeSpan(QueryParam queryParam, boolean isOver, boolean isAdd) { public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver,
if (Objects.nonNull(queryParam.getDateInfo())) { boolean isAdd) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
String addStr = isAdd ? "" : "-"; String addStr = isAdd ? "" : "-";
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return "day," + (isOver ? addStr + "7" : addStr + "1"); return "day," + (isOver ? addStr + "7" : addStr + "1");
} }
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? "month," + addStr + "1" : "day," + addStr + "7"; return isOver ? "month," + addStr + "1" : "day," + addStr + "7";
} }
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) {
return isOver ? "year," + addStr + "1" : "month," + addStr + "1"; return isOver ? "year," + addStr + "1" : "month," + addStr + "1";
} }
} }
return ""; return "";
} }
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(queryParam); String timeDim = getTimeDim(structQueryParam);
String timeSpan = getTimeSpan(queryParam, isOver, true); String timeSpan = getTimeSpan(structQueryParam, isOver, true);
String aggStr = queryParam.getAggregators().stream().map(f -> { String aggStr = structQueryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ", "%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
aliasRight + timeDim, aliasLeft + timeDim, timeSpan, aliasRight + timeDim, aliasLeft + timeDim, timeSpan,
aliasRight + timeDim); aliasRight + timeDim);
} }
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ", return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
getTimeSpan(queryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim,
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan, return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan,
@@ -217,7 +174,7 @@ public class CalculateAggConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) { for (String group : structQueryParam.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -228,35 +185,36 @@ public class CalculateAggConverter implements QueryConverter {
} }
@Override @Override
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) { public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
String metricSql) {
String sql = String.format( String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."), getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."),
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql, getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
getLimit(queryParam)); getLimit(structQueryParam));
return sql; return sql;
} }
} }
public class CkEngineSql extends MysqlEngineSql { public class CkEngineSql extends MysqlEngineSql {
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(queryParam); String timeDim = getTimeDim(structQueryParam);
String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true); String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true);
String aggStr = queryParam.getAggregators().stream().map(f -> { String aggStr = structQueryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ", "toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan); aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} }
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s", return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false),
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -266,7 +224,7 @@ public class CalculateAggConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) { for (String group : structQueryParam.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -277,45 +235,49 @@ public class CalculateAggConverter implements QueryConverter {
} }
@Override @Override
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) { public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
String metricSql) {
if (!asWith) { if (!asWith) {
return String.format( return String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."), getOverSelect(structQueryParam, isOver),
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql, getAllSelect(structQueryParam, "t0."),
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
getLimit(queryParam)); getJoinOn(structQueryParam, isOver, "t0.", "t1."),
getOrderBy(structQueryParam), getLimit(structQueryParam));
} }
return String.format( return String.format(
",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s " ",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s "
+ "from t0 left join t1 on %s ) metric_tb_src %s %s ", + "from t0 left join t1 on %s ) metric_tb_src %s %s ",
metricSql, metricSql, getOverSelect(queryParam, isOver), metricSql, metricSql, getOverSelect(structQueryParam, isOver),
getAllSelect(queryParam, "t0."), getAllJoinSelect(queryParam, "t1."), getAllSelect(structQueryParam, "t0."),
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), getAllJoinSelect(structQueryParam, "t1."),
getLimit(queryParam)); getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
getLimit(structQueryParam));
} }
} }
public class MysqlEngineSql implements EngineSql { public class MysqlEngineSql implements EngineSql {
public String getTimeSpan(QueryParam queryParam, boolean isOver, boolean isAdd) { public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver,
if (Objects.nonNull(queryParam.getDateInfo())) { boolean isAdd) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
String addStr = isAdd ? "" : "-"; String addStr = isAdd ? "" : "-";
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return isOver ? addStr + "7 day" : addStr + "1 day"; return isOver ? addStr + "7 day" : addStr + "1 day";
} }
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) {
return isOver ? addStr + "1 month" : addStr + "7 day"; return isOver ? addStr + "1 month" : addStr + "7 day";
} }
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? addStr + "1 year" : addStr + "1 month"; return isOver ? addStr + "1 year" : addStr + "1 month";
} }
} }
return ""; return "";
} }
public String getOverSelect(QueryParam queryParam, boolean isOver) { public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) {
String aggStr = queryParam.getAggregators().stream().map(f -> { String aggStr = structQueryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s", return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
@@ -325,26 +287,26 @@ public class CalculateAggConverter implements QueryConverter {
return f.getColumn(); return f.getColumn();
} }
}).collect(Collectors.joining(",")); }).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr; : String.join(",", structQueryParam.getGroups()) + "," + aggStr;
} }
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft, public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
String aliasRight) { String aliasRight) {
String timeDim = getTimeDim(queryParam); String timeDim = getTimeDim(structQueryParam);
String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true); String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true);
String aggStr = queryParam.getAggregators().stream().map(f -> { String aggStr = structQueryParam.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER) if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) { || f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) { if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format( return String.format(
"%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ", "%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan); aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
} }
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK) if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) { && isOver) {
return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s", return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false), aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false),
aliasRight + timeDim); aliasRight + timeDim);
} }
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim, return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -354,7 +316,7 @@ public class CalculateAggConverter implements QueryConverter {
} }
}).collect(Collectors.joining(" and ")); }).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) { for (String group : structQueryParam.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) { if (group.equalsIgnoreCase(timeDim)) {
continue; continue;
} }
@@ -365,51 +327,53 @@ public class CalculateAggConverter implements QueryConverter {
} }
@Override @Override
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) { public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
String metricSql) {
String sql = String.format( String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ", "select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."), getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."),
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql, getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam), getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
getLimit(queryParam)); getLimit(structQueryParam));
return sql; return sql;
} }
} }
private String getAllJoinSelect(QueryParam queryParam, String alias) { private String getAllJoinSelect(StructQueryParam structQueryParam, String alias) {
String aggStr = queryParam.getAggregators().stream() String aggStr = structQueryParam.getAggregators().stream()
.map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll") .map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
List<String> groups = new ArrayList<>(); List<String> groups = new ArrayList<>();
for (String group : queryParam.getGroups()) { for (String group : structQueryParam.getGroups()) {
groups.add(alias + group + " as " + group + "_roll"); groups.add(alias + group + " as " + group + "_roll");
} }
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr; return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
} }
private String getGroupDimWithOutTime(QueryParam queryParam) { private String getGroupDimWithOutTime(StructQueryParam structQueryParam) {
String timeDim = getTimeDim(queryParam); String timeDim = getTimeDim(structQueryParam);
return queryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim)) return structQueryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
private static String getTimeDim(QueryParam queryParam) { private static String getTimeDim(StructQueryParam structQueryParam) {
DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class); DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class);
return dateModeUtils.getSysDateCol(queryParam.getDateInfo()); return dateModeUtils.getSysDateCol(structQueryParam.getDateInfo());
} }
private static String getLimit(QueryParam queryParam) { private static String getLimit(StructQueryParam structQueryParam) {
if (queryParam != null && queryParam.getLimit() != null && queryParam.getLimit() > 0) { if (structQueryParam != null && structQueryParam.getLimit() != null
return " limit " + String.valueOf(queryParam.getLimit()); && structQueryParam.getLimit() > 0) {
return " limit " + String.valueOf(structQueryParam.getLimit());
} }
return ""; return "";
} }
private String getAllSelect(QueryParam queryParam, String alias) { private String getAllSelect(StructQueryParam structQueryParam, String alias) {
String aggStr = queryParam.getAggregators().stream().map(f -> getSelectField(f, alias)) String aggStr = structQueryParam.getAggregators().stream()
.collect(Collectors.joining(",")); .map(f -> getSelectField(f, alias)).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: alias + String.join("," + alias, queryParam.getGroups()) + "," + aggStr; : alias + String.join("," + alias, structQueryParam.getGroups()) + "," + aggStr;
} }
private String getSelectField(final Aggregator agg, String alias) { private String getSelectField(final Aggregator agg, String alias) {
@@ -421,32 +385,32 @@ public class CalculateAggConverter implements QueryConverter {
return sqlGenerateUtils.getSelectField(agg); return sqlGenerateUtils.getSelectField(agg);
} }
private String getGroupBy(QueryParam queryParam) { private String getGroupBy(StructQueryParam structQueryParam) {
if (CollectionUtils.isEmpty(queryParam.getGroups())) { if (CollectionUtils.isEmpty(structQueryParam.getGroups())) {
return ""; return "";
} }
return "group by " + String.join(",", queryParam.getGroups()); return "group by " + String.join(",", structQueryParam.getGroups());
} }
private static String getOrderBy(QueryParam queryParam) { private static String getOrderBy(StructQueryParam structQueryParam) {
return "order by " + getTimeDim(queryParam) + " desc"; return "order by " + getTimeDim(structQueryParam) + " desc";
} }
private boolean isOverRatio(QueryParam queryParam) { private boolean isOverRatio(StructQueryParam structQueryParam) {
Long overCt = queryParam.getAggregators().stream() Long overCt = structQueryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
return overCt > 0; return overCt > 0;
} }
private void check(QueryParam queryParam) throws Exception { private void check(StructQueryParam structQueryParam) throws Exception {
Long ratioOverNum = queryParam.getAggregators().stream() Long ratioOverNum = structQueryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
Long ratioRollNum = queryParam.getAggregators().stream() Long ratioRollNum = structQueryParam.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count(); .filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count();
if (ratioOverNum > 0 && ratioRollNum > 0) { if (ratioOverNum > 0 && ratioRollNum > 0) {
throw new Exception("not support over ratio and roll ratio together "); throw new Exception("not support over ratio and roll ratio together ");
} }
if (getTimeDim(queryParam).isEmpty()) { if (getTimeDim(structQueryParam).isEmpty()) {
throw new Exception("miss time filter"); throw new Exception("miss time filter");
} }
} }

View File

@@ -1,74 +0,0 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
/** QueryConverter default implement */
@Component("ParserDefaultConverter")
@Slf4j
public class ParserDefaultConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryParam()) || queryStatement.getIsS2SQL()) {
return false;
}
CalculateAggConverter calculateConverterAgg =
ContextUtils.getBean(CalculateAggConverter.class);
return !calculateConverterAgg.accept(queryStatement);
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
QueryParam queryParam = queryStatement.getQueryParam();
MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam();
MetricQueryParam metricReq =
generateSqlCommand(queryStatement.getQueryParam(), queryStatement);
queryStatement.setMinMaxTime(sqlGenerateUtils.getBeginEndTime(queryParam, null));
BeanUtils.copyProperties(metricReq, metricQueryParam);
}
public MetricQueryParam generateSqlCommand(QueryParam queryParam,
QueryStatement queryStatement) {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
MetricQueryParam metricQueryParam = new MetricQueryParam();
metricQueryParam.setMetrics(queryParam.getMetrics());
metricQueryParam.setDimensions(queryParam.getGroups());
String where = sqlGenerateUtils.generateWhere(queryParam, null);
log.info("in generateSqlCommend, complete where:{}", where);
metricQueryParam.setWhere(where);
metricQueryParam.setOrder(queryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
metricQueryParam.setLimit(queryParam.getLimit());
// support detail query
if (queryParam.getQueryType().isNativeAggQuery()
&& CollectionUtils.isEmpty(metricQueryParam.getMetrics())) {
Map<Long, DataModel> modelMap = queryStatement.getOntology().getModelMap();
for (Long modelId : modelMap.keySet()) {
String modelBizName = modelMap.get(modelId).getName();
String internalMetricName =
sqlGenerateUtils.generateInternalMetricName(modelBizName);
metricQueryParam.getMetrics().add(internalMetricName);
}
}
return metricQueryParam;
}
}

View File

@@ -0,0 +1,307 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Component("SqlQueryConverter")
@Slf4j
public class SqlQueryConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL();
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
convertNameToBizName(queryStatement);
rewriteFunction(queryStatement);
rewriteOrderBy(queryStatement);
// fill sqlQuery
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
String tableName = SqlSelectHelper.getTableName(sqlQueryParam.getSql());
if (StringUtils.isEmpty(tableName)) {
return;
}
sqlQueryParam.setTable(tableName.toLowerCase());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
if (!sqlGenerateUtils.isSupportWith(
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()),
semanticSchemaResp.getDatabaseResp().getVersion())) {
sqlQueryParam.setSupportWith(false);
sqlQueryParam.setWithAlias(false);
}
// build ontologyQuery
List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQueryParam.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
AggOption aggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getMetrics().addAll(metrics);
ontologyQueryParam.getDimensions().addAll(dimensions);
ontologyQueryParam.setAggOption(aggOption);
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(aggOption));
queryStatement.setOntologyQueryParam(ontologyQueryParam);
generateDerivedMetric(sqlGenerateUtils, queryStatement);
queryStatement.setSql(sqlQueryParam.getSql());
log.info("parse sqlQuery [{}] ", sqlQueryParam);
}
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) {
if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) {
return AggOption.AGGREGATION;
}
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)
&& !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) {
log.debug("getAggOption simple sql set to DEFAULT");
return AggOption.DEFAULT;
}
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql)
|| SqlSelectFunctionHelper.hasFunction(sql, "count")
|| SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
return AggOption.OUTER;
}
if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql)
|| SqlSelectHelper.hasGroupBy(sql)) {
return AggOption.OUTER;
}
long defaultAggNullCnt = metricSchemas.stream().filter(
m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg()))
.count();
if (defaultAggNullCnt > 0) {
log.debug("getAggOption find null defaultAgg metric set to NATIVE");
return AggOption.OUTER;
}
return AggOption.DEFAULT;
}
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
SchemaItem::getBizName, (k1, k2) -> k1));
dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(),
TimeDimensionEnum.DAY.getName());
return allFields.stream()
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toSet());
}
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
List<String> allFields) {
Map<String, MetricSchemaResp> metricLowerToNameMap =
semanticSchemaResp.getMetrics().stream().collect(Collectors
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
return allFields.stream()
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
.collect(Collectors.toList());
}
private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SqlQueryParam sqlParam = queryStatement.getSqlQueryParam();
OntologyQueryParam ontologyParam = queryStatement.getOntologyQueryParam();
String sql = sqlParam.getSql();
Set<String> measures = new HashSet<>();
Map<String, String> replaces = generateDerivedMetric(sqlGenerateUtils, semanticSchemaResp,
ontologyParam.getAggOption(), ontologyParam.getMetrics(),
ontologyParam.getDimensions(), measures);
if (!CollectionUtils.isEmpty(replaces)) {
// metricTable sql use measures replace metric
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
ontologyParam.setAggOption(AggOption.NATIVE);
// metricTable use measures replace metric
if (!CollectionUtils.isEmpty(measures)) {
ontologyParam.getMetrics().addAll(measures);
} else {
// empty measure , fill default
ontologyParam.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
getDefaultModel(semanticSchemaResp, ontologyParam.getDimensions())));
}
}
sqlParam.setSql(sql);
}
private Map<String, String> generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
SemanticSchemaResp semanticSchemaResp, AggOption aggOption, Set<String> metrics,
Set<String> dimensions, Set<String> measures) {
Map<String, String> result = new HashMap<>();
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
// Check if any metric is derived
boolean hasDerivedMetrics =
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
if (!hasDerivedMetrics) {
return result;
}
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
Set<String> allFields = new HashSet<>();
Map<String, Measure> allMeasures = new HashMap<>();
semanticSchemaResp.getModelResps().forEach(modelResp -> {
allFields.addAll(modelResp.getFieldList());
if (modelResp.getModelDetail().getMeasures() != null) {
modelResp.getModelDetail().getMeasures()
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
}
});
Set<String> derivedDimensions = new HashSet<>();
Set<String> derivedMetrics = new HashSet<>();
Map<String, String> visitedMetrics = new HashMap<>();
for (MetricResp metricResp : metricResps) {
if (metrics.contains(metricResp.getBizName())) {
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
metricResp.getMetricDefineByMeasureParams());
if (isDerived) {
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
derivedMetrics, derivedDimensions);
result.put(metricResp.getBizName(), expr);
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
} else {
measures.add(metricResp.getBizName());
}
}
}
measures.addAll(derivedMetrics);
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
.forEach(dimensions::add);
return result;
}
private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = queryStatement.getSqlQueryParam().getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQueryParam().setSql(sql);
}
private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number
String sql = queryStatement.getSqlQueryParam().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQueryParam().setSql(newSql);
}
private void rewriteFunction(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
String sql = queryStatement.getSqlQueryParam().getSql();
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
return;
}
String type = database.getType();
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
if (Objects.nonNull(engineAdaptor)) {
String functionNameCorrector = engineAdaptor.functionNameCorrector(sql);
queryStatement.getSqlQueryParam().setSql(functionNameCorrector);
}
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
// support fieldName and field alias to bizName
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
dimensionResults.putAll(metricResults);
return dimensionResults;
}
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
String bizName) {
Set<Pair<String, String>> elements = new HashSet<>();
elements.add(Pair.of(name, bizName));
if (StringUtils.isNotBlank(aliasStr)) {
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
for (String alias : aliasList) {
elements.add(Pair.of(alias, bizName));
}
}
return elements.stream();
}
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, Set<String> dimensions) {
if (!CollectionUtils.isEmpty(dimensions)) {
Map<String, Long> modelMatchCnt = new HashMap<>();
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
}
return modelMatchCnt.entrySet().stream()
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
.map(Map.Entry::getKey).findFirst().orElse("");
}
return semanticSchemaResp.getModelResps().get(0).getBizName();
}
}

View File

@@ -14,15 +14,13 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
@Slf4j @Slf4j
@Component("SqlVariableParseConverter") @Component("SqlVariableConverter")
public class SqlVariableParseConverter implements QueryConverter { public class SqlVariableConverter implements QueryConverter {
@Override @Override
public boolean accept(QueryStatement queryStatement) { public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getQueryParam())) { return Objects.nonNull(queryStatement.getStructQueryParam())
return false; && !queryStatement.getIsS2SQL();
}
return true;
} }
@Override @Override
@@ -38,7 +36,7 @@ public class SqlVariableParseConverter implements QueryConverter {
String sqlParsed = String sqlParsed =
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(), SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(), modelResp.getModelDetail().getSqlVariables(),
queryStatement.getQueryParam().getParams()); queryStatement.getStructQueryParam().getParams());
DataModel dataModel = DataModel dataModel =
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName()); queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
dataModel.setSqlQuery(sqlParsed); dataModel.setSqlQuery(sqlParsed);

View File

@@ -0,0 +1,70 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("ParserDefaultConverter")
@Slf4j
public class StructQueryConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQueryParam())
&& !queryStatement.getIsS2SQL();
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
String dsTable = "t_1";
SqlQueryParam sqlParam = new SqlQueryParam();
sqlParam.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam),
sqlGenerateUtils.getOrderBy(structQueryParam),
sqlGenerateUtils.getLimit(structQueryParam));
Database database = queryStatement.getOntology().getDatabase();
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
sqlParam.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam),
sqlGenerateUtils.getOrderBy(structQueryParam),
sqlGenerateUtils.getLimit(structQueryParam));
}
sqlParam.setSql(sql);
queryStatement.setSqlQueryParam(sqlParam);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups());
ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
ontologyQueryParam.setWhere(where);
ontologyQueryParam.setAggOption(AggOption.AGGREGATION);
ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery());
ontologyQueryParam.setOrder(structQueryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQueryParam.setLimit(structQueryParam.getLimit());
queryStatement.setOntologyQueryParam(ontologyQueryParam);
log.info("parse structQuery [{}] ", queryStatement.getSqlQueryParam());
}
}

View File

@@ -12,7 +12,6 @@ import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.common.util.SqlFilterUtils; import com.tencent.supersonic.common.util.SqlFilterUtils;
import com.tencent.supersonic.common.util.StringUtil; import com.tencent.supersonic.common.util.StringUtil;
import com.tencent.supersonic.headless.api.pojo.Measure; import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption; import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType; import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq; import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
@@ -20,6 +19,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp; import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.config.ExecutorConfig; import com.tencent.supersonic.headless.core.config.ExecutorConfig;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutablePair;
@@ -85,25 +85,26 @@ public class SqlGenerateUtils {
return selectSql; return selectSql;
} }
public String getLimit(QueryParam queryParam) { public String getLimit(StructQueryParam structQueryParam) {
if (queryParam != null && queryParam.getLimit() != null && queryParam.getLimit() > 0) { if (structQueryParam != null && structQueryParam.getLimit() != null
return " limit " + queryParam.getLimit(); && structQueryParam.getLimit() > 0) {
return " limit " + structQueryParam.getLimit();
} }
return ""; return "";
} }
public String getSelect(QueryParam queryParam) { public String getSelect(StructQueryParam structQueryParam) {
String aggStr = queryParam.getAggregators().stream().map(this::getSelectField) String aggStr = structQueryParam.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr; : String.join(",", structQueryParam.getGroups()) + "," + aggStr;
} }
public String getSelect(QueryParam queryParam, Map<String, String> deriveMetrics) { public String getSelect(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) {
String aggStr = queryParam.getAggregators().stream() String aggStr = structQueryParam.getAggregators().stream()
.map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(",")); .map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", queryParam.getGroups()) + "," + aggStr; : String.join(",", structQueryParam.getGroups()) + "," + aggStr;
} }
public String getSelectField(final Aggregator agg) { public String getSelectField(final Aggregator agg) {
@@ -128,46 +129,46 @@ public class SqlGenerateUtils {
return deriveMetrics.get(agg.getColumn()); return deriveMetrics.get(agg.getColumn());
} }
public String getGroupBy(QueryParam queryParam) { public String getGroupBy(StructQueryParam structQueryParam) {
if (CollectionUtils.isEmpty(queryParam.getGroups())) { if (CollectionUtils.isEmpty(structQueryParam.getGroups())) {
return ""; return "";
} }
return "group by " + String.join(",", queryParam.getGroups()); return "group by " + String.join(",", structQueryParam.getGroups());
} }
public String getOrderBy(QueryParam queryParam) { public String getOrderBy(StructQueryParam structQueryParam) {
if (CollectionUtils.isEmpty(queryParam.getOrders())) { if (CollectionUtils.isEmpty(structQueryParam.getOrders())) {
return ""; return "";
} }
return "order by " + queryParam.getOrders().stream() return "order by " + structQueryParam.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ") .map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
public String getOrderBy(QueryParam queryParam, Map<String, String> deriveMetrics) { public String getOrderBy(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) {
if (CollectionUtils.isEmpty(queryParam.getOrders())) { if (CollectionUtils.isEmpty(structQueryParam.getOrders())) {
return ""; return "";
} }
if (!queryParam.getOrders().stream() if (!structQueryParam.getOrders().stream()
.anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) { .anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) {
return getOrderBy(queryParam); return getOrderBy(structQueryParam);
} }
return "order by " + queryParam.getOrders().stream() return "order by " + structQueryParam.getOrders().stream()
.map(order -> " " + (deriveMetrics.containsKey(order.getColumn()) .map(order -> " " + (deriveMetrics.containsKey(order.getColumn())
? deriveMetrics.get(order.getColumn()) ? deriveMetrics.get(order.getColumn())
: order.getColumn()) + " " + order.getDirection() + " ") : order.getColumn()) + " " + order.getDirection() + " ")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
} }
public String generateWhere(QueryParam queryParam, ItemDateResp itemDateResp) { public String generateWhere(StructQueryParam structQueryParam, ItemDateResp itemDateResp) {
String whereClauseFromFilter = String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(queryParam.getDimensionFilters()); sqlFilterUtils.getWhereClause(structQueryParam.getDimensionFilters());
String whereFromDate = getDateWhereClause(queryParam.getDateInfo(), itemDateResp); String whereFromDate = getDateWhereClause(structQueryParam.getDateInfo(), itemDateResp);
return mergeDateWhereClause(queryParam, whereClauseFromFilter, whereFromDate); return mergeDateWhereClause(structQueryParam, whereClauseFromFilter, whereFromDate);
} }
private String mergeDateWhereClause(QueryParam queryParam, String whereClauseFromFilter, private String mergeDateWhereClause(StructQueryParam structQueryParam,
String whereFromDate) { String whereClauseFromFilter, String whereFromDate) {
if (StringUtils.isNotEmpty(whereFromDate) if (StringUtils.isNotEmpty(whereFromDate)
&& StringUtils.isNotEmpty(whereClauseFromFilter)) { && StringUtils.isNotEmpty(whereClauseFromFilter)) {
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter); return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
@@ -179,7 +180,7 @@ public class SqlGenerateUtils {
return whereFromDate; return whereFromDate;
} else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) { } else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) {
log.debug("the current date information is empty, enter the date initialization logic"); log.debug("the current date information is empty, enter the date initialization logic");
return dateModeUtils.defaultRecentDateInfo(queryParam.getDateInfo()); return dateModeUtils.defaultRecentDateInfo(structQueryParam.getDateInfo());
} }
return whereClauseFromFilter; return whereClauseFromFilter;
} }
@@ -203,12 +204,12 @@ public class SqlGenerateUtils {
return dateModeUtils.getDateWhereStr(dateInfo, dateDate); return dateModeUtils.getDateWhereStr(dateInfo, dateDate);
} }
public Triple<String, String, String> getBeginEndTime(QueryParam queryParam, public Triple<String, String, String> getBeginEndTime(StructQueryParam structQueryParam,
ItemDateResp dataDate) { ItemDateResp dataDate) {
if (Objects.isNull(queryParam.getDateInfo())) { if (Objects.isNull(structQueryParam.getDateInfo())) {
return Triple.of("", "", ""); return Triple.of("", "", "");
} }
DateConf dateConf = queryParam.getDateInfo(); DateConf dateConf = structQueryParam.getDateInfo();
String dateInfo = dateModeUtils.getSysDateCol(dateConf); String dateInfo = dateModeUtils.getSysDateCol(dateConf);
if (dateInfo.isEmpty()) { if (dateInfo.isEmpty()) {
return Triple.of("", "", ""); return Triple.of("", "", "");

View File

@@ -3,6 +3,7 @@ package com.tencent.supersonic.headless.core.utils;
import javax.sql.DataSource; import javax.sql.DataSource;
import com.tencent.supersonic.common.pojo.QueryColumn; import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.common.util.DateUtils; import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.api.pojo.enums.DataType; import com.tencent.supersonic.headless.api.pojo.enums.DataType;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
@@ -64,7 +65,7 @@ public class SqlUtils {
public SqlUtils init(Database database) { public SqlUtils init(Database database) {
return SqlUtilsBuilder.getBuilder() return SqlUtilsBuilder.getBuilder()
.withName(database.getId() + AT_SYMBOL + database.getName()) .withName(database.getId() + AT_SYMBOL + database.getName())
.withType(database.getType()).withJdbcUrl(database.getUrl()) .withType(database.getType().getName()).withJdbcUrl(database.getUrl())
.withUsername(database.getUsername()).withPassword(database.getPassword()) .withUsername(database.getUsername()).withPassword(database.getPassword())
.withJdbcDataSource(this.jdbcDataSource).withResultLimit(this.resultLimit) .withJdbcDataSource(this.jdbcDataSource).withResultLimit(this.resultLimit)
.withIsQueryLogEnable(this.isQueryLogEnable).build(); .withIsQueryLogEnable(this.isQueryLogEnable).build();
@@ -224,7 +225,8 @@ public class SqlUtils {
} }
public SqlUtils build() { public SqlUtils build() {
Database database = Database.builder().name(this.name).type(this.type).url(this.jdbcUrl) Database database = Database.builder().name(this.name)
.type(EngineType.fromString(this.type.toUpperCase())).url(this.jdbcUrl)
.username(this.username).password(this.password).build(); .username(this.username).password(this.password).build();
SqlUtils sqlUtils = new SqlUtils(database); SqlUtils sqlUtils = new SqlUtils(database);

View File

@@ -1,7 +1,6 @@
package com.tencent.supersonic.chat.core.parser.aggregate; package com.tencent.supersonic.chat.core.parser.aggregate;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.CalciteQueryParser; import com.tencent.supersonic.headless.core.translator.calcite.CalciteQueryParser;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@@ -318,7 +317,7 @@ public class CalciteSqlParserTest {
+ " \"updatedAt\": 1711367511146\n" + " }\n" + " }\n" + "}"; + " \"updatedAt\": 1711367511146\n" + " }\n" + " }\n" + "}";
QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class); QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class);
CalciteQueryParser calciteSqlParser = new CalciteQueryParser(); CalciteQueryParser calciteSqlParser = new CalciteQueryParser();
calciteSqlParser.parse(queryStatement, AggOption.DEFAULT); calciteSqlParser.parse(queryStatement);
Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""), Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""),
"SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`" + "FROM" + "`s2_pv_uv_statis`" "SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`" + "FROM" + "`s2_pv_uv_statis`"
+ "GROUPBY`imp_date`,`imp_date`"); + "GROUPBY`imp_date`,`imp_date`");

View File

@@ -6,24 +6,10 @@ import com.tencent.supersonic.common.pojo.QueryColumn;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum; import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum; import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
import com.tencent.supersonic.headless.api.pojo.DataSetSchema; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.MetaFilter;
import com.tencent.supersonic.headless.api.pojo.QueryParam;
import com.tencent.supersonic.headless.api.pojo.enums.SemanticType; import com.tencent.supersonic.headless.api.pojo.enums.SemanticType;
import com.tencent.supersonic.headless.api.pojo.request.DimensionValueReq; import com.tencent.supersonic.headless.api.pojo.request.*;
import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq; import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
import com.tencent.supersonic.headless.api.pojo.request.SchemaFilterReq;
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
import com.tencent.supersonic.headless.api.pojo.response.ItemResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticTranslateResp;
import com.tencent.supersonic.headless.chat.knowledge.HanlpMapResult; import com.tencent.supersonic.headless.chat.knowledge.HanlpMapResult;
import com.tencent.supersonic.headless.chat.knowledge.KnowledgeBaseService; import com.tencent.supersonic.headless.chat.knowledge.KnowledgeBaseService;
import com.tencent.supersonic.headless.chat.knowledge.MapResult; import com.tencent.supersonic.headless.chat.knowledge.MapResult;
@@ -33,6 +19,8 @@ import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
import com.tencent.supersonic.headless.core.cache.QueryCache; import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryExecutor; import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.translator.SemanticTranslator; import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
import com.tencent.supersonic.headless.core.utils.ComponentFactory; import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.annotation.S2DataPermission; import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
@@ -52,12 +40,7 @@ import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import java.util.ArrayList; import java.util.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Service @Service
@@ -307,30 +290,13 @@ public class S2SemanticLayerService implements SemanticLayerService {
return queryStatement; return queryStatement;
} }
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) { private QueryStatement buildQueryStatement(SemanticQueryReq queryReq) {
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) {
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
querySqlReq.setDataSetId(dataSetId);
}
QueryStatement queryStatement = buildStructQueryStatement(querySqlReq);
queryStatement.setIsS2SQL(true);
queryStatement.setSql(querySqlReq.getSql());
return queryStatement;
}
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
SchemaFilterReq schemaFilterReq = new SchemaFilterReq(); SchemaFilterReq schemaFilterReq = new SchemaFilterReq();
schemaFilterReq.setDataSetId(queryReq.getDataSetId()); schemaFilterReq.setDataSetId(queryReq.getDataSetId());
schemaFilterReq.setModelIds(queryReq.getModelIds()); schemaFilterReq.setModelIds(queryReq.getModelIds());
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(schemaFilterReq); SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(schemaFilterReq);
QueryStatement queryStatement = new QueryStatement(); QueryStatement queryStatement = new QueryStatement();
QueryParam queryParam = new QueryParam();
BeanUtils.copyProperties(queryReq, queryParam);
queryStatement.setQueryParam(queryParam);
queryStatement.setModelIds(queryReq.getModelIds());
queryStatement.setEnableOptimize(queryUtils.enableOptimize()); queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setDataSetId(queryReq.getDataSetId()); queryStatement.setDataSetId(queryReq.getDataSetId());
queryStatement.setSemanticSchemaResp(semanticSchemaResp); queryStatement.setSemanticSchemaResp(semanticSchemaResp);
@@ -338,6 +304,31 @@ public class S2SemanticLayerService implements SemanticLayerService {
return queryStatement; return queryStatement;
} }
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) {
QueryStatement queryStatement = buildQueryStatement(querySqlReq);
queryStatement.setIsS2SQL(true);
SqlQueryParam sqlQueryParam = new SqlQueryParam();
sqlQueryParam.setSql(querySqlReq.getSql());
queryStatement.setSqlQueryParam(sqlQueryParam);
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) {
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
querySqlReq.setDataSetId(dataSetId);
}
return queryStatement;
}
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
QueryStatement queryStatement = buildQueryStatement(queryReq);
StructQueryParam structQueryParam = new StructQueryParam();
BeanUtils.copyProperties(queryReq, structQueryParam);
queryStatement.setStructQueryParam(structQueryParam);
queryStatement.setIsS2SQL(false);
return queryStatement;
}
private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq) { private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq) {
List<QueryStatement> queryStatements = new ArrayList<>(); List<QueryStatement> queryStatements = new ArrayList<>();
for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) { for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) {

View File

@@ -2,33 +2,14 @@ package com.tencent.supersonic.headless.server.manager;
import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum; import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.headless.api.pojo.Field;
import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType; import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp; import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.TagResp; import com.tencent.supersonic.headless.api.pojo.response.TagResp;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.*;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization.TimePartType; import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization.TimePartType;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.*;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.FieldParamYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricParamYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
import com.tencent.supersonic.headless.server.service.SchemaService; import com.tencent.supersonic.headless.server.service.SchemaService;
import com.tencent.supersonic.headless.server.utils.DatabaseConverter; import com.tencent.supersonic.headless.server.utils.DatabaseConverter;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@@ -36,15 +17,8 @@ import org.apache.commons.lang3.tuple.Triple;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.ArrayList; import java.util.*;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@Slf4j @Slf4j
@@ -184,16 +158,6 @@ public class SemanticSchemaManager {
if (Objects.nonNull(d.getModelSourceTypeEnum())) { if (Objects.nonNull(d.getModelSourceTypeEnum())) {
dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name())); dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name()));
} }
if (Objects.nonNull(d.getFields()) && !CollectionUtils.isEmpty(d.getFields())) {
Set<String> measures = dataModel.getMeasures().stream().map(mm -> mm.getName())
.collect(Collectors.toSet());
for (Field f : d.getFields()) {
if (!measures.contains(f.getFieldName())) {
dataModel.getMeasures().add(Measure.builder().expr(f.getFieldName())
.name(f.getFieldName()).agg("").build());
}
}
}
return dataModel; return dataModel;
} }

View File

@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.server.persistence.repository;
import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter; import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
import com.tencent.supersonic.headless.server.persistence.dataobject.DictConfDO; import com.tencent.supersonic.headless.server.persistence.dataobject.DictConfDO;
@@ -26,4 +27,6 @@ public interface DictRepository {
DictTaskDO queryDictTaskById(Long id); DictTaskDO queryDictTaskById(Long id);
DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq); DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq);
List<DictTaskDO> queryAllDictTask(ValueTaskQueryReq taskQueryReq);
} }

View File

@@ -4,6 +4,7 @@ import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.tencent.supersonic.common.pojo.enums.TypeEnums; import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter; import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp; import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
@@ -14,11 +15,14 @@ import com.tencent.supersonic.headless.server.persistence.mapper.DictTaskMapper;
import com.tencent.supersonic.headless.server.persistence.repository.DictRepository; import com.tencent.supersonic.headless.server.persistence.repository.DictRepository;
import com.tencent.supersonic.headless.server.service.DimensionService; import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.utils.DictUtils; import com.tencent.supersonic.headless.server.utils.DictUtils;
import com.xkzhangsan.time.utils.CollectionUtil;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.codehaus.plexus.util.StringUtils;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Repository; import org.springframework.stereotype.Repository;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@@ -100,6 +104,23 @@ public class DictRepositoryImpl implements DictRepository {
return taskResp; return taskResp;
} }
@Override
public List<DictTaskDO> queryAllDictTask(ValueTaskQueryReq taskQueryReq) {
QueryWrapper<DictTaskDO> wrapper = new QueryWrapper<>();
if (Objects.nonNull(taskQueryReq.getItemId())) {
wrapper.lambda().eq(DictTaskDO::getItemId, taskQueryReq.getItemId());
}
if (CollectionUtil.isNotEmpty(taskQueryReq.getTaskStatusList())) {
wrapper.lambda().in(DictTaskDO::getStatus, taskQueryReq.getTaskStatusList());
}
if (StringUtils.isNotEmpty(taskQueryReq.getKey())) {
String key = taskQueryReq.getKey();
wrapper.lambda().and(qw -> qw.like(DictTaskDO::getName, key).or()
.like(DictTaskDO::getDescription, key).or().like(DictTaskDO::getConfig, key));
}
return dictTaskMapper.selectList(wrapper);
}
@Override @Override
public Long addDictConf(DictConfDO dictConfDO) { public Long addDictConf(DictConfDO dictConfDO) {
dictConfMapper.insert(dictConfDO); dictConfMapper.insert(dictConfDO);

View File

@@ -13,6 +13,7 @@ import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
import com.tencent.supersonic.headless.api.pojo.request.DictItemReq; import com.tencent.supersonic.headless.api.pojo.request.DictItemReq;
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
import com.tencent.supersonic.headless.api.pojo.request.DictValueReq; import com.tencent.supersonic.headless.api.pojo.request.DictValueReq;
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp; import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp;
@@ -132,6 +133,18 @@ public class KnowledgeController {
return taskService.queryLatestDictTask(taskReq, user); return taskService.queryLatestDictTask(taskReq, user);
} }
/**
* queryDictTask-分页返回维度的字典任务列表
*
* @param taskQueryReq
*/
@PostMapping("/task/search/page")
public PageInfo<DictTaskResp> queryDictTask(@RequestBody ValueTaskQueryReq taskQueryReq,
HttpServletRequest request, HttpServletResponse response) {
User user = UserHolder.findUser(request, response);
return taskService.queryDictTask(taskQueryReq, user);
}
@GetMapping("/embedding/reload") @GetMapping("/embedding/reload")
public Object reloadEmbedding() { public Object reloadEmbedding() {
metaEmbeddingTask.reloadMetaEmbedding(); metaEmbeddingTask.reloadMetaEmbedding();

View File

@@ -4,6 +4,7 @@ import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
import com.tencent.supersonic.headless.api.pojo.request.DictValueReq; import com.tencent.supersonic.headless.api.pojo.request.DictValueReq;
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp; import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp;
@@ -17,6 +18,8 @@ public interface DictTaskService {
DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq, User user); DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq, User user);
PageInfo<DictTaskResp> queryDictTask(ValueTaskQueryReq taskQueryReq, User user);
PageInfo<DictValueDimResp> queryDictValue(DictValueReq dictValueReq, User user); PageInfo<DictValueDimResp> queryDictValue(DictValueReq dictValueReq, User user);
String queryDictFilePath(DictValueReq dictValueReq, User user); String queryDictFilePath(DictValueReq dictValueReq, User user);

View File

@@ -9,15 +9,7 @@ import com.tencent.supersonic.headless.api.pojo.MetaFilter;
import com.tencent.supersonic.headless.api.pojo.SemanticSchema; import com.tencent.supersonic.headless.api.pojo.SemanticSchema;
import com.tencent.supersonic.headless.api.pojo.request.ItemUseReq; import com.tencent.supersonic.headless.api.pojo.request.ItemUseReq;
import com.tencent.supersonic.headless.api.pojo.request.SchemaFilterReq; import com.tencent.supersonic.headless.api.pojo.request.SchemaFilterReq;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
import com.tencent.supersonic.headless.api.pojo.response.DomainResp;
import com.tencent.supersonic.headless.api.pojo.response.ItemResp;
import com.tencent.supersonic.headless.api.pojo.response.ItemUseResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
import com.tencent.supersonic.headless.api.pojo.response.ModelSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
@@ -64,5 +56,4 @@ public interface SchemaService {
ItemDateResp getItemDate(ItemDateFilter dimension, ItemDateFilter metric); ItemDateResp getItemDate(ItemDateFilter dimension, ItemDateFilter metric);
DatabaseResp getDatabase(Long id);
} }

View File

@@ -1,15 +1,18 @@
package com.tencent.supersonic.headless.server.service.impl; package com.tencent.supersonic.headless.server.service.impl;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo; import com.github.pagehelper.PageInfo;
import com.tencent.supersonic.common.pojo.Constants; import com.tencent.supersonic.common.pojo.Constants;
import com.tencent.supersonic.common.pojo.User; import com.tencent.supersonic.common.pojo.User;
import com.tencent.supersonic.common.pojo.enums.StatusEnum; import com.tencent.supersonic.common.pojo.enums.StatusEnum;
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum; import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
import com.tencent.supersonic.common.util.BeanMapper; import com.tencent.supersonic.common.util.BeanMapper;
import com.tencent.supersonic.common.util.DateUtils;
import com.tencent.supersonic.headless.api.pojo.DimValueMap; import com.tencent.supersonic.headless.api.pojo.DimValueMap;
import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter; import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq; import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
import com.tencent.supersonic.headless.api.pojo.request.DictValueReq; import com.tencent.supersonic.headless.api.pojo.request.DictValueReq;
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp; import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp; import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp; import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp;
@@ -116,14 +119,17 @@ public class DictTaskServiceImpl implements DictTaskService {
fileHandler.writeFile(data, fileName, false); fileHandler.writeFile(data, fileName, false);
// 3.Change in-memory dictionary data in real time // 3.Change in-memory dictionary data in real time
String status = TaskStatusEnum.SUCCESS.getStatus();
try { try {
dictWordService.loadDictWord(); dictWordService.loadDictWord();
dictTaskDO.setStatus(TaskStatusEnum.SUCCESS.getStatus());
dictRepository.editDictTask(dictTaskDO);
} catch (Exception e) { } catch (Exception e) {
log.error("reloadCustomDictionary error", e); log.error("reloadCustomDictionary error", e);
status = TaskStatusEnum.ERROR.getStatus();
dictTaskDO.setDescription(e.toString());
} }
dictTaskDO.setStatus(status);
dictTaskDO.setElapsedMs(DateUtils.calculateDiffMs(dictTaskDO.getCreatedAt()));
dictRepository.editDictTask(dictTaskDO);
} }
@Override @Override
@@ -164,6 +170,17 @@ public class DictTaskServiceImpl implements DictTaskService {
return dictRepository.queryLatestDictTask(taskReq); return dictRepository.queryLatestDictTask(taskReq);
} }
@Override
public PageInfo<DictTaskResp> queryDictTask(ValueTaskQueryReq taskQueryReq, User user) {
PageInfo<DictTaskDO> dictTaskDOPageInfo =
PageHelper.startPage(taskQueryReq.getCurrent(), taskQueryReq.getPageSize())
.doSelectPageInfo(() -> dictRepository.queryAllDictTask(taskQueryReq));
PageInfo<DictTaskResp> dictTaskRespPageInfo = new PageInfo<>();
BeanMapper.mapper(dictTaskDOPageInfo, dictTaskRespPageInfo);
dictTaskRespPageInfo.setList(dictConverter.taskDO2Resp(dictTaskDOPageInfo.getList()));
return dictTaskRespPageInfo;
}
@Override @Override
public PageInfo<DictValueDimResp> queryDictValue(DictValueReq dictValueReq, User user) { public PageInfo<DictValueDimResp> queryDictValue(DictValueReq dictValueReq, User user) {
// todo 优化读取内存结构 // todo 优化读取内存结构

View File

@@ -536,8 +536,4 @@ public class SchemaServiceImpl implements SchemaService {
return modelService.getItemDate(dimension, metric); return modelService.getItemDate(dimension, metric);
} }
@Override
public DatabaseResp getDatabase(Long id) {
return databaseService.getDatabase(id);
}
} }

View File

@@ -1,6 +1,7 @@
package com.tencent.supersonic.headless.server.utils; package com.tencent.supersonic.headless.server.utils;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.request.DatabaseReq; import com.tencent.supersonic.headless.api.pojo.request.DatabaseReq;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp; import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.core.pojo.ConnectInfo; import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
@@ -16,6 +17,7 @@ public class DatabaseConverter {
public static Database convert(DatabaseResp databaseResp) { public static Database convert(DatabaseResp databaseResp) {
Database database = new Database(); Database database = new Database();
BeanUtils.copyProperties(databaseResp, database); BeanUtils.copyProperties(databaseResp, database);
database.setType(EngineType.fromString(databaseResp.getType().toUpperCase()));
return database; return database;
} }

View File

@@ -34,6 +34,7 @@ import com.tencent.supersonic.headless.server.service.DimensionService;
import com.tencent.supersonic.headless.server.service.MetricService; import com.tencent.supersonic.headless.server.service.MetricService;
import com.tencent.supersonic.headless.server.service.ModelService; import com.tencent.supersonic.headless.server.service.ModelService;
import com.tencent.supersonic.headless.server.service.TagMetaService; import com.tencent.supersonic.headless.server.service.TagMetaService;
import com.xkzhangsan.time.utils.CollectionUtil;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
@@ -533,4 +534,12 @@ public class DictUtils {
resp.setConfig(JsonUtil.toObject(dictTaskDO.getConfig(), ItemValueConfig.class)); resp.setConfig(JsonUtil.toObject(dictTaskDO.getConfig(), ItemValueConfig.class));
return resp; return resp;
} }
public List<DictTaskResp> taskDO2Resp(List<DictTaskDO> dictTaskDOList) {
List<DictTaskResp> dictTaskRespList = new ArrayList<>();
if (CollectionUtil.isNotEmpty(dictTaskDOList)) {
dictTaskDOList.stream().forEach(taskDO -> dictTaskRespList.add(taskDO2Resp(taskDO)));
}
return dictTaskRespList;
}
} }

View File

@@ -67,7 +67,8 @@ public class MetricDrillDownChecker {
List<MetricResp> metricResps = getMetrics(metricFields, semanticSchemaResp); List<MetricResp> metricResps = getMetrics(metricFields, semanticSchemaResp);
if (!checkDrillDownDimension(dimensionBizName, metricResps, semanticSchemaResp)) { if (!checkDrillDownDimension(dimensionBizName, metricResps, semanticSchemaResp)) {
DimSchemaResp dimSchemaResp = semanticSchemaResp.getDimension(dimensionBizName); DimSchemaResp dimSchemaResp = semanticSchemaResp.getDimension(dimensionBizName);
if (Objects.nonNull(dimSchemaResp) && dimSchemaResp.isPartitionTime()) { if (Objects.isNull(dimSchemaResp)
|| (Objects.nonNull(dimSchemaResp) && dimSchemaResp.isPartitionTime())) {
continue; continue;
} }
String errMsg = String errMsg =

View File

@@ -2,31 +2,25 @@ package com.tencent.supersonic.headless.server.calcite;
import com.tencent.supersonic.common.pojo.ColumnOrder; import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.pojo.enums.EngineType; import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp; import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp;
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
import com.tencent.supersonic.headless.core.pojo.QueryStatement; import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema; import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder; import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager; import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl; import com.tencent.supersonic.headless.server.pojo.yaml.*;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl;
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.List; import java.util.List;
@Slf4j @Slf4j
class HeadlessParserServiceTest { class HeadlessParserServiceTest {
public static SqlParserResp parser(S2CalciteSchema semanticSchema, public static SqlParserResp parser(S2CalciteSchema semanticSchema,
MetricQueryParam metricQueryParam, boolean isAgg) { OntologyQueryParam ontologyQueryParam, boolean isAgg) {
SqlParserResp sqlParser = new SqlParserResp(); SqlParserResp sqlParser = new SqlParserResp();
try { try {
if (semanticSchema == null) { if (semanticSchema == null) {
@@ -35,14 +29,14 @@ class HeadlessParserServiceTest {
} }
SqlBuilder aggBuilder = new SqlBuilder(semanticSchema); SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
QueryStatement queryStatement = new QueryStatement(); QueryStatement queryStatement = new QueryStatement();
queryStatement.setMetricQueryParam(metricQueryParam); queryStatement.setOntologyQueryParam(ontologyQueryParam);
aggBuilder.build(queryStatement, AggOption.getAggregation(!isAgg)); String sql = aggBuilder.buildOntologySql(queryStatement);
EngineType engineType = queryStatement.setSql(sql);
EngineType.fromString(semanticSchema.getOntology().getDatabase().getType()); EngineType engineType = semanticSchema.getOntology().getDatabase().getType();
sqlParser.setSql(aggBuilder.getSql(engineType)); sqlParser.setSql(aggBuilder.getSql(engineType));
} catch (Exception e) { } catch (Exception e) {
sqlParser.setErrMsg(e.getMessage()); sqlParser.setErrMsg(e.getMessage());
log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e); log.error("parser error metricQueryReq[{}] error [{}]", ontologyQueryParam, e);
} }
return sqlParser; return sqlParser;
} }
@@ -161,9 +155,9 @@ class HeadlessParserServiceTest {
// HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric)); // HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric));
MetricQueryParam metricCommand = new MetricQueryParam(); OntologyQueryParam metricCommand = new OntologyQueryParam();
metricCommand.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date"))); metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date")));
metricCommand.setMetrics(new ArrayList<>(Arrays.asList("pv"))); metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv")));
metricCommand.setWhere( metricCommand.setWhere(
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand.setLimit(1000L); metricCommand.setLimit(1000L);
@@ -174,10 +168,10 @@ class HeadlessParserServiceTest {
addDepartment(semanticSchema); addDepartment(semanticSchema);
MetricQueryParam metricCommand2 = new MetricQueryParam(); OntologyQueryParam metricCommand2 = new OntologyQueryParam();
metricCommand2.setDimensions(new ArrayList<>(Arrays.asList("sys_imp_date", metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date",
"user_name__department", "user_name", "user_name__page"))); "user_name__department", "user_name", "user_name__page")));
metricCommand2.setMetrics(new ArrayList<>(Arrays.asList("pv"))); metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv")));
metricCommand2.setWhere( metricCommand2.setWhere(
"user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') "); "user_name = 'ab' and (sys_imp_date >= '2023-02-28' and sys_imp_date <= '2023-05-28') ");
metricCommand2.setLimit(1000L); metricCommand2.setLimit(1000L);

View File

@@ -68,8 +68,5 @@ class QueryNLReqBuilderTest {
DateModeUtils dateModeUtils = new DateModeUtils(); DateModeUtils dateModeUtils = new DateModeUtils();
mockContextUtils.when(() -> ContextUtils.getBean(DateModeUtils.class)) mockContextUtils.when(() -> ContextUtils.getBean(DateModeUtils.class))
.thenReturn(dateModeUtils); .thenReturn(dateModeUtils);
dateModeUtils.setSysDateCol("sys_imp_date");
dateModeUtils.setSysDateWeekCol("sys_imp_week");
dateModeUtils.setSysDateMonthCol("sys_imp_month");
} }
} }

View File

@@ -26,9 +26,10 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableParseConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.CalculateAggConverter,\ com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\
com.tencent.supersonic.headless.core.translator.converter.ParserDefaultConverter com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter
com.tencent.supersonic.headless.core.translator.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.DetailQueryOptimizer com.tencent.supersonic.headless.core.translator.DetailQueryOptimizer
@@ -46,4 +47,5 @@ com.tencent.supersonic.headless.core.cache.QueryCache=\
### headless-server SPIs ### headless-server SPIs
com.tencent.supersonic.headless.server.modeller.SemanticModeller=\ com.tencent.supersonic.headless.server.modeller.SemanticModeller=\
com.tencent.supersonic.headless.server.modeller.RuleSemanticModeller com.tencent.supersonic.headless.server.modeller.RuleSemanticModeller, \
com.tencent.supersonic.headless.server.modeller.LLMSemanticModeller

View File

@@ -7,26 +7,12 @@ import com.tencent.supersonic.chat.server.agent.Agent;
import com.tencent.supersonic.chat.server.agent.AgentToolType; import com.tencent.supersonic.chat.server.agent.AgentToolType;
import com.tencent.supersonic.chat.server.agent.DatasetTool; import com.tencent.supersonic.chat.server.agent.DatasetTool;
import com.tencent.supersonic.chat.server.agent.ToolConfig; import com.tencent.supersonic.chat.server.agent.ToolConfig;
import com.tencent.supersonic.chat.server.processor.execute.DataInterpretProcessor;
import com.tencent.supersonic.common.pojo.ChatApp; import com.tencent.supersonic.common.pojo.ChatApp;
import com.tencent.supersonic.common.pojo.JoinCondition; import com.tencent.supersonic.common.pojo.JoinCondition;
import com.tencent.supersonic.common.pojo.ModelRela; import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum; import com.tencent.supersonic.common.pojo.enums.*;
import com.tencent.supersonic.common.pojo.enums.AppModule;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.common.pojo.enums.TimeMode;
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
import com.tencent.supersonic.common.util.ChatAppManager; import com.tencent.supersonic.common.util.ChatAppManager;
import com.tencent.supersonic.headless.api.pojo.AggregateTypeDefaultConfig; import com.tencent.supersonic.headless.api.pojo.*;
import com.tencent.supersonic.headless.api.pojo.DataSetDetail;
import com.tencent.supersonic.headless.api.pojo.DataSetModelConfig;
import com.tencent.supersonic.headless.api.pojo.Dim;
import com.tencent.supersonic.headless.api.pojo.DimensionTimeTypeParams;
import com.tencent.supersonic.headless.api.pojo.Identify;
import com.tencent.supersonic.headless.api.pojo.Measure;
import com.tencent.supersonic.headless.api.pojo.ModelDetail;
import com.tencent.supersonic.headless.api.pojo.QueryConfig;
import com.tencent.supersonic.headless.api.pojo.TimeDefaultConfig;
import com.tencent.supersonic.headless.api.pojo.enums.DimensionType; import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType; import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
import com.tencent.supersonic.headless.api.pojo.request.DataSetReq; import com.tencent.supersonic.headless.api.pojo.request.DataSetReq;
@@ -40,11 +26,7 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.core.annotation.Order; import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@Component @Component
@Slf4j @Slf4j
@@ -272,7 +254,6 @@ public class S2CompanyDemo extends S2BaseDemo {
Map<String, ChatApp> chatAppConfig = Map<String, ChatApp> chatAppConfig =
Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT)); Maps.newHashMap(ChatAppManager.getAllApps(AppModule.CHAT));
chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId())); chatAppConfig.values().forEach(app -> app.setChatModelId(demoChatModel.getId()));
chatAppConfig.get(DataInterpretProcessor.APP_KEY).setEnable(true);
agent.setChatAppConfig(chatAppConfig); agent.setChatAppConfig(chatAppConfig);
agentService.createAgent(agent, defaultUser); agentService.createAgent(agent, defaultUser);

View File

@@ -146,7 +146,8 @@ public class S2VisitsDemo extends S2BaseDemo {
agent.setStatus(1); agent.setStatus(1);
agent.setEnableSearch(1); agent.setEnableSearch(1);
agent.setExamples(Lists.newArrayList("近15天超音数访问次数汇总", "按部门统计超音数的访问人数", "对比alice和lucy的停留时长", agent.setExamples(Lists.newArrayList("近15天超音数访问次数汇总", "按部门统计超音数的访问人数", "对比alice和lucy的停留时长",
"过去30天访问次数最高的部门top3", "近1个月总访问次数超过100次的部门有几个", "过去半个月每个核心用户的总停留时长")); "过去30天访问次数最高的部门top3", "近1个月总访问次数超过100次的部门有几个", "过去半个月每个核心用户的总停留时长",
"今年以来访问次数最高的一天是哪一天"));
// configure tools // configure tools
ToolConfig toolConfig = new ToolConfig(); ToolConfig toolConfig = new ToolConfig();
@@ -382,9 +383,9 @@ public class S2VisitsDemo extends S2BaseDemo {
metricReq.setDescription("访问的用户个数"); metricReq.setDescription("访问的用户个数");
metricReq.setAlias("UV,访问人数"); metricReq.setAlias("UV,访问人数");
MetricDefineByFieldParams metricTypeParams = new MetricDefineByFieldParams(); MetricDefineByFieldParams metricTypeParams = new MetricDefineByFieldParams();
metricTypeParams.setExpr("count(distinct user_id)"); metricTypeParams.setExpr("count(distinct user_name)");
List<FieldParam> fieldParams = new ArrayList<>(); List<FieldParam> fieldParams = new ArrayList<>();
fieldParams.add(new FieldParam("user_id")); fieldParams.add(new FieldParam("user_name"));
metricTypeParams.setFields(fieldParams); metricTypeParams.setFields(fieldParams);
metricReq.setMetricDefineByFieldParams(metricTypeParams); metricReq.setMetricDefineByFieldParams(metricTypeParams);
metricReq.setMetricDefineType(MetricDefineType.FIELD); metricReq.setMetricDefineType(MetricDefineType.FIELD);

View File

@@ -26,9 +26,10 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\ com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\ com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableParseConverter,\ com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.CalculateAggConverter,\ com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter,\
com.tencent.supersonic.headless.core.translator.converter.ParserDefaultConverter com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter
com.tencent.supersonic.headless.core.translator.QueryOptimizer=\ com.tencent.supersonic.headless.core.translator.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.DetailQueryOptimizer com.tencent.supersonic.headless.core.translator.DetailQueryOptimizer

View File

@@ -12,7 +12,6 @@ import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
import com.tencent.supersonic.common.service.ChatModelService; import com.tencent.supersonic.common.service.ChatModelService;
import com.tencent.supersonic.headless.api.pojo.SchemaElement; import com.tencent.supersonic.headless.api.pojo.SchemaElement;
import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo; import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.response.ParseResp;
import com.tencent.supersonic.headless.api.pojo.response.QueryState; import com.tencent.supersonic.headless.api.pojo.response.QueryState;
import com.tencent.supersonic.util.DataUtils; import com.tencent.supersonic.util.DataUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;

View File

@@ -20,7 +20,7 @@ public class DetailTest extends BaseTest {
@Test @Test
public void test_detail_dimension() throws Exception { public void test_detail_dimension() throws Exception {
QueryResult actualResult = submitNewChat("周杰伦流派和代表作", DataUtils.tagAgentId); QueryResult actualResult = submitNewChat("周杰伦流派和代表作", DataUtils.singerAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
@@ -31,7 +31,7 @@ public class DetailTest extends BaseTest {
expectedParseInfo.setAggType(AggregateTypeEnum.NONE); expectedParseInfo.setAggType(AggregateTypeEnum.NONE);
QueryFilter dimensionFilter = QueryFilter dimensionFilter =
DataUtils.getFilter("singer_name", FilterOperatorEnum.EQUALS, "周杰伦", "歌手名", 8L); DataUtils.getFilter("singer_name", FilterOperatorEnum.EQUALS, "周杰伦", "歌手名", 17L);
expectedParseInfo.getDimensionFilters().add(dimensionFilter); expectedParseInfo.getDimensionFilters().add(dimensionFilter);
expectedParseInfo.getDimensions() expectedParseInfo.getDimensions()
@@ -43,7 +43,7 @@ public class DetailTest extends BaseTest {
@Test @Test
public void test_detail_filter() throws Exception { public void test_detail_filter() throws Exception {
QueryResult actualResult = submitNewChat("国风歌手", DataUtils.tagAgentId); QueryResult actualResult = submitNewChat("国风歌手", DataUtils.singerAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();

View File

@@ -9,6 +9,7 @@ import com.tencent.supersonic.headless.api.pojo.SemanticParseInfo;
import com.tencent.supersonic.headless.api.pojo.request.QueryFilter; import com.tencent.supersonic.headless.api.pojo.request.QueryFilter;
import com.tencent.supersonic.headless.chat.query.rule.metric.MetricFilterQuery; import com.tencent.supersonic.headless.chat.query.rule.metric.MetricFilterQuery;
import com.tencent.supersonic.headless.chat.query.rule.metric.MetricGroupByQuery; import com.tencent.supersonic.headless.chat.query.rule.metric.MetricGroupByQuery;
import com.tencent.supersonic.headless.chat.query.rule.metric.MetricModelQuery;
import com.tencent.supersonic.headless.chat.query.rule.metric.MetricTopNQuery; import com.tencent.supersonic.headless.chat.query.rule.metric.MetricTopNQuery;
import com.tencent.supersonic.util.DataUtils; import com.tencent.supersonic.util.DataUtils;
import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Order;
@@ -28,13 +29,28 @@ import static com.tencent.supersonic.common.pojo.enums.AggregateTypeEnum.SUM;
public class MetricTest extends BaseTest { public class MetricTest extends BaseTest {
@Test @Test
public void testMetric() throws Exception { public void testMetricModel() throws Exception {
QueryResult actualResult = submitNewChat("超音数 访问次数", DataUtils.metricAgentId); QueryResult actualResult = submitNewChat("超音数 访问次数", DataUtils.productAgentId);
QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
expectedResult.setChatContext(expectedParseInfo);
expectedResult.setQueryMode(MetricModelQuery.QUERY_MODE);
expectedParseInfo.setAggType(NONE);
expectedParseInfo.getMetrics().add(DataUtils.getSchemaElement("访问次数"));
expectedParseInfo.setDateInfo(
DataUtils.getDateConf(DateConf.DateMode.BETWEEN, unit, period, startDay, endDay));
expectedParseInfo.setQueryType(QueryType.AGGREGATE);
assertQueryResult(expectedResult, actualResult);
assert actualResult.getQueryResults().size() == 1;
} }
@Test @Test
public void testMetricFilter() throws Exception { public void testMetricFilter() throws Exception {
QueryResult actualResult = submitNewChat("alice的访问次数", DataUtils.metricAgentId); QueryResult actualResult = submitNewChat("alice的访问次数", DataUtils.productAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
@@ -57,7 +73,8 @@ public class MetricTest extends BaseTest {
@Test @Test
public void testMetricGroupBy() throws Exception { public void testMetricGroupBy() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", DataUtils.metricAgentId); System.setProperty("s2.test", "true");
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数", DataUtils.productAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
@@ -79,7 +96,7 @@ public class MetricTest extends BaseTest {
@Test @Test
public void testMetricFilterCompare() throws Exception { public void testMetricFilterCompare() throws Exception {
QueryResult actualResult = submitNewChat("对比alice和lucy的访问次数", DataUtils.metricAgentId); QueryResult actualResult = submitNewChat("对比alice和lucy的访问次数", DataUtils.productAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
@@ -107,7 +124,7 @@ public class MetricTest extends BaseTest {
@Test @Test
@Order(3) @Order(3)
public void testMetricTopN() throws Exception { public void testMetricTopN() throws Exception {
QueryResult actualResult = submitNewChat("近3天访问次数最多的用户", DataUtils.metricAgentId); QueryResult actualResult = submitNewChat("近3天访问次数最多的用户", DataUtils.productAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
@@ -128,7 +145,7 @@ public class MetricTest extends BaseTest {
@Test @Test
public void testMetricGroupBySum() throws Exception { public void testMetricGroupBySum() throws Exception {
QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数总和", DataUtils.metricAgentId); QueryResult actualResult = submitNewChat("近7天超音数各部门的访问次数总和", DataUtils.productAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();
expectedResult.setChatContext(expectedParseInfo); expectedResult.setChatContext(expectedParseInfo);
@@ -154,7 +171,7 @@ public class MetricTest extends BaseTest {
String dateStr = textFormat.format(format.parse(startDay)); String dateStr = textFormat.format(format.parse(startDay));
QueryResult actualResult = QueryResult actualResult =
submitNewChat(String.format("alice在%s的访问次数", dateStr), DataUtils.metricAgentId); submitNewChat(String.format("alice在%s的访问次数", dateStr), DataUtils.productAgentId);
QueryResult expectedResult = new QueryResult(); QueryResult expectedResult = new QueryResult();
SemanticParseInfo expectedParseInfo = new SemanticParseInfo(); SemanticParseInfo expectedParseInfo = new SemanticParseInfo();

View File

@@ -20,6 +20,7 @@ public class QueryByMetricTest extends BaseTest {
@Test @Test
public void testWithMetricAndDimensionBizNames() throws Exception { public void testWithMetricAndDimensionBizNames() throws Exception {
System.setProperty("s2.test", "true");
QueryMetricReq queryMetricReq = new QueryMetricReq(); QueryMetricReq queryMetricReq = new QueryMetricReq();
queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv")); queryMetricReq.setMetricNames(Arrays.asList("stay_hours", "pv"));
queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department")); queryMetricReq.setDimensionNames(Arrays.asList("user_name", "department"));
@@ -57,6 +58,7 @@ public class QueryByMetricTest extends BaseTest {
@Test @Test
public void testWithMetricAndDimensionIds() throws Exception { public void testWithMetricAndDimensionIds() throws Exception {
System.setProperty("s2.test", "true");
QueryMetricReq queryMetricReq = new QueryMetricReq(); QueryMetricReq queryMetricReq = new QueryMetricReq();
queryMetricReq.setDomainId(1L); queryMetricReq.setDomainId(1L);
queryMetricReq.setMetricIds(Arrays.asList(1L, 3L)); queryMetricReq.setMetricIds(Arrays.asList(1L, 3L));

View File

@@ -46,6 +46,7 @@ public class QueryByStructTest extends BaseTest {
@Test @Test
public void testDetailQuery() throws Exception { public void testDetailQuery() throws Exception {
System.setProperty("s2.test", "true");
QueryStructReq queryStructReq = QueryStructReq queryStructReq =
buildQueryStructReq(Arrays.asList("user_name", "department"), QueryType.DETAIL); buildQueryStructReq(Arrays.asList("user_name", "department"), QueryType.DETAIL);
SemanticQueryResp semanticQueryResp = SemanticQueryResp semanticQueryResp =
@@ -86,6 +87,7 @@ public class QueryByStructTest extends BaseTest {
@Test @Test
public void testFilterQuery() throws Exception { public void testFilterQuery() throws Exception {
System.setProperty("s2.test", "true");
QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("department")); QueryStructReq queryStructReq = buildQueryStructReq(Arrays.asList("department"));
List<Filter> dimensionFilters = new ArrayList<>(); List<Filter> dimensionFilters = new ArrayList<>();
Filter filter = new Filter(); Filter filter = new Filter();

View File

@@ -18,7 +18,7 @@ public class TranslateTest extends BaseTest {
public void testSqlExplain() throws Exception { public void testSqlExplain() throws Exception {
String sql = "SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数PVUV统计 GROUP BY 部门 "; String sql = "SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数PVUV统计 GROUP BY 部门 ";
SemanticTranslateResp explain = semanticLayerService.translate( SemanticTranslateResp explain = semanticLayerService.translate(
QueryReqBuilder.buildS2SQLReq(sql, DataUtils.getMetricAgentView()), QueryReqBuilder.buildS2SQLReq(sql, DataUtils.productDatasetId),
User.getDefaultUser()); User.getDefaultUser());
assertNotNull(explain); assertNotNull(explain);
assertNotNull(explain.getQuerySQL()); assertNotNull(explain.getQuerySQL());

View File

@@ -15,10 +15,15 @@ import static java.time.LocalDate.now;
public class DataUtils { public class DataUtils {
public static final Integer metricAgentId = 1; public static final Integer productAgentId = 1;
public static final Integer tagAgentId = 2; public static final Integer companyAgentId = 2;
public static final Integer singerAgentId = 3;
public static final Long productDatasetId = 1L;
public static final Long companyDatasetId = 2L;
public static final Long singerDatasettId = 3L;
public static final Integer ONE_TURNS_CHAT_ID = 10; public static final Integer ONE_TURNS_CHAT_ID = 10;
public static final Integer MULTI_TURNS_CHAT_ID = 11;
private static final User user_test = User.getDefaultUser(); private static final User user_test = User.getDefaultUser();
public static User getUser() { public static User getUser() {
@@ -40,7 +45,7 @@ public class DataUtils {
public static ChatParseReq getChatParseReq(Integer id, String query, boolean enableLLM) { public static ChatParseReq getChatParseReq(Integer id, String query, boolean enableLLM) {
ChatParseReq chatParseReq = new ChatParseReq(); ChatParseReq chatParseReq = new ChatParseReq();
chatParseReq.setQueryText(query); chatParseReq.setQueryText(query);
chatParseReq.setAgentId(metricAgentId); chatParseReq.setAgentId(productAgentId);
chatParseReq.setChatId(id); chatParseReq.setChatId(id);
chatParseReq.setUser(user_test); chatParseReq.setUser(user_test);
chatParseReq.setDisableLLM(!enableLLM); chatParseReq.setDisableLLM(!enableLLM);
@@ -92,7 +97,4 @@ public class DataUtils {
return result; return result;
} }
public static Long getMetricAgentView() {
return 1L;
}
} }

View File

@@ -6,8 +6,8 @@ spring:
password: semantic password: semantic
sql: sql:
init: init:
schema-locations: classpath:db/schema-h2.sql schema-locations: classpath:db/schema-h2.sql,classpath:db/schema-h2-demo.sql
data-locations: classpath:db/data-h2.sql data-locations: classpath:db/data-h2.sql,classpath:db/data-h2-demo.sql
h2: h2:
console: console:
path: /h2-console/semantic path: /h2-console/semantic

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,85 @@
-------S2VisitsDemo
CREATE TABLE IF NOT EXISTS `s2_user_department` (
`user_name` varchar(200) NOT NULL,
`department` varchar(200) NOT NULL, -- department of user
PRIMARY KEY (`user_name`,`department`)
);
COMMENT ON TABLE s2_user_department IS 'user_department_info';
CREATE TABLE IF NOT EXISTS `s2_pv_uv_statis` (
`imp_date` varchar(200) NOT NULL,
`user_name` varchar(200) NOT NULL,
`page` varchar(200) NOT NULL
);
COMMENT ON TABLE s2_pv_uv_statis IS 's2_pv_uv_statis';
CREATE TABLE IF NOT EXISTS `s2_stay_time_statis` (
`imp_date` varchar(200) NOT NULL,
`user_name` varchar(200) NOT NULL,
`stay_hours` DOUBLE NOT NULL,
`page` varchar(200) NOT NULL
);
COMMENT ON TABLE s2_stay_time_statis IS 's2_stay_time_statis_info';
-------S2ArtistDemo
CREATE TABLE IF NOT EXISTS `singer` (
`singer_name` varchar(200) NOT NULL,
`act_area` varchar(200) NOT NULL,
`song_name` varchar(200) NOT NULL,
`genre` varchar(200) NOT NULL,
`js_play_cnt` bigINT DEFAULT NULL,
`down_cnt` bigINT DEFAULT NULL,
`favor_cnt` bigINT DEFAULT NULL,
PRIMARY KEY (`singer_name`)
);
COMMENT ON TABLE singer IS 'singer_info';
CREATE TABLE IF NOT EXISTS `genre` (
`g_name` varchar(20) NOT NULL , -- genre name
`rating` INT ,
`most_popular_in` varchar(50) ,
PRIMARY KEY (`g_name`)
);
COMMENT ON TABLE genre IS 'genre';
CREATE TABLE IF NOT EXISTS `artist` (
`artist_name` varchar(50) NOT NULL , -- genre name
`citizenship` varchar(20) ,
`gender` varchar(20) ,
`g_name` varchar(50),
PRIMARY KEY (`artist_name`,`citizenship`)
);
COMMENT ON TABLE artist IS 'artist';
-------S2CompanyDemo
CREATE TABLE IF NOT EXISTS `company` (
`company_id` varchar(50) NOT NULL ,
`company_name` varchar(50) NOT NULL ,
`headquarter_address` varchar(50) NOT NULL ,
`company_established_time` varchar(20) NOT NULL ,
`founder` varchar(20) NOT NULL ,
`ceo` varchar(20) NOT NULL ,
`annual_turnover` bigint(15) ,
`employee_count` int(7) ,
PRIMARY KEY (`company_id`)
);
CREATE TABLE IF NOT EXISTS `brand` (
`brand_id` varchar(50) NOT NULL ,
`brand_name` varchar(50) NOT NULL ,
`brand_established_time` varchar(20) NOT NULL ,
`company_id` varchar(50) NOT NULL ,
`legal_representative` varchar(20) NOT NULL ,
`registered_capital` bigint(15) ,
PRIMARY KEY (`brand_id`)
);
CREATE TABLE IF NOT EXISTS `brand_revenue` (
`year_time` varchar(10) NOT NULL ,
`brand_id` varchar(50) NOT NULL ,
`revenue` bigint(15) NOT NULL,
`profit` bigint(15) NOT NULL ,
`revenue_growth_year_on_year` double NOT NULL ,
`profit_growth_year_on_year` double NOT NULL
);

View File

@@ -21,7 +21,7 @@ s2:
date: true date: true
demo: demo:
names: S2VisitsDemo,S2SingerDemo names: S2VisitsDemo,S2SingerDemo,S2CompanyDemo
enableLLM: false enableLLM: false
authentication: authentication:

View File

@@ -1,19 +1,19 @@
import React, { useState } from 'react'; import React, { useState } from 'react';
import { Table, Select, Checkbox, Input, Space, Tooltip, Form, Switch, Row, Col } from 'antd'; import { Checkbox, Form, Input, Select, Space, Switch, Table, Tooltip } from 'antd';
import TableTitleTooltips from '../../components/TableTitleTooltips'; import TableTitleTooltips from '../../components/TableTitleTooltips';
import { isUndefined } from 'lodash'; import { isUndefined } from 'lodash';
import { ExclamationCircleOutlined } from '@ant-design/icons'; import { ExclamationCircleOutlined } from '@ant-design/icons';
import SqlEditor from '@/components/SqlEditor'; import SqlEditor from '@/components/SqlEditor';
import { ISemantic } from '../../data'; import { ISemantic } from '../../data';
import { import {
TYPE_OPTIONS,
DATE_FORMATTER,
AGG_OPTIONS, AGG_OPTIONS,
EnumDataSourceType, DATE_FORMATTER,
DATE_OPTIONS, DATE_OPTIONS,
DIM_OPTIONS, DIM_OPTIONS,
EnumDataSourceType,
EnumModelDataType, EnumModelDataType,
PARTITION_TIME_FORMATTER, PARTITION_TIME_FORMATTER,
TYPE_OPTIONS,
} from '../constants'; } from '../constants';
import styles from '../style.less'; import styles from '../style.less';
@@ -90,8 +90,7 @@ const ModelFieldForm: React.FC<Props> = ({
dataIndex: 'type', dataIndex: 'type',
width: 250, width: 250,
render: (_: any, record: FieldItem) => { render: (_: any, record: FieldItem) => {
const type = fields.find((field) => field.bizName === record.bizName)?.type; const { type, classType } = record;
const classType = fields.find((field) => field.bizName === record.bizName)?.classType;
const selectTypeValue = [EnumModelDataType.DIMENSION].includes(classType) const selectTypeValue = [EnumModelDataType.DIMENSION].includes(classType)
? classType ? classType
: type; : type;
@@ -229,13 +228,10 @@ const ModelFieldForm: React.FC<Props> = ({
// ); // );
// } // }
if (type === EnumDataSourceType.MEASURES) { if (type === EnumDataSourceType.MEASURES) {
const agg = record.expr
? fields.find((field) => field.expr === record.expr)?.agg
: undefined;
return ( return (
<Select <Select
placeholder="度量算子" placeholder="度量算子"
value={agg} value={record.agg}
onChange={(value) => { onChange={(value) => {
handleFieldChange(record, 'agg', value); handleFieldChange(record, 'agg', value);
}} }}
@@ -251,7 +247,6 @@ const ModelFieldForm: React.FC<Props> = ({
</Select> </Select>
); );
} }
if (process.env.SHOW_TAG) { if (process.env.SHOW_TAG) {
if (type === EnumDataSourceType.CATEGORICAL) { if (type === EnumDataSourceType.CATEGORICAL) {
const isTag = fields.find((field) => field.bizName === record.bizName)?.isTag; const isTag = fields.find((field) => field.bizName === record.bizName)?.isTag;
@@ -299,10 +294,7 @@ const ModelFieldForm: React.FC<Props> = ({
} }
} }
if ([EnumDataSourceType.TIME, EnumDataSourceType.PARTITION_TIME].includes(type)) { if ([EnumDataSourceType.TIME, EnumDataSourceType.PARTITION_TIME].includes(type)) {
const dateFormat = fields.find((field) => field.bizName === record.bizName)?.dateFormat; const { dateFormat, timeGranularity } = record;
const timeGranularity = fields.find(
(field) => field.bizName === record.bizName,
)?.timeGranularity;
const dateFormatterOptions = const dateFormatterOptions =
type === EnumDataSourceType.PARTITION_TIME ? PARTITION_TIME_FORMATTER : DATE_FORMATTER; type === EnumDataSourceType.PARTITION_TIME ? PARTITION_TIME_FORMATTER : DATE_FORMATTER;