Merge remote-tracking branch 'origin/master'

This commit is contained in:
jerryjzhang
2024-12-20 12:43:36 +08:00
47 changed files with 487 additions and 553 deletions

18
LICENSE
View File

@@ -1,19 +1,11 @@
SuperSonic is licensed under the MIT License, with the following additional conditions:
1. You may provide SuperSonic to third parties as a commercial software or service. However,
when the following conditions are met, you must contact the producer to obtain a commercial license:
a. Multi-tenant SaaS service: Unless explicitly authorized by SuperSonic in writing, you may not use the
SuperSonic source code to operate a multi-tenant SaaS service.
b. LOGO and copyright information: In the process of using SuperSonic, you may not remove or modify
the LOGO or copyright information on the SuperSonic UI. This restriction is inapplicable to uses of
SuperSonic that do not involve its frontend components.
SuperSonic is licensed under the MIT License, you can freely use or integrate SuperSonic within
your organization. However, if you want to provide or integrate SuperSonic to third parties
as a commercial software or service, you must contact the producer to obtain a commercial license.
Please contact jerryjzhang@tencent.com by email to inquire about licensing matters.
2. As a contributor, you should agree that:
As a SuperSonic contributor, you should agree that:
a. The producer can adjust the open-source agreement to be more strict or relaxed as deemed necessary.
a. The producer can adjust the open-source agreement to be stricter or relaxed as deemed necessary.
b. Your contributed code may be used for commercial purposes, including but not limited to its business operations.
Terms of the MIT License:

View File

@@ -81,7 +81,7 @@ public class Configuration {
.setUnquotedCasing(Casing.TO_UPPER).setConformance(sqlDialect.getConformance())
.setLex(Lex.BIG_QUERY);
if (EngineType.HANADB.equals(engineType)) {
parserConfig = parserConfig.setQuoting(Quoting.DOUBLE_QUOTE);
parserConfig = parserConfig.setQuoting(Quoting.DOUBLE_QUOTE);
}
parserConfig = parserConfig.setQuotedCasing(Casing.UNCHANGED);
parserConfig = parserConfig.setUnquotedCasing(Casing.UNCHANGED);

View File

@@ -21,10 +21,11 @@ public class SqlDialectFactory {
.withDatabaseProduct(DatabaseProduct.BIG_QUERY).withLiteralQuoteString("'")
.withLiteralEscapedQuoteString("''").withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED).withCaseSensitive(false);
public static final Context HANADB_CONTEXT = SqlDialect.EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.BIG_QUERY).withLiteralQuoteString("'")
.withIdentifierQuoteString("\"").withLiteralEscapedQuoteString("''").withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED).withCaseSensitive(true);
public static final Context HANADB_CONTEXT =
SqlDialect.EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
.withLiteralQuoteString("'").withIdentifierQuoteString("\"")
.withLiteralEscapedQuoteString("''").withUnquotedCasing(Casing.UNCHANGED)
.withQuotedCasing(Casing.UNCHANGED).withCaseSensitive(true);
private static Map<EngineType, SemanticSqlDialect> sqlDialectMap;
static {

View File

@@ -1,15 +1,13 @@
package com.tencent.supersonic.common.jsqlparser;
import net.sf.jsqlparser.expression.Alias;
import net.sf.jsqlparser.statement.select.SelectItem;
import net.sf.jsqlparser.statement.select.SelectItemVisitorAdapter;
import org.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import net.sf.jsqlparser.expression.Alias;
public class FieldAliasReplaceNameVisitor extends SelectItemVisitorAdapter {
private Map<String, String> fieldNameMap;

View File

@@ -465,6 +465,7 @@ public class SqlReplaceHelper {
}
return selectStatement.toString();
}
public static String replaceAlias(String sql) {
Select selectStatement = SqlSelectHelper.getSelect(sql);
if (!(selectStatement instanceof PlainSelect)) {

View File

@@ -9,6 +9,7 @@ import dev.langchain4j.model.zhipu.ZhipuAiChatModel;
import dev.langchain4j.model.zhipu.ZhipuAiEmbeddingModel;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service;
import static java.time.Duration.ofSeconds;
@Service
@@ -32,8 +33,8 @@ public class ZhipuModelFactory implements ModelFactory, InitializingBean {
return ZhipuAiEmbeddingModel.builder().baseUrl(embeddingModelConfig.getBaseUrl())
.apiKey(embeddingModelConfig.getApiKey()).model(embeddingModelConfig.getModelName())
.maxRetries(embeddingModelConfig.getMaxRetries()).callTimeout(ofSeconds(60))
.connectTimeout(ofSeconds(60)).writeTimeout(ofSeconds(60)).readTimeout(ofSeconds(60))
.logRequests(embeddingModelConfig.getLogRequests())
.connectTimeout(ofSeconds(60)).writeTimeout(ofSeconds(60))
.readTimeout(ofSeconds(60)).logRequests(embeddingModelConfig.getLogRequests())
.logResponses(embeddingModelConfig.getLogResponses()).build();
}

View File

@@ -326,34 +326,35 @@ class SqlReplaceHelperTest {
@Test
void testReplaceAliasFieldName() {
Map<String, String> map = new HashMap<>();
map.put("总访问次数", "\"总访问次数\"");
map.put("访问次数", "\"访问次数\"");
String sql = "select 部门, sum(访问次数) as 总访问次数 from 超音数 where "
+ "datediff('day', 数据日期, '2023-09-05') <= 3 group by 部门 order by 总访问次数 desc limit 10";
String replaceSql = SqlReplaceHelper.replaceAliasFieldName(sql, map);
System.out.println(replaceSql);
Assert.assertEquals("SELECT 部门, sum(访问次数) AS \"总访问次数\" FROM 超音数 WHERE "
+ "datediff('day', 数据日期, '2023-09-05') <= 3 GROUP BY 部门 ORDER BY \"总访问次数\" DESC LIMIT 10",
replaceSql);
Map<String, String> map = new HashMap<>();
map.put("总访问次数", "\"总访问次数\"");
map.put("访问次数", "\"访问次数\"");
String sql = "select 部门, sum(访问次数) as 总访问次数 from 超音数 where "
+ "datediff('day', 数据日期, '2023-09-05') <= 3 group by 部门 order by 总访问次数 desc limit 10";
String replaceSql = SqlReplaceHelper.replaceAliasFieldName(sql, map);
System.out.println(replaceSql);
Assert.assertEquals("SELECT 部门, sum(访问次数) AS \"总访问次数\" FROM 超音数 WHERE "
+ "datediff('day', 数据日期, '2023-09-05') <= 3 GROUP BY 部门 ORDER BY \"总访问次数\" DESC LIMIT 10",
replaceSql);
sql = "select 部门, sum(访问次数) as 总访问次数 from 超音数 where "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) and 数据日期 = '2023-10-10' "
+ "group by 部门 order by 总访问次数 desc limit 10";
replaceSql = SqlReplaceHelper.replaceAliasFieldName(sql, map);
System.out.println(replaceSql);
Assert.assertEquals("SELECT 部门, sum(访问次数) AS \"总访问次数\" FROM 超音数 WHERE "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) AND 数据日期 = '2023-10-10' "
+ "GROUP BY 部门 ORDER BY \"总访问次数\" DESC LIMIT 10", replaceSql);
sql = "select 部门, sum(访问次数) as 总访问次数 from 超音数 where "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) and 数据日期 = '2023-10-10' "
+ "group by 部门 order by 总访问次数 desc limit 10";
replaceSql = SqlReplaceHelper.replaceAliasFieldName(sql, map);
System.out.println(replaceSql);
Assert.assertEquals("SELECT 部门, sum(访问次数) AS \"总访问次数\" FROM 超音数 WHERE "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) AND 数据日期 = '2023-10-10' "
+ "GROUP BY 部门 ORDER BY \"总访问次数\" DESC LIMIT 10", replaceSql);
sql = "select 部门, sum(访问次数) as 访问次数 from 超音数 where "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) and 数据日期 = '2023-10-10' "
+ "group by 部门 order by 访问次数 desc limit 10";
replaceSql = SqlReplaceHelper.replaceAliasFieldName(sql, map);
System.out.println(replaceSql);
Assert.assertEquals("SELECT 部门, sum(\"访问次数\") AS \"访问次数\" FROM 超音数 WHERE (datediff('day', 数据日期, "
+ "'2023-09-05') <= 3) AND 数据日期 = '2023-10-10' GROUP BY 部门 ORDER BY \"访问次数\" DESC LIMIT 10",
replaceSql);
sql = "select 部门, sum(访问次数) as 访问次数 from 超音数 where "
+ "(datediff('day', 数据日期, '2023-09-05') <= 3) and 数据日期 = '2023-10-10' "
+ "group by 部门 order by 访问次数 desc limit 10";
replaceSql = SqlReplaceHelper.replaceAliasFieldName(sql, map);
System.out.println(replaceSql);
Assert.assertEquals(
"SELECT 部门, sum(\"访问次数\") AS \"访问次数\" FROM 超音数 WHERE (datediff('day', 数据日期, "
+ "'2023-09-05') <= 3) AND 数据日期 = '2023-10-10' GROUP BY 部门 ORDER BY \"访问次数\" DESC LIMIT 10",
replaceSql);
}
@Test

View File

@@ -65,7 +65,7 @@ public class LLMSqlCorrector extends BaseSemanticCorrector {
return;
}
Text2SQLExemplar exemplar = (Text2SQLExemplar)semanticParseInfo.getProperties()
Text2SQLExemplar exemplar = (Text2SQLExemplar) semanticParseInfo.getProperties()
.get(Text2SQLExemplar.PROPERTY_KEY);
ChatLanguageModel chatLanguageModel =

View File

@@ -27,12 +27,12 @@ import java.util.stream.Collectors;
@Slf4j
public class DatabaseMatchStrategy extends SingleMatchStrategy<DatabaseMapResult> {
private List<SchemaElement> allElements;
private ThreadLocal<List<SchemaElement>> allElements = ThreadLocal.withInitial(ArrayList::new);
@Override
public Map<MatchText, List<DatabaseMapResult>> match(ChatQueryContext chatQueryContext,
List<S2Term> terms, Set<Long> detectDataSetIds) {
this.allElements = getSchemaElements(chatQueryContext);
allElements.set(getSchemaElements(chatQueryContext));
return super.match(chatQueryContext, terms, detectDataSetIds);
}
@@ -43,7 +43,7 @@ public class DatabaseMatchStrategy extends SingleMatchStrategy<DatabaseMapResult
}
Double metricDimensionThresholdConfig = getThreshold(chatQueryContext);
Map<String, Set<SchemaElement>> nameToItems = getNameToItems(allElements);
Map<String, Set<SchemaElement>> nameToItems = getNameToItems(allElements.get());
List<DatabaseMapResult> results = new ArrayList<>();
for (Entry<String, Set<SchemaElement>> entry : nameToItems.entrySet()) {
String name = entry.getKey();

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
package com.tencent.supersonic.headless.core.pojo;
import lombok.Builder;
import lombok.Data;

View File

@@ -1,6 +1,9 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import lombok.Data;
import java.util.ArrayList;

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
package com.tencent.supersonic.headless.core.pojo;
import com.google.common.collect.Sets;
import com.tencent.supersonic.common.pojo.ColumnOrder;
@@ -9,7 +9,7 @@ import java.util.List;
import java.util.Set;
@Data
public class OntologyQueryParam {
public class OntologyQuery {
private Set<String> metrics = Sets.newHashSet();
private Set<String> dimensions = Sets.newHashSet();
private String where;

View File

@@ -1,8 +1,6 @@
package com.tencent.supersonic.headless.core.pojo;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
@@ -13,15 +11,15 @@ public class QueryStatement {
private Long dataSetId;
private String sql;
private String errMsg;
private StructQueryParam structQueryParam;
private SqlQueryParam sqlQueryParam;
private OntologyQueryParam ontologyQueryParam;
private StructQuery structQuery;
private SqlQuery sqlQuery;
private OntologyQuery ontologyQuery;
private Integer status = 0;
private Boolean isS2SQL = false;
private Boolean enableOptimize = true;
private Triple<String, String, String> minMaxTime;
private Ontology ontology;
private SemanticSchemaResp semanticSchemaResp;
private SemanticSchemaResp semanticSchema;
private Integer limit = 1000;
private Boolean isTranslated = false;

View File

@@ -3,7 +3,7 @@ package com.tencent.supersonic.headless.core.pojo;
import lombok.Data;
@Data
public class SqlQueryParam {
public class SqlQuery {
private String sql;
private String table;
private boolean supportWith = true;

View File

@@ -12,7 +12,7 @@ import java.util.ArrayList;
import java.util.List;
@Data
public class StructQueryParam {
public class StructQuery {
private List<String> groups = new ArrayList();
private List<Aggregator> aggregators = new ArrayList();
private List<Order> orders = new ArrayList();

View File

@@ -3,10 +3,10 @@ package com.tencent.supersonic.headless.core.translator;
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
@@ -26,23 +26,30 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
return;
}
try {
for (QueryConverter converter : ComponentFactory.getQueryConverters()) {
if (converter.accept(queryStatement)) {
log.debug("QueryConverter accept [{}]", converter.getClass().getName());
converter.convert(queryStatement);
for (QueryParser parser : ComponentFactory.getQueryParser()) {
if (parser.accept(queryStatement)) {
log.debug("QueryConverter accept [{}]", parser.getClass().getName());
parser.parse(queryStatement);
}
}
doOntologyParse(queryStatement);
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQuery().getTable(), queryStatement.getErrMsg()));
}
if (StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSimplifiedSql())) {
queryStatement.setSql(queryStatement.getSqlQueryParam().getSimplifiedSql());
mergeOntologyQuery(queryStatement);
if (StringUtils.isNotBlank(queryStatement.getSqlQuery().getSimplifiedSql())) {
queryStatement.setSql(queryStatement.getSqlQuery().getSimplifiedSql());
}
if (StringUtils.isBlank(queryStatement.getSql())) {
throw new RuntimeException("parse exception: " + queryStatement.getErrMsg());
}
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
queryOptimizer.rewrite(queryStatement);
for (QueryOptimizer optimizer : ComponentFactory.getQueryOptimizers()) {
if (optimizer.accept(queryStatement)) {
optimizer.rewrite(queryStatement);
}
}
log.info("translated query SQL: [{}]",
StringUtils.normalizeSpace(queryStatement.getSql()));
@@ -52,23 +59,18 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
}
private void doOntologyParse(QueryStatement queryStatement) throws Exception {
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam();
log.info("parse with ontology: [{}]", ontologyQueryParam);
ComponentFactory.getQueryParser().parse(queryStatement);
if (!queryStatement.isOk()) {
throw new Exception(String.format("parse ontology table [%s] error [%s]",
queryStatement.getSqlQueryParam().getTable(), queryStatement.getErrMsg()));
}
private void mergeOntologyQuery(QueryStatement queryStatement) throws Exception {
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
log.info("parse with ontology: [{}]", ontologyQuery);
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
String ontologyQuerySql = sqlQueryParam.getSql();
String ontologyInnerTable = sqlQueryParam.getTable();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
String ontologyQuerySql = sqlQuery.getSql();
String ontologyInnerTable = sqlQuery.getTable();
String ontologyInnerSql = queryStatement.getSql();
List<Pair<String, String>> tables = new ArrayList<>();
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
if (sqlQueryParam.isSupportWith()) {
if (sqlQuery.isSupportWith()) {
EngineType engineType = queryStatement.getOntology().getDatabase().getType();
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
String withSql = "with " + tables.stream()
@@ -86,9 +88,9 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
}
} else {
for (Pair<String, String> tb : tables) {
ontologyQuerySql =
StringUtils.replace(ontologyQuerySql, tb.getLeft(), "(" + tb.getRight()
+ ") " + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1);
ontologyQuerySql = StringUtils.replace(ontologyQuerySql, tb.getLeft(),
"(" + tb.getRight() + ") " + (sqlQuery.isWithAlias() ? "" : tb.getLeft()),
-1);
}
queryStatement.setSql(ontologyQuerySql);
}

View File

@@ -1,11 +0,0 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** to supplement,translate the request Body */
public interface QueryConverter {
boolean accept(QueryStatement queryStatement);
void convert(QueryStatement queryStatement) throws Exception;
}

View File

@@ -1,74 +0,0 @@
package com.tencent.supersonic.headless.core.translator.converter;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("ParserDefaultConverter")
@Slf4j
public class StructQueryConverter implements QueryConverter {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQueryParam())
&& !queryStatement.getIsS2SQL();
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
String dsTable = "t_1";
SqlQueryParam sqlParam = new SqlQueryParam();
sqlParam.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam),
sqlGenerateUtils.getOrderBy(structQueryParam),
sqlGenerateUtils.getLimit(structQueryParam));
Database database = queryStatement.getOntology().getDatabase();
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
sqlParam.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
sqlGenerateUtils.getGroupBy(structQueryParam),
sqlGenerateUtils.getOrderBy(structQueryParam),
sqlGenerateUtils.getLimit(structQueryParam));
}
sqlParam.setSql(sql);
queryStatement.setSqlQueryParam(sqlParam);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups());
ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
ontologyQueryParam.setWhere(where);
if (ontologyQueryParam.getMetrics().isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
} else {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery());
ontologyQueryParam.setOrder(structQueryParam.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQueryParam.setLimit(structQueryParam.getLimit());
queryStatement.setOntologyQueryParam(ontologyQueryParam);
log.info("parse structQuery [{}] ", queryStatement.getSqlQueryParam());
}
}

View File

@@ -14,9 +14,16 @@ import java.util.Objects;
@Component("DbDialectOptimizer")
public class DbDialectOptimizer implements QueryOptimizer {
@Override
public boolean accept(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
return Objects.nonNull(database) && Objects.nonNull(database.getType());
}
@Override
public void rewrite(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
String sql = queryStatement.getSql();
if (Objects.isNull(database) || Objects.isNull(database.getType())) {

View File

@@ -1,40 +0,0 @@
package com.tencent.supersonic.headless.core.translator.optimizer;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import java.util.Objects;
/** Remove the default metric added by the system when the query only has dimensions */
@Slf4j
@Component("DetailQueryOptimizer")
public class DetailQueryOptimizer implements QueryOptimizer {
@Override
public void rewrite(QueryStatement queryStatement) {
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
String sqlRaw = queryStatement.getSql().trim();
if (StringUtils.isEmpty(sqlRaw)) {
throw new RuntimeException("sql is empty or null");
}
log.debug("before handleNoMetric, sql:{}", sqlRaw);
// if (isDetailQuery(structQueryParam)) {
// if (!CollectionUtils.isEmpty(structQueryParam.getGroups())) {
// String sqlForm = "select %s from ( %s ) src_no_metric";
// String sql = String.format(sqlForm,
// structQueryParam.getGroups().stream().collect(Collectors.joining(",")),
// sqlRaw);
// queryStatement.setSql(sql);
// }
// }
log.debug("after handleNoMetric, sql:{}", queryStatement.getSql());
}
public boolean isDetailQuery(StructQueryParam structQueryParam) {
return Objects.nonNull(structQueryParam)
&& structQueryParam.getQueryType().isNativeAggQuery();
}
}

View File

@@ -7,5 +7,9 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement;
* derive the most efficient query.
*/
public interface QueryOptimizer {
boolean accept(QueryStatement queryStatement);
void rewrite(QueryStatement queryStatement);
}

View File

@@ -9,10 +9,13 @@ import org.springframework.stereotype.Component;
@Component("ResultLimitOptimizer")
public class ResultLimitOptimizer implements QueryOptimizer {
@Override
public boolean accept(QueryStatement queryStatement) {
return !SqlSelectHelper.hasLimit(queryStatement.getSql());
}
@Override
public void rewrite(QueryStatement queryStatement) {
if (!SqlSelectHelper.hasLimit(queryStatement.getSql())) {
queryStatement.setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit());
}
queryStatement.setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit());
}
}

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter;
package com.tencent.supersonic.headless.core.translator.parser;
import com.google.common.collect.Lists;
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
@@ -22,24 +22,24 @@ import java.util.Objects;
import java.util.stream.Collectors;
@Slf4j
@Component("DefaultDimValueConverter")
public class DefaultDimValueConverter implements QueryConverter {
@Component("DefaultDimValueParser")
public class DefaultDimValueParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam())
&& StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSql());
return Objects.nonNull(queryStatement.getSqlQuery())
&& StringUtils.isNotBlank(queryStatement.getSqlQuery().getSql());
}
@Override
public void convert(QueryStatement queryStatement) {
public void parse(QueryStatement queryStatement) {
List<Dimension> dimensions = queryStatement.getOntology().getDimensions().stream()
.filter(dimension -> !CollectionUtils.isEmpty(dimension.getDefaultValues()))
.collect(Collectors.toList());
if (CollectionUtils.isEmpty(dimensions)) {
return;
}
String sql = queryStatement.getSqlQueryParam().getSql();
String sql = queryStatement.getSqlQuery().getSql();
List<String> whereFields = SqlSelectHelper.getWhereFields(sql).stream()
.filter(field -> !TimeDimensionEnum.containsTimeDimension(field))
.collect(Collectors.toList());
@@ -56,11 +56,11 @@ public class DefaultDimValueConverter implements QueryConverter {
inExpression.setLeftExpression(new Column(dimension.getBizName()));
inExpression.setRightExpression(expressionList);
expressions.add(inExpression);
if (Objects.nonNull(queryStatement.getSqlQueryParam().getTable())) {
queryStatement.getOntologyQueryParam().getDimensions().add(dimension.getBizName());
if (Objects.nonNull(queryStatement.getSqlQuery().getTable())) {
queryStatement.getOntologyQuery().getDimensions().add(dimension.getBizName());
}
}
sql = SqlAddHelper.addWhere(sql, expressions);
queryStatement.getSqlQueryParam().setSql(sql);
queryStatement.getSqlQuery().setSql(sql);
}
}

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter;
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
@@ -9,9 +9,9 @@ import com.tencent.supersonic.common.util.DateModeUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@@ -22,30 +22,29 @@ import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("CalculateAggConverter")
@Component("MetricRatioParser")
@Slf4j
public class MetricRatioConverter implements QueryConverter {
public class MetricRatioParser implements QueryParser {
public interface EngineSql {
String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
String metricSql);
String sql(StructQuery structQuery, boolean isOver, boolean asWith, String metricSql);
}
@Override
public boolean accept(QueryStatement queryStatement) {
if (Objects.isNull(queryStatement.getStructQueryParam()) || queryStatement.getIsS2SQL()
|| !isRatioAccept(queryStatement.getStructQueryParam())) {
if (Objects.isNull(queryStatement.getStructQuery()) || queryStatement.getIsS2SQL()
|| !isRatioAccept(queryStatement.getStructQuery())) {
return false;
}
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
if (structQueryParam.getQueryType().isNativeAggQuery()
|| CollectionUtils.isEmpty(structQueryParam.getAggregators())) {
StructQuery structQuery = queryStatement.getStructQuery();
if (structQuery.getQueryType().isNativeAggQuery()
|| CollectionUtils.isEmpty(structQuery.getAggregators())) {
return false;
}
int nonSumFunction = 0;
for (Aggregator agg : structQueryParam.getAggregators()) {
for (Aggregator agg : structQuery.getAggregators()) {
if (agg.getFunc() == null || "".equals(agg.getFunc())) {
return false;
}
@@ -60,14 +59,14 @@ public class MetricRatioConverter implements QueryConverter {
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
public void parse(QueryStatement queryStatement) throws Exception {
Database database = queryStatement.getOntology().getDatabase();
generateRatioSql(queryStatement, database.getType(), database.getVersion());
}
/** Ratio */
public boolean isRatioAccept(StructQueryParam structQueryParam) {
Long ratioFuncNum = structQueryParam.getAggregators().stream()
public boolean isRatioAccept(StructQuery structQuery) {
Long ratioFuncNum = structQuery.getAggregators().stream()
.filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)
|| f.getFunc().equals(AggOperatorEnum.RATIO_OVER)))
.count();
@@ -80,20 +79,20 @@ public class MetricRatioConverter implements QueryConverter {
public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum,
String version) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
check(structQueryParam);
StructQuery structQuery = queryStatement.getStructQuery();
check(structQuery);
queryStatement.setEnableOptimize(false);
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam();
ontologyQueryParam.setAggOption(AggOption.AGGREGATION);
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
ontologyQuery.setAggOption(AggOption.AGGREGATION);
String metricTableName = "v_metric_tb_tmp";
boolean isOver = isOverRatio(structQueryParam);
boolean isOver = isOverRatio(structQuery);
String sql = "";
SqlQueryParam dsParam = queryStatement.getSqlQueryParam();
SqlQuery dsParam = queryStatement.getSqlQuery();
dsParam.setTable(metricTableName);
switch (engineTypeEnum) {
case H2:
sql = new H2EngineSql().sql(structQueryParam, isOver, true, metricTableName);
sql = new H2EngineSql().sql(structQuery, isOver, true, metricTableName);
break;
case MYSQL:
case DORIS:
@@ -102,10 +101,10 @@ public class MetricRatioConverter implements QueryConverter {
dsParam.setSupportWith(false);
}
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
sql = new MysqlEngineSql().sql(structQueryParam, isOver,
dsParam.isSupportWith(), metricTableName);
sql = new MysqlEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(),
metricTableName);
} else {
sql = new CkEngineSql().sql(structQueryParam, isOver, dsParam.isSupportWith(),
sql = new CkEngineSql().sql(structQuery, isOver, dsParam.isSupportWith(),
metricTableName);
}
break;
@@ -116,8 +115,8 @@ public class MetricRatioConverter implements QueryConverter {
public class H2EngineSql implements EngineSql {
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) {
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
public String getOverSelect(StructQuery structQuery, boolean isOver) {
String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
@@ -127,44 +126,43 @@ public class MetricRatioConverter implements QueryConverter {
return f.getColumn();
}
}).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQuery.getGroups()) + "," + aggStr;
}
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver,
boolean isAdd) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
public String getTimeSpan(StructQuery structQuery, boolean isOver, boolean isAdd) {
if (Objects.nonNull(structQuery.getDateInfo())) {
String addStr = isAdd ? "" : "-";
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return "day," + (isOver ? addStr + "7" : addStr + "1");
}
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? "month," + addStr + "1" : "day," + addStr + "7";
}
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) {
return isOver ? "year," + addStr + "1" : "month," + addStr + "1";
}
}
return "";
}
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) {
String timeDim = getTimeDim(structQueryParam);
String timeSpan = getTimeSpan(structQueryParam, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
String timeDim = getTimeDim(structQuery);
String timeSpan = getTimeSpan(structQuery, isOver, true);
String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format(
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
aliasRight + timeDim, aliasLeft + timeDim, timeSpan,
aliasRight + timeDim);
}
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) {
return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim,
getTimeSpan(structQuery, isOver, false), aliasLeft + timeDim,
aliasRight + timeDim);
}
return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan,
@@ -174,7 +172,7 @@ public class MetricRatioConverter implements QueryConverter {
}
}).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) {
for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
continue;
}
@@ -185,36 +183,36 @@ public class MetricRatioConverter implements QueryConverter {
}
@Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) {
String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."),
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
getLimit(structQueryParam));
getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQuery));
return sql;
}
}
public class CkEngineSql extends MysqlEngineSql {
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) {
String timeDim = getTimeDim(structQueryParam);
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
String timeDim = getTimeDim(structQuery);
String timeSpan = "INTERVAL " + getTimeSpan(structQuery, isOver, true);
String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format(
"toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
}
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) {
return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false),
aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false),
aliasRight + timeDim);
}
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -224,7 +222,7 @@ public class MetricRatioConverter implements QueryConverter {
}
}).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) {
for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
continue;
}
@@ -235,49 +233,46 @@ public class MetricRatioConverter implements QueryConverter {
}
@Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) {
if (!asWith) {
return String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver),
getAllSelect(structQueryParam, "t0."),
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
getJoinOn(structQueryParam, isOver, "t0.", "t1."),
getOrderBy(structQueryParam), getLimit(structQueryParam));
getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQuery));
}
return String.format(
",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s "
+ "from t0 left join t1 on %s ) metric_tb_src %s %s ",
metricSql, metricSql, getOverSelect(structQueryParam, isOver),
getAllSelect(structQueryParam, "t0."),
getAllJoinSelect(structQueryParam, "t1."),
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
getLimit(structQueryParam));
metricSql, metricSql, getOverSelect(structQuery, isOver),
getAllSelect(structQuery, "t0."), getAllJoinSelect(structQuery, "t1."),
getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQuery));
}
}
public class MysqlEngineSql implements EngineSql {
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver,
boolean isAdd) {
if (Objects.nonNull(structQueryParam.getDateInfo())) {
public String getTimeSpan(StructQuery structQuery, boolean isOver, boolean isAdd) {
if (Objects.nonNull(structQuery.getDateInfo())) {
String addStr = isAdd ? "" : "-";
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
return isOver ? addStr + "7 day" : addStr + "1 day";
}
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) {
return isOver ? addStr + "1 month" : addStr + "7 day";
}
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return isOver ? addStr + "1 year" : addStr + "1 month";
}
}
return "";
}
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) {
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
public String getOverSelect(StructQuery structQuery, boolean isOver) {
String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
@@ -287,26 +282,26 @@ public class MetricRatioConverter implements QueryConverter {
return f.getColumn();
}
}).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQuery.getGroups()) + "," + aggStr;
}
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
public String getJoinOn(StructQuery structQuery, boolean isOver, String aliasLeft,
String aliasRight) {
String timeDim = getTimeDim(structQueryParam);
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true);
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
String timeDim = getTimeDim(structQuery);
String timeSpan = "INTERVAL " + getTimeSpan(structQuery, isOver, true);
String aggStr = structQuery.getAggregators().stream().map(f -> {
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
return String.format(
"%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
}
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
if (structQuery.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
&& isOver) {
return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s",
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false),
aliasLeft + timeDim, getTimeSpan(structQuery, isOver, false),
aliasRight + timeDim);
}
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
@@ -316,7 +311,7 @@ public class MetricRatioConverter implements QueryConverter {
}
}).collect(Collectors.joining(" and "));
List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) {
for (String group : structQuery.getGroups()) {
if (group.equalsIgnoreCase(timeDim)) {
continue;
}
@@ -327,53 +322,52 @@ public class MetricRatioConverter implements QueryConverter {
}
@Override
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
public String sql(StructQuery structQuery, boolean isOver, boolean asWith,
String metricSql) {
String sql = String.format(
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."),
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
getLimit(structQueryParam));
getOverSelect(structQuery, isOver), getAllSelect(structQuery, "t0."),
getAllJoinSelect(structQuery, "t1."), metricSql, metricSql,
getJoinOn(structQuery, isOver, "t0.", "t1."), getOrderBy(structQuery),
getLimit(structQuery));
return sql;
}
}
private String getAllJoinSelect(StructQueryParam structQueryParam, String alias) {
String aggStr = structQueryParam.getAggregators().stream()
private String getAllJoinSelect(StructQuery structQuery, String alias) {
String aggStr = structQuery.getAggregators().stream()
.map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll")
.collect(Collectors.joining(","));
List<String> groups = new ArrayList<>();
for (String group : structQueryParam.getGroups()) {
for (String group : structQuery.getGroups()) {
groups.add(alias + group + " as " + group + "_roll");
}
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
}
private String getGroupDimWithOutTime(StructQueryParam structQueryParam) {
String timeDim = getTimeDim(structQueryParam);
return structQueryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
private String getGroupDimWithOutTime(StructQuery structQuery) {
String timeDim = getTimeDim(structQuery);
return structQuery.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
.collect(Collectors.joining(","));
}
private static String getTimeDim(StructQueryParam structQueryParam) {
private static String getTimeDim(StructQuery structQuery) {
DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class);
return dateModeUtils.getSysDateCol(structQueryParam.getDateInfo());
return dateModeUtils.getSysDateCol(structQuery.getDateInfo());
}
private static String getLimit(StructQueryParam structQueryParam) {
if (structQueryParam != null && structQueryParam.getLimit() != null
&& structQueryParam.getLimit() > 0) {
return " limit " + String.valueOf(structQueryParam.getLimit());
private static String getLimit(StructQuery structQuery) {
if (structQuery != null && structQuery.getLimit() != null && structQuery.getLimit() > 0) {
return " limit " + String.valueOf(structQuery.getLimit());
}
return "";
}
private String getAllSelect(StructQueryParam structQueryParam, String alias) {
String aggStr = structQueryParam.getAggregators().stream()
.map(f -> getSelectField(f, alias)).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: alias + String.join("," + alias, structQueryParam.getGroups()) + "," + aggStr;
private String getAllSelect(StructQuery structQuery, String alias) {
String aggStr = structQuery.getAggregators().stream().map(f -> getSelectField(f, alias))
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: alias + String.join("," + alias, structQuery.getGroups()) + "," + aggStr;
}
private String getSelectField(final Aggregator agg, String alias) {
@@ -385,32 +379,32 @@ public class MetricRatioConverter implements QueryConverter {
return sqlGenerateUtils.getSelectField(agg);
}
private String getGroupBy(StructQueryParam structQueryParam) {
if (CollectionUtils.isEmpty(structQueryParam.getGroups())) {
private String getGroupBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQuery.getGroups())) {
return "";
}
return "group by " + String.join(",", structQueryParam.getGroups());
return "group by " + String.join(",", structQuery.getGroups());
}
private static String getOrderBy(StructQueryParam structQueryParam) {
return "order by " + getTimeDim(structQueryParam) + " desc";
private static String getOrderBy(StructQuery structQuery) {
return "order by " + getTimeDim(structQuery) + " desc";
}
private boolean isOverRatio(StructQueryParam structQueryParam) {
Long overCt = structQueryParam.getAggregators().stream()
private boolean isOverRatio(StructQuery structQuery) {
Long overCt = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
return overCt > 0;
}
private void check(StructQueryParam structQueryParam) throws Exception {
Long ratioOverNum = structQueryParam.getAggregators().stream()
private void check(StructQuery structQuery) throws Exception {
Long ratioOverNum = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
Long ratioRollNum = structQueryParam.getAggregators().stream()
Long ratioRollNum = structQuery.getAggregators().stream()
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count();
if (ratioOverNum > 0 && ratioRollNum > 0) {
throw new Exception("not support over ratio and roll ratio together ");
}
if (getTimeDim(structQueryParam).isEmpty()) {
if (getTimeDim(structQuery).isEmpty()) {
throw new Exception("miss time filter");
}
}

View File

@@ -1,24 +1,28 @@
package com.tencent.supersonic.headless.core.translator.parser.calcite;
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.calcite.RuntimeOptions;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
/** the calcite parse implements */
@Component("CalciteQueryParser")
@Component("OntologyQueryParser")
@Slf4j
public class CalciteQueryParser implements QueryParser {
public class OntologyQueryParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getOntologyQuery());
}
@Override
public void parse(QueryStatement queryStatement) throws Exception {
Ontology ontology = queryStatement.getOntology();
if (ontology == null) {
queryStatement.setErrMsg("No ontology could be found");
return;
}
S2CalciteSchema semanticSchema = S2CalciteSchema.builder()
.schemaKey("DATASET_" + queryStatement.getDataSetId()).ontology(ontology)
.runtimeOptions(RuntimeOptions.builder().minMaxTime(queryStatement.getMinMaxTime())

View File

@@ -4,5 +4,8 @@ import com.tencent.supersonic.headless.core.pojo.QueryStatement;
/** A query parser generates physical SQL for the QueryStatement. */
public interface QueryParser {
boolean accept(QueryStatement queryStatement);
void parse(QueryStatement queryStatement) throws Exception;
}

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter;
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.jsqlparser.SqlAsHelper;
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
@@ -14,8 +14,8 @@ import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
import com.tencent.supersonic.headless.api.pojo.response.*;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
@@ -27,62 +27,62 @@ import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@Component("SqlQueryConverter")
@Component("SqlQueryParser")
@Slf4j
public class SqlQueryConverter implements QueryConverter {
public class SqlQueryParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL();
return Objects.nonNull(queryStatement.getSqlQuery()) && queryStatement.getIsS2SQL();
}
@Override
public void convert(QueryStatement queryStatement) throws Exception {
public void parse(QueryStatement queryStatement) throws Exception {
convertNameToBizName(queryStatement);
rewriteOrderBy(queryStatement);
// fill sqlQuery
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
String tableName = SqlSelectHelper.getTableName(sqlQueryParam.getSql());
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQuery sqlQuery = queryStatement.getSqlQuery();
String tableName = SqlSelectHelper.getTableName(sqlQuery.getSql());
if (StringUtils.isEmpty(tableName)) {
return;
}
sqlQueryParam.setTable(tableName.toLowerCase());
sqlQuery.setTable(tableName.toLowerCase());
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
if (!sqlGenerateUtils.isSupportWith(
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()),
semanticSchemaResp.getDatabaseResp().getVersion())) {
sqlQueryParam.setSupportWith(false);
sqlQueryParam.setWithAlias(false);
sqlQuery.setSupportWith(false);
sqlQuery.setWithAlias(false);
}
// build ontologyQuery
List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQueryParam.getSql());
List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQuery.getSql());
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
List<String> metrics =
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
ontologyQueryParam.getMetrics().addAll(metrics);
ontologyQueryParam.getDimensions().addAll(dimensions);
AggOption sqlQueryAggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQuery.getMetrics().addAll(metrics);
ontologyQuery.getDimensions().addAll(dimensions);
AggOption sqlQueryAggOption = getAggOption(sqlQuery.getSql(), metricSchemas);
// if sql query itself has aggregation, ontology query just returns detail
if (sqlQueryAggOption.equals(AggOption.AGGREGATION)) {
ontologyQueryParam.setAggOption(AggOption.NATIVE);
ontologyQuery.setAggOption(AggOption.NATIVE);
} else if (sqlQueryAggOption.equals(AggOption.NATIVE) && !metrics.isEmpty()) {
ontologyQueryParam.setAggOption(AggOption.DEFAULT);
ontologyQuery.setAggOption(AggOption.DEFAULT);
}
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(ontologyQueryParam.getAggOption()));
queryStatement.setOntologyQueryParam(ontologyQueryParam);
ontologyQuery.setNativeQuery(!AggOption.isAgg(ontologyQuery.getAggOption()));
queryStatement.setOntologyQuery(ontologyQuery);
generateDerivedMetric(sqlGenerateUtils, queryStatement);
queryStatement.setSql(sqlQueryParam.getSql());
queryStatement.setSql(sqlQuery.getSql());
// replace sql fields for db, must called after convertNameToBizName
String sqlRewrite = replaceSqlFieldsForHanaDB(queryStatement, sqlQueryParam.getSql());
sqlQueryParam.setSql(sqlRewrite);
log.info("parse sqlQuery [{}] ", sqlQueryParam);
String sqlRewrite = replaceSqlFieldsForHanaDB(queryStatement, sqlQuery.getSql());
sqlQuery.setSql(sqlRewrite);
log.info("parse sqlQuery [{}] ", sqlQuery);
}
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) {
@@ -145,9 +145,9 @@ public class SqlQueryConverter implements QueryConverter {
private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SqlQueryParam sqlParam = queryStatement.getSqlQueryParam();
OntologyQueryParam ontologyParam = queryStatement.getOntologyQueryParam();
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
SqlQuery sqlParam = queryStatement.getSqlQuery();
OntologyQuery ontologyParam = queryStatement.getOntologyQuery();
String sql = sqlParam.getSql();
Set<String> measures = new HashSet<>();
@@ -229,22 +229,20 @@ public class SqlQueryConverter implements QueryConverter {
/**
* special process for hanaDB,the sap hana DB don't support the chinese name as
* the column name,
* so we need to quote the column name after converting the convertNameToBizName
* called
* special process for hanaDB,the sap hana DB don't support the chinese name as the column name,
* so we need to quote the column name after converting the convertNameToBizName called
*
* sap hana DB will auto translate the colume to upper case letter if not
* quoted.
* also we need to quote the field name if it is a lower case letter.
* sap hana DB will auto translate the colume to upper case letter if not quoted. also we need
* to quote the field name if it is a lower case letter.
*
* @param queryStatement
* @param sql
* @return
*/
private String replaceSqlFieldsForHanaDB(QueryStatement queryStatement, String sql) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
if (!semanticSchemaResp.getDatabaseResp().getType().equalsIgnoreCase(EngineType.HANADB.getName())) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
if (!semanticSchemaResp.getDatabaseResp().getType()
.equalsIgnoreCase(EngineType.HANADB.getName())) {
return sql;
}
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
@@ -277,9 +275,9 @@ public class SqlQueryConverter implements QueryConverter {
}
private void convertNameToBizName(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
String sql = queryStatement.getSqlQueryParam().getSql();
String sql = queryStatement.getSqlQuery().getSql();
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
sql);
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
@@ -288,15 +286,15 @@ public class SqlQueryConverter implements QueryConverter {
sql = SqlReplaceHelper.replaceTable(sql,
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
log.debug("replaceTableName after:{}", sql);
queryStatement.getSqlQueryParam().setSql(sql);
queryStatement.getSqlQuery().setSql(sql);
}
private void rewriteOrderBy(QueryStatement queryStatement) {
// replace order by field with the select sequence number
String sql = queryStatement.getSqlQueryParam().getSql();
String sql = queryStatement.getSqlQuery().getSql();
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
queryStatement.getSqlQueryParam().setSql(newSql);
queryStatement.getSqlQuery().setSql(newSql);
}
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {

View File

@@ -1,4 +1,4 @@
package com.tencent.supersonic.headless.core.translator.converter;
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
@@ -14,18 +14,17 @@ import java.util.List;
import java.util.Objects;
@Slf4j
@Component("SqlVariableConverter")
public class SqlVariableConverter implements QueryConverter {
@Component("SqlVariableParser")
public class SqlVariableParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQueryParam())
&& !queryStatement.getIsS2SQL();
return Objects.nonNull(queryStatement.getStructQuery()) && !queryStatement.getIsS2SQL();
}
@Override
public void convert(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
public void parse(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
List<ModelResp> modelResps = semanticSchemaResp.getModelResps();
if (CollectionUtils.isEmpty(modelResps)) {
return;
@@ -36,7 +35,7 @@ public class SqlVariableConverter implements QueryConverter {
String sqlParsed =
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
modelResp.getModelDetail().getSqlVariables(),
queryStatement.getStructQueryParam().getParams());
queryStatement.getStructQuery().getParams());
DataModel dataModel =
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
dataModel.setSqlQuery(sqlParsed);

View File

@@ -0,0 +1,72 @@
package com.tencent.supersonic.headless.core.translator.parser;
import com.tencent.supersonic.common.pojo.Aggregator;
import com.tencent.supersonic.common.pojo.ColumnOrder;
import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
import com.tencent.supersonic.headless.core.pojo.Database;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Objects;
import java.util.stream.Collectors;
@Component("StructQueryParser")
@Slf4j
public class StructQueryParser implements QueryParser {
@Override
public boolean accept(QueryStatement queryStatement) {
return Objects.nonNull(queryStatement.getStructQuery()) && !queryStatement.getIsS2SQL();
}
@Override
public void parse(QueryStatement queryStatement) throws Exception {
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
StructQuery structQuery = queryStatement.getStructQuery();
String dsTable = "t_1";
SqlQuery sqlParam = new SqlQuery();
sqlParam.setTable(dsTable);
String sql = String.format("select %s from %s %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery));
Database database = queryStatement.getOntology().getDatabase();
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
sqlParam.setSupportWith(false);
sql = String.format("select %s from %s t0 %s %s %s",
sqlGenerateUtils.getSelect(structQuery), dsTable,
sqlGenerateUtils.getGroupBy(structQuery),
sqlGenerateUtils.getOrderBy(structQuery),
sqlGenerateUtils.getLimit(structQuery));
}
sqlParam.setSql(sql);
queryStatement.setSqlQuery(sqlParam);
OntologyQuery ontologyQuery = new OntologyQuery();
ontologyQuery.getDimensions().addAll(structQuery.getGroups());
ontologyQuery.getMetrics().addAll(structQuery.getAggregators().stream()
.map(Aggregator::getColumn).collect(Collectors.toList()));
String where = sqlGenerateUtils.generateWhere(structQuery, null);
ontologyQuery.setWhere(where);
if (ontologyQuery.getMetrics().isEmpty()) {
ontologyQuery.setAggOption(AggOption.NATIVE);
} else {
ontologyQuery.setAggOption(AggOption.DEFAULT);
}
ontologyQuery.setNativeQuery(structQuery.getQueryType().isNativeAggQuery());
ontologyQuery.setOrder(structQuery.getOrders().stream()
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
.collect(Collectors.toList()));
ontologyQuery.setLimit(structQuery.getLimit());
queryStatement.setOntologyQuery(ontologyQuery);
log.info("parse structQuery [{}] ", queryStatement.getSqlQuery());
}
}

View File

@@ -2,9 +2,9 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import lombok.Builder;
import lombok.Data;
import org.apache.calcite.schema.Schema;

View File

@@ -13,7 +13,7 @@ import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Ren
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParser;
@@ -25,7 +25,7 @@ import java.util.*;
public class SqlBuilder {
private final S2CalciteSchema schema;
private OntologyQueryParam ontologyQueryParam;
private OntologyQuery ontologyQuery;
private SqlValidatorScope scope;
private SqlNode parserNode;
private boolean isAgg = false;
@@ -36,11 +36,11 @@ public class SqlBuilder {
}
public String buildOntologySql(QueryStatement queryStatement) throws Exception {
this.ontologyQueryParam = queryStatement.getOntologyQueryParam();
if (ontologyQueryParam.getLimit() == null) {
ontologyQueryParam.setLimit(0L);
this.ontologyQuery = queryStatement.getOntologyQuery();
if (ontologyQuery.getLimit() == null) {
ontologyQuery.setLimit(0L);
}
this.aggOption = ontologyQueryParam.getAggOption();
this.aggOption = ontologyQuery.getAggOption();
buildParseNode();
Database database = queryStatement.getOntology().getDatabase();
@@ -51,8 +51,7 @@ public class SqlBuilder {
private void buildParseNode() throws Exception {
// find relevant data models
scope = SchemaBuilder.getScope(schema);
List<DataModel> dataModels =
DataModelNode.getQueryDataModels(scope, schema, ontologyQueryParam);
List<DataModel> dataModels = DataModelNode.getQueryDataModels(scope, schema, ontologyQuery);
if (dataModels == null || dataModels.isEmpty()) {
throw new Exception("data model not found");
}
@@ -69,14 +68,14 @@ public class SqlBuilder {
while (it.hasNext()) {
Renderer renderer = it.next();
if (previous != null) {
previous.render(ontologyQueryParam, dataModels, scope, schema, !isAgg);
previous.render(ontologyQuery, dataModels, scope, schema, !isAgg);
renderer.setTable(previous
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
i++;
}
previous = renderer;
}
builders.getLast().render(ontologyQueryParam, dataModels, scope, schema, !isAgg);
builders.getLast().render(ontologyQuery, dataModels, scope, schema, !isAgg);
parserNode = builders.getLast().builder();
}
@@ -87,7 +86,7 @@ public class SqlBuilder {
// default by dataModel time aggregation
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
if (!ontologyQueryParam.isNativeQuery()) {
if (!ontologyQuery.isNativeQuery()) {
return true;
}
}

View File

@@ -4,6 +4,9 @@ import com.google.common.collect.Lists;
import com.tencent.supersonic.common.calcite.Configuration;
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
import com.tencent.supersonic.common.pojo.enums.EngineType;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
@@ -130,8 +133,8 @@ public class DataModelNode extends SemanticNode {
return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_"));
}
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam,
Set<String> queryDimensions, Set<String> queryMeasures) {
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
Set<String> queryDimensions, Set<String> queryMeasures) {
queryDimensions.addAll(queryParam.getDimensions().stream()
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY)
? d.split(Constants.DIMENSION_IDENTIFY)[1]
@@ -146,9 +149,9 @@ public class DataModelNode extends SemanticNode {
.forEach(queryMeasures::add);
}
public static void mergeQueryFilterDimensionMeasure(Ontology ontology,
OntologyQueryParam queryParam, Set<String> dimensions, Set<String> measures,
SqlValidatorScope scope) throws Exception {
public static void mergeQueryFilterDimensionMeasure(Ontology ontology, OntologyQuery queryParam,
Set<String> dimensions, Set<String> measures, SqlValidatorScope scope)
throws Exception {
EngineType engineType = ontology.getDatabase().getType();
if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) {
Set<String> filterConditions = new HashSet<>();
@@ -173,7 +176,7 @@ public class DataModelNode extends SemanticNode {
}
public static List<DataModel> getQueryDataModels(SqlValidatorScope scope,
S2CalciteSchema schema, OntologyQueryParam queryParam) throws Exception {
S2CalciteSchema schema, OntologyQuery queryParam) throws Exception {
Ontology ontology = schema.getOntology();
// get query measures and dimensions
Set<String> queryMeasures = new HashSet<>();
@@ -282,7 +285,7 @@ public class DataModelNode extends SemanticNode {
}
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
OntologyQueryParam queryParam, DataModel baseDataModel, Set<String> queryDimensions,
OntologyQuery queryParam, DataModel baseDataModel, Set<String> queryDimensions,
Set<String> queryMeasures) {
Set<String> joinDataModelNames = new HashSet<>();
List<DataModel> joinDataModels = new ArrayList<>();

View File

@@ -86,7 +86,7 @@ public abstract class SemanticNode {
expression = String.format("`%s`", expression);
}
}
SqlParser sqlParser =
SqlParser sqlParser =
SqlParser.create(expression, Configuration.getParserConfig(engineType));
SqlNode sqlNode = sqlParser.parseExpression();
scope.validateExpr(sqlNode);

View File

@@ -9,7 +9,7 @@ import com.tencent.supersonic.headless.core.translator.parser.calcite.node.Seman
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParserPos;
@@ -26,7 +26,7 @@ import java.util.stream.Collectors;
public class FilterRender extends Renderer {
@Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView tableView = super.tableView;
SqlNode filterNode = null;

View File

@@ -13,10 +13,10 @@ import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.JoinConditionType;
import org.apache.calcite.sql.SqlBasicCall;
@@ -47,7 +47,7 @@ import java.util.stream.Collectors;
public class JoinRender extends Renderer {
@Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = metricCommand.getWhere();
EngineType engineType = schema.getOntology().getDatabase().getType();

View File

@@ -7,7 +7,7 @@ import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlNodeList;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
@@ -22,7 +22,7 @@ import java.util.List;
public class OutputRender extends Renderer {
@Override
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
public void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
TableView selectDataSet = super.tableView;
EngineType engineType = schema.getOntology().getDatabase().getType();

View File

@@ -11,7 +11,7 @@ import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.Data;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -114,6 +114,6 @@ public abstract class Renderer {
return SemanticNode.buildAs(alias, tableView.build());
}
public abstract void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
public abstract void render(OntologyQuery metricCommand, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
}

View File

@@ -16,7 +16,7 @@ import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.validate.SqlValidatorScope;
@@ -334,9 +334,9 @@ public class SourceRender extends Renderer {
}
}
public void render(OntologyQueryParam ontologyQueryParam, List<DataModel> dataModels,
public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
String queryWhere = ontologyQueryParam.getWhere();
String queryWhere = ontologyQuery.getWhere();
Set<String> whereFields = new HashSet<>();
List<String> fieldWhere = new ArrayList<>();
EngineType engineType = schema.getOntology().getDatabase().getType();
@@ -347,13 +347,13 @@ public class SourceRender extends Renderer {
}
if (dataModels.size() == 1) {
DataModel dataModel = dataModels.get(0);
super.tableView = renderOne("", fieldWhere, ontologyQueryParam.getMetrics(),
ontologyQueryParam.getDimensions(), ontologyQueryParam.getWhere(), dataModel,
scope, schema, nonAgg);
super.tableView = renderOne("", fieldWhere, ontologyQuery.getMetrics(),
ontologyQuery.getDimensions(), ontologyQuery.getWhere(), dataModel, scope,
schema, nonAgg);
return;
}
JoinRender joinRender = new JoinRender();
joinRender.render(ontologyQueryParam, dataModels, scope, schema, nonAgg);
joinRender.render(ontologyQuery, dataModels, scope, schema, nonAgg);
super.tableView = joinRender.getTableView();
}
}

View File

@@ -4,7 +4,6 @@ import com.tencent.supersonic.common.util.ContextUtils;
import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryAccelerator;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
import lombok.extern.slf4j.Slf4j;
@@ -20,29 +19,20 @@ import java.util.stream.Collectors;
@Slf4j
public class ComponentFactory {
private static List<QueryConverter> queryConverters = new ArrayList<>();
private static Map<String, QueryOptimizer> queryOptimizers = new HashMap<>();
private static List<QueryExecutor> queryExecutors = new ArrayList<>();
private static List<QueryAccelerator> queryAccelerators = new ArrayList<>();
private static QueryParser queryParser;
private static List<QueryParser> queryParsers = new ArrayList<>();
private static QueryCache queryCache;
static {
initQueryConverter();
initQueryOptimizer();
initQueryExecutors();
initQueryAccelerators();
initQueryParser();
initQueryParsers();
initQueryCache();
}
public static List<QueryConverter> getQueryConverters() {
if (queryConverters.isEmpty()) {
initQueryConverter();
}
return queryConverters;
}
public static List<QueryOptimizer> getQueryOptimizers() {
if (queryOptimizers.isEmpty()) {
initQueryOptimizer();
@@ -64,11 +54,11 @@ public class ComponentFactory {
return queryAccelerators;
}
public static QueryParser getQueryParser() {
if (queryParser == null) {
initQueryParser();
public static List<QueryParser> getQueryParser() {
if (queryParsers.isEmpty()) {
initQueryParsers();
}
return queryParser;
return queryParsers;
}
public static QueryCache getQueryCache() {
@@ -92,23 +82,15 @@ public class ComponentFactory {
}
private static void initQueryExecutors() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryExecutor.class, queryExecutors);
}
private static void initQueryAccelerators() {
// queryExecutors.add(ContextUtils.getContext().getBean("JdbcExecutor",
// JdbcExecutor.class));
init(QueryAccelerator.class, queryAccelerators);
}
private static void initQueryConverter() {
init(QueryConverter.class, queryConverters);
}
private static void initQueryParser() {
queryParser = init(QueryParser.class);
private static void initQueryParsers() {
init(QueryParser.class, queryParsers);
}
private static void initQueryCache() {

View File

@@ -19,7 +19,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
import com.tencent.supersonic.headless.core.config.ExecutorConfig;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
@@ -85,26 +85,25 @@ public class SqlGenerateUtils {
return selectSql;
}
public String getLimit(StructQueryParam structQueryParam) {
if (structQueryParam != null && structQueryParam.getLimit() != null
&& structQueryParam.getLimit() > 0) {
return " limit " + structQueryParam.getLimit();
public String getLimit(StructQuery structQuery) {
if (structQuery != null && structQuery.getLimit() != null && structQuery.getLimit() > 0) {
return " limit " + structQuery.getLimit();
}
return "";
}
public String getSelect(StructQueryParam structQueryParam) {
String aggStr = structQueryParam.getAggregators().stream().map(this::getSelectField)
public String getSelect(StructQuery structQuery) {
String aggStr = structQuery.getAggregators().stream().map(this::getSelectField)
.collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQuery.getGroups()) + "," + aggStr;
}
public String getSelect(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) {
String aggStr = structQueryParam.getAggregators().stream()
public String getSelect(StructQuery structQuery, Map<String, String> deriveMetrics) {
String aggStr = structQuery.getAggregators().stream()
.map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(","));
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
return CollectionUtils.isEmpty(structQuery.getGroups()) ? aggStr
: String.join(",", structQuery.getGroups()) + "," + aggStr;
}
public String getSelectField(final Aggregator agg) {
@@ -129,46 +128,46 @@ public class SqlGenerateUtils {
return deriveMetrics.get(agg.getColumn());
}
public String getGroupBy(StructQueryParam structQueryParam) {
if (CollectionUtils.isEmpty(structQueryParam.getGroups())) {
public String getGroupBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQuery.getGroups())) {
return "";
}
return "group by " + String.join(",", structQueryParam.getGroups());
return "group by " + String.join(",", structQuery.getGroups());
}
public String getOrderBy(StructQueryParam structQueryParam) {
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) {
public String getOrderBy(StructQuery structQuery) {
if (CollectionUtils.isEmpty(structQuery.getOrders())) {
return "";
}
return "order by " + structQueryParam.getOrders().stream()
return "order by " + structQuery.getOrders().stream()
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
}
public String getOrderBy(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) {
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) {
public String getOrderBy(StructQuery structQuery, Map<String, String> deriveMetrics) {
if (CollectionUtils.isEmpty(structQuery.getOrders())) {
return "";
}
if (!structQueryParam.getOrders().stream()
if (!structQuery.getOrders().stream()
.anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) {
return getOrderBy(structQueryParam);
return getOrderBy(structQuery);
}
return "order by " + structQueryParam.getOrders().stream()
return "order by " + structQuery.getOrders().stream()
.map(order -> " " + (deriveMetrics.containsKey(order.getColumn())
? deriveMetrics.get(order.getColumn())
: order.getColumn()) + " " + order.getDirection() + " ")
.collect(Collectors.joining(","));
}
public String generateWhere(StructQueryParam structQueryParam, ItemDateResp itemDateResp) {
public String generateWhere(StructQuery structQuery, ItemDateResp itemDateResp) {
String whereClauseFromFilter =
sqlFilterUtils.getWhereClause(structQueryParam.getDimensionFilters());
String whereFromDate = getDateWhereClause(structQueryParam.getDateInfo(), itemDateResp);
return mergeDateWhereClause(structQueryParam, whereClauseFromFilter, whereFromDate);
sqlFilterUtils.getWhereClause(structQuery.getDimensionFilters());
String whereFromDate = getDateWhereClause(structQuery.getDateInfo(), itemDateResp);
return mergeDateWhereClause(structQuery, whereClauseFromFilter, whereFromDate);
}
private String mergeDateWhereClause(StructQueryParam structQueryParam,
String whereClauseFromFilter, String whereFromDate) {
private String mergeDateWhereClause(StructQuery structQuery, String whereClauseFromFilter,
String whereFromDate) {
if (StringUtils.isNotEmpty(whereFromDate)
&& StringUtils.isNotEmpty(whereClauseFromFilter)) {
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
@@ -180,7 +179,7 @@ public class SqlGenerateUtils {
return whereFromDate;
} else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) {
log.debug("the current date information is empty, enter the date initialization logic");
return dateModeUtils.defaultRecentDateInfo(structQueryParam.getDateInfo());
return dateModeUtils.defaultRecentDateInfo(structQuery.getDateInfo());
}
return whereClauseFromFilter;
}
@@ -204,12 +203,12 @@ public class SqlGenerateUtils {
return dateModeUtils.getDateWhereStr(dateInfo, dateDate);
}
public Triple<String, String, String> getBeginEndTime(StructQueryParam structQueryParam,
public Triple<String, String, String> getBeginEndTime(StructQuery structQuery,
ItemDateResp dataDate) {
if (Objects.isNull(structQueryParam.getDateInfo())) {
if (Objects.isNull(structQuery.getDateInfo())) {
return Triple.of("", "", "");
}
DateConf dateConf = structQueryParam.getDateInfo();
DateConf dateConf = structQuery.getDateInfo();
String dateInfo = dateModeUtils.getSysDateCol(dateConf);
if (dateInfo.isEmpty()) {
return Triple.of("", "", "");

View File

@@ -2,7 +2,7 @@ package com.tencent.supersonic.chat.core.parser.aggregate;
import com.alibaba.fastjson.JSON;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.CalciteQueryParser;
import com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser;
import org.junit.jupiter.api.Test;
import org.testng.Assert;
@@ -316,7 +316,7 @@ public class CalciteSqlParserTest {
+ " \"createdAt\": 1711367511146,\n"
+ " \"updatedAt\": 1711367511146\n" + " }\n" + " }\n" + "}";
QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class);
CalciteQueryParser calciteSqlParser = new CalciteQueryParser();
OntologyQueryParser calciteSqlParser = new OntologyQueryParser();
calciteSqlParser.parse(queryStatement);
Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""),
"SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`" + "FROM" + "`s2_pv_uv_statis`"

View File

@@ -19,8 +19,8 @@ import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
import com.tencent.supersonic.headless.core.cache.QueryCache;
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
import com.tencent.supersonic.headless.core.pojo.StructQuery;
import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
@@ -128,8 +128,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
for (QueryExecutor queryExecutor : queryExecutors) {
if (queryExecutor.accept(queryStatement)) {
queryResp = queryExecutor.execute(queryStatement);
queryUtils.populateQueryColumns(queryResp,
queryStatement.getSemanticSchemaResp());
queryUtils.populateQueryColumns(queryResp, queryStatement.getSemanticSchema());
}
}
@@ -299,7 +298,7 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = new QueryStatement();
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
queryStatement.setDataSetId(queryReq.getDataSetId());
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
queryStatement.setSemanticSchema(semanticSchemaResp);
queryStatement.setOntology(semanticSchemaManager.buildOntology(semanticSchemaResp));
return queryStatement;
}
@@ -308,9 +307,9 @@ public class S2SemanticLayerService implements SemanticLayerService {
QueryStatement queryStatement = buildQueryStatement(querySqlReq);
queryStatement.setIsS2SQL(true);
SqlQueryParam sqlQueryParam = new SqlQueryParam();
sqlQueryParam.setSql(querySqlReq.getSql());
queryStatement.setSqlQueryParam(sqlQueryParam);
SqlQuery sqlQuery = new SqlQuery();
sqlQuery.setSql(querySqlReq.getSql());
queryStatement.setSqlQuery(sqlQuery);
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
if (querySqlReq.needGetDataSetId()) {
@@ -322,9 +321,9 @@ public class S2SemanticLayerService implements SemanticLayerService {
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
QueryStatement queryStatement = buildQueryStatement(queryReq);
StructQueryParam structQueryParam = new StructQueryParam();
BeanUtils.copyProperties(queryReq, structQueryParam);
queryStatement.setStructQueryParam(structQueryParam);
StructQuery structQuery = new StructQuery();
BeanUtils.copyProperties(queryReq, structQuery);
queryStatement.setStructQuery(structQuery);
queryStatement.setIsS2SQL(false);
return queryStatement;
}

View File

@@ -4,6 +4,8 @@ import com.tencent.supersonic.common.pojo.ModelRela;
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
import com.tencent.supersonic.headless.core.pojo.Ontology;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType;
@@ -215,7 +217,7 @@ public class SemanticSchemaManager {
}
private static List<JoinRelation> getJoinRelation(List<ModelRela> modelRelas,
Map<Long, String> modelIdName) {
Map<Long, String> modelIdName) {
List<JoinRelation> joinRelations = new ArrayList<>();
modelRelas.stream().forEach(r -> {
if (modelIdName.containsKey(r.getFromModelId())

View File

@@ -31,7 +31,7 @@ public class MetricDrillDownChecker {
private MetricService metricService;
public void checkQuery(QueryStatement queryStatement) {
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchema();
String sql = queryStatement.getSql();
if (StringUtils.isBlank(sql)) {
return;

View File

@@ -6,7 +6,7 @@ import com.tencent.supersonic.headless.api.pojo.response.SqlParserResp;
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
import com.tencent.supersonic.headless.core.translator.parser.calcite.SqlBuilder;
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
import com.tencent.supersonic.headless.server.manager.SemanticSchemaManager;
import com.tencent.supersonic.headless.server.pojo.yaml.*;
import lombok.extern.slf4j.Slf4j;
@@ -19,8 +19,8 @@ import java.util.List;
@Slf4j
class HeadlessParserServiceTest {
public static SqlParserResp parser(S2CalciteSchema semanticSchema,
OntologyQueryParam ontologyQueryParam, boolean isAgg) {
public static SqlParserResp parser(S2CalciteSchema semanticSchema, OntologyQuery ontologyQuery,
boolean isAgg) {
SqlParserResp sqlParser = new SqlParserResp();
try {
if (semanticSchema == null) {
@@ -29,14 +29,14 @@ class HeadlessParserServiceTest {
}
SqlBuilder aggBuilder = new SqlBuilder(semanticSchema);
QueryStatement queryStatement = new QueryStatement();
queryStatement.setOntologyQueryParam(ontologyQueryParam);
queryStatement.setOntologyQuery(ontologyQuery);
String sql = aggBuilder.buildOntologySql(queryStatement);
queryStatement.setSql(sql);
EngineType engineType = semanticSchema.getOntology().getDatabase().getType();
sqlParser.setSql(aggBuilder.getSql(engineType));
} catch (Exception e) {
sqlParser.setErrMsg(e.getMessage());
log.error("parser error metricQueryReq[{}] error [{}]", ontologyQueryParam, e);
log.error("parser error metricQueryReq[{}] error [{}]", ontologyQuery, e);
}
return sqlParser;
}
@@ -155,7 +155,7 @@ class HeadlessParserServiceTest {
// HeadlessSchemaManager.update(headlessSchema, HeadlessSchemaManager.getMetrics(metric));
OntologyQueryParam metricCommand = new OntologyQueryParam();
OntologyQuery metricCommand = new OntologyQuery();
metricCommand.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date")));
metricCommand.setMetrics(new HashSet<>(Arrays.asList("pv")));
metricCommand.setWhere(
@@ -168,7 +168,7 @@ class HeadlessParserServiceTest {
addDepartment(semanticSchema);
OntologyQueryParam metricCommand2 = new OntologyQueryParam();
OntologyQuery metricCommand2 = new OntologyQuery();
metricCommand2.setDimensions(new HashSet<>(Arrays.asList("sys_imp_date",
"user_name__department", "user_name", "user_name__page")));
metricCommand2.setMetrics(new HashSet<>(Arrays.asList("pv")));

View File

@@ -25,21 +25,18 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
### headless-core SPIs
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.calcite.CalciteQueryParser
com.tencent.supersonic.headless.core.executor.QueryExecutor=\
com.tencent.supersonic.headless.core.executor.JdbcExecutor

View File

@@ -25,21 +25,18 @@ com.tencent.supersonic.headless.chat.parser.llm.DataSetResolver=\
### headless-core SPIs
com.tencent.supersonic.headless.core.translator.converter.QueryConverter=\
com.tencent.supersonic.headless.core.translator.converter.DefaultDimValueConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlVariableConverter,\
com.tencent.supersonic.headless.core.translator.converter.SqlQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.StructQueryConverter,\
com.tencent.supersonic.headless.core.translator.converter.MetricRatioConverter
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.DefaultDimValueParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlVariableParser,\
com.tencent.supersonic.headless.core.translator.parser.SqlQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.StructQueryParser,\
com.tencent.supersonic.headless.core.translator.parser.MetricRatioParser,\
com.tencent.supersonic.headless.core.translator.parser.OntologyQueryParser
com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer=\
com.tencent.supersonic.headless.core.translator.optimizer.DetailQueryOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.DbDialectOptimizer,\
com.tencent.supersonic.headless.core.translator.optimizer.ResultLimitOptimizer
com.tencent.supersonic.headless.core.translator.parser.QueryParser=\
com.tencent.supersonic.headless.core.translator.parser.calcite.CalciteQueryParser
com.tencent.supersonic.headless.core.executor.QueryExecutor=\
com.tencent.supersonic.headless.core.executor.JdbcExecutor