mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-10 19:38:13 +00:00
Refactor translator module (#1932)
* [improvement][Chat] Support agent permission management #1143 * [improvement][chat]Iterate LLM prompts of parsing and correction. * [improvement][headless]Clean code logic of headless core. * (fix) (chat) 记忆管理更新不生效 (#1912) * [improvement][headless-fe] Added null-check conditions to the data formatting function. * [improvement][headless]Clean code logic of headless translator. * [improvement][headless-fe] Added permissions management for agents. * [improvement][headless-fe] Unified the assistant's permission settings interaction to match the system style. * [improvement](Dict)Support returns dict task list of dimensions by page * [improvement][headless-fe] Revised the interaction for semantic modeling routing and implemented the initial version of metric management switching. * [improvement][launcher]Set system property `s2.test` in junit tests in order to facilitate conditional breakpoints. * [improvement][headless] add validateAndQuery interface in SqlQueryApiController * [improvement][launcher]Use API to get element ID avoiding hard-code. * [improvement][launcher]Support DuckDB database and refactor translator code structure. --------- Co-authored-by: lxwcodemonkey <jolunoluo@tencent.com> Co-authored-by: tristanliu <tristanliu@tencent.com> Co-authored-by: daikon12 <1059907724@qq.com> Co-authored-by: lexluo09 <39718951+lexluo09@users.noreply.github.com>
This commit is contained in:
@@ -528,7 +528,7 @@ public class SqlReplaceHelper {
|
||||
}
|
||||
}
|
||||
|
||||
private static Select replaceAggAliasOrderItem(Select selectStatement) {
|
||||
private static Select replaceAggAliasOrderbyField(Select selectStatement) {
|
||||
if (selectStatement instanceof PlainSelect) {
|
||||
PlainSelect plainSelect = (PlainSelect) selectStatement;
|
||||
if (Objects.nonNull(plainSelect.getOrderByElements())) {
|
||||
@@ -564,15 +564,15 @@ public class SqlReplaceHelper {
|
||||
if (plainSelect.getFromItem() instanceof ParenthesedSelect) {
|
||||
ParenthesedSelect parenthesedSelect = (ParenthesedSelect) plainSelect.getFromItem();
|
||||
parenthesedSelect
|
||||
.setSelect(replaceAggAliasOrderItem(parenthesedSelect.getSelect()));
|
||||
.setSelect(replaceAggAliasOrderbyField(parenthesedSelect.getSelect()));
|
||||
}
|
||||
return selectStatement;
|
||||
}
|
||||
return selectStatement;
|
||||
}
|
||||
|
||||
public static String replaceAggAliasOrderItem(String sql) {
|
||||
Select selectStatement = replaceAggAliasOrderItem(SqlSelectHelper.getSelect(sql));
|
||||
public static String replaceAggAliasOrderbyField(String sql) {
|
||||
Select selectStatement = replaceAggAliasOrderbyField(SqlSelectHelper.getSelect(sql));
|
||||
return selectStatement.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.common.pojo;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@@ -10,6 +11,7 @@ import java.util.List;
|
||||
@Data
|
||||
@AllArgsConstructor
|
||||
@NoArgsConstructor
|
||||
@Builder
|
||||
public class Filter {
|
||||
|
||||
private Relation relation = Relation.FILTER;
|
||||
|
||||
@@ -8,7 +8,8 @@ public enum EngineType {
|
||||
KAFKA(4, "kafka"),
|
||||
H2(5, "h2"),
|
||||
POSTGRESQL(6, "postgresql"),
|
||||
OTHER(7, "other");
|
||||
OTHER(7, "other"),
|
||||
DUCKDB(8, "duckdb");
|
||||
|
||||
private Integer code;
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.tencent.supersonic.common.pojo.Constants;
|
||||
import com.tencent.supersonic.common.pojo.DateConf;
|
||||
import com.tencent.supersonic.common.pojo.ItemDateResp;
|
||||
import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
|
||||
import lombok.Data;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@@ -32,14 +33,9 @@ import static com.tencent.supersonic.common.pojo.Constants.MONTH_FORMAT;
|
||||
@Data
|
||||
public class DateModeUtils {
|
||||
|
||||
@Value("${s2.query.parameter.sys.date:sys_imp_date}")
|
||||
private String sysDateCol;
|
||||
|
||||
@Value("${s2.query.parameter.sys.month:sys_imp_month}")
|
||||
private String sysDateMonthCol;
|
||||
|
||||
@Value("${s2.query.parameter.sys.month:sys_imp_week}")
|
||||
private String sysDateWeekCol;
|
||||
private final String sysDateCol = TimeDimensionEnum.DAY.getName();
|
||||
private final String sysDateMonthCol = TimeDimensionEnum.MONTH.getName();
|
||||
private final String sysDateWeekCol = TimeDimensionEnum.WEEK.getName();
|
||||
|
||||
@Value("${s2.query.parameter.sys.zipper.begin:start_}")
|
||||
private String sysZipperDateColBegin;
|
||||
|
||||
@@ -16,6 +16,7 @@ import java.time.temporal.TemporalAdjuster;
|
||||
import java.time.temporal.TemporalAdjusters;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
@@ -201,6 +202,13 @@ public class DateUtils {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static Long calculateDiffMs(Date createAt) {
|
||||
Calendar calendar = Calendar.getInstance();
|
||||
Date now = calendar.getTime();
|
||||
long milliseconds = now.getTime() - createAt.getTime();
|
||||
return milliseconds;
|
||||
}
|
||||
|
||||
public static boolean isDateString(String value, String format) {
|
||||
try {
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format);
|
||||
|
||||
@@ -325,10 +325,10 @@ class SqlReplaceHelperTest {
|
||||
}
|
||||
|
||||
@Test
|
||||
void testReplaceAggAliasOrderItem() {
|
||||
void testReplaceAggAliasOrderbyField() {
|
||||
String sql = "SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 "
|
||||
+ "GROUP BY 部门 ORDER BY SUM(访问次数) DESC LIMIT 10) AS top10";
|
||||
String replaceSql = SqlReplaceHelper.replaceAggAliasOrderItem(sql);
|
||||
String replaceSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
|
||||
Assert.assertEquals(
|
||||
"SELECT SUM(访问次数) AS top10总播放量 FROM (SELECT 部门, SUM(访问次数) AS 访问次数 FROM 超音数 "
|
||||
+ "GROUP BY 部门 ORDER BY 2 DESC LIMIT 10) AS top10",
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
package com.tencent.supersonic.headless.api.pojo;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class MetricTable {
|
||||
|
||||
private String alias;
|
||||
private List<String> metrics = Lists.newArrayList();
|
||||
private List<String> dimensions = Lists.newArrayList();
|
||||
private String where;
|
||||
private AggOption aggOption = AggOption.DEFAULT;
|
||||
}
|
||||
@@ -45,7 +45,8 @@ public enum DataType {
|
||||
|
||||
TDENGINE("TAOS", "TAOS", "com.taosdata.jdbc.TSDBDriver", "'", "'", "\"", "\""),
|
||||
|
||||
POSTGRESQL("postgresql", "postgresql", "org.postgresql.Driver", "'", "'", "\"", "\"");
|
||||
POSTGRESQL("postgresql", "postgresql", "org.postgresql.Driver", "'", "'", "\"", "\""),
|
||||
DUCKDB("duckdb", "duckdb", "org.duckdb.DuckDBDriver", "'", "'", "\"", "\"");
|
||||
|
||||
private String feature;
|
||||
private String desc;
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
package com.tencent.supersonic.headless.api.pojo.request;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.MetricTable;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@Data
|
||||
public class ParseSqlReq {
|
||||
private Map<String, String> variables;
|
||||
private String sql = "";
|
||||
private List<MetricTable> tables;
|
||||
private boolean supportWith = true;
|
||||
private boolean withAlias = true;
|
||||
|
||||
public Map<String, String> getVariables() {
|
||||
if (variables == null) {
|
||||
variables = new HashMap<>();
|
||||
}
|
||||
return variables;
|
||||
}
|
||||
}
|
||||
@@ -34,12 +34,11 @@ public class QueryFilter implements Serializable {
|
||||
QueryFilter that = (QueryFilter) o;
|
||||
return Objects.equal(bizName, that.bizName) && Objects.equal(name, that.name)
|
||||
&& operator == that.operator && Objects.equal(value, that.value)
|
||||
&& Objects.equal(elementID, that.elementID)
|
||||
&& Objects.equal(function, that.function);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(bizName, name, operator, value, elementID, function);
|
||||
return Objects.hashCode(bizName, name, operator, value, function);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.tencent.supersonic.headless.api.pojo.request;
|
||||
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.PageBaseReq;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author: kanedai
|
||||
* @date: 2024/11/24
|
||||
*/
|
||||
@Data
|
||||
public class ValueTaskQueryReq extends PageBaseReq {
|
||||
|
||||
@NotNull
|
||||
private Long itemId;
|
||||
|
||||
private List<String> taskStatusList;
|
||||
|
||||
private String key;
|
||||
}
|
||||
@@ -37,8 +37,8 @@ public class OnePassSCSqlGenStrategy extends SqlGenStrategy {
|
||||
+ "please convert it to a SQL query so that relevant data could be returned "
|
||||
+ "by executing the SQL query against underlying database." + "\n#Rules:"
|
||||
+ "\n1.SQL columns and values must be mentioned in the `Schema`, DO NOT hallucinate."
|
||||
+ "\n2.ALWAYS specify date filter using `>`,`<`,`>=`,`<=` operator."
|
||||
+ "\n3.DO NOT include date filter in the where clause if not explicitly expressed in the `Question`."
|
||||
+ "\n2.ALWAYS specify time range using `>`,`<`,`>=`,`<=` operator."
|
||||
+ "\n3.DO NOT include time range in the where clause if not explicitly expressed in the `Question`."
|
||||
+ "\n4.DO NOT calculate date range using functions."
|
||||
+ "\n5.ALWAYS use `with` statement if nested aggregation is needed."
|
||||
+ "\n6.ALWAYS enclose alias declared by `AS` command in underscores."
|
||||
|
||||
@@ -34,7 +34,7 @@ public class ClickHouseAdaptor extends BaseDbAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String functionNameCorrector(String sql) {
|
||||
public String rewriteSql(String sql) {
|
||||
Map<String, String> functionMap = new HashMap<>();
|
||||
functionMap.put("MONTH".toLowerCase(), "toMonth");
|
||||
functionMap.put("DAY".toLowerCase(), "toDayOfMonth");
|
||||
|
||||
@@ -11,7 +11,7 @@ public interface DbAdaptor {
|
||||
|
||||
String getDateFormat(String dateType, String dateFormat, String column);
|
||||
|
||||
String functionNameCorrector(String sql);
|
||||
String rewriteSql(String sql);
|
||||
|
||||
List<String> getDBs(ConnectInfo connectInfo) throws SQLException;
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ public class DbAdaptorFactory {
|
||||
dbAdaptorMap.put(EngineType.H2.getName(), new H2Adaptor());
|
||||
dbAdaptorMap.put(EngineType.POSTGRESQL.getName(), new PostgresqlAdaptor());
|
||||
dbAdaptorMap.put(EngineType.OTHER.getName(), new DefaultDbAdaptor());
|
||||
dbAdaptorMap.put(EngineType.DUCKDB.getName(), new DuckdbAdaptor());
|
||||
}
|
||||
|
||||
public static DbAdaptor getEngineAdaptor(String engineType) {
|
||||
|
||||
@@ -8,7 +8,7 @@ public class DefaultDbAdaptor extends BaseDbAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String functionNameCorrector(String sql) {
|
||||
public String rewriteSql(String sql) {
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
package com.tencent.supersonic.headless.core.adaptor.db;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.tencent.supersonic.headless.api.pojo.DBColumn;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.FieldType;
|
||||
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
@Slf4j
|
||||
public class DuckdbAdaptor extends DefaultDbAdaptor {
|
||||
|
||||
protected ResultSet getResultSet(String schemaName, DatabaseMetaData metaData)
|
||||
throws SQLException {
|
||||
return metaData.getTables(schemaName, null, null, new String[] {"TABLE", "VIEW"});
|
||||
}
|
||||
|
||||
public List<DBColumn> getColumns(ConnectInfo connectInfo, String schemaName, String tableName)
|
||||
throws SQLException {
|
||||
List<DBColumn> dbColumns = Lists.newArrayList();
|
||||
DatabaseMetaData metaData = getDatabaseMetaData(connectInfo);
|
||||
ResultSet columns = metaData.getColumns(schemaName, null, tableName, null);
|
||||
while (columns.next()) {
|
||||
String columnName = columns.getString("COLUMN_NAME");
|
||||
String dataType = columns.getString("TYPE_NAME");
|
||||
String remarks = columns.getString("REMARKS");
|
||||
FieldType fieldType = classifyColumnType(dataType);
|
||||
dbColumns.add(new DBColumn(columnName, dataType, remarks, fieldType));
|
||||
}
|
||||
return dbColumns;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String rewriteSql(String sql) {
|
||||
return sql.replaceAll("`", "");
|
||||
}
|
||||
|
||||
}
|
||||
@@ -62,7 +62,7 @@ public class H2Adaptor extends BaseDbAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String functionNameCorrector(String sql) {
|
||||
public String rewriteSql(String sql) {
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ public class MysqlAdaptor extends BaseDbAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String functionNameCorrector(String sql) {
|
||||
public String rewriteSql(String sql) {
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ public class PostgresqlAdaptor extends BaseDbAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String functionNameCorrector(String sql) {
|
||||
public String rewriteSql(String sql) {
|
||||
Map<String, String> functionMap = new HashMap<>();
|
||||
functionMap.put("MONTH".toLowerCase(), "TO_CHAR");
|
||||
functionMap.put("DAY".toLowerCase(), "TO_CHAR");
|
||||
|
||||
@@ -3,10 +3,10 @@ package com.tencent.supersonic.headless.core.executor;
|
||||
import com.tencent.supersonic.common.calcite.Configuration;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||
import com.tencent.supersonic.headless.core.pojo.Materialization;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.TimeRange;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteTable;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteTable.Builder;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable.Builder;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.TimeRange;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.calcite.adapter.enumerable.EnumerableRules;
|
||||
import org.apache.calcite.config.CalciteConnectionConfigImpl;
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
package com.tencent.supersonic.headless.core.pojo;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.MetricTable;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class DataSetQueryParam {
|
||||
private String sql = "";
|
||||
private List<MetricTable> tables;
|
||||
private boolean supportWith = true;
|
||||
private boolean withAlias = true;
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.pojo;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.tencent.supersonic.common.pojo.RecordInfo;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.common.util.AESEncryptionUtil;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
@@ -36,7 +37,7 @@ public class Database extends RecordInfo {
|
||||
|
||||
private String schema;
|
||||
/** mysql,clickhouse */
|
||||
private String type;
|
||||
private EngineType type;
|
||||
|
||||
private List<String> admins = Lists.newArrayList();
|
||||
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
package com.tencent.supersonic.headless.core.pojo;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.ColumnOrder;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class MetricQueryParam {
|
||||
|
||||
private List<String> metrics;
|
||||
private List<String> dimensions;
|
||||
private String where;
|
||||
private Long limit;
|
||||
private List<ColumnOrder> order;
|
||||
private boolean nativeQuery = false;
|
||||
}
|
||||
@@ -1,34 +1,25 @@
|
||||
package com.tencent.supersonic.headless.core.pojo;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import lombok.Data;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||
import org.apache.commons.lang3.tuple.Triple;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class QueryStatement {
|
||||
|
||||
private Long dataSetId;
|
||||
private List<Long> modelIds;
|
||||
private String sql;
|
||||
private String errMsg;
|
||||
private QueryParam queryParam;
|
||||
private MetricQueryParam metricQueryParam;
|
||||
private DataSetQueryParam dataSetQueryParam;
|
||||
private StructQueryParam structQueryParam;
|
||||
private SqlQueryParam sqlQueryParam;
|
||||
private OntologyQueryParam ontologyQueryParam;
|
||||
private Integer status = 0;
|
||||
private Boolean isS2SQL = false;
|
||||
private List<ImmutablePair<String, String>> timeRanges;
|
||||
private Boolean enableOptimize = true;
|
||||
private Triple<String, String, String> minMaxTime;
|
||||
private String dataSetSql;
|
||||
private String dataSetAlias;
|
||||
private String dataSetSimplifySql;
|
||||
private Boolean enableLimitWrapper = false;
|
||||
private Ontology ontology;
|
||||
private SemanticSchemaResp semanticSchemaResp;
|
||||
private Integer limit = 1000;
|
||||
@@ -41,9 +32,4 @@ public class QueryStatement {
|
||||
public boolean isTranslated() {
|
||||
return isTranslated != null && isTranslated && isOk();
|
||||
}
|
||||
|
||||
public QueryStatement error(String msg) {
|
||||
this.setErrMsg(msg);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
package com.tencent.supersonic.headless.core.pojo;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class SqlQueryParam {
|
||||
private String sql;
|
||||
private String table;
|
||||
private boolean supportWith = true;
|
||||
private boolean withAlias = true;
|
||||
private String simplifiedSql;
|
||||
}
|
||||
@@ -1,21 +1,18 @@
|
||||
package com.tencent.supersonic.headless.api.pojo;
|
||||
package com.tencent.supersonic.headless.core.pojo;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.Aggregator;
|
||||
import com.tencent.supersonic.common.pojo.ColumnOrder;
|
||||
import com.tencent.supersonic.common.pojo.DateConf;
|
||||
import com.tencent.supersonic.common.pojo.Filter;
|
||||
import com.tencent.supersonic.common.pojo.Order;
|
||||
import com.tencent.supersonic.common.pojo.enums.QueryType;
|
||||
import com.tencent.supersonic.headless.api.pojo.Param;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@Data
|
||||
public class QueryParam {
|
||||
// struct
|
||||
public class StructQueryParam {
|
||||
private List<String> groups = new ArrayList();
|
||||
private List<Aggregator> aggregators = new ArrayList();
|
||||
private List<Order> orders = new ArrayList();
|
||||
@@ -24,17 +21,5 @@ public class QueryParam {
|
||||
private DateConf dateInfo;
|
||||
private Long limit = 2000L;
|
||||
private QueryType queryType;
|
||||
private String s2SQL;
|
||||
private String correctS2SQL;
|
||||
private Long dataSetId;
|
||||
private String dataSetName;
|
||||
private Set<Long> modelIds = new HashSet<>();
|
||||
private List<Param> params = new ArrayList<>();
|
||||
|
||||
// metric
|
||||
private List<String> metrics = new ArrayList();
|
||||
private List<String> dimensions;
|
||||
private String where;
|
||||
private List<ColumnOrder> order;
|
||||
private boolean nativeQuery = false;
|
||||
}
|
||||
@@ -1,507 +1,95 @@
|
||||
package com.tencent.supersonic.headless.core.translator;
|
||||
|
||||
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlRemoveHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||
import com.tencent.supersonic.common.pojo.Aggregator;
|
||||
import com.tencent.supersonic.common.pojo.Constants;
|
||||
import com.tencent.supersonic.common.pojo.enums.AggOperatorEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.common.pojo.enums.QueryType;
|
||||
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
|
||||
import com.tencent.supersonic.common.util.StringUtil;
|
||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
||||
import com.tencent.supersonic.headless.api.pojo.MetricTable;
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
|
||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
|
||||
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
|
||||
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
|
||||
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
|
||||
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Component
|
||||
@Slf4j
|
||||
public class DefaultSemanticTranslator implements SemanticTranslator {
|
||||
|
||||
@Autowired
|
||||
private SqlGenerateUtils sqlGenerateUtils;
|
||||
|
||||
public void translate(QueryStatement queryStatement) {
|
||||
if (queryStatement.isTranslated()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
preprocess(queryStatement);
|
||||
parse(queryStatement);
|
||||
optimize(queryStatement);
|
||||
for (QueryConverter converter : ComponentFactory.getQueryConverters()) {
|
||||
if (converter.accept(queryStatement)) {
|
||||
log.debug("QueryConverter accept [{}]", converter.getClass().getName());
|
||||
converter.convert(queryStatement);
|
||||
}
|
||||
}
|
||||
doOntologyParse(queryStatement);
|
||||
|
||||
if (StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSimplifiedSql())) {
|
||||
queryStatement.setSql(queryStatement.getSqlQueryParam().getSimplifiedSql());
|
||||
}
|
||||
if (StringUtils.isBlank(queryStatement.getSql())) {
|
||||
throw new RuntimeException("parse exception: " + queryStatement.getErrMsg());
|
||||
}
|
||||
|
||||
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
|
||||
queryOptimizer.rewrite(queryStatement);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
queryStatement.setErrMsg(e.getMessage());
|
||||
log.error("Failed to translate query [{}]", e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private void parse(QueryStatement queryStatement) throws Exception {
|
||||
QueryParam queryParam = queryStatement.getQueryParam();
|
||||
if (Objects.isNull(queryStatement.getDataSetQueryParam())) {
|
||||
queryStatement.setDataSetQueryParam(new DataSetQueryParam());
|
||||
}
|
||||
if (Objects.isNull(queryStatement.getMetricQueryParam())) {
|
||||
queryStatement.setMetricQueryParam(new MetricQueryParam());
|
||||
private void doOntologyParse(QueryStatement queryStatement) throws Exception {
|
||||
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam();
|
||||
log.info("parse with ontology: [{}]", ontologyQueryParam);
|
||||
ComponentFactory.getQueryParser().parse(queryStatement);
|
||||
if (!queryStatement.isOk()) {
|
||||
throw new Exception(String.format("parse ontology table [%s] error [%s]",
|
||||
queryStatement.getSqlQueryParam().getTable(), queryStatement.getErrMsg()));
|
||||
}
|
||||
|
||||
log.debug("SemanticConverter before [{}]", queryParam);
|
||||
for (QueryConverter headlessConverter : ComponentFactory.getQueryConverters()) {
|
||||
if (headlessConverter.accept(queryStatement)) {
|
||||
log.debug("SemanticConverter accept [{}]", headlessConverter.getClass().getName());
|
||||
headlessConverter.convert(queryStatement);
|
||||
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
|
||||
String ontologyQuerySql = sqlQueryParam.getSql();
|
||||
String ontologyInnerTable = sqlQueryParam.getTable();
|
||||
String ontologyInnerSql = queryStatement.getSql();
|
||||
|
||||
List<Pair<String, String>> tables = new ArrayList<>();
|
||||
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
|
||||
if (sqlQueryParam.isSupportWith()) {
|
||||
EngineType engineType = queryStatement.getOntology().getDatabase().getType();
|
||||
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
|
||||
String withSql = "with " + tables.stream()
|
||||
.map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight()))
|
||||
.collect(Collectors.joining(",")) + "\n" + ontologyQuerySql;
|
||||
queryStatement.setSql(withSql);
|
||||
} else {
|
||||
List<String> withTableList =
|
||||
tables.stream().map(Pair::getLeft).collect(Collectors.toList());
|
||||
List<String> withSqlList =
|
||||
tables.stream().map(Pair::getRight).collect(Collectors.toList());
|
||||
String mergeSql = SqlMergeWithUtils.mergeWith(engineType, ontologyQuerySql,
|
||||
withSqlList, withTableList);
|
||||
queryStatement.setSql(mergeSql);
|
||||
}
|
||||
}
|
||||
log.debug("SemanticConverter after {} {} {}", queryParam,
|
||||
queryStatement.getDataSetQueryParam(), queryStatement.getMetricQueryParam());
|
||||
|
||||
if (!queryStatement.getDataSetQueryParam().getSql().isEmpty()) {
|
||||
doParse(queryStatement.getDataSetQueryParam(), queryStatement);
|
||||
} else {
|
||||
queryStatement.getMetricQueryParam()
|
||||
.setNativeQuery(queryParam.getQueryType().isNativeAggQuery());
|
||||
doParse(queryStatement,
|
||||
AggOption.getAggregation(queryStatement.getMetricQueryParam().isNativeQuery()));
|
||||
}
|
||||
|
||||
if (StringUtils.isEmpty(queryStatement.getSql())) {
|
||||
throw new RuntimeException("parse Exception: " + queryStatement.getErrMsg());
|
||||
}
|
||||
if (StringUtils.isNotBlank(queryStatement.getSql())
|
||||
&& !SqlSelectHelper.hasLimit(queryStatement.getSql())) {
|
||||
String querySql =
|
||||
queryStatement.getSql() + " limit " + queryStatement.getLimit().toString();
|
||||
queryStatement.setSql(querySql);
|
||||
}
|
||||
}
|
||||
|
||||
private QueryStatement doParse(DataSetQueryParam dataSetQueryParam,
|
||||
QueryStatement queryStatement) {
|
||||
log.info("parse dataSetQuery [{}] ", dataSetQueryParam);
|
||||
Ontology ontology = queryStatement.getOntology();
|
||||
EngineType engineType = EngineType.fromString(ontology.getDatabase().getType());
|
||||
try {
|
||||
if (!CollectionUtils.isEmpty(dataSetQueryParam.getTables())) {
|
||||
List<String[]> tables = new ArrayList<>();
|
||||
boolean isSingleTable = dataSetQueryParam.getTables().size() == 1;
|
||||
for (MetricTable metricTable : dataSetQueryParam.getTables()) {
|
||||
QueryStatement tableSql = parserSql(metricTable, isSingleTable,
|
||||
dataSetQueryParam, queryStatement);
|
||||
if (isSingleTable && StringUtils.isNotBlank(tableSql.getDataSetSimplifySql())) {
|
||||
queryStatement.setSql(tableSql.getDataSetSimplifySql());
|
||||
queryStatement.setDataSetQueryParam(dataSetQueryParam);
|
||||
return queryStatement;
|
||||
}
|
||||
tables.add(new String[] {metricTable.getAlias(), tableSql.getSql()});
|
||||
}
|
||||
if (!tables.isEmpty()) {
|
||||
String sql;
|
||||
if (dataSetQueryParam.isSupportWith()) {
|
||||
if (!SqlMergeWithUtils.hasWith(engineType, dataSetQueryParam.getSql())) {
|
||||
sql = "with "
|
||||
+ tables.stream()
|
||||
.map(t -> String.format("%s as (%s)", t[0], t[1]))
|
||||
.collect(Collectors.joining(","))
|
||||
+ "\n" + dataSetQueryParam.getSql();
|
||||
} else {
|
||||
List<String> parentWithNameList = tables.stream().map(table -> table[0])
|
||||
.collect(Collectors.toList());
|
||||
List<String> parentSqlList = tables.stream().map(table -> table[1])
|
||||
.collect(Collectors.toList());
|
||||
sql = SqlMergeWithUtils.mergeWith(engineType,
|
||||
dataSetQueryParam.getSql(), parentSqlList, parentWithNameList);
|
||||
}
|
||||
} else {
|
||||
sql = dataSetQueryParam.getSql();
|
||||
for (String[] tb : tables) {
|
||||
sql = StringUtils.replace(sql, tb[0], "(" + tb[1] + ") "
|
||||
+ (dataSetQueryParam.isWithAlias() ? "" : tb[0]), -1);
|
||||
}
|
||||
}
|
||||
queryStatement.setSql(sql);
|
||||
queryStatement.setDataSetQueryParam(dataSetQueryParam);
|
||||
return queryStatement;
|
||||
}
|
||||
for (Pair<String, String> tb : tables) {
|
||||
ontologyQuerySql =
|
||||
StringUtils.replace(ontologyQuerySql, tb.getLeft(), "(" + tb.getRight()
|
||||
+ ") " + (sqlQueryParam.isWithAlias() ? "" : tb.getLeft()), -1);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("physicalSql error {}", e);
|
||||
queryStatement.setErrMsg(e.getMessage());
|
||||
}
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement doParse(QueryStatement queryStatement, AggOption isAgg) {
|
||||
MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam();
|
||||
log.info("parse metricQuery [{}] isAgg [{}]", metricQueryParam, isAgg);
|
||||
try {
|
||||
ComponentFactory.getQueryParser().parse(queryStatement, isAgg);
|
||||
} catch (Exception e) {
|
||||
queryStatement.setErrMsg(e.getMessage());
|
||||
log.error("parser error metricQueryReq[{}] error [{}]", metricQueryParam, e);
|
||||
}
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement parserSql(MetricTable metricTable, Boolean isSingleMetricTable,
|
||||
DataSetQueryParam dataSetQueryParam, QueryStatement queryStatement) throws Exception {
|
||||
MetricQueryParam metricQueryParam = new MetricQueryParam();
|
||||
metricQueryParam.setMetrics(metricTable.getMetrics());
|
||||
metricQueryParam.setDimensions(metricTable.getDimensions());
|
||||
metricQueryParam.setWhere(StringUtil.formatSqlQuota(metricTable.getWhere()));
|
||||
metricQueryParam.setNativeQuery(!AggOption.isAgg(metricTable.getAggOption()));
|
||||
|
||||
QueryStatement tableSql = new QueryStatement();
|
||||
tableSql.setIsS2SQL(false);
|
||||
tableSql.setMetricQueryParam(metricQueryParam);
|
||||
tableSql.setMinMaxTime(queryStatement.getMinMaxTime());
|
||||
tableSql.setEnableOptimize(queryStatement.getEnableOptimize());
|
||||
tableSql.setDataSetId(queryStatement.getDataSetId());
|
||||
tableSql.setOntology(queryStatement.getOntology());
|
||||
if (isSingleMetricTable) {
|
||||
tableSql.setDataSetSql(dataSetQueryParam.getSql());
|
||||
tableSql.setDataSetAlias(metricTable.getAlias());
|
||||
}
|
||||
tableSql = doParse(tableSql, metricTable.getAggOption());
|
||||
if (!tableSql.isOk()) {
|
||||
throw new Exception(String.format("parser table [%s] error [%s]",
|
||||
metricTable.getAlias(), tableSql.getErrMsg()));
|
||||
}
|
||||
return tableSql;
|
||||
}
|
||||
|
||||
private void optimize(QueryStatement queryStatement) {
|
||||
for (QueryOptimizer queryOptimizer : ComponentFactory.getQueryOptimizers()) {
|
||||
queryOptimizer.rewrite(queryStatement);
|
||||
queryStatement.setSql(ontologyQuerySql);
|
||||
}
|
||||
}
|
||||
|
||||
private void preprocess(QueryStatement queryStatement) {
|
||||
if (StringUtils.isBlank(queryStatement.getSql())) {
|
||||
return;
|
||||
}
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
|
||||
convertNameToBizName(queryStatement);
|
||||
rewriteFunction(queryStatement);
|
||||
queryStatement.setSql(SqlRemoveHelper.removeUnderscores(queryStatement.getSql()));
|
||||
|
||||
String tableName = SqlSelectHelper.getTableName(queryStatement.getSql());
|
||||
if (StringUtils.isEmpty(tableName)) {
|
||||
return;
|
||||
}
|
||||
// correct order item is same as agg alias
|
||||
String reqSql = queryStatement.getSql();
|
||||
queryStatement.setSql(SqlReplaceHelper.replaceAggAliasOrderItem(queryStatement.getSql()));
|
||||
log.debug("replaceOrderAggSameAlias {} -> {}", reqSql, queryStatement.getSql());
|
||||
// 5.build MetricTables
|
||||
List<String> allFields = SqlSelectHelper.getAllSelectFields(queryStatement.getSql());
|
||||
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
|
||||
List<String> metrics =
|
||||
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
|
||||
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
|
||||
QueryStructReq queryStructReq = new QueryStructReq();
|
||||
|
||||
MetricTable metricTable = new MetricTable();
|
||||
metricTable.getMetrics().addAll(metrics);
|
||||
metricTable.getDimensions().addAll(dimensions);
|
||||
metricTable.setAlias(tableName.toLowerCase());
|
||||
// if metric empty , fill model default
|
||||
if (CollectionUtils.isEmpty(metricTable.getMetrics())) {
|
||||
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
|
||||
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
|
||||
} else {
|
||||
queryStructReq.getAggregators()
|
||||
.addAll(metricTable.getMetrics().stream()
|
||||
.map(m -> new Aggregator(m, AggOperatorEnum.UNKNOWN))
|
||||
.collect(Collectors.toList()));
|
||||
}
|
||||
AggOption aggOption = getAggOption(queryStatement, metricSchemas);
|
||||
metricTable.setAggOption(aggOption);
|
||||
List<MetricTable> tables = new ArrayList<>();
|
||||
tables.add(metricTable);
|
||||
|
||||
// 6.build ParseSqlReq
|
||||
DataSetQueryParam datasetQueryParam = new DataSetQueryParam();
|
||||
datasetQueryParam.setTables(tables);
|
||||
datasetQueryParam.setSql(queryStatement.getSql());
|
||||
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
|
||||
if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType().toUpperCase()),
|
||||
database.getVersion())) {
|
||||
datasetQueryParam.setSupportWith(false);
|
||||
datasetQueryParam.setWithAlias(false);
|
||||
}
|
||||
|
||||
// 7. do deriveMetric
|
||||
generateDerivedMetric(semanticSchemaResp, aggOption, datasetQueryParam);
|
||||
|
||||
// 8.physicalSql by ParseSqlReq
|
||||
// queryStructReq.setDateInfo(queryStructUtils.getDateConfBySql(queryStatement.getSql()));
|
||||
queryStructReq.setDataSetId(queryStatement.getDataSetId());
|
||||
queryStructReq.setQueryType(getQueryType(aggOption));
|
||||
log.debug("QueryReqConverter queryStructReq[{}]", queryStructReq);
|
||||
QueryParam queryParam = new QueryParam();
|
||||
BeanUtils.copyProperties(queryStructReq, queryParam);
|
||||
queryStatement.setQueryParam(queryParam);
|
||||
queryStatement.setDataSetQueryParam(datasetQueryParam);
|
||||
// queryStatement.setMinMaxTime(queryStructUtils.getBeginEndTime(queryStructReq));
|
||||
}
|
||||
|
||||
private AggOption getAggOption(QueryStatement queryStatement,
|
||||
List<MetricSchemaResp> metricSchemas) {
|
||||
String sql = queryStatement.getSql();
|
||||
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)
|
||||
&& !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) {
|
||||
log.debug("getAggOption simple sql set to DEFAULT");
|
||||
return AggOption.DEFAULT;
|
||||
}
|
||||
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
|
||||
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
|
||||
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql)
|
||||
|| SqlSelectFunctionHelper.hasFunction(sql, "count")
|
||||
|| SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
// if (queryStatement.isInnerLayerNative()) {
|
||||
// return AggOption.NATIVE;
|
||||
// }
|
||||
if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql)
|
||||
|| SqlSelectHelper.hasGroupBy(sql)) {
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
long defaultAggNullCnt = metricSchemas.stream().filter(
|
||||
m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg()))
|
||||
.count();
|
||||
if (defaultAggNullCnt > 0) {
|
||||
log.debug("getAggOption find null defaultAgg metric set to NATIVE");
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
return AggOption.DEFAULT;
|
||||
}
|
||||
|
||||
private void convertNameToBizName(QueryStatement queryStatement) {
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
|
||||
String sql = queryStatement.getSql();
|
||||
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
|
||||
sql);
|
||||
sql = SqlReplaceHelper.replaceSqlByPositions(sql);
|
||||
log.debug("replaceSqlByPositions:{}", sql);
|
||||
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
|
||||
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
|
||||
sql);
|
||||
sql = SqlReplaceHelper.replaceTable(sql,
|
||||
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
|
||||
log.debug("replaceTableName after:{}", sql);
|
||||
queryStatement.setSql(sql);
|
||||
}
|
||||
|
||||
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
|
||||
List<String> allFields) {
|
||||
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
|
||||
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
|
||||
SchemaItem::getBizName, (k1, k2) -> k1));
|
||||
dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(),
|
||||
TimeDimensionEnum.DAY.getName());
|
||||
return allFields.stream()
|
||||
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
|
||||
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
|
||||
List<String> allFields) {
|
||||
Map<String, MetricSchemaResp> metricLowerToNameMap =
|
||||
semanticSchemaResp.getMetrics().stream().collect(Collectors
|
||||
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
|
||||
return allFields.stream()
|
||||
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
|
||||
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private void rewriteFunction(QueryStatement queryStatement) {
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
|
||||
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
|
||||
return;
|
||||
}
|
||||
String type = database.getType();
|
||||
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
|
||||
if (Objects.nonNull(engineAdaptor)) {
|
||||
String functionNameCorrector =
|
||||
engineAdaptor.functionNameCorrector(queryStatement.getSql());
|
||||
queryStatement.setSql(functionNameCorrector);
|
||||
}
|
||||
}
|
||||
|
||||
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
|
||||
// support fieldName and field alias to bizName
|
||||
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
|
||||
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
|
||||
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
|
||||
|
||||
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
|
||||
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
|
||||
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
|
||||
|
||||
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
|
||||
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
|
||||
dimensionResults.putAll(metricResults);
|
||||
return dimensionResults;
|
||||
}
|
||||
|
||||
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
|
||||
String bizName) {
|
||||
Set<Pair<String, String>> elements = new HashSet<>();
|
||||
elements.add(Pair.of(name, bizName));
|
||||
if (StringUtils.isNotBlank(aliasStr)) {
|
||||
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
|
||||
for (String alias : aliasList) {
|
||||
elements.add(Pair.of(alias, bizName));
|
||||
}
|
||||
}
|
||||
return elements.stream();
|
||||
}
|
||||
|
||||
private QueryType getQueryType(AggOption aggOption) {
|
||||
boolean isAgg = AggOption.isAgg(aggOption);
|
||||
QueryType queryType = QueryType.DETAIL;
|
||||
if (isAgg) {
|
||||
queryType = QueryType.AGGREGATE;
|
||||
}
|
||||
return queryType;
|
||||
}
|
||||
|
||||
private void generateDerivedMetric(SemanticSchemaResp semanticSchemaResp, AggOption aggOption,
|
||||
DataSetQueryParam viewQueryParam) {
|
||||
String sql = viewQueryParam.getSql();
|
||||
for (MetricTable metricTable : viewQueryParam.getTables()) {
|
||||
Set<String> measures = new HashSet<>();
|
||||
Map<String, String> replaces = generateDerivedMetric(semanticSchemaResp, aggOption,
|
||||
metricTable.getMetrics(), metricTable.getDimensions(), measures);
|
||||
|
||||
if (!CollectionUtils.isEmpty(replaces)) {
|
||||
// metricTable sql use measures replace metric
|
||||
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
|
||||
metricTable.setAggOption(AggOption.NATIVE);
|
||||
// metricTable use measures replace metric
|
||||
if (!CollectionUtils.isEmpty(measures)) {
|
||||
metricTable.setMetrics(new ArrayList<>(measures));
|
||||
} else {
|
||||
// empty measure , fill default
|
||||
metricTable.setMetrics(new ArrayList<>());
|
||||
metricTable.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
|
||||
getDefaultModel(semanticSchemaResp, metricTable.getDimensions())));
|
||||
}
|
||||
}
|
||||
}
|
||||
viewQueryParam.setSql(sql);
|
||||
}
|
||||
|
||||
private Map<String, String> generateDerivedMetric(SemanticSchemaResp semanticSchemaResp,
|
||||
AggOption aggOption, List<String> metrics, List<String> dimensions,
|
||||
Set<String> measures) {
|
||||
Map<String, String> result = new HashMap<>();
|
||||
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
|
||||
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
|
||||
|
||||
// Check if any metric is derived
|
||||
boolean hasDerivedMetrics =
|
||||
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
|
||||
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
|
||||
if (!hasDerivedMetrics) {
|
||||
return result;
|
||||
}
|
||||
|
||||
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
|
||||
|
||||
Set<String> allFields = new HashSet<>();
|
||||
Map<String, Measure> allMeasures = new HashMap<>();
|
||||
semanticSchemaResp.getModelResps().forEach(modelResp -> {
|
||||
allFields.addAll(modelResp.getFieldList());
|
||||
if (modelResp.getModelDetail().getMeasures() != null) {
|
||||
modelResp.getModelDetail().getMeasures()
|
||||
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
|
||||
}
|
||||
});
|
||||
|
||||
Set<String> derivedDimensions = new HashSet<>();
|
||||
Set<String> derivedMetrics = new HashSet<>();
|
||||
Map<String, String> visitedMetrics = new HashMap<>();
|
||||
|
||||
for (MetricResp metricResp : metricResps) {
|
||||
if (metrics.contains(metricResp.getBizName())) {
|
||||
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
|
||||
metricResp.getMetricDefineByMeasureParams());
|
||||
if (isDerived) {
|
||||
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
|
||||
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
|
||||
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
|
||||
derivedMetrics, derivedDimensions);
|
||||
result.put(metricResp.getBizName(), expr);
|
||||
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
|
||||
} else {
|
||||
measures.add(metricResp.getBizName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
measures.addAll(derivedMetrics);
|
||||
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
|
||||
.forEach(dimensions::add);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, List<String> dimensions) {
|
||||
if (!CollectionUtils.isEmpty(dimensions)) {
|
||||
Map<String, Long> modelMatchCnt = new HashMap<>();
|
||||
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
|
||||
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
|
||||
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
|
||||
}
|
||||
return modelMatchCnt.entrySet().stream()
|
||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
|
||||
.map(m -> m.getKey()).findFirst().orElse("");
|
||||
}
|
||||
return semanticSchemaResp.getModelResps().get(0).getBizName();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
package com.tencent.supersonic.headless.core.translator;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/** Remove the default metric added by the system when the query only has dimensions */
|
||||
@Slf4j
|
||||
@Component("DetailQueryOptimizer")
|
||||
public class DetailQueryOptimizer implements QueryOptimizer {
|
||||
|
||||
@Override
|
||||
public void rewrite(QueryStatement queryStatement) {
|
||||
QueryParam queryParam = queryStatement.getQueryParam();
|
||||
String sqlRaw = queryStatement.getSql().trim();
|
||||
if (StringUtils.isEmpty(sqlRaw)) {
|
||||
throw new RuntimeException("sql is empty or null");
|
||||
}
|
||||
log.debug("before handleNoMetric, sql:{}", sqlRaw);
|
||||
if (isDetailQuery(queryParam)) {
|
||||
if (queryParam.getMetrics().size() == 0
|
||||
&& !CollectionUtils.isEmpty(queryParam.getGroups())) {
|
||||
String sqlForm = "select %s from ( %s ) src_no_metric";
|
||||
String sql = String.format(sqlForm,
|
||||
queryParam.getGroups().stream().collect(Collectors.joining(",")), sqlRaw);
|
||||
queryStatement.setSql(sql);
|
||||
}
|
||||
}
|
||||
log.debug("after handleNoMetric, sql:{}", queryStatement.getSql());
|
||||
}
|
||||
|
||||
public boolean isDetailQuery(QueryParam queryParam) {
|
||||
return Objects.nonNull(queryParam) && queryParam.getQueryType().isNativeAggQuery()
|
||||
&& CollectionUtils.isEmpty(queryParam.getMetrics());
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
package com.tencent.supersonic.headless.core.translator;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
|
||||
/** A query parser generates physical SQL for the QueryStatement. */
|
||||
public interface QueryParser {
|
||||
void parse(QueryStatement queryStatement, AggOption aggOption) throws Exception;
|
||||
}
|
||||
@@ -1,176 +0,0 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
|
||||
import com.tencent.supersonic.common.calcite.Configuration;
|
||||
import com.tencent.supersonic.common.calcite.SqlMergeWithUtils;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.FilterRender;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.OutputRender;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.Renderer;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.render.SourceRender;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.parser.SqlParseException;
|
||||
import org.apache.calcite.sql.parser.SqlParser;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Objects;
|
||||
|
||||
/** parsing from query dimensions and metrics */
|
||||
@Slf4j
|
||||
public class SqlBuilder {
|
||||
|
||||
private final S2CalciteSchema schema;
|
||||
private MetricQueryParam metricQueryParam;
|
||||
private SqlValidatorScope scope;
|
||||
private SqlNode parserNode;
|
||||
private boolean isAgg = false;
|
||||
private AggOption aggOption = AggOption.DEFAULT;
|
||||
|
||||
public SqlBuilder(S2CalciteSchema schema) {
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
public void build(QueryStatement queryStatement, AggOption aggOption) throws Exception {
|
||||
this.metricQueryParam = queryStatement.getMetricQueryParam();
|
||||
if (metricQueryParam.getMetrics() == null) {
|
||||
metricQueryParam.setMetrics(new ArrayList<>());
|
||||
}
|
||||
if (metricQueryParam.getDimensions() == null) {
|
||||
metricQueryParam.setDimensions(new ArrayList<>());
|
||||
}
|
||||
if (metricQueryParam.getLimit() == null) {
|
||||
metricQueryParam.setLimit(0L);
|
||||
}
|
||||
this.aggOption = aggOption;
|
||||
|
||||
buildParseNode();
|
||||
Database database = queryStatement.getOntology().getDatabase();
|
||||
EngineType engineType = EngineType.fromString(database.getType());
|
||||
optimizeParseNode(engineType);
|
||||
String sql = getSql(engineType);
|
||||
|
||||
queryStatement.setSql(sql);
|
||||
if (Objects.nonNull(queryStatement.getEnableOptimize())
|
||||
&& queryStatement.getEnableOptimize()
|
||||
&& Objects.nonNull(queryStatement.getDataSetAlias())
|
||||
&& !queryStatement.getDataSetAlias().isEmpty()) {
|
||||
// simplify model sql with query sql
|
||||
String simplifySql = rewrite(getSqlByDataSet(engineType, sql,
|
||||
queryStatement.getDataSetSql(), queryStatement.getDataSetAlias()), engineType);
|
||||
if (Objects.nonNull(simplifySql) && !simplifySql.isEmpty()) {
|
||||
log.debug("simplifySql [{}]", simplifySql);
|
||||
queryStatement.setDataSetSimplifySql(simplifySql);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void buildParseNode() throws Exception {
|
||||
// find the match Datasource
|
||||
scope = SchemaBuilder.getScope(schema);
|
||||
List<DataModel> dataModels =
|
||||
DataModelNode.getRelatedDataModels(scope, schema, metricQueryParam);
|
||||
if (dataModels == null || dataModels.isEmpty()) {
|
||||
throw new Exception("data model not found");
|
||||
}
|
||||
isAgg = getAgg(dataModels.get(0));
|
||||
|
||||
// build level by level
|
||||
LinkedList<Renderer> builders = new LinkedList<>();
|
||||
builders.add(new SourceRender());
|
||||
builders.add(new FilterRender());
|
||||
builders.add(new OutputRender());
|
||||
ListIterator<Renderer> it = builders.listIterator();
|
||||
int i = 0;
|
||||
Renderer previous = null;
|
||||
while (it.hasNext()) {
|
||||
Renderer renderer = it.next();
|
||||
if (previous != null) {
|
||||
previous.render(metricQueryParam, dataModels, scope, schema, !isAgg);
|
||||
renderer.setTable(previous
|
||||
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
|
||||
i++;
|
||||
}
|
||||
previous = renderer;
|
||||
}
|
||||
builders.getLast().render(metricQueryParam, dataModels, scope, schema, !isAgg);
|
||||
parserNode = builders.getLast().builder();
|
||||
}
|
||||
|
||||
private boolean getAgg(DataModel dataModel) {
|
||||
if (!AggOption.DEFAULT.equals(aggOption)) {
|
||||
return AggOption.isAgg(aggOption);
|
||||
}
|
||||
// default by dataModel time aggregation
|
||||
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
|
||||
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
|
||||
if (!metricQueryParam.isNativeQuery()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return isAgg;
|
||||
}
|
||||
|
||||
public String getSql(EngineType engineType) {
|
||||
return SemanticNode.getSql(parserNode, engineType);
|
||||
}
|
||||
|
||||
private String rewrite(String sql, EngineType engineType) {
|
||||
try {
|
||||
SqlNode sqlNode =
|
||||
SqlParser.create(sql, Configuration.getParserConfig(engineType)).parseStmt();
|
||||
if (Objects.nonNull(sqlNode)) {
|
||||
return SemanticNode.getSql(
|
||||
SemanticNode.optimize(scope, schema, sqlNode, engineType), engineType);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("optimize error {}", e.toString());
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
private void optimizeParseNode(EngineType engineType) {
|
||||
if (Objects.isNull(schema.getRuntimeOptions())
|
||||
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|
||||
|| !schema.getRuntimeOptions().getEnableOptimize()) {
|
||||
return;
|
||||
}
|
||||
|
||||
SqlNode optimizeNode = null;
|
||||
try {
|
||||
SqlNode sqlNode = SqlParser.create(SemanticNode.getSql(parserNode, engineType),
|
||||
Configuration.getParserConfig(engineType)).parseStmt();
|
||||
if (Objects.nonNull(sqlNode)) {
|
||||
optimizeNode = SemanticNode.optimize(scope, schema, sqlNode, engineType);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("optimize error {}", e);
|
||||
}
|
||||
|
||||
if (Objects.nonNull(optimizeNode)) {
|
||||
parserNode = optimizeNode;
|
||||
}
|
||||
}
|
||||
|
||||
private String getSqlByDataSet(EngineType engineType, String parentSql, String dataSetSql,
|
||||
String parentAlias) throws SqlParseException {
|
||||
if (!SqlMergeWithUtils.hasWith(engineType, dataSetSql)) {
|
||||
return String.format("with %s as (%s) %s", parentAlias, parentSql, dataSetSql);
|
||||
}
|
||||
return SqlMergeWithUtils.mergeWith(engineType, dataSetSql,
|
||||
Collections.singletonList(parentSql), Collections.singletonList(parentAlias));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -4,9 +4,8 @@ import com.google.common.collect.Lists;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlAddHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
|
||||
import com.tencent.supersonic.headless.api.pojo.MetricTable;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import net.sf.jsqlparser.expression.Expression;
|
||||
import net.sf.jsqlparser.expression.StringValue;
|
||||
@@ -28,8 +27,8 @@ public class DefaultDimValueConverter implements QueryConverter {
|
||||
|
||||
@Override
|
||||
public boolean accept(QueryStatement queryStatement) {
|
||||
return !Objects.isNull(queryStatement.getDataSetQueryParam())
|
||||
&& !StringUtils.isBlank(queryStatement.getDataSetQueryParam().getSql());
|
||||
return Objects.nonNull(queryStatement.getSqlQueryParam())
|
||||
&& StringUtils.isNotBlank(queryStatement.getSqlQueryParam().getSql());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -40,15 +39,13 @@ public class DefaultDimValueConverter implements QueryConverter {
|
||||
if (CollectionUtils.isEmpty(dimensions)) {
|
||||
return;
|
||||
}
|
||||
String sql = queryStatement.getDataSetQueryParam().getSql();
|
||||
String sql = queryStatement.getSqlQueryParam().getSql();
|
||||
List<String> whereFields = SqlSelectHelper.getWhereFields(sql).stream()
|
||||
.filter(field -> !TimeDimensionEnum.containsTimeDimension(field))
|
||||
.collect(Collectors.toList());
|
||||
if (!CollectionUtils.isEmpty(whereFields)) {
|
||||
return;
|
||||
}
|
||||
MetricTable metricTable =
|
||||
queryStatement.getDataSetQueryParam().getTables().stream().findFirst().orElse(null);
|
||||
List<Expression> expressions = Lists.newArrayList();
|
||||
for (Dimension dimension : dimensions) {
|
||||
ExpressionList expressionList = new ExpressionList();
|
||||
@@ -59,11 +56,11 @@ public class DefaultDimValueConverter implements QueryConverter {
|
||||
inExpression.setLeftExpression(new Column(dimension.getBizName()));
|
||||
inExpression.setRightExpression(expressionList);
|
||||
expressions.add(inExpression);
|
||||
if (metricTable != null) {
|
||||
metricTable.getDimensions().add(dimension.getBizName());
|
||||
if (Objects.nonNull(queryStatement.getSqlQueryParam().getTable())) {
|
||||
queryStatement.getOntologyQueryParam().getDimensions().add(dimension.getBizName());
|
||||
}
|
||||
}
|
||||
sql = SqlAddHelper.addWhere(sql, expressions);
|
||||
queryStatement.getDataSetQueryParam().setSql(sql);
|
||||
queryStatement.getSqlQueryParam().setSql(sql);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,82 +6,46 @@ import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.common.util.ContextUtils;
|
||||
import com.tencent.supersonic.common.util.DateModeUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.MetricTable;
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.core.pojo.DataSetQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/** supplement the QueryStatement when query with custom aggregation method */
|
||||
@Component("CalculateAggConverter")
|
||||
@Slf4j
|
||||
public class CalculateAggConverter implements QueryConverter {
|
||||
public class MetricRatioConverter implements QueryConverter {
|
||||
|
||||
public interface EngineSql {
|
||||
|
||||
String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql);
|
||||
}
|
||||
|
||||
public DataSetQueryParam generateSqlCommend(QueryStatement queryStatement,
|
||||
EngineType engineTypeEnum, String version) throws Exception {
|
||||
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
|
||||
QueryParam queryParam = queryStatement.getQueryParam();
|
||||
// 同环比
|
||||
if (isRatioAccept(queryParam)) {
|
||||
return generateRatioSqlCommand(queryStatement, engineTypeEnum, version);
|
||||
}
|
||||
DataSetQueryParam sqlCommand = new DataSetQueryParam();
|
||||
String metricTableName = "v_metric_tb_tmp";
|
||||
MetricTable metricTable = new MetricTable();
|
||||
metricTable.setAlias(metricTableName);
|
||||
metricTable.setMetrics(queryParam.getMetrics());
|
||||
metricTable.setDimensions(queryParam.getGroups());
|
||||
String where = sqlGenerateUtils.generateWhere(queryParam, null);
|
||||
log.info("in generateSqlCommand, complete where:{}", where);
|
||||
metricTable.setWhere(where);
|
||||
metricTable.setAggOption(AggOption.AGGREGATION);
|
||||
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
|
||||
String sql = String.format("select %s from %s %s %s %s",
|
||||
sqlGenerateUtils.getSelect(queryParam), metricTableName,
|
||||
sqlGenerateUtils.getGroupBy(queryParam), sqlGenerateUtils.getOrderBy(queryParam),
|
||||
sqlGenerateUtils.getLimit(queryParam));
|
||||
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
|
||||
sqlCommand.setSupportWith(false);
|
||||
sql = String.format("select %s from %s t0 %s %s %s",
|
||||
sqlGenerateUtils.getSelect(queryParam), metricTableName,
|
||||
sqlGenerateUtils.getGroupBy(queryParam),
|
||||
sqlGenerateUtils.getOrderBy(queryParam), sqlGenerateUtils.getLimit(queryParam));
|
||||
}
|
||||
sqlCommand.setSql(sql);
|
||||
return sqlCommand;
|
||||
String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
|
||||
String metricSql);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean accept(QueryStatement queryStatement) {
|
||||
if (Objects.isNull(queryStatement.getQueryParam()) || queryStatement.getIsS2SQL()) {
|
||||
if (Objects.isNull(queryStatement.getStructQueryParam()) || queryStatement.getIsS2SQL()
|
||||
|| !isRatioAccept(queryStatement.getStructQueryParam())) {
|
||||
return false;
|
||||
}
|
||||
QueryParam queryParam = queryStatement.getQueryParam();
|
||||
if (queryParam.getQueryType().isNativeAggQuery()) {
|
||||
return false;
|
||||
}
|
||||
if (CollectionUtils.isEmpty(queryParam.getAggregators())) {
|
||||
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
|
||||
if (structQueryParam.getQueryType().isNativeAggQuery()
|
||||
|| CollectionUtils.isEmpty(structQueryParam.getAggregators())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int nonSumFunction = 0;
|
||||
for (Aggregator agg : queryParam.getAggregators()) {
|
||||
for (Aggregator agg : structQueryParam.getAggregators()) {
|
||||
if (agg.getFunc() == null || "".equals(agg.getFunc())) {
|
||||
return false;
|
||||
}
|
||||
@@ -98,14 +62,12 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
@Override
|
||||
public void convert(QueryStatement queryStatement) throws Exception {
|
||||
Database database = queryStatement.getOntology().getDatabase();
|
||||
DataSetQueryParam dataSetQueryParam = generateSqlCommend(queryStatement,
|
||||
EngineType.fromString(database.getType().toUpperCase()), database.getVersion());
|
||||
queryStatement.setDataSetQueryParam(dataSetQueryParam);
|
||||
generateRatioSql(queryStatement, database.getType(), database.getVersion());
|
||||
}
|
||||
|
||||
/** Ratio */
|
||||
public boolean isRatioAccept(QueryParam queryParam) {
|
||||
Long ratioFuncNum = queryParam.getAggregators().stream()
|
||||
public boolean isRatioAccept(StructQueryParam structQueryParam) {
|
||||
Long ratioFuncNum = structQueryParam.getAggregators().stream()
|
||||
.filter(f -> (f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)
|
||||
|| f.getFunc().equals(AggOperatorEnum.RATIO_OVER)))
|
||||
.count();
|
||||
@@ -115,53 +77,47 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
return false;
|
||||
}
|
||||
|
||||
public DataSetQueryParam generateRatioSqlCommand(QueryStatement queryStatement,
|
||||
EngineType engineTypeEnum, String version) throws Exception {
|
||||
public void generateRatioSql(QueryStatement queryStatement, EngineType engineTypeEnum,
|
||||
String version) throws Exception {
|
||||
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
|
||||
QueryParam queryParam = queryStatement.getQueryParam();
|
||||
check(queryParam);
|
||||
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
|
||||
check(structQueryParam);
|
||||
queryStatement.setEnableOptimize(false);
|
||||
DataSetQueryParam sqlCommand = new DataSetQueryParam();
|
||||
OntologyQueryParam ontologyQueryParam = queryStatement.getOntologyQueryParam();
|
||||
ontologyQueryParam.setAggOption(AggOption.AGGREGATION);
|
||||
String metricTableName = "v_metric_tb_tmp";
|
||||
MetricTable metricTable = new MetricTable();
|
||||
metricTable.setAlias(metricTableName);
|
||||
metricTable.setMetrics(queryParam.getMetrics());
|
||||
metricTable.setDimensions(queryParam.getGroups());
|
||||
String where = sqlGenerateUtils.generateWhere(queryParam, null);
|
||||
log.info("in generateSqlCommend, complete where:{}", where);
|
||||
metricTable.setWhere(where);
|
||||
metricTable.setAggOption(AggOption.AGGREGATION);
|
||||
sqlCommand.setTables(new ArrayList<>(Collections.singletonList(metricTable)));
|
||||
boolean isOver = isOverRatio(queryParam);
|
||||
boolean isOver = isOverRatio(structQueryParam);
|
||||
String sql = "";
|
||||
|
||||
SqlQueryParam dsParam = queryStatement.getSqlQueryParam();
|
||||
dsParam.setTable(metricTableName);
|
||||
switch (engineTypeEnum) {
|
||||
case H2:
|
||||
sql = new H2EngineSql().sql(queryParam, isOver, true, metricTableName);
|
||||
sql = new H2EngineSql().sql(structQueryParam, isOver, true, metricTableName);
|
||||
break;
|
||||
case MYSQL:
|
||||
case DORIS:
|
||||
case CLICKHOUSE:
|
||||
if (!sqlGenerateUtils.isSupportWith(engineTypeEnum, version)) {
|
||||
sqlCommand.setSupportWith(false);
|
||||
dsParam.setSupportWith(false);
|
||||
}
|
||||
if (!engineTypeEnum.equals(engineTypeEnum.CLICKHOUSE)) {
|
||||
sql = new MysqlEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(),
|
||||
metricTableName);
|
||||
sql = new MysqlEngineSql().sql(structQueryParam, isOver,
|
||||
dsParam.isSupportWith(), metricTableName);
|
||||
} else {
|
||||
sql = new CkEngineSql().sql(queryParam, isOver, sqlCommand.isSupportWith(),
|
||||
sql = new CkEngineSql().sql(structQueryParam, isOver, dsParam.isSupportWith(),
|
||||
metricTableName);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
sqlCommand.setSql(sql);
|
||||
return sqlCommand;
|
||||
dsParam.setSql(sql);
|
||||
}
|
||||
|
||||
public class H2EngineSql implements EngineSql {
|
||||
|
||||
public String getOverSelect(QueryParam queryParam, boolean isOver) {
|
||||
String aggStr = queryParam.getAggregators().stream().map(f -> {
|
||||
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) {
|
||||
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
|
||||
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|
||||
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
|
||||
return String.format("( (%s-%s_roll)/cast(%s_roll as DOUBLE) ) as %s_%s,%s",
|
||||
@@ -171,43 +127,44 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
return f.getColumn();
|
||||
}
|
||||
}).collect(Collectors.joining(","));
|
||||
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
|
||||
: String.join(",", queryParam.getGroups()) + "," + aggStr;
|
||||
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
|
||||
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
|
||||
}
|
||||
|
||||
public String getTimeSpan(QueryParam queryParam, boolean isOver, boolean isAdd) {
|
||||
if (Objects.nonNull(queryParam.getDateInfo())) {
|
||||
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver,
|
||||
boolean isAdd) {
|
||||
if (Objects.nonNull(structQueryParam.getDateInfo())) {
|
||||
String addStr = isAdd ? "" : "-";
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
|
||||
return "day," + (isOver ? addStr + "7" : addStr + "1");
|
||||
}
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
return isOver ? "month," + addStr + "1" : "day," + addStr + "7";
|
||||
}
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH.MONTH)) {
|
||||
return isOver ? "year," + addStr + "1" : "month," + addStr + "1";
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft,
|
||||
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
|
||||
String aliasRight) {
|
||||
String timeDim = getTimeDim(queryParam);
|
||||
String timeSpan = getTimeSpan(queryParam, isOver, true);
|
||||
String aggStr = queryParam.getAggregators().stream().map(f -> {
|
||||
String timeDim = getTimeDim(structQueryParam);
|
||||
String timeSpan = getTimeSpan(structQueryParam, isOver, true);
|
||||
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
|
||||
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|
||||
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
return String.format(
|
||||
"%s is not null and %s = FORMATDATETIME(DATEADD(%s,CONCAT(%s,'-01')),'yyyy-MM') ",
|
||||
aliasRight + timeDim, aliasLeft + timeDim, timeSpan,
|
||||
aliasRight + timeDim);
|
||||
}
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
|
||||
&& isOver) {
|
||||
return String.format(" DATE_TRUNC('week',DATEADD(%s,%s) ) = %s ",
|
||||
getTimeSpan(queryParam, isOver, false), aliasLeft + timeDim,
|
||||
getTimeSpan(structQueryParam, isOver, false), aliasLeft + timeDim,
|
||||
aliasRight + timeDim);
|
||||
}
|
||||
return String.format("%s = TIMESTAMPADD(%s,%s) ", aliasLeft + timeDim, timeSpan,
|
||||
@@ -217,7 +174,7 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
}
|
||||
}).collect(Collectors.joining(" and "));
|
||||
List<String> groups = new ArrayList<>();
|
||||
for (String group : queryParam.getGroups()) {
|
||||
for (String group : structQueryParam.getGroups()) {
|
||||
if (group.equalsIgnoreCase(timeDim)) {
|
||||
continue;
|
||||
}
|
||||
@@ -228,35 +185,36 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) {
|
||||
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
|
||||
String metricSql) {
|
||||
String sql = String.format(
|
||||
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
|
||||
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."),
|
||||
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql,
|
||||
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam),
|
||||
getLimit(queryParam));
|
||||
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."),
|
||||
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
|
||||
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
|
||||
getLimit(structQueryParam));
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
|
||||
public class CkEngineSql extends MysqlEngineSql {
|
||||
|
||||
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft,
|
||||
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
|
||||
String aliasRight) {
|
||||
String timeDim = getTimeDim(queryParam);
|
||||
String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true);
|
||||
String aggStr = queryParam.getAggregators().stream().map(f -> {
|
||||
String timeDim = getTimeDim(structQueryParam);
|
||||
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true);
|
||||
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
|
||||
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|
||||
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
return String.format(
|
||||
"toDate(CONCAT(%s,'-01')) = date_add(toDate(CONCAT(%s,'-01')),%s) ",
|
||||
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
|
||||
}
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
|
||||
&& isOver) {
|
||||
return String.format("toMonday(date_add(%s ,INTERVAL %s) ) = %s",
|
||||
aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false),
|
||||
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false),
|
||||
aliasRight + timeDim);
|
||||
}
|
||||
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
|
||||
@@ -266,7 +224,7 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
}
|
||||
}).collect(Collectors.joining(" and "));
|
||||
List<String> groups = new ArrayList<>();
|
||||
for (String group : queryParam.getGroups()) {
|
||||
for (String group : structQueryParam.getGroups()) {
|
||||
if (group.equalsIgnoreCase(timeDim)) {
|
||||
continue;
|
||||
}
|
||||
@@ -277,45 +235,49 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) {
|
||||
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
|
||||
String metricSql) {
|
||||
if (!asWith) {
|
||||
return String.format(
|
||||
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
|
||||
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."),
|
||||
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql,
|
||||
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam),
|
||||
getLimit(queryParam));
|
||||
getOverSelect(structQueryParam, isOver),
|
||||
getAllSelect(structQueryParam, "t0."),
|
||||
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
|
||||
getJoinOn(structQueryParam, isOver, "t0.", "t1."),
|
||||
getOrderBy(structQueryParam), getLimit(structQueryParam));
|
||||
}
|
||||
return String.format(
|
||||
",t0 as (select * from %s),t1 as (select * from %s) select %s from ( select %s , %s "
|
||||
+ "from t0 left join t1 on %s ) metric_tb_src %s %s ",
|
||||
metricSql, metricSql, getOverSelect(queryParam, isOver),
|
||||
getAllSelect(queryParam, "t0."), getAllJoinSelect(queryParam, "t1."),
|
||||
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam),
|
||||
getLimit(queryParam));
|
||||
metricSql, metricSql, getOverSelect(structQueryParam, isOver),
|
||||
getAllSelect(structQueryParam, "t0."),
|
||||
getAllJoinSelect(structQueryParam, "t1."),
|
||||
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
|
||||
getLimit(structQueryParam));
|
||||
}
|
||||
}
|
||||
|
||||
public class MysqlEngineSql implements EngineSql {
|
||||
|
||||
public String getTimeSpan(QueryParam queryParam, boolean isOver, boolean isAdd) {
|
||||
if (Objects.nonNull(queryParam.getDateInfo())) {
|
||||
public String getTimeSpan(StructQueryParam structQueryParam, boolean isOver,
|
||||
boolean isAdd) {
|
||||
if (Objects.nonNull(structQueryParam.getDateInfo())) {
|
||||
String addStr = isAdd ? "" : "-";
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.DAY)) {
|
||||
return isOver ? addStr + "7 day" : addStr + "1 day";
|
||||
}
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)) {
|
||||
return isOver ? addStr + "1 month" : addStr + "7 day";
|
||||
}
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
return isOver ? addStr + "1 year" : addStr + "1 month";
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
public String getOverSelect(QueryParam queryParam, boolean isOver) {
|
||||
String aggStr = queryParam.getAggregators().stream().map(f -> {
|
||||
public String getOverSelect(StructQueryParam structQueryParam, boolean isOver) {
|
||||
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
|
||||
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|
||||
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
|
||||
return String.format("if(%s_roll!=0, (%s-%s_roll)/%s_roll , 0) as %s_%s,%s",
|
||||
@@ -325,26 +287,26 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
return f.getColumn();
|
||||
}
|
||||
}).collect(Collectors.joining(","));
|
||||
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
|
||||
: String.join(",", queryParam.getGroups()) + "," + aggStr;
|
||||
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
|
||||
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
|
||||
}
|
||||
|
||||
public String getJoinOn(QueryParam queryParam, boolean isOver, String aliasLeft,
|
||||
public String getJoinOn(StructQueryParam structQueryParam, boolean isOver, String aliasLeft,
|
||||
String aliasRight) {
|
||||
String timeDim = getTimeDim(queryParam);
|
||||
String timeSpan = "INTERVAL " + getTimeSpan(queryParam, isOver, true);
|
||||
String aggStr = queryParam.getAggregators().stream().map(f -> {
|
||||
String timeDim = getTimeDim(structQueryParam);
|
||||
String timeSpan = "INTERVAL " + getTimeSpan(structQueryParam, isOver, true);
|
||||
String aggStr = structQueryParam.getAggregators().stream().map(f -> {
|
||||
if (f.getFunc().equals(AggOperatorEnum.RATIO_OVER)
|
||||
|| f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)) {
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.MONTH)) {
|
||||
return String.format(
|
||||
"%s = DATE_FORMAT(date_add(CONCAT(%s,'-01'), %s),'%%Y-%%m') ",
|
||||
aliasLeft + timeDim, aliasRight + timeDim, timeSpan);
|
||||
}
|
||||
if (queryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
|
||||
if (structQueryParam.getDateInfo().getPeriod().equals(DatePeriodEnum.WEEK)
|
||||
&& isOver) {
|
||||
return String.format("to_monday(date_add(%s ,INTERVAL %s) ) = %s",
|
||||
aliasLeft + timeDim, getTimeSpan(queryParam, isOver, false),
|
||||
aliasLeft + timeDim, getTimeSpan(structQueryParam, isOver, false),
|
||||
aliasRight + timeDim);
|
||||
}
|
||||
return String.format("%s = date_add(%s,%s) ", aliasLeft + timeDim,
|
||||
@@ -354,7 +316,7 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
}
|
||||
}).collect(Collectors.joining(" and "));
|
||||
List<String> groups = new ArrayList<>();
|
||||
for (String group : queryParam.getGroups()) {
|
||||
for (String group : structQueryParam.getGroups()) {
|
||||
if (group.equalsIgnoreCase(timeDim)) {
|
||||
continue;
|
||||
}
|
||||
@@ -365,51 +327,53 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String sql(QueryParam queryParam, boolean isOver, boolean asWith, String metricSql) {
|
||||
public String sql(StructQueryParam structQueryParam, boolean isOver, boolean asWith,
|
||||
String metricSql) {
|
||||
String sql = String.format(
|
||||
"select %s from ( select %s , %s from %s t0 left join %s t1 on %s ) metric_tb_src %s %s ",
|
||||
getOverSelect(queryParam, isOver), getAllSelect(queryParam, "t0."),
|
||||
getAllJoinSelect(queryParam, "t1."), metricSql, metricSql,
|
||||
getJoinOn(queryParam, isOver, "t0.", "t1."), getOrderBy(queryParam),
|
||||
getLimit(queryParam));
|
||||
getOverSelect(structQueryParam, isOver), getAllSelect(structQueryParam, "t0."),
|
||||
getAllJoinSelect(structQueryParam, "t1."), metricSql, metricSql,
|
||||
getJoinOn(structQueryParam, isOver, "t0.", "t1."), getOrderBy(structQueryParam),
|
||||
getLimit(structQueryParam));
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
|
||||
private String getAllJoinSelect(QueryParam queryParam, String alias) {
|
||||
String aggStr = queryParam.getAggregators().stream()
|
||||
private String getAllJoinSelect(StructQueryParam structQueryParam, String alias) {
|
||||
String aggStr = structQueryParam.getAggregators().stream()
|
||||
.map(f -> getSelectField(f, alias) + " as " + getSelectField(f, "") + "_roll")
|
||||
.collect(Collectors.joining(","));
|
||||
List<String> groups = new ArrayList<>();
|
||||
for (String group : queryParam.getGroups()) {
|
||||
for (String group : structQueryParam.getGroups()) {
|
||||
groups.add(alias + group + " as " + group + "_roll");
|
||||
}
|
||||
return CollectionUtils.isEmpty(groups) ? aggStr : String.join(",", groups) + "," + aggStr;
|
||||
}
|
||||
|
||||
private String getGroupDimWithOutTime(QueryParam queryParam) {
|
||||
String timeDim = getTimeDim(queryParam);
|
||||
return queryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
|
||||
private String getGroupDimWithOutTime(StructQueryParam structQueryParam) {
|
||||
String timeDim = getTimeDim(structQueryParam);
|
||||
return structQueryParam.getGroups().stream().filter(f -> !f.equalsIgnoreCase(timeDim))
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
private static String getTimeDim(QueryParam queryParam) {
|
||||
private static String getTimeDim(StructQueryParam structQueryParam) {
|
||||
DateModeUtils dateModeUtils = ContextUtils.getContext().getBean(DateModeUtils.class);
|
||||
return dateModeUtils.getSysDateCol(queryParam.getDateInfo());
|
||||
return dateModeUtils.getSysDateCol(structQueryParam.getDateInfo());
|
||||
}
|
||||
|
||||
private static String getLimit(QueryParam queryParam) {
|
||||
if (queryParam != null && queryParam.getLimit() != null && queryParam.getLimit() > 0) {
|
||||
return " limit " + String.valueOf(queryParam.getLimit());
|
||||
private static String getLimit(StructQueryParam structQueryParam) {
|
||||
if (structQueryParam != null && structQueryParam.getLimit() != null
|
||||
&& structQueryParam.getLimit() > 0) {
|
||||
return " limit " + String.valueOf(structQueryParam.getLimit());
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
private String getAllSelect(QueryParam queryParam, String alias) {
|
||||
String aggStr = queryParam.getAggregators().stream().map(f -> getSelectField(f, alias))
|
||||
.collect(Collectors.joining(","));
|
||||
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
|
||||
: alias + String.join("," + alias, queryParam.getGroups()) + "," + aggStr;
|
||||
private String getAllSelect(StructQueryParam structQueryParam, String alias) {
|
||||
String aggStr = structQueryParam.getAggregators().stream()
|
||||
.map(f -> getSelectField(f, alias)).collect(Collectors.joining(","));
|
||||
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
|
||||
: alias + String.join("," + alias, structQueryParam.getGroups()) + "," + aggStr;
|
||||
}
|
||||
|
||||
private String getSelectField(final Aggregator agg, String alias) {
|
||||
@@ -421,32 +385,32 @@ public class CalculateAggConverter implements QueryConverter {
|
||||
return sqlGenerateUtils.getSelectField(agg);
|
||||
}
|
||||
|
||||
private String getGroupBy(QueryParam queryParam) {
|
||||
if (CollectionUtils.isEmpty(queryParam.getGroups())) {
|
||||
private String getGroupBy(StructQueryParam structQueryParam) {
|
||||
if (CollectionUtils.isEmpty(structQueryParam.getGroups())) {
|
||||
return "";
|
||||
}
|
||||
return "group by " + String.join(",", queryParam.getGroups());
|
||||
return "group by " + String.join(",", structQueryParam.getGroups());
|
||||
}
|
||||
|
||||
private static String getOrderBy(QueryParam queryParam) {
|
||||
return "order by " + getTimeDim(queryParam) + " desc";
|
||||
private static String getOrderBy(StructQueryParam structQueryParam) {
|
||||
return "order by " + getTimeDim(structQueryParam) + " desc";
|
||||
}
|
||||
|
||||
private boolean isOverRatio(QueryParam queryParam) {
|
||||
Long overCt = queryParam.getAggregators().stream()
|
||||
private boolean isOverRatio(StructQueryParam structQueryParam) {
|
||||
Long overCt = structQueryParam.getAggregators().stream()
|
||||
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
|
||||
return overCt > 0;
|
||||
}
|
||||
|
||||
private void check(QueryParam queryParam) throws Exception {
|
||||
Long ratioOverNum = queryParam.getAggregators().stream()
|
||||
private void check(StructQueryParam structQueryParam) throws Exception {
|
||||
Long ratioOverNum = structQueryParam.getAggregators().stream()
|
||||
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_OVER)).count();
|
||||
Long ratioRollNum = queryParam.getAggregators().stream()
|
||||
Long ratioRollNum = structQueryParam.getAggregators().stream()
|
||||
.filter(f -> f.getFunc().equals(AggOperatorEnum.RATIO_ROLL)).count();
|
||||
if (ratioOverNum > 0 && ratioRollNum > 0) {
|
||||
throw new Exception("not support over ratio and roll ratio together ");
|
||||
}
|
||||
if (getTimeDim(queryParam).isEmpty()) {
|
||||
if (getTimeDim(structQueryParam).isEmpty()) {
|
||||
throw new Exception("miss time filter");
|
||||
}
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
package com.tencent.supersonic.headless.core.translator.converter;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.ColumnOrder;
|
||||
import com.tencent.supersonic.common.util.ContextUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/** QueryConverter default implement */
|
||||
@Component("ParserDefaultConverter")
|
||||
@Slf4j
|
||||
public class ParserDefaultConverter implements QueryConverter {
|
||||
|
||||
@Override
|
||||
public boolean accept(QueryStatement queryStatement) {
|
||||
if (Objects.isNull(queryStatement.getQueryParam()) || queryStatement.getIsS2SQL()) {
|
||||
return false;
|
||||
}
|
||||
CalculateAggConverter calculateConverterAgg =
|
||||
ContextUtils.getBean(CalculateAggConverter.class);
|
||||
return !calculateConverterAgg.accept(queryStatement);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void convert(QueryStatement queryStatement) throws Exception {
|
||||
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
|
||||
QueryParam queryParam = queryStatement.getQueryParam();
|
||||
MetricQueryParam metricQueryParam = queryStatement.getMetricQueryParam();
|
||||
MetricQueryParam metricReq =
|
||||
generateSqlCommand(queryStatement.getQueryParam(), queryStatement);
|
||||
queryStatement.setMinMaxTime(sqlGenerateUtils.getBeginEndTime(queryParam, null));
|
||||
BeanUtils.copyProperties(metricReq, metricQueryParam);
|
||||
}
|
||||
|
||||
public MetricQueryParam generateSqlCommand(QueryParam queryParam,
|
||||
QueryStatement queryStatement) {
|
||||
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
|
||||
MetricQueryParam metricQueryParam = new MetricQueryParam();
|
||||
metricQueryParam.setMetrics(queryParam.getMetrics());
|
||||
metricQueryParam.setDimensions(queryParam.getGroups());
|
||||
String where = sqlGenerateUtils.generateWhere(queryParam, null);
|
||||
log.info("in generateSqlCommend, complete where:{}", where);
|
||||
|
||||
metricQueryParam.setWhere(where);
|
||||
metricQueryParam.setOrder(queryParam.getOrders().stream()
|
||||
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
|
||||
.collect(Collectors.toList()));
|
||||
metricQueryParam.setLimit(queryParam.getLimit());
|
||||
|
||||
// support detail query
|
||||
if (queryParam.getQueryType().isNativeAggQuery()
|
||||
&& CollectionUtils.isEmpty(metricQueryParam.getMetrics())) {
|
||||
Map<Long, DataModel> modelMap = queryStatement.getOntology().getModelMap();
|
||||
for (Long modelId : modelMap.keySet()) {
|
||||
String modelBizName = modelMap.get(modelId).getName();
|
||||
String internalMetricName =
|
||||
sqlGenerateUtils.generateInternalMetricName(modelBizName);
|
||||
metricQueryParam.getMetrics().add(internalMetricName);
|
||||
}
|
||||
}
|
||||
|
||||
return metricQueryParam;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,288 @@
|
||||
package com.tencent.supersonic.headless.core.translator.converter;
|
||||
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlReplaceHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectFunctionHelper;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||
import com.tencent.supersonic.common.pojo.Constants;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
|
||||
import com.tencent.supersonic.common.util.ContextUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
||||
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.MetricType;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.*;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@Component("SqlQueryConverter")
|
||||
@Slf4j
|
||||
public class SqlQueryConverter implements QueryConverter {
|
||||
|
||||
@Override
|
||||
public boolean accept(QueryStatement queryStatement) {
|
||||
return Objects.nonNull(queryStatement.getSqlQueryParam()) && queryStatement.getIsS2SQL();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void convert(QueryStatement queryStatement) throws Exception {
|
||||
convertNameToBizName(queryStatement);
|
||||
rewriteOrderBy(queryStatement);
|
||||
|
||||
// fill sqlQuery
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
SqlQueryParam sqlQueryParam = queryStatement.getSqlQueryParam();
|
||||
String tableName = SqlSelectHelper.getTableName(sqlQueryParam.getSql());
|
||||
if (StringUtils.isEmpty(tableName)) {
|
||||
return;
|
||||
}
|
||||
sqlQueryParam.setTable(tableName.toLowerCase());
|
||||
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
|
||||
if (!sqlGenerateUtils.isSupportWith(
|
||||
EngineType.fromString(semanticSchemaResp.getDatabaseResp().getType().toUpperCase()),
|
||||
semanticSchemaResp.getDatabaseResp().getVersion())) {
|
||||
sqlQueryParam.setSupportWith(false);
|
||||
sqlQueryParam.setWithAlias(false);
|
||||
}
|
||||
|
||||
// build ontologyQuery
|
||||
List<String> allFields = SqlSelectHelper.getAllSelectFields(sqlQueryParam.getSql());
|
||||
List<MetricSchemaResp> metricSchemas = getMetrics(semanticSchemaResp, allFields);
|
||||
List<String> metrics =
|
||||
metricSchemas.stream().map(SchemaItem::getBizName).collect(Collectors.toList());
|
||||
AggOption aggOption = getAggOption(sqlQueryParam.getSql(), metricSchemas);
|
||||
Set<String> dimensions = getDimensions(semanticSchemaResp, allFields);
|
||||
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
|
||||
ontologyQueryParam.getMetrics().addAll(metrics);
|
||||
ontologyQueryParam.getDimensions().addAll(dimensions);
|
||||
ontologyQueryParam.setAggOption(aggOption);
|
||||
ontologyQueryParam.setNativeQuery(!AggOption.isAgg(aggOption));
|
||||
queryStatement.setOntologyQueryParam(ontologyQueryParam);
|
||||
|
||||
generateDerivedMetric(sqlGenerateUtils, queryStatement);
|
||||
|
||||
queryStatement.setSql(sqlQueryParam.getSql());
|
||||
log.info("parse sqlQuery [{}] ", sqlQueryParam);
|
||||
}
|
||||
|
||||
private AggOption getAggOption(String sql, List<MetricSchemaResp> metricSchemas) {
|
||||
if (SqlSelectFunctionHelper.hasAggregateFunction(sql)) {
|
||||
return AggOption.AGGREGATION;
|
||||
}
|
||||
|
||||
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql) && !SqlSelectHelper.hasGroupBy(sql)
|
||||
&& !SqlSelectHelper.hasWith(sql) && !SqlSelectHelper.hasSubSelect(sql)) {
|
||||
log.debug("getAggOption simple sql set to DEFAULT");
|
||||
return AggOption.NATIVE;
|
||||
}
|
||||
|
||||
// if there is no group by in S2SQL,set MetricTable's aggOption to "NATIVE"
|
||||
// if there is count() in S2SQL,set MetricTable's aggOption to "NATIVE"
|
||||
if (!SqlSelectFunctionHelper.hasAggregateFunction(sql)
|
||||
|| SqlSelectFunctionHelper.hasFunction(sql, "count")
|
||||
|| SqlSelectFunctionHelper.hasFunction(sql, "count_distinct")) {
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
|
||||
if (SqlSelectHelper.hasSubSelect(sql) || SqlSelectHelper.hasWith(sql)
|
||||
|| SqlSelectHelper.hasGroupBy(sql)) {
|
||||
return AggOption.OUTER;
|
||||
}
|
||||
long defaultAggNullCnt = metricSchemas.stream().filter(
|
||||
m -> Objects.isNull(m.getDefaultAgg()) || StringUtils.isBlank(m.getDefaultAgg()))
|
||||
.count();
|
||||
if (defaultAggNullCnt > 0) {
|
||||
log.debug("getAggOption find null defaultAgg metric set to NATIVE");
|
||||
return AggOption.DEFAULT;
|
||||
}
|
||||
return AggOption.DEFAULT;
|
||||
}
|
||||
|
||||
private Set<String> getDimensions(SemanticSchemaResp semanticSchemaResp,
|
||||
List<String> allFields) {
|
||||
Map<String, String> dimensionLowerToNameMap = semanticSchemaResp.getDimensions().stream()
|
||||
.collect(Collectors.toMap(entry -> entry.getBizName().toLowerCase(),
|
||||
SchemaItem::getBizName, (k1, k2) -> k1));
|
||||
dimensionLowerToNameMap.put(TimeDimensionEnum.DAY.getName(),
|
||||
TimeDimensionEnum.DAY.getName());
|
||||
return allFields.stream()
|
||||
.filter(entry -> dimensionLowerToNameMap.containsKey(entry.toLowerCase()))
|
||||
.map(entry -> dimensionLowerToNameMap.get(entry.toLowerCase()))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private List<MetricSchemaResp> getMetrics(SemanticSchemaResp semanticSchemaResp,
|
||||
List<String> allFields) {
|
||||
Map<String, MetricSchemaResp> metricLowerToNameMap =
|
||||
semanticSchemaResp.getMetrics().stream().collect(Collectors
|
||||
.toMap(entry -> entry.getBizName().toLowerCase(), entry -> entry));
|
||||
return allFields.stream()
|
||||
.filter(entry -> metricLowerToNameMap.containsKey(entry.toLowerCase()))
|
||||
.map(entry -> metricLowerToNameMap.get(entry.toLowerCase()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
private void generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
|
||||
QueryStatement queryStatement) {
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
SqlQueryParam sqlParam = queryStatement.getSqlQueryParam();
|
||||
OntologyQueryParam ontologyParam = queryStatement.getOntologyQueryParam();
|
||||
String sql = sqlParam.getSql();
|
||||
|
||||
Set<String> measures = new HashSet<>();
|
||||
Map<String, String> replaces = generateDerivedMetric(sqlGenerateUtils, semanticSchemaResp,
|
||||
ontologyParam.getAggOption(), ontologyParam.getMetrics(),
|
||||
ontologyParam.getDimensions(), measures);
|
||||
|
||||
if (!CollectionUtils.isEmpty(replaces)) {
|
||||
// metricTable sql use measures replace metric
|
||||
sql = SqlReplaceHelper.replaceSqlByExpression(sql, replaces);
|
||||
ontologyParam.setAggOption(AggOption.NATIVE);
|
||||
// metricTable use measures replace metric
|
||||
if (!CollectionUtils.isEmpty(measures)) {
|
||||
ontologyParam.getMetrics().addAll(measures);
|
||||
} else {
|
||||
// empty measure , fill default
|
||||
ontologyParam.getMetrics().add(sqlGenerateUtils.generateInternalMetricName(
|
||||
getDefaultModel(semanticSchemaResp, ontologyParam.getDimensions())));
|
||||
}
|
||||
}
|
||||
|
||||
sqlParam.setSql(sql);
|
||||
}
|
||||
|
||||
private Map<String, String> generateDerivedMetric(SqlGenerateUtils sqlGenerateUtils,
|
||||
SemanticSchemaResp semanticSchemaResp, AggOption aggOption, Set<String> metrics,
|
||||
Set<String> dimensions, Set<String> measures) {
|
||||
Map<String, String> result = new HashMap<>();
|
||||
List<MetricSchemaResp> metricResps = semanticSchemaResp.getMetrics();
|
||||
List<DimSchemaResp> dimensionResps = semanticSchemaResp.getDimensions();
|
||||
|
||||
// Check if any metric is derived
|
||||
boolean hasDerivedMetrics =
|
||||
metricResps.stream().anyMatch(m -> metrics.contains(m.getBizName()) && MetricType
|
||||
.isDerived(m.getMetricDefineType(), m.getMetricDefineByMeasureParams()));
|
||||
if (!hasDerivedMetrics) {
|
||||
return result;
|
||||
}
|
||||
|
||||
log.debug("begin to generateDerivedMetric {} [{}]", aggOption, metrics);
|
||||
|
||||
Set<String> allFields = new HashSet<>();
|
||||
Map<String, Measure> allMeasures = new HashMap<>();
|
||||
semanticSchemaResp.getModelResps().forEach(modelResp -> {
|
||||
allFields.addAll(modelResp.getFieldList());
|
||||
if (modelResp.getModelDetail().getMeasures() != null) {
|
||||
modelResp.getModelDetail().getMeasures()
|
||||
.forEach(measure -> allMeasures.put(measure.getBizName(), measure));
|
||||
}
|
||||
});
|
||||
|
||||
Set<String> derivedDimensions = new HashSet<>();
|
||||
Set<String> derivedMetrics = new HashSet<>();
|
||||
Map<String, String> visitedMetrics = new HashMap<>();
|
||||
|
||||
for (MetricResp metricResp : metricResps) {
|
||||
if (metrics.contains(metricResp.getBizName())) {
|
||||
boolean isDerived = MetricType.isDerived(metricResp.getMetricDefineType(),
|
||||
metricResp.getMetricDefineByMeasureParams());
|
||||
if (isDerived) {
|
||||
String expr = sqlGenerateUtils.generateDerivedMetric(metricResps, allFields,
|
||||
allMeasures, dimensionResps, sqlGenerateUtils.getExpr(metricResp),
|
||||
metricResp.getMetricDefineType(), aggOption, visitedMetrics,
|
||||
derivedMetrics, derivedDimensions);
|
||||
result.put(metricResp.getBizName(), expr);
|
||||
log.debug("derived metric {}->{}", metricResp.getBizName(), expr);
|
||||
} else {
|
||||
measures.add(metricResp.getBizName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
measures.addAll(derivedMetrics);
|
||||
derivedDimensions.stream().filter(dimension -> !dimensions.contains(dimension))
|
||||
.forEach(dimensions::add);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private void convertNameToBizName(QueryStatement queryStatement) {
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
Map<String, String> fieldNameToBizNameMap = getFieldNameToBizNameMap(semanticSchemaResp);
|
||||
String sql = queryStatement.getSqlQueryParam().getSql();
|
||||
log.debug("dataSetId:{},convert name to bizName before:{}", queryStatement.getDataSetId(),
|
||||
sql);
|
||||
sql = SqlReplaceHelper.replaceFields(sql, fieldNameToBizNameMap, true);
|
||||
log.debug("dataSetId:{},convert name to bizName after:{}", queryStatement.getDataSetId(),
|
||||
sql);
|
||||
sql = SqlReplaceHelper.replaceTable(sql,
|
||||
Constants.TABLE_PREFIX + queryStatement.getDataSetId());
|
||||
log.debug("replaceTableName after:{}", sql);
|
||||
queryStatement.getSqlQueryParam().setSql(sql);
|
||||
}
|
||||
|
||||
private void rewriteOrderBy(QueryStatement queryStatement) {
|
||||
// replace order by field with the select sequence number
|
||||
String sql = queryStatement.getSqlQueryParam().getSql();
|
||||
String newSql = SqlReplaceHelper.replaceAggAliasOrderbyField(sql);
|
||||
log.debug("replaceOrderAggSameAlias {} -> {}", sql, newSql);
|
||||
queryStatement.getSqlQueryParam().setSql(newSql);
|
||||
}
|
||||
|
||||
protected Map<String, String> getFieldNameToBizNameMap(SemanticSchemaResp semanticSchemaResp) {
|
||||
// support fieldName and field alias to bizName
|
||||
Map<String, String> dimensionResults = semanticSchemaResp.getDimensions().stream().flatMap(
|
||||
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
|
||||
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
|
||||
|
||||
Map<String, String> metricResults = semanticSchemaResp.getMetrics().stream().flatMap(
|
||||
entry -> getPairStream(entry.getAlias(), entry.getName(), entry.getBizName()))
|
||||
.collect(Collectors.toMap(Pair::getLeft, Pair::getRight, (k1, k2) -> k1));
|
||||
|
||||
dimensionResults.putAll(TimeDimensionEnum.getChNameToNameMap());
|
||||
dimensionResults.putAll(TimeDimensionEnum.getNameToNameMap());
|
||||
dimensionResults.putAll(metricResults);
|
||||
return dimensionResults;
|
||||
}
|
||||
|
||||
private Stream<Pair<String, String>> getPairStream(String aliasStr, String name,
|
||||
String bizName) {
|
||||
Set<Pair<String, String>> elements = new HashSet<>();
|
||||
elements.add(Pair.of(name, bizName));
|
||||
if (StringUtils.isNotBlank(aliasStr)) {
|
||||
List<String> aliasList = SchemaItem.getAliasList(aliasStr);
|
||||
for (String alias : aliasList) {
|
||||
elements.add(Pair.of(alias, bizName));
|
||||
}
|
||||
}
|
||||
return elements.stream();
|
||||
}
|
||||
|
||||
private String getDefaultModel(SemanticSchemaResp semanticSchemaResp, Set<String> dimensions) {
|
||||
if (!CollectionUtils.isEmpty(dimensions)) {
|
||||
Map<String, Long> modelMatchCnt = new HashMap<>();
|
||||
for (ModelResp modelResp : semanticSchemaResp.getModelResps()) {
|
||||
modelMatchCnt.put(modelResp.getBizName(), modelResp.getModelDetail().getDimensions()
|
||||
.stream().filter(d -> dimensions.contains(d.getBizName())).count());
|
||||
}
|
||||
return modelMatchCnt.entrySet().stream()
|
||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
|
||||
.map(Map.Entry::getKey).findFirst().orElse("");
|
||||
}
|
||||
return semanticSchemaResp.getModelResps().get(0).getBizName();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
@@ -14,15 +14,13 @@ import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
@Slf4j
|
||||
@Component("SqlVariableParseConverter")
|
||||
public class SqlVariableParseConverter implements QueryConverter {
|
||||
@Component("SqlVariableConverter")
|
||||
public class SqlVariableConverter implements QueryConverter {
|
||||
|
||||
@Override
|
||||
public boolean accept(QueryStatement queryStatement) {
|
||||
if (Objects.isNull(queryStatement.getQueryParam())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
return Objects.nonNull(queryStatement.getStructQueryParam())
|
||||
&& !queryStatement.getIsS2SQL();
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -38,7 +36,7 @@ public class SqlVariableParseConverter implements QueryConverter {
|
||||
String sqlParsed =
|
||||
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
|
||||
modelResp.getModelDetail().getSqlVariables(),
|
||||
queryStatement.getQueryParam().getParams());
|
||||
queryStatement.getStructQueryParam().getParams());
|
||||
DataModel dataModel =
|
||||
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
|
||||
dataModel.setSqlQuery(sqlParsed);
|
||||
@@ -0,0 +1,70 @@
|
||||
package com.tencent.supersonic.headless.core.translator.converter;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.Aggregator;
|
||||
import com.tencent.supersonic.common.pojo.ColumnOrder;
|
||||
import com.tencent.supersonic.common.util.ContextUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import com.tencent.supersonic.headless.core.utils.SqlGenerateUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Component("ParserDefaultConverter")
|
||||
@Slf4j
|
||||
public class StructQueryConverter implements QueryConverter {
|
||||
|
||||
@Override
|
||||
public boolean accept(QueryStatement queryStatement) {
|
||||
return Objects.nonNull(queryStatement.getStructQueryParam())
|
||||
&& !queryStatement.getIsS2SQL();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void convert(QueryStatement queryStatement) throws Exception {
|
||||
SqlGenerateUtils sqlGenerateUtils = ContextUtils.getBean(SqlGenerateUtils.class);
|
||||
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
|
||||
|
||||
String dsTable = "t_1";
|
||||
SqlQueryParam sqlParam = new SqlQueryParam();
|
||||
sqlParam.setTable(dsTable);
|
||||
String sql = String.format("select %s from %s %s %s %s",
|
||||
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
|
||||
sqlGenerateUtils.getGroupBy(structQueryParam),
|
||||
sqlGenerateUtils.getOrderBy(structQueryParam),
|
||||
sqlGenerateUtils.getLimit(structQueryParam));
|
||||
Database database = queryStatement.getOntology().getDatabase();
|
||||
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
|
||||
sqlParam.setSupportWith(false);
|
||||
sql = String.format("select %s from %s t0 %s %s %s",
|
||||
sqlGenerateUtils.getSelect(structQueryParam), dsTable,
|
||||
sqlGenerateUtils.getGroupBy(structQueryParam),
|
||||
sqlGenerateUtils.getOrderBy(structQueryParam),
|
||||
sqlGenerateUtils.getLimit(structQueryParam));
|
||||
}
|
||||
sqlParam.setSql(sql);
|
||||
queryStatement.setSqlQueryParam(sqlParam);
|
||||
|
||||
OntologyQueryParam ontologyQueryParam = new OntologyQueryParam();
|
||||
ontologyQueryParam.getDimensions().addAll(structQueryParam.getGroups());
|
||||
ontologyQueryParam.getMetrics().addAll(structQueryParam.getAggregators().stream()
|
||||
.map(Aggregator::getColumn).collect(Collectors.toList()));
|
||||
String where = sqlGenerateUtils.generateWhere(structQueryParam, null);
|
||||
ontologyQueryParam.setWhere(where);
|
||||
ontologyQueryParam.setAggOption(AggOption.AGGREGATION);
|
||||
ontologyQueryParam.setNativeQuery(structQueryParam.getQueryType().isNativeAggQuery());
|
||||
ontologyQueryParam.setOrder(structQueryParam.getOrders().stream()
|
||||
.map(order -> new ColumnOrder(order.getColumn(), order.getDirection()))
|
||||
.collect(Collectors.toList()));
|
||||
ontologyQueryParam.setLimit(structQueryParam.getLimit());
|
||||
queryStatement.setOntologyQueryParam(ontologyQueryParam);
|
||||
log.info("parse structQuery [{}] ", queryStatement.getSqlQueryParam());
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
package com.tencent.supersonic.headless.core.translator.optimizer;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
|
||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
@Slf4j
|
||||
@Component("DbDialectOptimizer")
|
||||
public class DbDialectOptimizer implements QueryOptimizer {
|
||||
|
||||
@Override
|
||||
public void rewrite(QueryStatement queryStatement) {
|
||||
SemanticSchemaResp semanticSchemaResp = queryStatement.getSemanticSchemaResp();
|
||||
DatabaseResp database = semanticSchemaResp.getDatabaseResp();
|
||||
String sql = queryStatement.getSql();
|
||||
if (Objects.isNull(database) || Objects.isNull(database.getType())) {
|
||||
return;
|
||||
}
|
||||
String type = database.getType();
|
||||
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(type.toLowerCase());
|
||||
if (Objects.nonNull(engineAdaptor)) {
|
||||
String adaptedSql = engineAdaptor.rewriteSql(sql);
|
||||
queryStatement.setSql(adaptedSql);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
package com.tencent.supersonic.headless.core.translator.optimizer;
|
||||
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/** Remove the default metric added by the system when the query only has dimensions */
|
||||
@Slf4j
|
||||
@Component("DetailQueryOptimizer")
|
||||
public class DetailQueryOptimizer implements QueryOptimizer {
|
||||
|
||||
@Override
|
||||
public void rewrite(QueryStatement queryStatement) {
|
||||
StructQueryParam structQueryParam = queryStatement.getStructQueryParam();
|
||||
String sqlRaw = queryStatement.getSql().trim();
|
||||
if (StringUtils.isEmpty(sqlRaw)) {
|
||||
throw new RuntimeException("sql is empty or null");
|
||||
}
|
||||
log.debug("before handleNoMetric, sql:{}", sqlRaw);
|
||||
// if (isDetailQuery(structQueryParam)) {
|
||||
// if (!CollectionUtils.isEmpty(structQueryParam.getGroups())) {
|
||||
// String sqlForm = "select %s from ( %s ) src_no_metric";
|
||||
// String sql = String.format(sqlForm,
|
||||
// structQueryParam.getGroups().stream().collect(Collectors.joining(",")),
|
||||
// sqlRaw);
|
||||
// queryStatement.setSql(sql);
|
||||
// }
|
||||
// }
|
||||
log.debug("after handleNoMetric, sql:{}", queryStatement.getSql());
|
||||
}
|
||||
|
||||
public boolean isDetailQuery(StructQueryParam structQueryParam) {
|
||||
return Objects.nonNull(structQueryParam)
|
||||
&& structQueryParam.getQueryType().isNativeAggQuery();
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator;
|
||||
package com.tencent.supersonic.headless.core.translator.optimizer;
|
||||
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
package com.tencent.supersonic.headless.core.translator.optimizer;
|
||||
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Slf4j
|
||||
@Component("ResultLimitOptimizer")
|
||||
public class ResultLimitOptimizer implements QueryOptimizer {
|
||||
|
||||
@Override
|
||||
public void rewrite(QueryStatement queryStatement) {
|
||||
if (!SqlSelectHelper.hasLimit(queryStatement.getSql())) {
|
||||
queryStatement.setSql(queryStatement.getSql() + " limit " + queryStatement.getLimit());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
package com.tencent.supersonic.headless.core.translator.parser;
|
||||
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
|
||||
/** A query parser generates physical SQL for the QueryStatement. */
|
||||
public interface QueryParser {
|
||||
void parse(QueryStatement queryStatement) throws Exception;
|
||||
}
|
||||
@@ -1,12 +1,8 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.QueryParser;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.RuntimeOptions;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.SqlBuilder;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@@ -16,7 +12,7 @@ import org.springframework.stereotype.Component;
|
||||
public class CalciteQueryParser implements QueryParser {
|
||||
|
||||
@Override
|
||||
public void parse(QueryStatement queryStatement, AggOption isAgg) throws Exception {
|
||||
public void parse(QueryStatement queryStatement) throws Exception {
|
||||
Ontology ontology = queryStatement.getOntology();
|
||||
if (ontology == null) {
|
||||
queryStatement.setErrMsg("No ontology could be found");
|
||||
@@ -29,7 +25,8 @@ public class CalciteQueryParser implements QueryParser {
|
||||
.enableOptimize(queryStatement.getEnableOptimize()).build())
|
||||
.build();
|
||||
SqlBuilder sqlBuilder = new SqlBuilder(semanticSchema);
|
||||
sqlBuilder.build(queryStatement, isAgg);
|
||||
String sql = sqlBuilder.buildOntologySql(queryStatement);
|
||||
queryStatement.setSql(sql);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import org.apache.calcite.plan.RelOptRuleCall;
|
||||
import org.apache.calcite.plan.RelRule;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
@@ -1,10 +1,10 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Ontology;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import org.apache.calcite.schema.Schema;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import org.apache.calcite.DataContext;
|
||||
import org.apache.calcite.linq4j.Enumerable;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import org.apache.calcite.rel.type.RelDataTypeFactory;
|
||||
import org.apache.calcite.sql.SqlOperatorTable;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import com.tencent.supersonic.common.calcite.Configuration;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
@@ -33,7 +33,7 @@ public class SchemaBuilder {
|
||||
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
|
||||
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
|
||||
Configuration.config);
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
S2SQLSqlValidatorImpl s2SQLSqlValidator =
|
||||
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
|
||||
Configuration.typeFactory, Configuration.getValidatorConfig(engineType));
|
||||
@@ -0,0 +1,124 @@
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import com.tencent.supersonic.common.calcite.Configuration;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.FilterRender;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.OutputRender;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Renderer;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.SourceRender;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.parser.SqlParser;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
@Slf4j
|
||||
public class SqlBuilder {
|
||||
|
||||
private final S2CalciteSchema schema;
|
||||
private OntologyQueryParam ontologyQueryParam;
|
||||
private SqlValidatorScope scope;
|
||||
private SqlNode parserNode;
|
||||
private boolean isAgg = false;
|
||||
private AggOption aggOption = AggOption.DEFAULT;
|
||||
|
||||
public SqlBuilder(S2CalciteSchema schema) {
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
public String buildOntologySql(QueryStatement queryStatement) throws Exception {
|
||||
this.ontologyQueryParam = queryStatement.getOntologyQueryParam();
|
||||
if (ontologyQueryParam.getLimit() == null) {
|
||||
ontologyQueryParam.setLimit(0L);
|
||||
}
|
||||
this.aggOption = ontologyQueryParam.getAggOption();
|
||||
|
||||
buildParseNode();
|
||||
Database database = queryStatement.getOntology().getDatabase();
|
||||
optimizeParseNode(database.getType());
|
||||
return getSql(database.getType());
|
||||
}
|
||||
|
||||
private void buildParseNode() throws Exception {
|
||||
// find relevant data models
|
||||
scope = SchemaBuilder.getScope(schema);
|
||||
List<DataModel> dataModels =
|
||||
DataModelNode.getQueryDataModels(scope, schema, ontologyQueryParam);
|
||||
if (dataModels == null || dataModels.isEmpty()) {
|
||||
throw new Exception("data model not found");
|
||||
}
|
||||
isAgg = getAgg(dataModels.get(0));
|
||||
|
||||
// build level by level
|
||||
LinkedList<Renderer> builders = new LinkedList<>();
|
||||
builders.add(new SourceRender());
|
||||
builders.add(new FilterRender());
|
||||
builders.add(new OutputRender());
|
||||
ListIterator<Renderer> it = builders.listIterator();
|
||||
int i = 0;
|
||||
Renderer previous = null;
|
||||
while (it.hasNext()) {
|
||||
Renderer renderer = it.next();
|
||||
if (previous != null) {
|
||||
previous.render(ontologyQueryParam, dataModels, scope, schema, !isAgg);
|
||||
renderer.setTable(previous
|
||||
.builderAs(DataModelNode.getNames(dataModels) + "_" + String.valueOf(i)));
|
||||
i++;
|
||||
}
|
||||
previous = renderer;
|
||||
}
|
||||
builders.getLast().render(ontologyQueryParam, dataModels, scope, schema, !isAgg);
|
||||
parserNode = builders.getLast().builder();
|
||||
}
|
||||
|
||||
private boolean getAgg(DataModel dataModel) {
|
||||
if (!AggOption.DEFAULT.equals(aggOption)) {
|
||||
return AggOption.isAgg(aggOption);
|
||||
}
|
||||
// default by dataModel time aggregation
|
||||
if (Objects.nonNull(dataModel.getAggTime()) && !dataModel.getAggTime()
|
||||
.equalsIgnoreCase(Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE)) {
|
||||
if (!ontologyQueryParam.isNativeQuery()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return isAgg;
|
||||
}
|
||||
|
||||
public String getSql(EngineType engineType) {
|
||||
return SemanticNode.getSql(parserNode, engineType);
|
||||
}
|
||||
|
||||
private void optimizeParseNode(EngineType engineType) {
|
||||
if (Objects.isNull(schema.getRuntimeOptions())
|
||||
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|
||||
|| !schema.getRuntimeOptions().getEnableOptimize()) {
|
||||
return;
|
||||
}
|
||||
|
||||
SqlNode optimizeNode = null;
|
||||
try {
|
||||
SqlNode sqlNode = SqlParser.create(SemanticNode.getSql(parserNode, engineType),
|
||||
Configuration.getParserConfig(engineType)).parseStmt();
|
||||
if (Objects.nonNull(sqlNode)) {
|
||||
optimizeNode = SemanticNode.optimize(scope, schema, sqlNode, engineType);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("optimize error {}", e);
|
||||
}
|
||||
|
||||
if (Objects.nonNull(optimizeNode)) {
|
||||
parserNode = optimizeNode;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import lombok.Data;
|
||||
import org.apache.calcite.sql.SqlBasicCall;
|
||||
import org.apache.calcite.sql.SqlKind;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
@@ -1,39 +1,20 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.tencent.supersonic.common.calcite.Configuration;
|
||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.SchemaBuilder;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.calcite.sql.SqlBasicCall;
|
||||
import org.apache.calcite.sql.SqlDataTypeSpec;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.SqlNodeList;
|
||||
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
|
||||
import org.apache.calcite.sql.*;
|
||||
import org.apache.calcite.sql.parser.SqlParser;
|
||||
import org.apache.calcite.sql.parser.SqlParserPos;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Slf4j
|
||||
@@ -53,7 +34,7 @@ public class DataModelNode extends SemanticNode {
|
||||
}
|
||||
}
|
||||
if (sqlTable.isEmpty()) {
|
||||
throw new Exception("DatasourceNode build error [tableSqlNode not found]");
|
||||
throw new Exception("DataModelNode build error [tableSqlNode not found]");
|
||||
}
|
||||
SqlNode source = getTable(sqlTable, scope, EngineType.fromString(dataModel.getType()));
|
||||
addSchema(scope, dataModel, sqlTable);
|
||||
@@ -149,168 +130,171 @@ public class DataModelNode extends SemanticNode {
|
||||
return dataModelList.stream().map(d -> d.getName()).collect(Collectors.joining("_"));
|
||||
}
|
||||
|
||||
public static void getQueryDimensionMeasure(S2CalciteSchema schema,
|
||||
MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures) {
|
||||
queryDimension.addAll(metricCommand.getDimensions().stream()
|
||||
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQueryParam queryParam,
|
||||
Set<String> queryDimensions, Set<String> queryMeasures) {
|
||||
queryDimensions.addAll(queryParam.getDimensions().stream()
|
||||
.map(d -> d.contains(Constants.DIMENSION_IDENTIFY)
|
||||
? d.split(Constants.DIMENSION_IDENTIFY)[1]
|
||||
: d)
|
||||
.collect(Collectors.toSet()));
|
||||
Set<String> schemaMetricName =
|
||||
schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
|
||||
schema.getMetrics().stream().filter(m -> metricCommand.getMetrics().contains(m.getName()))
|
||||
ontology.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
|
||||
ontology.getMetrics().stream().filter(m -> queryParam.getMetrics().contains(m.getName()))
|
||||
.forEach(m -> m.getMetricTypeParams().getMeasures().stream()
|
||||
.forEach(mm -> measures.add(mm.getName())));
|
||||
metricCommand.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
|
||||
.forEach(m -> measures.add(m));
|
||||
.forEach(mm -> queryMeasures.add(mm.getName())));
|
||||
queryParam.getMetrics().stream().filter(m -> !schemaMetricName.contains(m))
|
||||
.forEach(m -> queryMeasures.add(m));
|
||||
}
|
||||
|
||||
public static void mergeQueryFilterDimensionMeasure(S2CalciteSchema schema,
|
||||
MetricQueryParam metricCommand, Set<String> queryDimension, List<String> measures,
|
||||
public static void mergeQueryFilterDimensionMeasure(Ontology ontology,
|
||||
OntologyQueryParam queryParam, Set<String> dimensions, Set<String> measures,
|
||||
SqlValidatorScope scope) throws Exception {
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
if (Objects.nonNull(metricCommand.getWhere()) && !metricCommand.getWhere().isEmpty()) {
|
||||
EngineType engineType = ontology.getDatabase().getType();
|
||||
if (Objects.nonNull(queryParam.getWhere()) && !queryParam.getWhere().isEmpty()) {
|
||||
Set<String> filterConditions = new HashSet<>();
|
||||
FilterNode.getFilterField(parse(metricCommand.getWhere(), scope, engineType),
|
||||
FilterNode.getFilterField(parse(queryParam.getWhere(), scope, engineType),
|
||||
filterConditions);
|
||||
Set<String> queryMeasures = new HashSet<>(measures);
|
||||
Set<String> schemaMetricName =
|
||||
schema.getMetrics().stream().map(m -> m.getName()).collect(Collectors.toSet());
|
||||
Set<String> schemaMetricName = ontology.getMetrics().stream().map(m -> m.getName())
|
||||
.collect(Collectors.toSet());
|
||||
for (String filterCondition : filterConditions) {
|
||||
if (schemaMetricName.contains(filterCondition)) {
|
||||
schema.getMetrics().stream()
|
||||
ontology.getMetrics().stream()
|
||||
.filter(m -> m.getName().equalsIgnoreCase(filterCondition))
|
||||
.forEach(m -> m.getMetricTypeParams().getMeasures().stream()
|
||||
.forEach(mm -> queryMeasures.add(mm.getName())));
|
||||
continue;
|
||||
}
|
||||
queryDimension.add(filterCondition);
|
||||
dimensions.add(filterCondition);
|
||||
}
|
||||
measures.clear();
|
||||
measures.addAll(queryMeasures);
|
||||
}
|
||||
}
|
||||
|
||||
public static List<DataModel> getRelatedDataModels(SqlValidatorScope scope,
|
||||
S2CalciteSchema schema, MetricQueryParam metricCommand) throws Exception {
|
||||
List<DataModel> dataModels = new ArrayList<>();
|
||||
public static List<DataModel> getQueryDataModels(SqlValidatorScope scope,
|
||||
S2CalciteSchema schema, OntologyQueryParam queryParam) throws Exception {
|
||||
Ontology ontology = schema.getOntology();
|
||||
// get query measures and dimensions
|
||||
Set<String> queryMeasures = new HashSet<>();
|
||||
Set<String> queryDimensions = new HashSet<>();
|
||||
getQueryDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures);
|
||||
mergeQueryFilterDimensionMeasure(ontology, queryParam, queryDimensions, queryMeasures,
|
||||
scope);
|
||||
|
||||
// check by metric
|
||||
List<String> measures = new ArrayList<>();
|
||||
Set<String> queryDimension = new HashSet<>();
|
||||
getQueryDimensionMeasure(schema, metricCommand, queryDimension, measures);
|
||||
DataModel baseDataModel = null;
|
||||
// one , match measure count
|
||||
Map<String, Integer> dataSourceMeasures = new HashMap<>();
|
||||
for (Map.Entry<String, DataModel> entry : schema.getDataModels().entrySet()) {
|
||||
Set<String> sourceMeasure = entry.getValue().getMeasures().stream()
|
||||
.map(mm -> mm.getName()).collect(Collectors.toSet());
|
||||
sourceMeasure.retainAll(measures);
|
||||
dataSourceMeasures.put(entry.getKey(), sourceMeasure.size());
|
||||
// first, find the base model
|
||||
DataModel baseDataModel = findBaseModel(ontology, queryMeasures, queryDimensions);
|
||||
if (Objects.isNull(baseDataModel)) {
|
||||
throw new RuntimeException(
|
||||
String.format("could not find matching dataModel, dimensions:%s, measures:%s",
|
||||
queryDimensions, queryMeasures));
|
||||
}
|
||||
log.info("dataSourceMeasures [{}]", dataSourceMeasures);
|
||||
Optional<Map.Entry<String, Integer>> base = dataSourceMeasures.entrySet().stream()
|
||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
|
||||
if (base.isPresent()) {
|
||||
baseDataModel = schema.getDataModels().get(base.get().getKey());
|
||||
dataModels.add(baseDataModel);
|
||||
}
|
||||
// second , check match all dimension and metric
|
||||
if (baseDataModel != null) {
|
||||
Set<String> filterMeasure = new HashSet<>();
|
||||
Set<String> sourceMeasure = baseDataModel.getMeasures().stream().map(mm -> mm.getName())
|
||||
.collect(Collectors.toSet());
|
||||
Set<String> dimension = baseDataModel.getDimensions().stream().map(dd -> dd.getName())
|
||||
.collect(Collectors.toSet());
|
||||
baseDataModel.getIdentifiers().stream().forEach(i -> dimension.add(i.getName()));
|
||||
if (schema.getDimensions().containsKey(baseDataModel.getName())) {
|
||||
schema.getDimensions().get(baseDataModel.getName()).stream()
|
||||
.forEach(d -> dimension.add(d.getName()));
|
||||
}
|
||||
filterMeasure.addAll(sourceMeasure);
|
||||
filterMeasure.addAll(dimension);
|
||||
EngineType engineType =
|
||||
EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
mergeQueryFilterDimensionMeasure(schema, metricCommand, queryDimension, measures,
|
||||
scope);
|
||||
boolean isAllMatch = checkMatch(sourceMeasure, queryDimension, measures, dimension,
|
||||
metricCommand, scope, engineType);
|
||||
if (isAllMatch) {
|
||||
log.debug("baseDataModel match all ");
|
||||
return dataModels;
|
||||
}
|
||||
// find all dataSource has the same identifiers
|
||||
List<DataModel> linkDataModels = getLinkDataSourcesByJoinRelation(queryDimension,
|
||||
measures, baseDataModel, schema);
|
||||
if (CollectionUtils.isEmpty(linkDataModels)) {
|
||||
log.debug("baseDataModel get by identifiers ");
|
||||
Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream()
|
||||
.map(i -> i.getName()).collect(Collectors.toSet());
|
||||
if (baseIdentifiers.isEmpty()) {
|
||||
throw new Exception(
|
||||
"datasource error : " + baseDataModel.getName() + " miss identifier");
|
||||
}
|
||||
linkDataModels = getLinkDataSources(baseIdentifiers, queryDimension, measures,
|
||||
baseDataModel, schema);
|
||||
if (linkDataModels.isEmpty()) {
|
||||
throw new Exception(String.format(
|
||||
"not find the match datasource : dimension[%s],measure[%s]",
|
||||
queryDimension, measures));
|
||||
}
|
||||
}
|
||||
log.debug("linkDataModels {}", linkDataModels);
|
||||
return linkDataModels;
|
||||
// dataModels.addAll(linkDataModels);
|
||||
// if the base model matches all queried measures and dimensions, just return
|
||||
if (checkMatch(baseDataModel, queryMeasures, queryDimensions)) {
|
||||
log.debug("baseDataModel match all measures and dimensions");
|
||||
return Collections.singletonList(baseDataModel);
|
||||
}
|
||||
|
||||
return dataModels;
|
||||
// second, traverse the ontology to find other related dataModels
|
||||
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, baseDataModel,
|
||||
queryDimensions, queryMeasures);
|
||||
if (CollectionUtils.isEmpty(relatedDataModels)) {
|
||||
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
|
||||
queryDimensions, queryMeasures);
|
||||
}
|
||||
if (CollectionUtils.isEmpty(relatedDataModels)) {
|
||||
relatedDataModels = Collections.singletonList(baseDataModel);
|
||||
}
|
||||
|
||||
log.debug("relatedDataModels {}", relatedDataModels);
|
||||
return relatedDataModels;
|
||||
}
|
||||
|
||||
private static boolean checkMatch(Set<String> sourceMeasure, Set<String> queryDimension,
|
||||
List<String> measures, Set<String> dimension, MetricQueryParam metricCommand,
|
||||
SqlValidatorScope scope, EngineType engineType) throws Exception {
|
||||
boolean isAllMatch = true;
|
||||
sourceMeasure.retainAll(measures);
|
||||
if (sourceMeasure.size() < measures.size()) {
|
||||
log.info("baseDataSource measures not match all measure");
|
||||
// check dimension again
|
||||
Set<String> dimensionMeasures = new HashSet<>();
|
||||
dimensionMeasures.addAll(dimension);
|
||||
dimensionMeasures.retainAll(measures);
|
||||
if (sourceMeasure.size() + dimensionMeasures.size() < measures.size()) {
|
||||
log.info("baseDataSource not match all measure");
|
||||
isAllMatch = false;
|
||||
private static DataModel findBaseModel(Ontology ontology, Set<String> queryMeasures,
|
||||
Set<String> queryDimensions) {
|
||||
DataModel dataModel = null;
|
||||
// first, try to find the model with the most matching measures
|
||||
Map<String, Integer> dataModelMeasuresCount = new HashMap<>();
|
||||
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
|
||||
Set<String> sourceMeasure = entry.getValue().getMeasures().stream()
|
||||
.map(Measure::getName).collect(Collectors.toSet());
|
||||
sourceMeasure.retainAll(queryMeasures);
|
||||
dataModelMeasuresCount.put(entry.getKey(), sourceMeasure.size());
|
||||
}
|
||||
log.info("dataModelMeasureCount: [{}]", dataModelMeasuresCount);
|
||||
Optional<Map.Entry<String, Integer>> base =
|
||||
dataModelMeasuresCount.entrySet().stream().filter(e -> e.getValue() > 0)
|
||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
|
||||
|
||||
if (base.isPresent()) {
|
||||
dataModel = ontology.getDataModelMap().get(base.get().getKey());
|
||||
} else {
|
||||
// second, try to find the model with the most matching dimensions
|
||||
Map<String, Integer> dataModelDimCount = new HashMap<>();
|
||||
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) {
|
||||
Set<String> modelDimensions = entry.getValue().stream().map(Dimension::getName)
|
||||
.collect(Collectors.toSet());
|
||||
modelDimensions.retainAll(queryDimensions);
|
||||
dataModelDimCount.put(entry.getKey(), modelDimensions.size());
|
||||
}
|
||||
log.info("dataModelDimCount: [{}]", dataModelDimCount);
|
||||
base = dataModelDimCount.entrySet().stream().filter(e -> e.getValue() > 0)
|
||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).findFirst();
|
||||
if (base.isPresent()) {
|
||||
dataModel = ontology.getDataModelMap().get(base.get().getKey());
|
||||
}
|
||||
}
|
||||
measures.removeAll(sourceMeasure);
|
||||
|
||||
dimension.retainAll(queryDimension);
|
||||
if (dimension.size() < queryDimension.size()) {
|
||||
log.debug("baseDataSource not match all dimension");
|
||||
return dataModel;
|
||||
}
|
||||
|
||||
private static boolean checkMatch(DataModel baseDataModel, Set<String> queryMeasures,
|
||||
Set<String> queryDimension) {
|
||||
boolean isAllMatch = true;
|
||||
Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName)
|
||||
.collect(Collectors.toSet());
|
||||
Set<String> baseDimensions = baseDataModel.getDimensions().stream().map(Dimension::getName)
|
||||
.collect(Collectors.toSet());
|
||||
baseDataModel.getIdentifiers().stream().forEach(i -> baseDimensions.add(i.getName()));
|
||||
|
||||
baseMeasures.retainAll(queryMeasures);
|
||||
if (baseMeasures.size() < queryMeasures.size()) {
|
||||
// check dimension again
|
||||
Set<String> dimensionMeasures = new HashSet<>();
|
||||
dimensionMeasures.addAll(baseDimensions);
|
||||
dimensionMeasures.retainAll(queryMeasures);
|
||||
if (baseMeasures.size() + dimensionMeasures.size() < queryMeasures.size()) {
|
||||
log.info("baseDataModel not match all measures");
|
||||
isAllMatch = false;
|
||||
}
|
||||
queryMeasures.removeAll(dimensionMeasures);
|
||||
}
|
||||
queryMeasures.removeAll(baseMeasures);
|
||||
|
||||
baseDimensions.retainAll(queryDimension);
|
||||
if (baseDimensions.size() < queryDimension.size()) {
|
||||
log.debug("baseDataModel not match all dimensions");
|
||||
isAllMatch = false;
|
||||
}
|
||||
queryDimension.removeAll(dimension);
|
||||
queryDimension.removeAll(baseDimensions);
|
||||
|
||||
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
|
||||
Set<String> whereFields = new HashSet<>();
|
||||
SqlNode sqlNode = parse(metricCommand.getWhere(), scope, engineType);
|
||||
FilterNode.getFilterField(sqlNode, whereFields);
|
||||
}
|
||||
return isAllMatch;
|
||||
}
|
||||
|
||||
private static List<DataModel> getLinkDataSourcesByJoinRelation(Set<String> queryDimension,
|
||||
List<String> measures, DataModel baseDataModel, S2CalciteSchema schema) {
|
||||
Set<String> linkDataSourceName = new HashSet<>();
|
||||
List<DataModel> linkDataModels = new ArrayList<>();
|
||||
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
|
||||
DataModel baseDataModel, Set<String> queryDimensions, Set<String> queryMeasures) {
|
||||
Set<String> joinDataModelNames = new HashSet<>();
|
||||
List<DataModel> joinDataModels = new ArrayList<>();
|
||||
Set<String> before = new HashSet<>();
|
||||
before.add(baseDataModel.getName());
|
||||
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
|
||||
|
||||
if (!CollectionUtils.isEmpty(ontology.getJoinRelations())) {
|
||||
Set<Long> visitJoinRelations = new HashSet<>();
|
||||
List<JoinRelation> sortedJoinRelation = new ArrayList<>();
|
||||
sortJoinRelation(schema.getJoinRelations(), baseDataModel.getName(), visitJoinRelations,
|
||||
sortedJoinRelation);
|
||||
schema.getJoinRelations().stream().filter(j -> !visitJoinRelations.contains(j.getId()))
|
||||
sortJoinRelation(ontology.getJoinRelations(), baseDataModel.getName(),
|
||||
visitJoinRelations, sortedJoinRelation);
|
||||
ontology.getJoinRelations().stream()
|
||||
.filter(j -> !visitJoinRelations.contains(j.getId()))
|
||||
.forEach(j -> sortedJoinRelation.add(j));
|
||||
for (JoinRelation joinRelation : sortedJoinRelation) {
|
||||
if (!before.contains(joinRelation.getLeft())
|
||||
@@ -319,53 +303,54 @@ public class DataModelNode extends SemanticNode {
|
||||
}
|
||||
boolean isMatch = false;
|
||||
boolean isRight = before.contains(joinRelation.getLeft());
|
||||
DataModel other = isRight ? schema.getDataModels().get(joinRelation.getRight())
|
||||
: schema.getDataModels().get(joinRelation.getLeft());
|
||||
if (!queryDimension.isEmpty()) {
|
||||
DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight())
|
||||
: ontology.getDataModelMap().get(joinRelation.getLeft());
|
||||
if (!queryDimensions.isEmpty()) {
|
||||
Set<String> linkDimension = other.getDimensions().stream()
|
||||
.map(dd -> dd.getName()).collect(Collectors.toSet());
|
||||
other.getIdentifiers().stream().forEach(i -> linkDimension.add(i.getName()));
|
||||
linkDimension.retainAll(queryDimension);
|
||||
linkDimension.retainAll(queryDimensions);
|
||||
if (!linkDimension.isEmpty()) {
|
||||
isMatch = true;
|
||||
}
|
||||
}
|
||||
Set<String> linkMeasure = other.getMeasures().stream().map(mm -> mm.getName())
|
||||
Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName)
|
||||
.collect(Collectors.toSet());
|
||||
linkMeasure.retainAll(measures);
|
||||
linkMeasure.retainAll(queryMeasures);
|
||||
if (!linkMeasure.isEmpty()) {
|
||||
isMatch = true;
|
||||
}
|
||||
if (!isMatch && schema.getDimensions().containsKey(other.getName())) {
|
||||
Set<String> linkDimension = schema.getDimensions().get(other.getName()).stream()
|
||||
.map(dd -> dd.getName()).collect(Collectors.toSet());
|
||||
linkDimension.retainAll(queryDimension);
|
||||
if (!isMatch && ontology.getDimensionMap().containsKey(other.getName())) {
|
||||
Set<String> linkDimension = ontology.getDimensionMap().get(other.getName())
|
||||
.stream().map(dd -> dd.getName()).collect(Collectors.toSet());
|
||||
linkDimension.retainAll(queryDimensions);
|
||||
if (!linkDimension.isEmpty()) {
|
||||
isMatch = true;
|
||||
}
|
||||
}
|
||||
if (isMatch) {
|
||||
linkDataSourceName.add(other.getName());
|
||||
joinDataModelNames.add(other.getName());
|
||||
before.add(other.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!CollectionUtils.isEmpty(linkDataSourceName)) {
|
||||
if (!CollectionUtils.isEmpty(joinDataModelNames)) {
|
||||
Map<String, Long> orders = new HashMap<>();
|
||||
linkDataSourceName.add(baseDataModel.getName());
|
||||
joinDataModelNames.add(baseDataModel.getName());
|
||||
orders.put(baseDataModel.getName(), 0L);
|
||||
for (JoinRelation joinRelation : schema.getJoinRelations()) {
|
||||
if (linkDataSourceName.contains(joinRelation.getLeft())
|
||||
&& linkDataSourceName.contains(joinRelation.getRight())) {
|
||||
for (JoinRelation joinRelation : ontology.getJoinRelations()) {
|
||||
if (joinDataModelNames.contains(joinRelation.getLeft())
|
||||
&& joinDataModelNames.contains(joinRelation.getRight())) {
|
||||
orders.put(joinRelation.getLeft(), 0L);
|
||||
orders.put(joinRelation.getRight(), 1L);
|
||||
}
|
||||
}
|
||||
orders.entrySet().stream().sorted(Map.Entry.comparingByValue()).forEach(d -> {
|
||||
linkDataModels.add(schema.getDataModels().get(d.getKey()));
|
||||
joinDataModels.add(ontology.getDataModelMap().get(d.getKey()));
|
||||
});
|
||||
}
|
||||
return linkDataModels;
|
||||
|
||||
return joinDataModels;
|
||||
}
|
||||
|
||||
private static void sortJoinRelation(List<JoinRelation> joinRelations, String next,
|
||||
@@ -383,12 +368,17 @@ public class DataModelNode extends SemanticNode {
|
||||
}
|
||||
}
|
||||
|
||||
private static List<DataModel> getLinkDataSources(Set<String> baseIdentifiers,
|
||||
Set<String> queryDimension, List<String> measures, DataModel baseDataModel,
|
||||
S2CalciteSchema schema) {
|
||||
private static List<DataModel> findRelatedModelsByIdentifier(Ontology ontology,
|
||||
DataModel baseDataModel, Set<String> queryDimension, Set<String> measures) {
|
||||
Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream().map(Identify::getName)
|
||||
.collect(Collectors.toSet());
|
||||
if (baseIdentifiers.isEmpty()) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
|
||||
Set<String> linkDataSourceName = new HashSet<>();
|
||||
List<DataModel> linkDataModels = new ArrayList<>();
|
||||
for (Map.Entry<String, DataModel> entry : schema.getDataModels().entrySet()) {
|
||||
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
|
||||
if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
|
||||
continue;
|
||||
}
|
||||
@@ -419,9 +409,9 @@ public class DataModelNode extends SemanticNode {
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, List<Dimension>> entry : schema.getDimensions().entrySet()) {
|
||||
for (Map.Entry<String, List<Dimension>> entry : ontology.getDimensionMap().entrySet()) {
|
||||
if (!queryDimension.isEmpty()) {
|
||||
Set<String> linkDimension = entry.getValue().stream().map(dd -> dd.getName())
|
||||
Set<String> linkDimension = entry.getValue().stream().map(Dimension::getName)
|
||||
.collect(Collectors.toSet());
|
||||
linkDimension.retainAll(queryDimension);
|
||||
if (!linkDimension.isEmpty()) {
|
||||
@@ -430,7 +420,7 @@ public class DataModelNode extends SemanticNode {
|
||||
}
|
||||
}
|
||||
for (String linkName : linkDataSourceName) {
|
||||
linkDataModels.add(schema.getDataModels().get(linkName));
|
||||
linkDataModels.add(ontology.getDataModelMap().get(linkName));
|
||||
}
|
||||
if (!CollectionUtils.isEmpty(linkDataModels)) {
|
||||
List<DataModel> all = new ArrayList<>();
|
||||
@@ -440,4 +430,5 @@ public class DataModelNode extends SemanticNode {
|
||||
}
|
||||
return Lists.newArrayList();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import org.apache.calcite.sql.SqlCall;
|
||||
import org.apache.calcite.sql.SqlInternalOperator;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import org.apache.calcite.sql.SqlBasicCall;
|
||||
import org.apache.calcite.sql.SqlIdentifier;
|
||||
@@ -1,7 +1,7 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import org.apache.calcite.linq4j.Ord;
|
||||
import org.apache.calcite.sql.SqlCall;
|
||||
@@ -1,7 +1,7 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
|
||||
import lombok.Data;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
@@ -1,12 +1,12 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.node;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
||||
|
||||
import com.tencent.supersonic.common.calcite.Configuration;
|
||||
import com.tencent.supersonic.common.calcite.SemanticSqlDialect;
|
||||
import com.tencent.supersonic.common.calcite.SqlDialectFactory;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.FilterToGroupScanRule;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.FilterToGroupScanRule;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.calcite.plan.RelOptPlanner;
|
||||
import org.apache.calcite.plan.hep.HepPlanner;
|
||||
@@ -1,15 +1,15 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import org.apache.calcite.sql.SqlIdentifier;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.parser.SqlParserPos;
|
||||
@@ -26,13 +26,13 @@ import java.util.stream.Collectors;
|
||||
public class FilterRender extends Renderer {
|
||||
|
||||
@Override
|
||||
public void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
|
||||
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
|
||||
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
|
||||
TableView tableView = super.tableView;
|
||||
SqlNode filterNode = null;
|
||||
List<String> queryMetrics = new ArrayList<>(metricCommand.getMetrics());
|
||||
List<String> queryDimensions = new ArrayList<>(metricCommand.getDimensions());
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
|
||||
if (metricCommand.getWhere() != null && !metricCommand.getWhere().isEmpty()) {
|
||||
filterNode = SemanticNode.parse(metricCommand.getWhere(), scope, engineType);
|
||||
@@ -1,22 +1,22 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.AggFunctionNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.AggFunctionNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.calcite.sql.JoinConditionType;
|
||||
import org.apache.calcite.sql.SqlBasicCall;
|
||||
@@ -47,10 +47,10 @@ import java.util.stream.Collectors;
|
||||
public class JoinRender extends Renderer {
|
||||
|
||||
@Override
|
||||
public void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
|
||||
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
|
||||
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
|
||||
String queryWhere = metricCommand.getWhere();
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
Set<String> whereFields = new HashSet<>();
|
||||
List<String> fieldWhere = new ArrayList<>();
|
||||
if (queryWhere != null && !queryWhere.isEmpty()) {
|
||||
@@ -59,8 +59,9 @@ public class JoinRender extends Renderer {
|
||||
fieldWhere = whereFields.stream().collect(Collectors.toList());
|
||||
}
|
||||
Set<String> queryAllDimension = new HashSet<>();
|
||||
List<String> measures = new ArrayList<>();
|
||||
DataModelNode.getQueryDimensionMeasure(schema, metricCommand, queryAllDimension, measures);
|
||||
Set<String> measures = new HashSet<>();
|
||||
DataModelNode.getQueryDimensionMeasure(schema.getOntology(), metricCommand,
|
||||
queryAllDimension, measures);
|
||||
SqlNode left = null;
|
||||
TableView leftTable = null;
|
||||
TableView innerView = new TableView();
|
||||
@@ -73,8 +74,8 @@ public class JoinRender extends Renderer {
|
||||
final DataModel dataModel = dataModels.get(i);
|
||||
final Set<String> filterDimensions = new HashSet<>();
|
||||
final Set<String> filterMetrics = new HashSet<>();
|
||||
final List<String> queryDimension = new ArrayList<>();
|
||||
final List<String> queryMetrics = new ArrayList<>();
|
||||
final Set<String> queryDimension = new HashSet<>();
|
||||
final Set<String> queryMetrics = new HashSet<>();
|
||||
SourceRender.whereDimMetric(fieldWhere, queryMetrics, queryDimension, dataModel, schema,
|
||||
filterDimensions, filterMetrics);
|
||||
List<String> reqMetric = new ArrayList<>(metricCommand.getMetrics());
|
||||
@@ -142,11 +143,11 @@ public class JoinRender extends Renderer {
|
||||
}
|
||||
|
||||
private void doMetric(Map<String, SqlNode> innerSelect, TableView filterView,
|
||||
List<String> queryMetrics, List<String> reqMetrics, DataModel dataModel,
|
||||
Set<String> queryMetrics, List<String> reqMetrics, DataModel dataModel,
|
||||
Set<String> sourceMeasure, SqlValidatorScope scope, S2CalciteSchema schema,
|
||||
boolean nonAgg) throws Exception {
|
||||
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
for (String m : reqMetrics) {
|
||||
if (getMatchMetric(schema, sourceMeasure, m, queryMetrics)) {
|
||||
MetricNode metricNode = buildMetricNode(m, dataModel, scope, schema, nonAgg, alias);
|
||||
@@ -177,11 +178,11 @@ public class JoinRender extends Renderer {
|
||||
}
|
||||
|
||||
private void doDimension(Map<String, SqlNode> innerSelect, Set<String> filterDimension,
|
||||
List<String> queryDimension, List<String> reqDimensions, DataModel dataModel,
|
||||
Set<String> queryDimension, List<String> reqDimensions, DataModel dataModel,
|
||||
Set<String> dimension, SqlValidatorScope scope, S2CalciteSchema schema)
|
||||
throws Exception {
|
||||
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
for (String d : reqDimensions) {
|
||||
if (getMatchDimension(schema, dimension, dataModel, d, queryDimension)) {
|
||||
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
|
||||
@@ -205,7 +206,7 @@ public class JoinRender extends Renderer {
|
||||
}
|
||||
|
||||
private boolean getMatchMetric(S2CalciteSchema schema, Set<String> sourceMeasure, String m,
|
||||
List<String> queryMetrics) {
|
||||
Set<String> queryMetrics) {
|
||||
Optional<Metric> metric = schema.getMetrics().stream()
|
||||
.filter(mm -> mm.getName().equalsIgnoreCase(m)).findFirst();
|
||||
boolean isAdd = false;
|
||||
@@ -226,7 +227,7 @@ public class JoinRender extends Renderer {
|
||||
}
|
||||
|
||||
private boolean getMatchDimension(S2CalciteSchema schema, Set<String> sourceDimension,
|
||||
DataModel dataModel, String d, List<String> queryDimension) {
|
||||
DataModel dataModel, String d, Set<String> queryDimension) {
|
||||
String oriDimension = d;
|
||||
boolean isAdd = false;
|
||||
if (d.contains(Constants.DIMENSION_IDENTIFY)) {
|
||||
@@ -261,7 +262,7 @@ public class JoinRender extends Renderer {
|
||||
private SqlNode buildJoin(SqlNode left, TableView leftTable, TableView tableView,
|
||||
Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
|
||||
SqlValidatorScope scope) throws Exception {
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
SqlNode condition =
|
||||
getCondition(leftTable, tableView, dataModel, schema, scope, engineType);
|
||||
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
|
||||
@@ -454,8 +455,7 @@ public class JoinRender extends Renderer {
|
||||
endTime = zipper.getAlias() + "." + endTimeOp.get().getName();
|
||||
dateTime = partMetric.getAlias() + "." + partTime.get().getName();
|
||||
}
|
||||
EngineType engineType =
|
||||
EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
ArrayList<SqlNode> operandList =
|
||||
new ArrayList<>(Arrays.asList(SemanticNode.parse(endTime, scope, engineType),
|
||||
SemanticNode.parse(dateTime, scope, engineType)));
|
||||
@@ -1,13 +1,13 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.ColumnOrder;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.SqlNodeList;
|
||||
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
|
||||
@@ -22,10 +22,10 @@ import java.util.List;
|
||||
public class OutputRender extends Renderer {
|
||||
|
||||
@Override
|
||||
public void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
|
||||
public void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
|
||||
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
|
||||
TableView selectDataSet = super.tableView;
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
for (String dimension : metricCommand.getDimensions()) {
|
||||
selectDataSet.getMeasure().add(SemanticNode.parse(dimension, scope, engineType));
|
||||
}
|
||||
@@ -1,17 +1,17 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MeasureNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MeasureNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import lombok.Data;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
@@ -114,6 +114,6 @@ public abstract class Renderer {
|
||||
return SemanticNode.buildAs(alias, tableView.build());
|
||||
}
|
||||
|
||||
public abstract void render(MetricQueryParam metricCommand, List<DataModel> dataModels,
|
||||
public abstract void render(OntologyQueryParam metricCommand, List<DataModel> dataModels,
|
||||
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception;
|
||||
}
|
||||
@@ -1,22 +1,22 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.sql.render;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.core.pojo.MetricQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DataModelNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.DimensionNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.FilterNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.IdentifyNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DimensionNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.FilterNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.MetricNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.OntologyQueryParam;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.calcite.sql.SqlNode;
|
||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||
@@ -34,21 +34,21 @@ import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants.DIMENSION_DELIMITER;
|
||||
import static com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants.DIMENSION_DELIMITER;
|
||||
|
||||
/** process the table dataSet from the defined data model schema */
|
||||
@Slf4j
|
||||
public class SourceRender extends Renderer {
|
||||
|
||||
public static TableView renderOne(String alias, List<String> fieldWheres,
|
||||
List<String> reqMetrics, List<String> reqDimensions, String queryWhere,
|
||||
Set<String> reqMetrics, Set<String> reqDimensions, String queryWhere,
|
||||
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
|
||||
throws Exception {
|
||||
|
||||
TableView dataSet = new TableView();
|
||||
TableView output = new TableView();
|
||||
List<String> queryMetrics = new ArrayList<>(reqMetrics);
|
||||
List<String> queryDimensions = new ArrayList<>(reqDimensions);
|
||||
Set<String> queryMetrics = new HashSet<>(reqMetrics);
|
||||
Set<String> queryDimensions = new HashSet<>(reqDimensions);
|
||||
List<String> fieldWhere = new ArrayList<>(fieldWheres);
|
||||
Map<String, String> extendFields = new HashMap<>();
|
||||
if (!fieldWhere.isEmpty()) {
|
||||
@@ -57,9 +57,7 @@ public class SourceRender extends Renderer {
|
||||
whereDimMetric(fieldWhere, queryMetrics, queryDimensions, datasource, schema,
|
||||
dimensions, metrics);
|
||||
queryMetrics.addAll(metrics);
|
||||
queryMetrics = uniqList(queryMetrics);
|
||||
queryDimensions.addAll(dimensions);
|
||||
queryDimensions = uniqList(queryDimensions);
|
||||
mergeWhere(fieldWhere, dataSet, output, queryMetrics, queryDimensions, extendFields,
|
||||
datasource, scope, schema, nonAgg);
|
||||
}
|
||||
@@ -109,7 +107,7 @@ public class SourceRender extends Renderer {
|
||||
S2CalciteSchema schema, boolean nonAgg, Map<String, String> extendFields,
|
||||
TableView dataSet, TableView output, SqlValidatorScope scope) throws Exception {
|
||||
List<Dimension> dimensionList = schema.getDimensions().get(datasource.getName());
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
boolean isAdd = false;
|
||||
if (!CollectionUtils.isEmpty(dimensionList)) {
|
||||
for (Dimension dim : dimensionList) {
|
||||
@@ -182,12 +180,12 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
}
|
||||
|
||||
private static List<SqlNode> getWhereMeasure(List<String> fields, List<String> queryMetrics,
|
||||
List<String> queryDimensions, Map<String, String> extendFields, DataModel datasource,
|
||||
private static List<SqlNode> getWhereMeasure(List<String> fields, Set<String> queryMetrics,
|
||||
Set<String> queryDimensions, Map<String, String> extendFields, DataModel datasource,
|
||||
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
|
||||
Iterator<String> iterator = fields.iterator();
|
||||
List<SqlNode> whereNode = new ArrayList<>();
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
while (iterator.hasNext()) {
|
||||
String cur = iterator.next();
|
||||
if (queryDimensions.contains(cur) || queryMetrics.contains(cur)) {
|
||||
@@ -224,17 +222,17 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
|
||||
private static void mergeWhere(List<String> fields, TableView dataSet, TableView outputSet,
|
||||
List<String> queryMetrics, List<String> queryDimensions,
|
||||
Map<String, String> extendFields, DataModel datasource, SqlValidatorScope scope,
|
||||
S2CalciteSchema schema, boolean nonAgg) throws Exception {
|
||||
Set<String> queryMetrics, Set<String> queryDimensions, Map<String, String> extendFields,
|
||||
DataModel datasource, SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg)
|
||||
throws Exception {
|
||||
List<SqlNode> whereNode = getWhereMeasure(fields, queryMetrics, queryDimensions,
|
||||
extendFields, datasource, scope, schema, nonAgg);
|
||||
dataSet.getMeasure().addAll(whereNode);
|
||||
// getWhere(outputSet,fields,queryMetrics,queryDimensions,datasource,scope,schema);
|
||||
}
|
||||
|
||||
public static void whereDimMetric(List<String> fields, List<String> queryMetrics,
|
||||
List<String> queryDimensions, DataModel datasource, S2CalciteSchema schema,
|
||||
public static void whereDimMetric(List<String> fields, Set<String> queryMetrics,
|
||||
Set<String> queryDimensions, DataModel datasource, S2CalciteSchema schema,
|
||||
Set<String> dimensions, Set<String> metrics) {
|
||||
for (String field : fields) {
|
||||
if (queryDimensions.contains(field) || queryMetrics.contains(field)) {
|
||||
@@ -310,7 +308,7 @@ public class SourceRender extends Renderer {
|
||||
return false;
|
||||
}
|
||||
|
||||
private static void addTimeDimension(DataModel dataModel, List<String> queryDimension) {
|
||||
private static void addTimeDimension(DataModel dataModel, Set<String> queryDimension) {
|
||||
if (Materialization.TimePartType.ZIPPER.equals(dataModel.getTimePartType())) {
|
||||
Optional<Dimension> startTimeOp = dataModel.getDimensions().stream()
|
||||
.filter(d -> Constants.DIMENSION_TYPE_TIME.equalsIgnoreCase(d.getType()))
|
||||
@@ -336,12 +334,12 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
}
|
||||
|
||||
public void render(MetricQueryParam metricQueryParam, List<DataModel> dataModels,
|
||||
public void render(OntologyQueryParam ontologyQueryParam, List<DataModel> dataModels,
|
||||
SqlValidatorScope scope, S2CalciteSchema schema, boolean nonAgg) throws Exception {
|
||||
String queryWhere = metricQueryParam.getWhere();
|
||||
String queryWhere = ontologyQueryParam.getWhere();
|
||||
Set<String> whereFields = new HashSet<>();
|
||||
List<String> fieldWhere = new ArrayList<>();
|
||||
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
||||
if (queryWhere != null && !queryWhere.isEmpty()) {
|
||||
SqlNode sqlNode = SemanticNode.parse(queryWhere, scope, engineType);
|
||||
FilterNode.getFilterField(sqlNode, whereFields);
|
||||
@@ -349,13 +347,13 @@ public class SourceRender extends Renderer {
|
||||
}
|
||||
if (dataModels.size() == 1) {
|
||||
DataModel dataModel = dataModels.get(0);
|
||||
super.tableView = renderOne("", fieldWhere, metricQueryParam.getMetrics(),
|
||||
metricQueryParam.getDimensions(), metricQueryParam.getWhere(), dataModel, scope,
|
||||
schema, nonAgg);
|
||||
super.tableView = renderOne("", fieldWhere, ontologyQueryParam.getMetrics(),
|
||||
ontologyQueryParam.getDimensions(), ontologyQueryParam.getWhere(), dataModel,
|
||||
scope, schema, nonAgg);
|
||||
return;
|
||||
}
|
||||
JoinRender joinRender = new JoinRender();
|
||||
joinRender.render(metricQueryParam, dataModels, scope, schema, nonAgg);
|
||||
joinRender.render(ontologyQueryParam, dataModels, scope, schema, nonAgg);
|
||||
super.tableView = joinRender.getTableView();
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
public class Constants {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
||||
import lombok.Data;
|
||||
@@ -25,8 +25,4 @@ public class Ontology {
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public Map<Long, DataModel> getModelMap() {
|
||||
return dataModelMap.values().stream()
|
||||
.collect(Collectors.toMap(DataModel::getId, dataSource -> dataSource));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.tencent.supersonic.common.pojo.ColumnOrder;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@Data
|
||||
public class OntologyQueryParam {
|
||||
private Set<String> metrics = Sets.newHashSet();
|
||||
private Set<String> dimensions = Sets.newHashSet();
|
||||
private String where;
|
||||
private Long limit;
|
||||
private List<ColumnOrder> order;
|
||||
private boolean nativeQuery = false;
|
||||
private AggOption aggOption = AggOption.DEFAULT;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
public interface SemanticItem {
|
||||
String getName();
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.tencent.supersonic.headless.core.translator.calcite.s2sql;
|
||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
@@ -4,9 +4,9 @@ import com.tencent.supersonic.common.util.ContextUtils;
|
||||
import com.tencent.supersonic.headless.core.cache.QueryCache;
|
||||
import com.tencent.supersonic.headless.core.executor.QueryAccelerator;
|
||||
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
|
||||
import com.tencent.supersonic.headless.core.translator.QueryOptimizer;
|
||||
import com.tencent.supersonic.headless.core.translator.QueryParser;
|
||||
import com.tencent.supersonic.headless.core.translator.converter.QueryConverter;
|
||||
import com.tencent.supersonic.headless.core.translator.optimizer.QueryOptimizer;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.core.io.support.SpringFactoriesLoader;
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ import com.tencent.supersonic.common.util.DateModeUtils;
|
||||
import com.tencent.supersonic.common.util.SqlFilterUtils;
|
||||
import com.tencent.supersonic.common.util.StringUtil;
|
||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
|
||||
@@ -20,6 +19,7 @@ import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
||||
import com.tencent.supersonic.headless.core.config.ExecutorConfig;
|
||||
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||
@@ -85,25 +85,26 @@ public class SqlGenerateUtils {
|
||||
return selectSql;
|
||||
}
|
||||
|
||||
public String getLimit(QueryParam queryParam) {
|
||||
if (queryParam != null && queryParam.getLimit() != null && queryParam.getLimit() > 0) {
|
||||
return " limit " + queryParam.getLimit();
|
||||
public String getLimit(StructQueryParam structQueryParam) {
|
||||
if (structQueryParam != null && structQueryParam.getLimit() != null
|
||||
&& structQueryParam.getLimit() > 0) {
|
||||
return " limit " + structQueryParam.getLimit();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
public String getSelect(QueryParam queryParam) {
|
||||
String aggStr = queryParam.getAggregators().stream().map(this::getSelectField)
|
||||
public String getSelect(StructQueryParam structQueryParam) {
|
||||
String aggStr = structQueryParam.getAggregators().stream().map(this::getSelectField)
|
||||
.collect(Collectors.joining(","));
|
||||
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
|
||||
: String.join(",", queryParam.getGroups()) + "," + aggStr;
|
||||
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
|
||||
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
|
||||
}
|
||||
|
||||
public String getSelect(QueryParam queryParam, Map<String, String> deriveMetrics) {
|
||||
String aggStr = queryParam.getAggregators().stream()
|
||||
public String getSelect(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) {
|
||||
String aggStr = structQueryParam.getAggregators().stream()
|
||||
.map(a -> getSelectField(a, deriveMetrics)).collect(Collectors.joining(","));
|
||||
return CollectionUtils.isEmpty(queryParam.getGroups()) ? aggStr
|
||||
: String.join(",", queryParam.getGroups()) + "," + aggStr;
|
||||
return CollectionUtils.isEmpty(structQueryParam.getGroups()) ? aggStr
|
||||
: String.join(",", structQueryParam.getGroups()) + "," + aggStr;
|
||||
}
|
||||
|
||||
public String getSelectField(final Aggregator agg) {
|
||||
@@ -128,46 +129,46 @@ public class SqlGenerateUtils {
|
||||
return deriveMetrics.get(agg.getColumn());
|
||||
}
|
||||
|
||||
public String getGroupBy(QueryParam queryParam) {
|
||||
if (CollectionUtils.isEmpty(queryParam.getGroups())) {
|
||||
public String getGroupBy(StructQueryParam structQueryParam) {
|
||||
if (CollectionUtils.isEmpty(structQueryParam.getGroups())) {
|
||||
return "";
|
||||
}
|
||||
return "group by " + String.join(",", queryParam.getGroups());
|
||||
return "group by " + String.join(",", structQueryParam.getGroups());
|
||||
}
|
||||
|
||||
public String getOrderBy(QueryParam queryParam) {
|
||||
if (CollectionUtils.isEmpty(queryParam.getOrders())) {
|
||||
public String getOrderBy(StructQueryParam structQueryParam) {
|
||||
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) {
|
||||
return "";
|
||||
}
|
||||
return "order by " + queryParam.getOrders().stream()
|
||||
return "order by " + structQueryParam.getOrders().stream()
|
||||
.map(order -> " " + order.getColumn() + " " + order.getDirection() + " ")
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
public String getOrderBy(QueryParam queryParam, Map<String, String> deriveMetrics) {
|
||||
if (CollectionUtils.isEmpty(queryParam.getOrders())) {
|
||||
public String getOrderBy(StructQueryParam structQueryParam, Map<String, String> deriveMetrics) {
|
||||
if (CollectionUtils.isEmpty(structQueryParam.getOrders())) {
|
||||
return "";
|
||||
}
|
||||
if (!queryParam.getOrders().stream()
|
||||
if (!structQueryParam.getOrders().stream()
|
||||
.anyMatch(o -> deriveMetrics.containsKey(o.getColumn()))) {
|
||||
return getOrderBy(queryParam);
|
||||
return getOrderBy(structQueryParam);
|
||||
}
|
||||
return "order by " + queryParam.getOrders().stream()
|
||||
return "order by " + structQueryParam.getOrders().stream()
|
||||
.map(order -> " " + (deriveMetrics.containsKey(order.getColumn())
|
||||
? deriveMetrics.get(order.getColumn())
|
||||
: order.getColumn()) + " " + order.getDirection() + " ")
|
||||
.collect(Collectors.joining(","));
|
||||
}
|
||||
|
||||
public String generateWhere(QueryParam queryParam, ItemDateResp itemDateResp) {
|
||||
public String generateWhere(StructQueryParam structQueryParam, ItemDateResp itemDateResp) {
|
||||
String whereClauseFromFilter =
|
||||
sqlFilterUtils.getWhereClause(queryParam.getDimensionFilters());
|
||||
String whereFromDate = getDateWhereClause(queryParam.getDateInfo(), itemDateResp);
|
||||
return mergeDateWhereClause(queryParam, whereClauseFromFilter, whereFromDate);
|
||||
sqlFilterUtils.getWhereClause(structQueryParam.getDimensionFilters());
|
||||
String whereFromDate = getDateWhereClause(structQueryParam.getDateInfo(), itemDateResp);
|
||||
return mergeDateWhereClause(structQueryParam, whereClauseFromFilter, whereFromDate);
|
||||
}
|
||||
|
||||
private String mergeDateWhereClause(QueryParam queryParam, String whereClauseFromFilter,
|
||||
String whereFromDate) {
|
||||
private String mergeDateWhereClause(StructQueryParam structQueryParam,
|
||||
String whereClauseFromFilter, String whereFromDate) {
|
||||
if (StringUtils.isNotEmpty(whereFromDate)
|
||||
&& StringUtils.isNotEmpty(whereClauseFromFilter)) {
|
||||
return String.format("%s AND (%s)", whereFromDate, whereClauseFromFilter);
|
||||
@@ -179,7 +180,7 @@ public class SqlGenerateUtils {
|
||||
return whereFromDate;
|
||||
} else if (Objects.isNull(whereFromDate) && StringUtils.isEmpty(whereClauseFromFilter)) {
|
||||
log.debug("the current date information is empty, enter the date initialization logic");
|
||||
return dateModeUtils.defaultRecentDateInfo(queryParam.getDateInfo());
|
||||
return dateModeUtils.defaultRecentDateInfo(structQueryParam.getDateInfo());
|
||||
}
|
||||
return whereClauseFromFilter;
|
||||
}
|
||||
@@ -203,12 +204,12 @@ public class SqlGenerateUtils {
|
||||
return dateModeUtils.getDateWhereStr(dateInfo, dateDate);
|
||||
}
|
||||
|
||||
public Triple<String, String, String> getBeginEndTime(QueryParam queryParam,
|
||||
public Triple<String, String, String> getBeginEndTime(StructQueryParam structQueryParam,
|
||||
ItemDateResp dataDate) {
|
||||
if (Objects.isNull(queryParam.getDateInfo())) {
|
||||
if (Objects.isNull(structQueryParam.getDateInfo())) {
|
||||
return Triple.of("", "", "");
|
||||
}
|
||||
DateConf dateConf = queryParam.getDateInfo();
|
||||
DateConf dateConf = structQueryParam.getDateInfo();
|
||||
String dateInfo = dateModeUtils.getSysDateCol(dateConf);
|
||||
if (dateInfo.isEmpty()) {
|
||||
return Triple.of("", "", "");
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.tencent.supersonic.headless.core.utils;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.QueryColumn;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.common.util.DateUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
||||
@@ -64,7 +65,7 @@ public class SqlUtils {
|
||||
public SqlUtils init(Database database) {
|
||||
return SqlUtilsBuilder.getBuilder()
|
||||
.withName(database.getId() + AT_SYMBOL + database.getName())
|
||||
.withType(database.getType()).withJdbcUrl(database.getUrl())
|
||||
.withType(database.getType().getName()).withJdbcUrl(database.getUrl())
|
||||
.withUsername(database.getUsername()).withPassword(database.getPassword())
|
||||
.withJdbcDataSource(this.jdbcDataSource).withResultLimit(this.resultLimit)
|
||||
.withIsQueryLogEnable(this.isQueryLogEnable).build();
|
||||
@@ -224,7 +225,8 @@ public class SqlUtils {
|
||||
}
|
||||
|
||||
public SqlUtils build() {
|
||||
Database database = Database.builder().name(this.name).type(this.type).url(this.jdbcUrl)
|
||||
Database database = Database.builder().name(this.name)
|
||||
.type(EngineType.fromString(this.type.toUpperCase())).url(this.jdbcUrl)
|
||||
.username(this.username).password(this.password).build();
|
||||
|
||||
SqlUtils sqlUtils = new SqlUtils(database);
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
package com.tencent.supersonic.chat.core.parser.aggregate;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.CalciteQueryParser;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.CalciteQueryParser;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.testng.Assert;
|
||||
|
||||
@@ -318,7 +317,7 @@ public class CalciteSqlParserTest {
|
||||
+ " \"updatedAt\": 1711367511146\n" + " }\n" + " }\n" + "}";
|
||||
QueryStatement queryStatement = JSON.parseObject(json, QueryStatement.class);
|
||||
CalciteQueryParser calciteSqlParser = new CalciteQueryParser();
|
||||
calciteSqlParser.parse(queryStatement, AggOption.DEFAULT);
|
||||
calciteSqlParser.parse(queryStatement);
|
||||
Assert.assertEquals(queryStatement.getSql().trim().replaceAll("\\s+", ""),
|
||||
"SELECT`imp_date`AS`sys_imp_date`,SUM(1)AS`pv`" + "FROM" + "`s2_pv_uv_statis`"
|
||||
+ "GROUPBY`imp_date`,`imp_date`");
|
||||
|
||||
@@ -6,6 +6,7 @@ import javax.servlet.http.HttpServletResponse;
|
||||
import com.tencent.supersonic.auth.api.authentication.utils.UserHolder;
|
||||
import com.tencent.supersonic.common.pojo.User;
|
||||
import com.tencent.supersonic.common.util.StringUtil;
|
||||
import com.tencent.supersonic.headless.api.pojo.SqlEvaluation;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlsReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
|
||||
@@ -102,4 +103,28 @@ public class SqlQueryApiController {
|
||||
querySqlReq.setSql(StringUtil.replaceBackticks(sql));
|
||||
return chatLayerService.validate(querySqlReq, user);
|
||||
}
|
||||
|
||||
@PostMapping("/validateAndQuery")
|
||||
public Object validateAndQuery(@RequestBody QuerySqlsReq querySqlsReq,
|
||||
HttpServletRequest request, HttpServletResponse response) throws Exception {
|
||||
User user = UserHolder.findUser(request, response);
|
||||
List<QuerySqlReq> convert = convert(querySqlsReq);
|
||||
for (QuerySqlReq querySqlReq : convert) {
|
||||
SqlEvaluation validate = chatLayerService.validate(querySqlReq, user);
|
||||
if (!validate.getIsValidated()) {
|
||||
throw new Exception(validate.getValidateMsg());
|
||||
}
|
||||
}
|
||||
return queryBySqls(querySqlsReq, request, response);
|
||||
}
|
||||
|
||||
private List<QuerySqlReq> convert(QuerySqlsReq querySqlsReq) {
|
||||
return querySqlsReq.getSqls().stream().map(sql -> {
|
||||
QuerySqlReq querySqlReq = new QuerySqlReq();
|
||||
BeanUtils.copyProperties(querySqlsReq, querySqlReq);
|
||||
querySqlReq.setSql(StringUtil.replaceBackticks(sql));
|
||||
return querySqlReq;
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -6,24 +6,10 @@ import com.tencent.supersonic.common.pojo.QueryColumn;
|
||||
import com.tencent.supersonic.common.pojo.User;
|
||||
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.TimeDimensionEnum;
|
||||
import com.tencent.supersonic.headless.api.pojo.DataSetSchema;
|
||||
import com.tencent.supersonic.headless.api.pojo.Dim;
|
||||
import com.tencent.supersonic.headless.api.pojo.MetaFilter;
|
||||
import com.tencent.supersonic.headless.api.pojo.QueryParam;
|
||||
import com.tencent.supersonic.headless.api.pojo.*;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.SemanticType;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DimensionValueReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryMultiStructReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QuerySqlReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.QueryStructReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.SchemaFilterReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.SemanticQueryReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ItemResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticTranslateResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.*;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.*;
|
||||
import com.tencent.supersonic.headless.chat.knowledge.HanlpMapResult;
|
||||
import com.tencent.supersonic.headless.chat.knowledge.KnowledgeBaseService;
|
||||
import com.tencent.supersonic.headless.chat.knowledge.MapResult;
|
||||
@@ -33,6 +19,8 @@ import com.tencent.supersonic.headless.chat.knowledge.helper.NatureHelper;
|
||||
import com.tencent.supersonic.headless.core.cache.QueryCache;
|
||||
import com.tencent.supersonic.headless.core.executor.QueryExecutor;
|
||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||
import com.tencent.supersonic.headless.core.pojo.SqlQueryParam;
|
||||
import com.tencent.supersonic.headless.core.pojo.StructQueryParam;
|
||||
import com.tencent.supersonic.headless.core.translator.SemanticTranslator;
|
||||
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
|
||||
import com.tencent.supersonic.headless.server.annotation.S2DataPermission;
|
||||
@@ -52,12 +40,7 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
@@ -307,30 +290,13 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) {
|
||||
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
|
||||
if (querySqlReq.needGetDataSetId()) {
|
||||
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
|
||||
querySqlReq.setDataSetId(dataSetId);
|
||||
}
|
||||
|
||||
QueryStatement queryStatement = buildStructQueryStatement(querySqlReq);
|
||||
queryStatement.setIsS2SQL(true);
|
||||
queryStatement.setSql(querySqlReq.getSql());
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
|
||||
private QueryStatement buildQueryStatement(SemanticQueryReq queryReq) {
|
||||
SchemaFilterReq schemaFilterReq = new SchemaFilterReq();
|
||||
schemaFilterReq.setDataSetId(queryReq.getDataSetId());
|
||||
schemaFilterReq.setModelIds(queryReq.getModelIds());
|
||||
SemanticSchemaResp semanticSchemaResp = schemaService.fetchSemanticSchema(schemaFilterReq);
|
||||
|
||||
QueryStatement queryStatement = new QueryStatement();
|
||||
QueryParam queryParam = new QueryParam();
|
||||
BeanUtils.copyProperties(queryReq, queryParam);
|
||||
queryStatement.setQueryParam(queryParam);
|
||||
queryStatement.setModelIds(queryReq.getModelIds());
|
||||
queryStatement.setEnableOptimize(queryUtils.enableOptimize());
|
||||
queryStatement.setDataSetId(queryReq.getDataSetId());
|
||||
queryStatement.setSemanticSchemaResp(semanticSchemaResp);
|
||||
@@ -338,6 +304,31 @@ public class S2SemanticLayerService implements SemanticLayerService {
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildSqlQueryStatement(QuerySqlReq querySqlReq, User user) {
|
||||
QueryStatement queryStatement = buildQueryStatement(querySqlReq);
|
||||
queryStatement.setIsS2SQL(true);
|
||||
|
||||
SqlQueryParam sqlQueryParam = new SqlQueryParam();
|
||||
sqlQueryParam.setSql(querySqlReq.getSql());
|
||||
queryStatement.setSqlQueryParam(sqlQueryParam);
|
||||
|
||||
// If dataSetId or DataSetName is empty, parse dataSetId from the SQL
|
||||
if (querySqlReq.needGetDataSetId()) {
|
||||
Long dataSetId = dataSetService.getDataSetIdFromSql(querySqlReq.getSql(), user);
|
||||
querySqlReq.setDataSetId(dataSetId);
|
||||
}
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildStructQueryStatement(SemanticQueryReq queryReq) {
|
||||
QueryStatement queryStatement = buildQueryStatement(queryReq);
|
||||
StructQueryParam structQueryParam = new StructQueryParam();
|
||||
BeanUtils.copyProperties(queryReq, structQueryParam);
|
||||
queryStatement.setStructQueryParam(structQueryParam);
|
||||
queryStatement.setIsS2SQL(false);
|
||||
return queryStatement;
|
||||
}
|
||||
|
||||
private QueryStatement buildMultiStructQueryStatement(QueryMultiStructReq queryMultiStructReq) {
|
||||
List<QueryStatement> queryStatements = new ArrayList<>();
|
||||
for (QueryStructReq queryStructReq : queryMultiStructReq.getQueryStructReqs()) {
|
||||
|
||||
@@ -33,7 +33,6 @@ public class ModelYamlManager {
|
||||
ModelDetail modelDetail = modelResp.getModelDetail();
|
||||
DbAdaptor engineAdaptor = DbAdaptorFactory.getEngineAdaptor(databaseResp.getType());
|
||||
SysTimeDimensionBuilder.addSysTimeDimension(modelDetail.getDimensions(), engineAdaptor);
|
||||
addInterCntMetric(modelResp.getBizName(), modelDetail);
|
||||
DataModelYamlTpl dataModelYamlTpl = new DataModelYamlTpl();
|
||||
dataModelYamlTpl.setType(databaseResp.getType());
|
||||
BeanUtils.copyProperties(modelDetail, dataModelYamlTpl);
|
||||
|
||||
@@ -2,33 +2,12 @@ package com.tencent.supersonic.headless.server.manager;
|
||||
|
||||
import com.tencent.supersonic.common.pojo.ModelRela;
|
||||
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
|
||||
import com.tencent.supersonic.headless.api.pojo.Field;
|
||||
import com.tencent.supersonic.headless.api.pojo.enums.TagDefineType;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.TagResp;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Constants;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataModel;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DataType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Dimension;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.DimensionTimeTypeParams;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Identify;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.JoinRelation;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Materialization.TimePartType;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Measure;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Metric;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.MetricTypeParams;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.s2sql.Ontology;
|
||||
import com.tencent.supersonic.headless.core.translator.calcite.sql.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionTimeTypeParamsTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.FieldParamYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.IdentifyYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.MeasureYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.MetricParamYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.MetricTypeParamsYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
|
||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.*;
|
||||
import com.tencent.supersonic.headless.server.service.SchemaService;
|
||||
import com.tencent.supersonic.headless.server.utils.DatabaseConverter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@@ -36,15 +15,8 @@ import org.apache.commons.lang3.tuple.Triple;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Slf4j
|
||||
@@ -90,82 +62,6 @@ public class SemanticSchemaManager {
|
||||
return ontology;
|
||||
}
|
||||
|
||||
public Ontology getTagSemanticModel(SemanticSchemaResp semanticSchemaResp) throws Exception {
|
||||
if (CollectionUtils.isEmpty(semanticSchemaResp.getTags())) {
|
||||
throw new Exception("semanticSchemaResp tag is empty");
|
||||
}
|
||||
Ontology ontology = buildOntology(semanticSchemaResp);
|
||||
// Map<String, List<Dimension>> dimensions = new HashMap<>();
|
||||
Map<Long, List<TagResp>> tagMap = new HashMap<>();
|
||||
for (TagResp tagResp : semanticSchemaResp.getTags()) {
|
||||
if (!tagMap.containsKey(tagResp.getModelId())) {
|
||||
tagMap.put(tagResp.getModelId(), new ArrayList<>());
|
||||
}
|
||||
tagMap.get(tagResp.getModelId()).add(tagResp);
|
||||
}
|
||||
if (Objects.nonNull(ontology.getDataModelMap()) && !ontology.getDataModelMap().isEmpty()) {
|
||||
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
|
||||
List<Dimension> modelDimensions = new ArrayList<>();
|
||||
if (!ontology.getDimensionMap().containsKey(entry.getKey())) {
|
||||
ontology.getDimensionMap().put(entry.getKey(), modelDimensions);
|
||||
} else {
|
||||
modelDimensions = ontology.getDimensionMap().get(entry.getKey());
|
||||
}
|
||||
if (tagMap.containsKey(entry.getValue().getId())) {
|
||||
for (TagResp tagResp : tagMap.get(entry.getValue().getId())) {
|
||||
addTagModel(tagResp, modelDimensions, ontology.getMetrics());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ontology;
|
||||
}
|
||||
|
||||
private void addTagModel(TagResp tagResp, List<Dimension> modelDimensions,
|
||||
List<Metric> modelMetrics) throws Exception {
|
||||
TagDefineType tagDefineType = TagDefineType.valueOf(tagResp.getTagDefineType());
|
||||
switch (tagDefineType) {
|
||||
case FIELD:
|
||||
case DIMENSION:
|
||||
if (TagDefineType.DIMENSION.equals(tagResp.getTagDefineType())) {
|
||||
Optional<Dimension> modelDimension = modelDimensions.stream()
|
||||
// .filter(d -> d.getBizName().equals(tagResp.getExpr()))
|
||||
.findFirst();
|
||||
if (modelDimension.isPresent()) {
|
||||
modelDimension.get().setName(tagResp.getBizName());
|
||||
return;
|
||||
}
|
||||
}
|
||||
Dimension dimension = Dimension.builder().build();
|
||||
dimension.setType("");
|
||||
// dimension.setExpr(tagResp.getExpr());
|
||||
dimension.setName(tagResp.getBizName());
|
||||
dimension.setOwners("");
|
||||
dimension.setBizName(tagResp.getBizName());
|
||||
if (Objects.isNull(dimension.getDataType())) {
|
||||
dimension.setDataType(DataType.UNKNOWN);
|
||||
}
|
||||
|
||||
DimensionTimeTypeParams dimensionTimeTypeParams = new DimensionTimeTypeParams();
|
||||
dimension.setDimensionTimeTypeParams(dimensionTimeTypeParams);
|
||||
modelDimensions.add(dimension);
|
||||
return;
|
||||
case METRIC:
|
||||
Optional<Metric> modelMetric = modelMetrics.stream()
|
||||
// .filter(m -> m.getName().equalsIgnoreCase(tagResp.getExpr()))
|
||||
.findFirst();
|
||||
if (modelMetric.isPresent()) {
|
||||
modelMetric.get().setName(tagResp.getBizName());
|
||||
} else {
|
||||
throw new Exception(
|
||||
String.format("tag [{}] cant find the metric", tagResp.getBizName()));
|
||||
}
|
||||
return;
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
public static List<Metric> getMetrics(final List<MetricYamlTpl> t) {
|
||||
return getMetricsByMetricYamlTpl(t);
|
||||
}
|
||||
@@ -184,16 +80,6 @@ public class SemanticSchemaManager {
|
||||
if (Objects.nonNull(d.getModelSourceTypeEnum())) {
|
||||
dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name()));
|
||||
}
|
||||
if (Objects.nonNull(d.getFields()) && !CollectionUtils.isEmpty(d.getFields())) {
|
||||
Set<String> measures = dataModel.getMeasures().stream().map(mm -> mm.getName())
|
||||
.collect(Collectors.toSet());
|
||||
for (Field f : d.getFields()) {
|
||||
if (!measures.contains(f.getFieldName())) {
|
||||
dataModel.getMeasures().add(Measure.builder().expr(f.getFieldName())
|
||||
.name(f.getFieldName()).agg("").build());
|
||||
}
|
||||
}
|
||||
}
|
||||
return dataModel;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.server.persistence.repository;
|
||||
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
|
||||
import com.tencent.supersonic.headless.server.persistence.dataobject.DictConfDO;
|
||||
@@ -26,4 +27,6 @@ public interface DictRepository {
|
||||
DictTaskDO queryDictTaskById(Long id);
|
||||
|
||||
DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq);
|
||||
|
||||
List<DictTaskDO> queryAllDictTask(ValueTaskQueryReq taskQueryReq);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
|
||||
@@ -14,11 +15,14 @@ import com.tencent.supersonic.headless.server.persistence.mapper.DictTaskMapper;
|
||||
import com.tencent.supersonic.headless.server.persistence.repository.DictRepository;
|
||||
import com.tencent.supersonic.headless.server.service.DimensionService;
|
||||
import com.tencent.supersonic.headless.server.utils.DictUtils;
|
||||
import com.xkzhangsan.time.utils.CollectionUtil;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.codehaus.plexus.util.StringUtils;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Repository;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
@@ -100,6 +104,23 @@ public class DictRepositoryImpl implements DictRepository {
|
||||
return taskResp;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DictTaskDO> queryAllDictTask(ValueTaskQueryReq taskQueryReq) {
|
||||
QueryWrapper<DictTaskDO> wrapper = new QueryWrapper<>();
|
||||
if (Objects.nonNull(taskQueryReq.getItemId())) {
|
||||
wrapper.lambda().eq(DictTaskDO::getItemId, taskQueryReq.getItemId());
|
||||
}
|
||||
if (CollectionUtil.isNotEmpty(taskQueryReq.getTaskStatusList())) {
|
||||
wrapper.lambda().in(DictTaskDO::getStatus, taskQueryReq.getTaskStatusList());
|
||||
}
|
||||
if (StringUtils.isNotEmpty(taskQueryReq.getKey())) {
|
||||
String key = taskQueryReq.getKey();
|
||||
wrapper.lambda().and(qw -> qw.like(DictTaskDO::getName, key).or()
|
||||
.like(DictTaskDO::getDescription, key).or().like(DictTaskDO::getConfig, key));
|
||||
}
|
||||
return dictTaskMapper.selectList(wrapper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long addDictConf(DictConfDO dictConfDO) {
|
||||
dictConfMapper.insert(dictConfDO);
|
||||
|
||||
@@ -13,6 +13,7 @@ import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictItemReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictValueReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp;
|
||||
@@ -132,6 +133,18 @@ public class KnowledgeController {
|
||||
return taskService.queryLatestDictTask(taskReq, user);
|
||||
}
|
||||
|
||||
/**
|
||||
* queryDictTask-分页返回维度的字典任务列表
|
||||
*
|
||||
* @param taskQueryReq
|
||||
*/
|
||||
@PostMapping("/task/search/page")
|
||||
public PageInfo<DictTaskResp> queryDictTask(@RequestBody ValueTaskQueryReq taskQueryReq,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
User user = UserHolder.findUser(request, response);
|
||||
return taskService.queryDictTask(taskQueryReq, user);
|
||||
}
|
||||
|
||||
@GetMapping("/embedding/reload")
|
||||
public Object reloadEmbedding() {
|
||||
metaEmbeddingTask.reloadMetaEmbedding();
|
||||
|
||||
@@ -4,6 +4,7 @@ import com.github.pagehelper.PageInfo;
|
||||
import com.tencent.supersonic.common.pojo.User;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictValueReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp;
|
||||
|
||||
@@ -17,6 +18,8 @@ public interface DictTaskService {
|
||||
|
||||
DictTaskResp queryLatestDictTask(DictSingleTaskReq taskReq, User user);
|
||||
|
||||
PageInfo<DictTaskResp> queryDictTask(ValueTaskQueryReq taskQueryReq, User user);
|
||||
|
||||
PageInfo<DictValueDimResp> queryDictValue(DictValueReq dictValueReq, User user);
|
||||
|
||||
String queryDictFilePath(DictValueReq dictValueReq, User user);
|
||||
|
||||
@@ -9,15 +9,7 @@ import com.tencent.supersonic.headless.api.pojo.MetaFilter;
|
||||
import com.tencent.supersonic.headless.api.pojo.SemanticSchema;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.ItemUseReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.SchemaFilterReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DimensionResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DomainResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ItemResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ItemUseResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.MetricResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.ModelSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.*;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DataModelYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.DimensionYamlTpl;
|
||||
import com.tencent.supersonic.headless.server.pojo.yaml.MetricYamlTpl;
|
||||
@@ -64,5 +56,4 @@ public interface SchemaService {
|
||||
|
||||
ItemDateResp getItemDate(ItemDateFilter dimension, ItemDateFilter metric);
|
||||
|
||||
DatabaseResp getDatabase(Long id);
|
||||
}
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
package com.tencent.supersonic.headless.server.service.impl;
|
||||
|
||||
import com.github.pagehelper.PageHelper;
|
||||
import com.github.pagehelper.PageInfo;
|
||||
import com.tencent.supersonic.common.pojo.Constants;
|
||||
import com.tencent.supersonic.common.pojo.User;
|
||||
import com.tencent.supersonic.common.pojo.enums.StatusEnum;
|
||||
import com.tencent.supersonic.common.pojo.enums.TaskStatusEnum;
|
||||
import com.tencent.supersonic.common.util.BeanMapper;
|
||||
import com.tencent.supersonic.common.util.DateUtils;
|
||||
import com.tencent.supersonic.headless.api.pojo.DimValueMap;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictItemFilter;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictSingleTaskReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DictValueReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.ValueTaskQueryReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictItemResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictTaskResp;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DictValueDimResp;
|
||||
@@ -116,14 +119,17 @@ public class DictTaskServiceImpl implements DictTaskService {
|
||||
fileHandler.writeFile(data, fileName, false);
|
||||
|
||||
// 3.Change in-memory dictionary data in real time
|
||||
String status = TaskStatusEnum.SUCCESS.getStatus();
|
||||
try {
|
||||
dictWordService.loadDictWord();
|
||||
|
||||
dictTaskDO.setStatus(TaskStatusEnum.SUCCESS.getStatus());
|
||||
dictRepository.editDictTask(dictTaskDO);
|
||||
} catch (Exception e) {
|
||||
log.error("reloadCustomDictionary error", e);
|
||||
status = TaskStatusEnum.ERROR.getStatus();
|
||||
dictTaskDO.setDescription(e.toString());
|
||||
}
|
||||
dictTaskDO.setStatus(status);
|
||||
dictTaskDO.setElapsedMs(DateUtils.calculateDiffMs(dictTaskDO.getCreatedAt()));
|
||||
dictRepository.editDictTask(dictTaskDO);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -164,6 +170,17 @@ public class DictTaskServiceImpl implements DictTaskService {
|
||||
return dictRepository.queryLatestDictTask(taskReq);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PageInfo<DictTaskResp> queryDictTask(ValueTaskQueryReq taskQueryReq, User user) {
|
||||
PageInfo<DictTaskDO> dictTaskDOPageInfo =
|
||||
PageHelper.startPage(taskQueryReq.getCurrent(), taskQueryReq.getPageSize())
|
||||
.doSelectPageInfo(() -> dictRepository.queryAllDictTask(taskQueryReq));
|
||||
PageInfo<DictTaskResp> dictTaskRespPageInfo = new PageInfo<>();
|
||||
BeanMapper.mapper(dictTaskDOPageInfo, dictTaskRespPageInfo);
|
||||
dictTaskRespPageInfo.setList(dictConverter.taskDO2Resp(dictTaskDOPageInfo.getList()));
|
||||
return dictTaskRespPageInfo;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PageInfo<DictValueDimResp> queryDictValue(DictValueReq dictValueReq, User user) {
|
||||
// todo 优化读取内存结构
|
||||
|
||||
@@ -536,8 +536,4 @@ public class SchemaServiceImpl implements SchemaService {
|
||||
return modelService.getItemDate(dimension, metric);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DatabaseResp getDatabase(Long id) {
|
||||
return databaseService.getDatabase(id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.tencent.supersonic.headless.server.utils;
|
||||
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||
import com.tencent.supersonic.headless.api.pojo.request.DatabaseReq;
|
||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
|
||||
@@ -16,6 +17,7 @@ public class DatabaseConverter {
|
||||
public static Database convert(DatabaseResp databaseResp) {
|
||||
Database database = new Database();
|
||||
BeanUtils.copyProperties(databaseResp, database);
|
||||
database.setType(EngineType.fromString(databaseResp.getType().toUpperCase()));
|
||||
return database;
|
||||
}
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ import com.tencent.supersonic.headless.server.service.DimensionService;
|
||||
import com.tencent.supersonic.headless.server.service.MetricService;
|
||||
import com.tencent.supersonic.headless.server.service.ModelService;
|
||||
import com.tencent.supersonic.headless.server.service.TagMetaService;
|
||||
import com.xkzhangsan.time.utils.CollectionUtil;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.BeanUtils;
|
||||
@@ -533,4 +534,12 @@ public class DictUtils {
|
||||
resp.setConfig(JsonUtil.toObject(dictTaskDO.getConfig(), ItemValueConfig.class));
|
||||
return resp;
|
||||
}
|
||||
|
||||
public List<DictTaskResp> taskDO2Resp(List<DictTaskDO> dictTaskDOList) {
|
||||
List<DictTaskResp> dictTaskRespList = new ArrayList<>();
|
||||
if (CollectionUtil.isNotEmpty(dictTaskDOList)) {
|
||||
dictTaskDOList.stream().forEach(taskDO -> dictTaskRespList.add(taskDO2Resp(taskDO)));
|
||||
}
|
||||
return dictTaskRespList;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,7 +67,8 @@ public class MetricDrillDownChecker {
|
||||
List<MetricResp> metricResps = getMetrics(metricFields, semanticSchemaResp);
|
||||
if (!checkDrillDownDimension(dimensionBizName, metricResps, semanticSchemaResp)) {
|
||||
DimSchemaResp dimSchemaResp = semanticSchemaResp.getDimension(dimensionBizName);
|
||||
if (Objects.nonNull(dimSchemaResp) && dimSchemaResp.isPartitionTime()) {
|
||||
if (Objects.isNull(dimSchemaResp)
|
||||
|| (Objects.nonNull(dimSchemaResp) && dimSchemaResp.isPartitionTime())) {
|
||||
continue;
|
||||
}
|
||||
String errMsg =
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user