mirror of
https://github.com/tencentmusic/supersonic.git
synced 2025-12-13 13:07:32 +00:00
[improvement][headless]Refactor translator module to make code logic cleaner and more readable.
This commit is contained in:
@@ -81,7 +81,7 @@ public class Configuration {
|
|||||||
.setUnquotedCasing(Casing.TO_UPPER).setConformance(sqlDialect.getConformance())
|
.setUnquotedCasing(Casing.TO_UPPER).setConformance(sqlDialect.getConformance())
|
||||||
.setLex(Lex.BIG_QUERY);
|
.setLex(Lex.BIG_QUERY);
|
||||||
if (EngineType.HANADB.equals(engineType)) {
|
if (EngineType.HANADB.equals(engineType)) {
|
||||||
parserConfig = parserConfig.setQuoting(Quoting.DOUBLE_QUOTE);
|
parserConfig = parserConfig.setQuoting(Quoting.DOUBLE_QUOTE);
|
||||||
}
|
}
|
||||||
parserConfig = parserConfig.setQuotedCasing(Casing.UNCHANGED);
|
parserConfig = parserConfig.setQuotedCasing(Casing.UNCHANGED);
|
||||||
parserConfig = parserConfig.setUnquotedCasing(Casing.UNCHANGED);
|
parserConfig = parserConfig.setUnquotedCasing(Casing.UNCHANGED);
|
||||||
|
|||||||
@@ -21,10 +21,11 @@ public class SqlDialectFactory {
|
|||||||
.withDatabaseProduct(DatabaseProduct.BIG_QUERY).withLiteralQuoteString("'")
|
.withDatabaseProduct(DatabaseProduct.BIG_QUERY).withLiteralQuoteString("'")
|
||||||
.withLiteralEscapedQuoteString("''").withUnquotedCasing(Casing.UNCHANGED)
|
.withLiteralEscapedQuoteString("''").withUnquotedCasing(Casing.UNCHANGED)
|
||||||
.withQuotedCasing(Casing.UNCHANGED).withCaseSensitive(false);
|
.withQuotedCasing(Casing.UNCHANGED).withCaseSensitive(false);
|
||||||
public static final Context HANADB_CONTEXT = SqlDialect.EMPTY_CONTEXT
|
public static final Context HANADB_CONTEXT =
|
||||||
.withDatabaseProduct(DatabaseProduct.BIG_QUERY).withLiteralQuoteString("'")
|
SqlDialect.EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.BIG_QUERY)
|
||||||
.withIdentifierQuoteString("\"").withLiteralEscapedQuoteString("''").withUnquotedCasing(Casing.UNCHANGED)
|
.withLiteralQuoteString("'").withIdentifierQuoteString("\"")
|
||||||
.withQuotedCasing(Casing.UNCHANGED).withCaseSensitive(true);
|
.withLiteralEscapedQuoteString("''").withUnquotedCasing(Casing.UNCHANGED)
|
||||||
|
.withQuotedCasing(Casing.UNCHANGED).withCaseSensitive(true);
|
||||||
private static Map<EngineType, SemanticSqlDialect> sqlDialectMap;
|
private static Map<EngineType, SemanticSqlDialect> sqlDialectMap;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ public class ModelDetail {
|
|||||||
|
|
||||||
private String queryType;
|
private String queryType;
|
||||||
|
|
||||||
|
private String dbType;
|
||||||
|
|
||||||
private String sqlQuery;
|
private String sqlQuery;
|
||||||
|
|
||||||
private String tableQuery;
|
private String tableQuery;
|
||||||
|
|||||||
@@ -4,8 +4,7 @@ import com.google.common.base.Objects;
|
|||||||
import com.tencent.supersonic.common.pojo.RecordInfo;
|
import com.tencent.supersonic.common.pojo.RecordInfo;
|
||||||
import com.tencent.supersonic.common.pojo.enums.SensitiveLevelEnum;
|
import com.tencent.supersonic.common.pojo.enums.SensitiveLevelEnum;
|
||||||
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
|
import com.tencent.supersonic.common.pojo.enums.TypeEnums;
|
||||||
import lombok.Data;
|
import lombok.*;
|
||||||
import lombok.ToString;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@@ -17,19 +16,20 @@ import java.util.List;
|
|||||||
public class SchemaItem extends RecordInfo {
|
public class SchemaItem extends RecordInfo {
|
||||||
|
|
||||||
private static String aliasSplit = ",";
|
private static String aliasSplit = ",";
|
||||||
private Long id;
|
|
||||||
|
|
||||||
private String name;
|
protected Long id;
|
||||||
|
|
||||||
private String bizName;
|
protected String name;
|
||||||
|
|
||||||
private String description;
|
protected String bizName;
|
||||||
|
|
||||||
private Integer status;
|
protected String description;
|
||||||
|
|
||||||
private TypeEnums typeEnum;
|
protected Integer status;
|
||||||
|
|
||||||
private Integer sensitiveLevel = SensitiveLevelEnum.LOW.getCode();
|
protected TypeEnums typeEnum;
|
||||||
|
|
||||||
|
protected Integer sensitiveLevel = SensitiveLevelEnum.LOW.getCode();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o) {
|
public boolean equals(Object o) {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.api.pojo.response;
|
|||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.tencent.supersonic.common.pojo.RecordInfo;
|
import com.tencent.supersonic.common.pojo.RecordInfo;
|
||||||
|
import com.tencent.supersonic.common.util.AESEncryptionUtil;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
@@ -64,4 +65,8 @@ public class DatabaseResp extends RecordInfo {
|
|||||||
}
|
}
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String passwordDecrypt() {
|
||||||
|
return AESEncryptionUtil.aesDecryptECB(password);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,9 @@
|
|||||||
package com.tencent.supersonic.headless.api.pojo.response;
|
package com.tencent.supersonic.headless.api.pojo.response;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.tencent.supersonic.headless.api.pojo.Dimension;
|
import com.tencent.supersonic.headless.api.pojo.*;
|
||||||
import com.tencent.supersonic.headless.api.pojo.DrillDownDimension;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.Field;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.Identify;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.ModelDetail;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.SchemaItem;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
|
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.*;
|
||||||
import lombok.Data;
|
|
||||||
import lombok.NoArgsConstructor;
|
|
||||||
import lombok.ToString;
|
|
||||||
import org.springframework.util.CollectionUtils;
|
import org.springframework.util.CollectionUtils;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@@ -26,6 +18,7 @@ import java.util.stream.Collectors;
|
|||||||
@ToString(callSuper = true)
|
@ToString(callSuper = true)
|
||||||
@AllArgsConstructor
|
@AllArgsConstructor
|
||||||
@NoArgsConstructor
|
@NoArgsConstructor
|
||||||
|
@Builder
|
||||||
public class ModelResp extends SchemaItem {
|
public class ModelResp extends SchemaItem {
|
||||||
|
|
||||||
private Long domainId;
|
private Long domainId;
|
||||||
@@ -62,6 +55,14 @@ public class ModelResp extends SchemaItem {
|
|||||||
return isOpen != null && isOpen == 1;
|
return isOpen != null && isOpen == 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public List<Measure> getMeasures() {
|
||||||
|
return modelDetail.getMeasures();
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Identify> getIdentifiers() {
|
||||||
|
return modelDetail.getIdentifiers();
|
||||||
|
}
|
||||||
|
|
||||||
public List<Dimension> getTimeDimension() {
|
public List<Dimension> getTimeDimension() {
|
||||||
if (modelDetail == null) {
|
if (modelDetail == null) {
|
||||||
return Lists.newArrayList();
|
return Lists.newArrayList();
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ package com.tencent.supersonic.headless.core.executor;
|
|||||||
import com.tencent.supersonic.common.calcite.Configuration;
|
import com.tencent.supersonic.common.calcite.Configuration;
|
||||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Materialization;
|
import com.tencent.supersonic.headless.core.pojo.Materialization;
|
||||||
|
import com.tencent.supersonic.headless.core.translator.parser.TimeRange;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable;
|
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable.Builder;
|
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteTable.Builder;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
|
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.TimeRange;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.calcite.adapter.enumerable.EnumerableRules;
|
import org.apache.calcite.adapter.enumerable.EnumerableRules;
|
||||||
import org.apache.calcite.config.CalciteConnectionConfigImpl;
|
import org.apache.calcite.config.CalciteConnectionConfigImpl;
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
package com.tencent.supersonic.headless.core.executor;
|
package com.tencent.supersonic.headless.core.executor;
|
||||||
|
|
||||||
import com.tencent.supersonic.common.util.ContextUtils;
|
import com.tencent.supersonic.common.util.ContextUtils;
|
||||||
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||||
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
|
import com.tencent.supersonic.headless.core.utils.ComponentFactory;
|
||||||
import com.tencent.supersonic.headless.core.utils.SqlUtils;
|
import com.tencent.supersonic.headless.core.utils.SqlUtils;
|
||||||
@@ -38,7 +38,7 @@ public class JdbcExecutor implements QueryExecutor {
|
|||||||
SqlUtils sqlUtils = ContextUtils.getBean(SqlUtils.class);
|
SqlUtils sqlUtils = ContextUtils.getBean(SqlUtils.class);
|
||||||
String sql = StringUtils.normalizeSpace(queryStatement.getSql());
|
String sql = StringUtils.normalizeSpace(queryStatement.getSql());
|
||||||
log.info("executing SQL: {}", sql);
|
log.info("executing SQL: {}", sql);
|
||||||
Database database = queryStatement.getOntology().getDatabase();
|
DatabaseResp database = queryStatement.getOntology().getDatabase();
|
||||||
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
|
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
|
||||||
try {
|
try {
|
||||||
SqlUtils sqlUtil = sqlUtils.init(database);
|
SqlUtils sqlUtil = sqlUtils.init(database);
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.pojo;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.Identify;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization;
|
|
||||||
import lombok.Builder;
|
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@Builder
|
|
||||||
public class DataModel {
|
|
||||||
|
|
||||||
private Long id;
|
|
||||||
|
|
||||||
private String name;
|
|
||||||
|
|
||||||
private Long modelId;
|
|
||||||
|
|
||||||
private String type;
|
|
||||||
|
|
||||||
private String sqlQuery;
|
|
||||||
|
|
||||||
private String tableQuery;
|
|
||||||
|
|
||||||
private List<Identify> identifiers;
|
|
||||||
|
|
||||||
private List<DimSchemaResp> dimensions;
|
|
||||||
|
|
||||||
private List<Measure> measures;
|
|
||||||
|
|
||||||
private String aggTime;
|
|
||||||
|
|
||||||
private com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType timePartType =
|
|
||||||
Materialization.TimePartType.None;
|
|
||||||
}
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.pojo;
|
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import com.tencent.supersonic.common.pojo.RecordInfo;
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.common.util.AESEncryptionUtil;
|
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import lombok.Builder;
|
|
||||||
import lombok.Data;
|
|
||||||
import lombok.NoArgsConstructor;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@AllArgsConstructor
|
|
||||||
@NoArgsConstructor
|
|
||||||
@Builder
|
|
||||||
public class Database extends RecordInfo {
|
|
||||||
|
|
||||||
private Long id;
|
|
||||||
|
|
||||||
private Long domainId;
|
|
||||||
|
|
||||||
private String name;
|
|
||||||
|
|
||||||
private String description;
|
|
||||||
|
|
||||||
private String version;
|
|
||||||
|
|
||||||
private String url;
|
|
||||||
|
|
||||||
private String username;
|
|
||||||
|
|
||||||
private String password;
|
|
||||||
|
|
||||||
private String database;
|
|
||||||
|
|
||||||
private String schema;
|
|
||||||
/** mysql,clickhouse */
|
|
||||||
private EngineType type;
|
|
||||||
|
|
||||||
private List<String> admins = Lists.newArrayList();
|
|
||||||
|
|
||||||
private List<String> viewers = Lists.newArrayList();
|
|
||||||
|
|
||||||
public String passwordDecrypt() {
|
|
||||||
return AESEncryptionUtil.aesDecryptECB(password);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -2,6 +2,7 @@ package com.tencent.supersonic.headless.core.pojo;
|
|||||||
|
|
||||||
import com.alibaba.druid.pool.DruidDataSource;
|
import com.alibaba.druid.pool.DruidDataSource;
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
|
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
|
||||||
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.core.utils.JdbcDataSourceUtils;
|
import com.tencent.supersonic.headless.core.utils.JdbcDataSourceUtils;
|
||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
@@ -106,7 +107,7 @@ public class JdbcDataSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void removeDatasource(Database database) {
|
public void removeDatasource(DatabaseResp database) {
|
||||||
|
|
||||||
String key = getDataSourceKey(database);
|
String key = getDataSourceKey(database);
|
||||||
|
|
||||||
@@ -128,7 +129,7 @@ public class JdbcDataSource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public DruidDataSource getDataSource(Database database) throws RuntimeException {
|
public DruidDataSource getDataSource(DatabaseResp database) throws RuntimeException {
|
||||||
|
|
||||||
String name = database.getName();
|
String name = database.getName();
|
||||||
String jdbcUrl = database.getUrl();
|
String jdbcUrl = database.getUrl();
|
||||||
@@ -239,7 +240,7 @@ public class JdbcDataSource {
|
|||||||
return druidDataSource;
|
return druidDataSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getDataSourceKey(Database database) {
|
private String getDataSourceKey(DatabaseResp database) {
|
||||||
return JdbcDataSourceUtils.getKey(database.getName(), database.getUrl(),
|
return JdbcDataSourceUtils.getKey(database.getName(), database.getUrl(),
|
||||||
database.getUsername(), database.passwordDecrypt(), "", false);
|
database.getUsername(), database.passwordDecrypt(), "", false);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
package com.tencent.supersonic.headless.core.pojo;
|
package com.tencent.supersonic.headless.core.pojo;
|
||||||
|
|
||||||
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
||||||
|
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
@@ -14,12 +16,16 @@ import java.util.stream.Collectors;
|
|||||||
@Data
|
@Data
|
||||||
public class Ontology {
|
public class Ontology {
|
||||||
|
|
||||||
private Database database;
|
private DatabaseResp database;
|
||||||
private Map<String, DataModel> dataModelMap = new HashMap<>();
|
private Map<String, ModelResp> modelMap = new HashMap<>();
|
||||||
private List<MetricSchemaResp> metrics = new ArrayList<>();
|
private Map<String, List<MetricSchemaResp>> metricMap = new HashMap<>();
|
||||||
private Map<String, List<DimSchemaResp>> dimensionMap = new HashMap<>();
|
private Map<String, List<DimSchemaResp>> dimensionMap = new HashMap<>();
|
||||||
private List<JoinRelation> joinRelations;
|
private List<JoinRelation> joinRelations;
|
||||||
|
|
||||||
|
public List<MetricSchemaResp> getMetrics() {
|
||||||
|
return metricMap.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
public List<DimSchemaResp> getDimensions() {
|
public List<DimSchemaResp> getDimensions() {
|
||||||
return dimensionMap.values().stream().flatMap(Collection::stream)
|
return dimensionMap.values().stream().flatMap(Collection::stream)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|||||||
@@ -68,7 +68,8 @@ public class DefaultSemanticTranslator implements SemanticTranslator {
|
|||||||
List<Pair<String, String>> tables = new ArrayList<>();
|
List<Pair<String, String>> tables = new ArrayList<>();
|
||||||
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
|
tables.add(Pair.of(ontologyInnerTable, ontologyInnerSql));
|
||||||
if (sqlQuery.isSupportWith()) {
|
if (sqlQuery.isSupportWith()) {
|
||||||
EngineType engineType = queryStatement.getOntology().getDatabase().getType();
|
EngineType engineType =
|
||||||
|
EngineType.fromString(queryStatement.getOntology().getDatabase().getType());
|
||||||
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
|
if (!SqlMergeWithUtils.hasWith(engineType, ontologyQuerySql)) {
|
||||||
String withSql = "with " + tables.stream()
|
String withSql = "with " + tables.stream()
|
||||||
.map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight()))
|
.map(t -> String.format("%s as (%s)", t.getLeft(), t.getRight()))
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import com.tencent.supersonic.common.pojo.enums.DatePeriodEnum;
|
|||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||||
import com.tencent.supersonic.common.util.ContextUtils;
|
import com.tencent.supersonic.common.util.ContextUtils;
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
import com.tencent.supersonic.headless.api.pojo.enums.AggOption;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
|
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
|
||||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||||
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
|
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
|
||||||
@@ -59,8 +59,9 @@ public class MetricRatioConverter implements QueryConverter {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void convert(QueryStatement queryStatement) throws Exception {
|
public void convert(QueryStatement queryStatement) throws Exception {
|
||||||
Database database = queryStatement.getOntology().getDatabase();
|
DatabaseResp database = queryStatement.getOntology().getDatabase();
|
||||||
generateRatioSql(queryStatement, database.getType(), database.getVersion());
|
generateRatioSql(queryStatement, EngineType.fromString(database.getType()),
|
||||||
|
database.getVersion());
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Ratio */
|
/** Ratio */
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ package com.tencent.supersonic.headless.core.translator.converter;
|
|||||||
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
|
import com.tencent.supersonic.headless.api.pojo.enums.ModelDefineType;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||||
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
|
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
@@ -36,9 +35,9 @@ public class SqlVariableConverter implements QueryConverter {
|
|||||||
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
|
SqlVariableParseUtils.parse(modelResp.getModelDetail().getSqlQuery(),
|
||||||
modelResp.getModelDetail().getSqlVariables(),
|
modelResp.getModelDetail().getSqlVariables(),
|
||||||
queryStatement.getStructQuery().getParams());
|
queryStatement.getStructQuery().getParams());
|
||||||
DataModel dataModel =
|
ModelResp dataModel =
|
||||||
queryStatement.getOntology().getDataModelMap().get(modelResp.getBizName());
|
queryStatement.getOntology().getModelMap().get(modelResp.getBizName());
|
||||||
dataModel.setSqlQuery(sqlParsed);
|
dataModel.getModelDetail().setSqlQuery(sqlParsed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.converter;
|
package com.tencent.supersonic.headless.core.translator.converter;
|
||||||
|
|
||||||
|
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||||
import com.tencent.supersonic.common.util.ContextUtils;
|
import com.tencent.supersonic.common.util.ContextUtils;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
||||||
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
|
import com.tencent.supersonic.headless.core.pojo.SqlQuery;
|
||||||
import com.tencent.supersonic.headless.core.pojo.StructQuery;
|
import com.tencent.supersonic.headless.core.pojo.StructQuery;
|
||||||
@@ -40,8 +41,9 @@ public class StructQueryConverter implements QueryConverter {
|
|||||||
sqlGenerateUtils.generateWhere(structQuery, null),
|
sqlGenerateUtils.generateWhere(structQuery, null),
|
||||||
sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
|
sqlGenerateUtils.getGroupBy(structQuery), sqlGenerateUtils.getOrderBy(structQuery),
|
||||||
sqlGenerateUtils.getLimit(structQuery));
|
sqlGenerateUtils.getLimit(structQuery));
|
||||||
Database database = queryStatement.getOntology().getDatabase();
|
DatabaseResp database = queryStatement.getOntology().getDatabase();
|
||||||
if (!sqlGenerateUtils.isSupportWith(database.getType(), database.getVersion())) {
|
if (!sqlGenerateUtils.isSupportWith(EngineType.fromString(database.getType()),
|
||||||
|
database.getVersion())) {
|
||||||
sqlQuery.setSupportWith(false);
|
sqlQuery.setSupportWith(false);
|
||||||
sql = String.format("select %s from %s t0 %s %s %s",
|
sql = String.format("select %s from %s t0 %s %s %s",
|
||||||
sqlGenerateUtils.getSelect(structQuery), dsTable,
|
sqlGenerateUtils.getSelect(structQuery), dsTable,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
package com.tencent.supersonic.headless.core.translator.parser;
|
||||||
|
|
||||||
public class Constants {
|
public class Constants {
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
package com.tencent.supersonic.headless.core.translator.parser;
|
||||||
|
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
package com.tencent.supersonic.headless.core.translator.parser;
|
||||||
|
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.s2sql;
|
package com.tencent.supersonic.headless.core.translator.parser;
|
||||||
|
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
@@ -6,7 +6,6 @@ import com.tencent.supersonic.headless.core.translator.parser.QueryParser;
|
|||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
/** the calcite parse implements */
|
|
||||||
@Component("CalciteQueryParser")
|
@Component("CalciteQueryParser")
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class CalciteQueryParser implements QueryParser {
|
public class CalciteQueryParser implements QueryParser {
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
@@ -6,17 +6,19 @@ import com.google.common.collect.Sets;
|
|||||||
import com.tencent.supersonic.common.calcite.Configuration;
|
import com.tencent.supersonic.common.calcite.Configuration;
|
||||||
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
import com.tencent.supersonic.common.jsqlparser.SqlSelectHelper;
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||||
|
import com.tencent.supersonic.headless.api.pojo.Dimension;
|
||||||
import com.tencent.supersonic.headless.api.pojo.Identify;
|
import com.tencent.supersonic.headless.api.pojo.Identify;
|
||||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
import com.tencent.supersonic.headless.api.pojo.Measure;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
|
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Ontology;
|
import com.tencent.supersonic.headless.core.pojo.Ontology;
|
||||||
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
|
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.SchemaBuilder;
|
import com.tencent.supersonic.headless.core.translator.parser.Constants;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.calcite.sql.*;
|
import org.apache.calcite.sql.SqlDataTypeSpec;
|
||||||
|
import org.apache.calcite.sql.SqlNode;
|
||||||
|
import org.apache.calcite.sql.SqlUserDefinedTypeNameSpec;
|
||||||
import org.apache.calcite.sql.parser.SqlParser;
|
import org.apache.calcite.sql.parser.SqlParser;
|
||||||
import org.apache.calcite.sql.parser.SqlParserPos;
|
import org.apache.calcite.sql.parser.SqlParserPos;
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||||
@@ -28,28 +30,32 @@ import java.util.stream.Collectors;
|
|||||||
@Slf4j
|
@Slf4j
|
||||||
public class DataModelNode extends SemanticNode {
|
public class DataModelNode extends SemanticNode {
|
||||||
|
|
||||||
public static SqlNode build(DataModel dataModel, SqlValidatorScope scope) throws Exception {
|
public static SqlNode build(ModelResp dataModel, SqlValidatorScope scope) throws Exception {
|
||||||
String sqlTable = "";
|
String sqlTable = "";
|
||||||
if (dataModel.getSqlQuery() != null && !dataModel.getSqlQuery().isEmpty()) {
|
if (dataModel.getModelDetail().getSqlQuery() != null
|
||||||
sqlTable = dataModel.getSqlQuery();
|
&& !dataModel.getModelDetail().getSqlQuery().isEmpty()) {
|
||||||
} else if (dataModel.getTableQuery() != null && !dataModel.getTableQuery().isEmpty()) {
|
sqlTable = dataModel.getModelDetail().getSqlQuery();
|
||||||
if (dataModel.getType().equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
|
} else if (dataModel.getModelDetail().getTableQuery() != null
|
||||||
String fullTableName =
|
&& !dataModel.getModelDetail().getTableQuery().isEmpty()) {
|
||||||
String.join(".public.", dataModel.getTableQuery().split("\\."));
|
if (dataModel.getModelDetail().getDbType()
|
||||||
|
.equalsIgnoreCase(EngineType.POSTGRESQL.getName())) {
|
||||||
|
String fullTableName = String.join(".public.",
|
||||||
|
dataModel.getModelDetail().getTableQuery().split("\\."));
|
||||||
sqlTable = "select * from " + fullTableName;
|
sqlTable = "select * from " + fullTableName;
|
||||||
} else {
|
} else {
|
||||||
sqlTable = "select * from " + dataModel.getTableQuery();
|
sqlTable = "select * from " + dataModel.getModelDetail().getTableQuery();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (sqlTable.isEmpty()) {
|
if (sqlTable.isEmpty()) {
|
||||||
throw new Exception("DataModelNode build error [tableSqlNode not found]");
|
throw new Exception("DataModelNode build error [tableSqlNode not found]");
|
||||||
}
|
}
|
||||||
SqlNode source = getTable(sqlTable, scope, EngineType.fromString(dataModel.getType()));
|
SqlNode source = getTable(sqlTable, scope,
|
||||||
|
EngineType.fromString(dataModel.getModelDetail().getDbType()));
|
||||||
addSchema(scope, dataModel, sqlTable);
|
addSchema(scope, dataModel, sqlTable);
|
||||||
return buildAs(dataModel.getName(), source);
|
return buildAs(dataModel.getName(), source);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void addSchema(SqlValidatorScope scope, DataModel datasource, String table)
|
private static void addSchema(SqlValidatorScope scope, ModelResp datasource, String table)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table);
|
Map<String, Set<String>> sqlTable = SqlSelectHelper.getFieldsWithSubQuery(table);
|
||||||
for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) {
|
for (Map.Entry<String, Set<String>> entry : sqlTable.entrySet()) {
|
||||||
@@ -63,22 +69,22 @@ public class DataModelNode extends SemanticNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void addSchemaTable(SqlValidatorScope scope, DataModel datasource, String db,
|
private static void addSchemaTable(SqlValidatorScope scope, ModelResp dataModel, String db,
|
||||||
String tb, Set<String> fields) throws Exception {
|
String tb, Set<String> fields) throws Exception {
|
||||||
Set<String> dateInfo = new HashSet<>();
|
Set<String> dateInfo = new HashSet<>();
|
||||||
Set<String> dimensions = new HashSet<>();
|
Set<String> dimensions = new HashSet<>();
|
||||||
Set<String> metrics = new HashSet<>();
|
Set<String> metrics = new HashSet<>();
|
||||||
EngineType engineType = EngineType.fromString(datasource.getType());
|
EngineType engineType = EngineType.fromString(dataModel.getModelDetail().getDbType());
|
||||||
for (DimSchemaResp d : datasource.getDimensions()) {
|
for (Dimension d : dataModel.getModelDetail().getDimensions()) {
|
||||||
List<SqlNode> identifiers =
|
List<SqlNode> identifiers =
|
||||||
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
|
expand(SemanticNode.parse(d.getExpr(), scope, engineType), scope);
|
||||||
identifiers.forEach(i -> dimensions.add(i.toString()));
|
identifiers.forEach(i -> dimensions.add(i.toString()));
|
||||||
dimensions.add(d.getName());
|
dimensions.add(d.getName());
|
||||||
}
|
}
|
||||||
for (Identify i : datasource.getIdentifiers()) {
|
for (Identify i : dataModel.getIdentifiers()) {
|
||||||
dimensions.add(i.getName());
|
dimensions.add(i.getName());
|
||||||
}
|
}
|
||||||
for (Measure m : datasource.getMeasures()) {
|
for (Measure m : dataModel.getMeasures()) {
|
||||||
List<SqlNode> identifiers =
|
List<SqlNode> identifiers =
|
||||||
expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
|
expand(SemanticNode.parse(m.getExpr(), scope, engineType), scope);
|
||||||
identifiers.forEach(i -> {
|
identifiers.forEach(i -> {
|
||||||
@@ -93,7 +99,7 @@ public class DataModelNode extends SemanticNode {
|
|||||||
for (String field : fields) {
|
for (String field : fields) {
|
||||||
if (!metrics.contains(field) && !dimensions.contains(field)) {
|
if (!metrics.contains(field) && !dimensions.contains(field)) {
|
||||||
dimensions.add(field);
|
dimensions.add(field);
|
||||||
log.info("add column {} {}", datasource.getName(), field);
|
log.info("add column {} {}", dataModel.getName(), field);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SchemaBuilder.addSourceView(scope.getValidator().getCatalogReader().getRootSchema(), db, tb,
|
SchemaBuilder.addSourceView(scope.getValidator().getCatalogReader().getRootSchema(), db, tb,
|
||||||
@@ -121,10 +127,6 @@ public class DataModelNode extends SemanticNode {
|
|||||||
return sqlNode;
|
return sqlNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getNames(List<DataModel> dataModelList) {
|
|
||||||
return dataModelList.stream().map(DataModel::getName).collect(Collectors.joining("_"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery ontologyQuery,
|
public static void getQueryDimensionMeasure(Ontology ontology, OntologyQuery ontologyQuery,
|
||||||
Set<String> queryDimensions, Set<String> queryMeasures) {
|
Set<String> queryDimensions, Set<String> queryMeasures) {
|
||||||
ontologyQuery.getMetrics().forEach(m -> {
|
ontologyQuery.getMetrics().forEach(m -> {
|
||||||
@@ -139,7 +141,7 @@ public class DataModelNode extends SemanticNode {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<DataModel> getQueryDataModelsV2(Ontology ontology, OntologyQuery query) {
|
public static List<ModelResp> getQueryDataModelsV2(Ontology ontology, OntologyQuery query) {
|
||||||
// first, sort models based on the number of query metrics
|
// first, sort models based on the number of query metrics
|
||||||
Map<String, Integer> modelMetricCount = Maps.newHashMap();
|
Map<String, Integer> modelMetricCount = Maps.newHashMap();
|
||||||
query.getMetrics().forEach(m -> {
|
query.getMetrics().forEach(m -> {
|
||||||
@@ -171,11 +173,11 @@ public class DataModelNode extends SemanticNode {
|
|||||||
Set<String> dataModelNames = Sets.newLinkedHashSet();
|
Set<String> dataModelNames = Sets.newLinkedHashSet();
|
||||||
dataModelNames.addAll(dimDataModels);
|
dataModelNames.addAll(dimDataModels);
|
||||||
dataModelNames.addAll(metricsDataModels);
|
dataModelNames.addAll(metricsDataModels);
|
||||||
return dataModelNames.stream().map(bizName -> ontology.getDataModelMap().get(bizName))
|
return dataModelNames.stream().map(bizName -> ontology.getModelMap().get(bizName))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<DataModel> getQueryDataModels(Ontology ontology,
|
public static List<ModelResp> getQueryDataModels(Ontology ontology,
|
||||||
OntologyQuery ontologyQuery) {
|
OntologyQuery ontologyQuery) {
|
||||||
// get query measures and dimensions
|
// get query measures and dimensions
|
||||||
Set<String> queryMeasures = new HashSet<>();
|
Set<String> queryMeasures = new HashSet<>();
|
||||||
@@ -183,7 +185,7 @@ public class DataModelNode extends SemanticNode {
|
|||||||
getQueryDimensionMeasure(ontology, ontologyQuery, queryDimensions, queryMeasures);
|
getQueryDimensionMeasure(ontology, ontologyQuery, queryDimensions, queryMeasures);
|
||||||
|
|
||||||
// first, find the base model
|
// first, find the base model
|
||||||
DataModel baseDataModel = findBaseModel(ontology, ontologyQuery);
|
ModelResp baseDataModel = findBaseModel(ontology, ontologyQuery);
|
||||||
if (Objects.isNull(baseDataModel)) {
|
if (Objects.isNull(baseDataModel)) {
|
||||||
throw new RuntimeException(
|
throw new RuntimeException(
|
||||||
String.format("could not find matching dataModel, dimensions:%s, measures:%s",
|
String.format("could not find matching dataModel, dimensions:%s, measures:%s",
|
||||||
@@ -196,7 +198,7 @@ public class DataModelNode extends SemanticNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// second, traverse the ontology to find other related dataModels
|
// second, traverse the ontology to find other related dataModels
|
||||||
List<DataModel> relatedDataModels = findRelatedModelsByRelation(ontology, ontologyQuery,
|
List<ModelResp> relatedDataModels = findRelatedModelsByRelation(ontology, ontologyQuery,
|
||||||
baseDataModel, queryDimensions, queryMeasures);
|
baseDataModel, queryDimensions, queryMeasures);
|
||||||
if (CollectionUtils.isEmpty(relatedDataModels)) {
|
if (CollectionUtils.isEmpty(relatedDataModels)) {
|
||||||
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
|
relatedDataModels = findRelatedModelsByIdentifier(ontology, baseDataModel,
|
||||||
@@ -210,8 +212,8 @@ public class DataModelNode extends SemanticNode {
|
|||||||
return relatedDataModels;
|
return relatedDataModels;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static DataModel findBaseModel(Ontology ontology, OntologyQuery query) {
|
private static ModelResp findBaseModel(Ontology ontology, OntologyQuery query) {
|
||||||
DataModel dataModel = null;
|
ModelResp dataModel = null;
|
||||||
// first, try to find the model with the most query metrics
|
// first, try to find the model with the most query metrics
|
||||||
Map<String, Integer> modelMetricCount = Maps.newHashMap();
|
Map<String, Integer> modelMetricCount = Maps.newHashMap();
|
||||||
query.getMetrics().forEach(m -> {
|
query.getMetrics().forEach(m -> {
|
||||||
@@ -226,7 +228,7 @@ public class DataModelNode extends SemanticNode {
|
|||||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).map(e -> e.getKey())
|
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).map(e -> e.getKey())
|
||||||
.findFirst();
|
.findFirst();
|
||||||
if (baseModelName.isPresent()) {
|
if (baseModelName.isPresent()) {
|
||||||
dataModel = ontology.getDataModelMap().get(baseModelName.get());
|
dataModel = ontology.getModelMap().get(baseModelName.get());
|
||||||
} else {
|
} else {
|
||||||
// second, try to find the model with the most query dimensions
|
// second, try to find the model with the most query dimensions
|
||||||
Map<String, Integer> modelDimCount = Maps.newHashMap();
|
Map<String, Integer> modelDimCount = Maps.newHashMap();
|
||||||
@@ -242,20 +244,20 @@ public class DataModelNode extends SemanticNode {
|
|||||||
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
|
.sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
|
||||||
.map(e -> e.getKey()).findFirst();
|
.map(e -> e.getKey()).findFirst();
|
||||||
if (baseModelName.isPresent()) {
|
if (baseModelName.isPresent()) {
|
||||||
dataModel = ontology.getDataModelMap().get(baseModelName.get());
|
dataModel = ontology.getModelMap().get(baseModelName.get());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return dataModel;
|
return dataModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean checkMatch(DataModel baseDataModel, Set<String> queryMeasures,
|
private static boolean checkMatch(ModelResp baseDataModel, Set<String> queryMeasures,
|
||||||
Set<String> queryDimension) {
|
Set<String> queryDimension) {
|
||||||
boolean isAllMatch = true;
|
boolean isAllMatch = true;
|
||||||
Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName)
|
Set<String> baseMeasures = baseDataModel.getMeasures().stream().map(Measure::getName)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
Set<String> baseDimensions = baseDataModel.getDimensions().stream()
|
Set<String> baseDimensions = baseDataModel.getModelDetail().getDimensions().stream()
|
||||||
.map(DimSchemaResp::getName).collect(Collectors.toSet());
|
.map(Dimension::getName).collect(Collectors.toSet());
|
||||||
baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName()));
|
baseDataModel.getIdentifiers().forEach(i -> baseDimensions.add(i.getName()));
|
||||||
|
|
||||||
baseMeasures.retainAll(queryMeasures);
|
baseMeasures.retainAll(queryMeasures);
|
||||||
@@ -282,11 +284,11 @@ public class DataModelNode extends SemanticNode {
|
|||||||
return isAllMatch;
|
return isAllMatch;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<DataModel> findRelatedModelsByRelation(Ontology ontology,
|
private static List<ModelResp> findRelatedModelsByRelation(Ontology ontology,
|
||||||
OntologyQuery ontologyQuery, DataModel baseDataModel, Set<String> queryDimensions,
|
OntologyQuery ontologyQuery, ModelResp baseDataModel, Set<String> queryDimensions,
|
||||||
Set<String> queryMeasures) {
|
Set<String> queryMeasures) {
|
||||||
Set<String> joinDataModelNames = new HashSet<>();
|
Set<String> joinDataModelNames = new HashSet<>();
|
||||||
List<DataModel> joinDataModels = new ArrayList<>();
|
List<ModelResp> joinDataModels = new ArrayList<>();
|
||||||
Set<String> before = new HashSet<>();
|
Set<String> before = new HashSet<>();
|
||||||
before.add(baseDataModel.getName());
|
before.add(baseDataModel.getName());
|
||||||
|
|
||||||
@@ -305,14 +307,15 @@ public class DataModelNode extends SemanticNode {
|
|||||||
}
|
}
|
||||||
boolean isMatch = false;
|
boolean isMatch = false;
|
||||||
boolean isRight = before.contains(joinRelation.getLeft());
|
boolean isRight = before.contains(joinRelation.getLeft());
|
||||||
DataModel other = isRight ? ontology.getDataModelMap().get(joinRelation.getRight())
|
ModelResp other = isRight ? ontology.getModelMap().get(joinRelation.getRight())
|
||||||
: ontology.getDataModelMap().get(joinRelation.getLeft());
|
: ontology.getModelMap().get(joinRelation.getLeft());
|
||||||
String joinDimName = isRight ? joinRelation.getJoinCondition().get(0).getRight()
|
String joinDimName = isRight ? joinRelation.getJoinCondition().get(0).getRight()
|
||||||
: joinRelation.getJoinCondition().get(0).getLeft();
|
: joinRelation.getJoinCondition().get(0).getLeft();
|
||||||
if (!queryDimensions.isEmpty()) {
|
if (!queryDimensions.isEmpty()) {
|
||||||
Set<String> linkDimension = other.getDimensions().stream()
|
Set<String> linkDimension = other.getModelDetail().getDimensions().stream()
|
||||||
.map(DimSchemaResp::getName).collect(Collectors.toSet());
|
.map(Dimension::getName).collect(Collectors.toSet());
|
||||||
other.getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
|
other.getModelDetail().getIdentifiers()
|
||||||
|
.forEach(i -> linkDimension.add(i.getName()));
|
||||||
linkDimension.retainAll(queryDimensions);
|
linkDimension.retainAll(queryDimensions);
|
||||||
if (!linkDimension.isEmpty()) {
|
if (!linkDimension.isEmpty()) {
|
||||||
isMatch = true;
|
isMatch = true;
|
||||||
@@ -320,8 +323,8 @@ public class DataModelNode extends SemanticNode {
|
|||||||
// ontologyQuery.getDimensions().add(joinDimName);
|
// ontologyQuery.getDimensions().add(joinDimName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Set<String> linkMeasure = other.getMeasures().stream().map(Measure::getName)
|
Set<String> linkMeasure = other.getModelDetail().getMeasures().stream()
|
||||||
.collect(Collectors.toSet());
|
.map(Measure::getName).collect(Collectors.toSet());
|
||||||
linkMeasure.retainAll(queryMeasures);
|
linkMeasure.retainAll(queryMeasures);
|
||||||
if (!linkMeasure.isEmpty()) {
|
if (!linkMeasure.isEmpty()) {
|
||||||
isMatch = true;
|
isMatch = true;
|
||||||
@@ -360,7 +363,7 @@ public class DataModelNode extends SemanticNode {
|
|||||||
orders.entrySet().stream()
|
orders.entrySet().stream()
|
||||||
.sorted((entry1, entry2) -> entry2.getValue().compareTo(entry1.getValue())) // 倒序排序
|
.sorted((entry1, entry2) -> entry2.getValue().compareTo(entry1.getValue())) // 倒序排序
|
||||||
.forEach(d -> {
|
.forEach(d -> {
|
||||||
joinDataModels.add(ontology.getDataModelMap().get(d.getKey()));
|
joinDataModels.add(ontology.getModelMap().get(d.getKey()));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return joinDataModels;
|
return joinDataModels;
|
||||||
@@ -381,36 +384,37 @@ public class DataModelNode extends SemanticNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<DataModel> findRelatedModelsByIdentifier(Ontology ontology,
|
private static List<ModelResp> findRelatedModelsByIdentifier(Ontology ontology,
|
||||||
DataModel baseDataModel, Set<String> queryDimension, Set<String> measures) {
|
ModelResp baseDataModel, Set<String> queryDimension, Set<String> measures) {
|
||||||
Set<String> baseIdentifiers = baseDataModel.getIdentifiers().stream().map(Identify::getName)
|
Set<String> baseIdentifiers = baseDataModel.getModelDetail().getIdentifiers().stream()
|
||||||
.collect(Collectors.toSet());
|
.map(Identify::getName).collect(Collectors.toSet());
|
||||||
if (baseIdentifiers.isEmpty()) {
|
if (baseIdentifiers.isEmpty()) {
|
||||||
return Collections.EMPTY_LIST;
|
return Collections.EMPTY_LIST;
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<String> linkDataSourceName = new HashSet<>();
|
Set<String> linkDataSourceName = new HashSet<>();
|
||||||
List<DataModel> linkDataModels = new ArrayList<>();
|
List<ModelResp> linkDataModels = new ArrayList<>();
|
||||||
for (Map.Entry<String, DataModel> entry : ontology.getDataModelMap().entrySet()) {
|
for (Map.Entry<String, ModelResp> entry : ontology.getModelMap().entrySet()) {
|
||||||
if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
|
if (entry.getKey().equalsIgnoreCase(baseDataModel.getName())) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
long identifierNum = entry.getValue().getIdentifiers().stream().map(Identify::getName)
|
long identifierNum = entry.getValue().getModelDetail().getIdentifiers().stream()
|
||||||
.filter(baseIdentifiers::contains).count();
|
.map(Identify::getName).filter(baseIdentifiers::contains).count();
|
||||||
if (identifierNum > 0) {
|
if (identifierNum > 0) {
|
||||||
boolean isMatch = false;
|
boolean isMatch = false;
|
||||||
if (!queryDimension.isEmpty()) {
|
if (!queryDimension.isEmpty()) {
|
||||||
Set<String> linkDimension = entry.getValue().getDimensions().stream()
|
Set<String> linkDimension = entry.getValue().getModelDetail().getDimensions()
|
||||||
.map(DimSchemaResp::getName).collect(Collectors.toSet());
|
.stream().map(Dimension::getName).collect(Collectors.toSet());
|
||||||
entry.getValue().getIdentifiers().forEach(i -> linkDimension.add(i.getName()));
|
entry.getValue().getModelDetail().getIdentifiers()
|
||||||
|
.forEach(i -> linkDimension.add(i.getName()));
|
||||||
linkDimension.retainAll(queryDimension);
|
linkDimension.retainAll(queryDimension);
|
||||||
if (!linkDimension.isEmpty()) {
|
if (!linkDimension.isEmpty()) {
|
||||||
isMatch = true;
|
isMatch = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!measures.isEmpty()) {
|
if (!measures.isEmpty()) {
|
||||||
Set<String> linkMeasure = entry.getValue().getMeasures().stream()
|
Set<String> linkMeasure = entry.getValue().getModelDetail().getMeasures()
|
||||||
.map(Measure::getName).collect(Collectors.toSet());
|
.stream().map(Measure::getName).collect(Collectors.toSet());
|
||||||
linkMeasure.retainAll(measures);
|
linkMeasure.retainAll(measures);
|
||||||
if (!linkMeasure.isEmpty()) {
|
if (!linkMeasure.isEmpty()) {
|
||||||
isMatch = true;
|
isMatch = true;
|
||||||
@@ -432,10 +436,10 @@ public class DataModelNode extends SemanticNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (String linkName : linkDataSourceName) {
|
for (String linkName : linkDataSourceName) {
|
||||||
linkDataModels.add(ontology.getDataModelMap().get(linkName));
|
linkDataModels.add(ontology.getModelMap().get(linkName));
|
||||||
}
|
}
|
||||||
if (!CollectionUtils.isEmpty(linkDataModels)) {
|
if (!CollectionUtils.isEmpty(linkDataModels)) {
|
||||||
List<DataModel> all = new ArrayList<>();
|
List<ModelResp> all = new ArrayList<>();
|
||||||
all.add(baseDataModel);
|
all.add(baseDataModel);
|
||||||
all.addAll(linkDataModels);
|
all.addAll(linkDataModels);
|
||||||
return all;
|
return all;
|
||||||
@@ -25,7 +25,9 @@ import java.util.List;
|
|||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
/** push down the time filter into group using the RuntimeOptions defined minMaxTime */
|
/**
|
||||||
|
* push down the time filter into group using the RuntimeOptions defined minMaxTime
|
||||||
|
*/
|
||||||
public class FilterToGroupScanRule extends RelRule<Config> implements TransformationRule {
|
public class FilterToGroupScanRule extends RelRule<Config> implements TransformationRule {
|
||||||
|
|
||||||
public static FilterTableScanRule.Config DEFAULT =
|
public static FilterTableScanRule.Config DEFAULT =
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
|||||||
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
|
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Ontology;
|
import com.tencent.supersonic.headless.core.pojo.Ontology;
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
@@ -29,8 +29,8 @@ public class S2CalciteSchema extends AbstractSchema {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, DataModel> getDataModels() {
|
public Map<String, ModelResp> getDataModels() {
|
||||||
return ontology.getDataModelMap();
|
return ontology.getModelMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<MetricSchemaResp> getMetrics() {
|
public List<MetricSchemaResp> getMetrics() {
|
||||||
|
|||||||
@@ -26,14 +26,14 @@ public class SchemaBuilder {
|
|||||||
public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1";
|
public static final String MATERIALIZATION_SYS_FIELD_DATE = "C1";
|
||||||
public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2";
|
public static final String MATERIALIZATION_SYS_FIELD_DATA = "C2";
|
||||||
|
|
||||||
public static SqlValidatorScope getScope(S2CalciteSchema schema) throws Exception {
|
public static SqlValidatorScope getScope(S2CalciteSchema schema) {
|
||||||
Map<String, RelDataType> nameToTypeMap = new HashMap<>();
|
Map<String, RelDataType> nameToTypeMap = new HashMap<>();
|
||||||
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
|
CalciteSchema rootSchema = CalciteSchema.createRootSchema(true, false);
|
||||||
rootSchema.add(schema.getSchemaKey(), schema);
|
rootSchema.add(schema.getSchemaKey(), schema);
|
||||||
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
|
Prepare.CatalogReader catalogReader = new CalciteCatalogReader(rootSchema,
|
||||||
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
|
Collections.singletonList(schema.getSchemaKey()), Configuration.typeFactory,
|
||||||
Configuration.config);
|
Configuration.config);
|
||||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||||
S2SQLSqlValidatorImpl s2SQLSqlValidator =
|
S2SQLSqlValidatorImpl s2SQLSqlValidator =
|
||||||
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
|
new S2SQLSqlValidatorImpl(Configuration.operatorTable, catalogReader,
|
||||||
Configuration.typeFactory, Configuration.getValidatorConfig(engineType));
|
Configuration.typeFactory, Configuration.getValidatorConfig(engineType));
|
||||||
|
|||||||
@@ -1,12 +1,10 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
||||||
|
|
||||||
import com.tencent.supersonic.common.calcite.Configuration;
|
import com.tencent.supersonic.common.calcite.Configuration;
|
||||||
import com.tencent.supersonic.common.calcite.SemanticSqlDialect;
|
import com.tencent.supersonic.common.calcite.SemanticSqlDialect;
|
||||||
import com.tencent.supersonic.common.calcite.SqlDialectFactory;
|
import com.tencent.supersonic.common.calcite.SqlDialectFactory;
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.FilterToGroupScanRule;
|
import com.tencent.supersonic.headless.core.translator.parser.Constants;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.calcite.plan.RelOptPlanner;
|
import org.apache.calcite.plan.RelOptPlanner;
|
||||||
import org.apache.calcite.plan.hep.HepPlanner;
|
import org.apache.calcite.plan.hep.HepPlanner;
|
||||||
@@ -2,84 +2,65 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
|||||||
|
|
||||||
import com.tencent.supersonic.common.calcite.Configuration;
|
import com.tencent.supersonic.common.calcite.Configuration;
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
import com.tencent.supersonic.headless.api.pojo.Dimension;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
import com.tencent.supersonic.headless.api.pojo.Identify;
|
||||||
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
|
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
|
||||||
import com.tencent.supersonic.headless.core.pojo.QueryStatement;
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
|
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.JoinRender;
|
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.render.Renderer;
|
import com.tencent.supersonic.headless.core.pojo.*;
|
||||||
|
import com.tencent.supersonic.headless.core.translator.parser.Constants;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.calcite.sql.SqlNode;
|
import org.apache.calcite.sql.*;
|
||||||
|
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
|
||||||
import org.apache.calcite.sql.parser.SqlParser;
|
import org.apache.calcite.sql.parser.SqlParser;
|
||||||
|
import org.apache.calcite.sql.parser.SqlParserPos;
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.lang3.tuple.Triple;
|
||||||
|
import org.springframework.util.CollectionUtils;
|
||||||
|
|
||||||
import java.util.LinkedList;
|
import java.util.*;
|
||||||
import java.util.List;
|
import java.util.stream.Collectors;
|
||||||
import java.util.ListIterator;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class SqlBuilder {
|
public class SqlBuilder {
|
||||||
|
|
||||||
private final S2CalciteSchema schema;
|
private final S2CalciteSchema schema;
|
||||||
private OntologyQuery ontologyQuery;
|
private final SqlValidatorScope scope;
|
||||||
private SqlValidatorScope scope;
|
|
||||||
private SqlNode parserNode;
|
|
||||||
|
|
||||||
public SqlBuilder(S2CalciteSchema schema) {
|
public SqlBuilder(S2CalciteSchema schema) {
|
||||||
this.schema = schema;
|
this.schema = schema;
|
||||||
|
this.scope = SchemaBuilder.getScope(schema);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String buildOntologySql(QueryStatement queryStatement) throws Exception {
|
public String buildOntologySql(QueryStatement queryStatement) throws Exception {
|
||||||
this.ontologyQuery = queryStatement.getOntologyQuery();
|
OntologyQuery ontologyQuery = queryStatement.getOntologyQuery();
|
||||||
if (ontologyQuery.getLimit() == null) {
|
if (ontologyQuery.getLimit() == null) {
|
||||||
ontologyQuery.setLimit(0L);
|
ontologyQuery.setLimit(0L);
|
||||||
}
|
}
|
||||||
|
|
||||||
buildParseNode();
|
|
||||||
Database database = queryStatement.getOntology().getDatabase();
|
|
||||||
optimizeParseNode(database.getType());
|
|
||||||
return getSql(database.getType());
|
|
||||||
}
|
|
||||||
|
|
||||||
private void buildParseNode() throws Exception {
|
|
||||||
// find relevant data models
|
// find relevant data models
|
||||||
scope = SchemaBuilder.getScope(schema);
|
List<ModelResp> dataModels =
|
||||||
List<DataModel> dataModels =
|
|
||||||
DataModelNode.getQueryDataModelsV2(schema.getOntology(), ontologyQuery);
|
DataModelNode.getQueryDataModelsV2(schema.getOntology(), ontologyQuery);
|
||||||
if (dataModels == null || dataModels.isEmpty()) {
|
if (dataModels == null || dataModels.isEmpty()) {
|
||||||
throw new Exception("data model not found");
|
throw new Exception("data model not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
LinkedList<Renderer> builders = new LinkedList<>();
|
TableView tableView = render(ontologyQuery, dataModels, scope, schema);
|
||||||
builders.add(new JoinRender());
|
SqlNode parserNode = tableView.build();
|
||||||
ListIterator<Renderer> it = builders.listIterator();
|
DatabaseResp database = queryStatement.getOntology().getDatabase();
|
||||||
int i = 0;
|
EngineType engineType = EngineType.fromString(database.getType());
|
||||||
Renderer previous = null;
|
parserNode = optimizeParseNode(parserNode, engineType);
|
||||||
while (it.hasNext()) {
|
|
||||||
Renderer renderer = it.next();
|
|
||||||
if (previous != null) {
|
|
||||||
previous.render(ontologyQuery, dataModels, scope, schema);
|
|
||||||
renderer.setTable(previous.builderAs(DataModelNode.getNames(dataModels) + "_" + i));
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
previous = renderer;
|
|
||||||
}
|
|
||||||
builders.getLast().render(ontologyQuery, dataModels, scope, schema);
|
|
||||||
parserNode = builders.getLast().build();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getSql(EngineType engineType) {
|
|
||||||
return SemanticNode.getSql(parserNode, engineType);
|
return SemanticNode.getSql(parserNode, engineType);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void optimizeParseNode(EngineType engineType) {
|
private SqlNode optimizeParseNode(SqlNode parserNode, EngineType engineType) {
|
||||||
if (Objects.isNull(schema.getRuntimeOptions())
|
if (Objects.isNull(schema.getRuntimeOptions())
|
||||||
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|
|| Objects.isNull(schema.getRuntimeOptions().getEnableOptimize())
|
||||||
|| !schema.getRuntimeOptions().getEnableOptimize()) {
|
|| !schema.getRuntimeOptions().getEnableOptimize()) {
|
||||||
return;
|
return parserNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
SqlNode optimizeNode = null;
|
SqlNode optimizeNode = null;
|
||||||
@@ -94,8 +75,233 @@ public class SqlBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (Objects.nonNull(optimizeNode)) {
|
if (Objects.nonNull(optimizeNode)) {
|
||||||
parserNode = optimizeNode;
|
return optimizeNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return parserNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
private TableView render(OntologyQuery ontologyQuery, List<ModelResp> dataModels,
|
||||||
|
SqlValidatorScope scope, S2CalciteSchema schema) throws Exception {
|
||||||
|
SqlNode left = null;
|
||||||
|
TableView leftTable = null;
|
||||||
|
TableView outerTable = new TableView();
|
||||||
|
Map<String, SqlNode> outerSelect = new HashMap<>();
|
||||||
|
Map<String, String> beforeModels = new HashMap<>();
|
||||||
|
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||||
|
|
||||||
|
for (int i = 0; i < dataModels.size(); i++) {
|
||||||
|
final ModelResp dataModel = dataModels.get(i);
|
||||||
|
final Set<DimSchemaResp> queryDimensions =
|
||||||
|
ontologyQuery.getDimensionsByModel(dataModel.getId());
|
||||||
|
final Set<MetricSchemaResp> queryMetrics =
|
||||||
|
ontologyQuery.getMetricsByModel(dataModel.getId());
|
||||||
|
|
||||||
|
List<String> primary = new ArrayList<>();
|
||||||
|
for (Identify identify : dataModel.getIdentifiers()) {
|
||||||
|
primary.add(identify.getName());
|
||||||
|
}
|
||||||
|
|
||||||
|
TableView tableView =
|
||||||
|
renderOne(queryMetrics, queryDimensions, dataModel, scope, schema);
|
||||||
|
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
|
||||||
|
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
|
||||||
|
tableView.setAlias(alias);
|
||||||
|
tableView.setPrimary(primary);
|
||||||
|
tableView.setDataModel(dataModel);
|
||||||
|
for (String field : tableView.getFields()) {
|
||||||
|
outerSelect.put(field, SemanticNode.parse(alias + "." + field, scope, engineType));
|
||||||
|
}
|
||||||
|
if (left == null) {
|
||||||
|
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView));
|
||||||
|
} else {
|
||||||
|
left = buildJoin(left, leftTable, tableView, beforeModels, dataModel, schema,
|
||||||
|
scope);
|
||||||
|
}
|
||||||
|
leftTable = tableView;
|
||||||
|
beforeModels.put(dataModel.getName(), leftTable.getAlias());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Map.Entry<String, SqlNode> entry : outerSelect.entrySet()) {
|
||||||
|
outerTable.getSelect().add(entry.getValue());
|
||||||
|
}
|
||||||
|
outerTable.setTable(left);
|
||||||
|
|
||||||
|
return outerTable;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SqlNode getTable(TableView tableView) {
|
||||||
|
return SemanticNode.getTable(tableView.getTable());
|
||||||
|
}
|
||||||
|
|
||||||
|
private SqlNode buildJoin(SqlNode leftNode, TableView leftTable, TableView rightTable,
|
||||||
|
Map<String, String> before, ModelResp dataModel, S2CalciteSchema schema,
|
||||||
|
SqlValidatorScope scope) throws Exception {
|
||||||
|
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||||
|
SqlNode condition =
|
||||||
|
getCondition(leftTable, rightTable, dataModel, schema, scope, engineType);
|
||||||
|
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
|
||||||
|
JoinRelation matchJoinRelation = getMatchJoinRelation(before, rightTable, schema);
|
||||||
|
SqlNode joinRelationCondition;
|
||||||
|
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
|
||||||
|
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
|
||||||
|
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
|
||||||
|
condition = joinRelationCondition;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new SqlJoin(SqlParserPos.ZERO, leftNode,
|
||||||
|
SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral,
|
||||||
|
SemanticNode.buildAs(rightTable.getAlias(), getTable(rightTable)),
|
||||||
|
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition);
|
||||||
|
}
|
||||||
|
|
||||||
|
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView,
|
||||||
|
S2CalciteSchema schema) {
|
||||||
|
JoinRelation matchJoinRelation = JoinRelation.builder().build();
|
||||||
|
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
|
||||||
|
for (JoinRelation joinRelation : schema.getJoinRelations()) {
|
||||||
|
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName())
|
||||||
|
&& before.containsKey(joinRelation.getLeft())) {
|
||||||
|
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
|
||||||
|
.map(r -> Triple.of(
|
||||||
|
before.get(joinRelation.getLeft()) + "." + r.getLeft(),
|
||||||
|
r.getMiddle(), tableView.getAlias() + "." + r.getRight()))
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
matchJoinRelation.setJoinType(joinRelation.getJoinType());
|
||||||
|
// Added join condition judgment to solve the problem of join condition order
|
||||||
|
} else if (joinRelation.getLeft()
|
||||||
|
.equalsIgnoreCase(tableView.getDataModel().getName())
|
||||||
|
&& before.containsKey(joinRelation.getRight())) {
|
||||||
|
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
|
||||||
|
.map(r -> Triple.of(
|
||||||
|
before.get(joinRelation.getRight()) + "." + r.getRight(),
|
||||||
|
r.getMiddle(), tableView.getAlias() + "." + r.getLeft()))
|
||||||
|
.collect(Collectors.toList()));
|
||||||
|
matchJoinRelation.setJoinType(joinRelation.getJoinType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matchJoinRelation;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope,
|
||||||
|
EngineType engineType) throws Exception {
|
||||||
|
SqlNode condition = null;
|
||||||
|
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
|
||||||
|
List<SqlNode> ons = new ArrayList<>();
|
||||||
|
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
|
||||||
|
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
|
||||||
|
if (Objects.isNull(condition)) {
|
||||||
|
condition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()), ons,
|
||||||
|
SqlParserPos.ZERO, null);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
SqlNode addCondition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()),
|
||||||
|
ons, SqlParserPos.ZERO, null);
|
||||||
|
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
|
||||||
|
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
|
||||||
|
null);
|
||||||
|
}
|
||||||
|
return condition;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SqlNode getCondition(TableView left, TableView right, ModelResp dataModel,
|
||||||
|
S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType)
|
||||||
|
throws Exception {
|
||||||
|
|
||||||
|
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
|
||||||
|
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
|
||||||
|
selectLeft.retainAll(selectRight);
|
||||||
|
SqlNode condition = null;
|
||||||
|
for (String on : selectLeft) {
|
||||||
|
if (!isDimension(on, dataModel, schema)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (isForeign(on, left.getDataModel().getIdentifiers())) {
|
||||||
|
if (!isPrimary(on, right.getDataModel().getIdentifiers())) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isForeign(on, right.getDataModel().getIdentifiers())) {
|
||||||
|
if (!isPrimary(on, left.getDataModel().getIdentifiers())) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
List<SqlNode> ons = new ArrayList<>();
|
||||||
|
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
|
||||||
|
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
|
||||||
|
if (condition == null) {
|
||||||
|
condition =
|
||||||
|
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
SqlNode addCondition =
|
||||||
|
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
|
||||||
|
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
|
||||||
|
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
|
||||||
|
null);
|
||||||
|
}
|
||||||
|
return condition;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static TableView renderOne(Set<MetricSchemaResp> queryMetrics,
|
||||||
|
Set<DimSchemaResp> queryDimensions, ModelResp dataModel, SqlValidatorScope scope,
|
||||||
|
S2CalciteSchema schema) {
|
||||||
|
TableView tableView = new TableView();
|
||||||
|
EngineType engineType = EngineType.fromString(schema.getOntology().getDatabase().getType());
|
||||||
|
Set<String> queryFields = tableView.getFields();
|
||||||
|
queryMetrics.stream().forEach(m -> queryFields.addAll(m.getFields()));
|
||||||
|
queryDimensions.stream().forEach(m -> queryFields.add(m.getBizName()));
|
||||||
|
|
||||||
|
try {
|
||||||
|
for (String field : queryFields) {
|
||||||
|
tableView.getSelect().add(SemanticNode.parse(field, scope, engineType));
|
||||||
|
}
|
||||||
|
tableView.setTable(DataModelNode.build(dataModel, scope));
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("Failed to create sqlNode for data model {}", dataModel);
|
||||||
|
}
|
||||||
|
|
||||||
|
return tableView;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isDimension(String name, ModelResp dataModel, S2CalciteSchema schema) {
|
||||||
|
Optional<Dimension> dimension = dataModel.getModelDetail().getDimensions().stream()
|
||||||
|
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
|
||||||
|
if (dimension.isPresent()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
Optional<Identify> identify = dataModel.getIdentifiers().stream()
|
||||||
|
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
|
||||||
|
if (identify.isPresent()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (schema.getDimensions().containsKey(dataModel.getName())) {
|
||||||
|
Optional<DimSchemaResp> dataSourceDim = schema.getDimensions().get(dataModel.getName())
|
||||||
|
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
|
||||||
|
if (dataSourceDim.isPresent()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isForeign(String name, List<Identify> identifies) {
|
||||||
|
Optional<Identify> identify =
|
||||||
|
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
|
||||||
|
if (identify.isPresent()) {
|
||||||
|
return IdentifyType.foreign.equals(identify.get().getType());
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isPrimary(String name, List<Identify> identifies) {
|
||||||
|
Optional<Identify> identify =
|
||||||
|
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
|
||||||
|
if (identify.isPresent()) {
|
||||||
|
return IdentifyType.primary.equals(identify.get().getType());
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package com.tencent.supersonic.headless.core.translator.parser.calcite;
|
|||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import org.apache.calcite.sql.SqlNode;
|
import org.apache.calcite.sql.SqlNode;
|
||||||
import org.apache.calcite.sql.SqlNodeList;
|
import org.apache.calcite.sql.SqlNodeList;
|
||||||
@@ -13,7 +13,6 @@ import java.util.ArrayList;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/** basic query project */
|
|
||||||
@Data
|
@Data
|
||||||
public class TableView {
|
public class TableView {
|
||||||
|
|
||||||
@@ -28,7 +27,7 @@ public class TableView {
|
|||||||
|
|
||||||
private String alias;
|
private String alias;
|
||||||
private List<String> primary;
|
private List<String> primary;
|
||||||
private DataModel dataModel;
|
private ModelResp dataModel;
|
||||||
|
|
||||||
public SqlNode build() {
|
public SqlNode build() {
|
||||||
List<SqlNode> selectNodeList = new ArrayList<>();
|
List<SqlNode> selectNodeList = new ArrayList<>();
|
||||||
|
|||||||
@@ -1,26 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public class AggFunctionNode extends SemanticNode {
|
|
||||||
|
|
||||||
public static SqlNode build(String agg, String name, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
if (Objects.isNull(agg) || agg.isEmpty()) {
|
|
||||||
return parse(name, scope, engineType);
|
|
||||||
}
|
|
||||||
if (AggFunction.COUNT_DISTINCT.name().equalsIgnoreCase(agg)) {
|
|
||||||
return parse(AggFunction.COUNT.name() + " ( " + AggFunction.DISTINCT.name() + " " + name
|
|
||||||
+ " ) ", scope, engineType);
|
|
||||||
}
|
|
||||||
return parse(agg + " ( " + name + " ) ", scope, engineType);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static enum AggFunction {
|
|
||||||
AVG, COUNT_DISTINCT, MAX, MIN, SUM, COUNT, DISTINCT
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.DataTypeEnums;
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public class DimensionNode extends SemanticNode {
|
|
||||||
|
|
||||||
public static SqlNode build(DimSchemaResp dimension, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
return parse(dimension.getExpr(), scope, engineType);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<SqlNode> expand(DimSchemaResp dimension, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
|
|
||||||
return expand(sqlNode, scope);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static SqlNode buildName(DimSchemaResp dimension, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
return parse(dimension.getExpr(), scope, engineType);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static SqlNode buildNameAs(String alias, DimSchemaResp dimension,
|
|
||||||
SqlValidatorScope scope, EngineType engineType) throws Exception {
|
|
||||||
if ("".equals(alias)) {
|
|
||||||
return buildName(dimension, scope, engineType);
|
|
||||||
}
|
|
||||||
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
|
|
||||||
return buildAs(alias, sqlNode);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static SqlNode buildArray(DimSchemaResp dimension, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
if (Objects.nonNull(dimension.getDataType())
|
|
||||||
&& dimension.getDataType().equals(DataTypeEnums.ARRAY)) {
|
|
||||||
SqlNode sqlNode = parse(dimension.getExpr(), scope, engineType);
|
|
||||||
if (isIdentifier(sqlNode)) {
|
|
||||||
return buildAs(dimension.getName(),
|
|
||||||
parse(dimension.getExpr() + Constants.DIMENSION_ARRAY_SINGLE_SUFFIX, scope,
|
|
||||||
engineType));
|
|
||||||
}
|
|
||||||
throw new Exception("array dimension expr should only identify");
|
|
||||||
}
|
|
||||||
return build(dimension, scope, engineType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import org.apache.calcite.sql.SqlCall;
|
|
||||||
import org.apache.calcite.sql.SqlInternalOperator;
|
|
||||||
import org.apache.calcite.sql.SqlKind;
|
|
||||||
import org.apache.calcite.sql.SqlNodeList;
|
|
||||||
import org.apache.calcite.sql.SqlOperator;
|
|
||||||
import org.apache.calcite.sql.SqlWriter;
|
|
||||||
import org.apache.calcite.sql.SqlWriter.Frame;
|
|
||||||
import org.apache.calcite.sql.SqlWriter.FrameTypeEnum;
|
|
||||||
|
|
||||||
public class ExtendNode extends SqlInternalOperator {
|
|
||||||
|
|
||||||
public ExtendNode() {
|
|
||||||
super(SqlKind.EXTEND.lowerName, SqlKind.EXTEND);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
|
|
||||||
SqlOperator operator = call.getOperator();
|
|
||||||
Frame frame = writer.startList(FrameTypeEnum.SIMPLE);
|
|
||||||
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
|
|
||||||
writer.setNeedWhitespace(true);
|
|
||||||
writer.sep(operator.getName());
|
|
||||||
SqlNodeList list = (SqlNodeList) call.operand(1);
|
|
||||||
Frame frameArgs = writer.startList("(", ")");
|
|
||||||
for (int i = 0; i < list.size(); i++) {
|
|
||||||
list.get(i).unparse(writer, 0, 0);
|
|
||||||
if (i < list.size() - 1) {
|
|
||||||
writer.sep(",");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
writer.endList(frameArgs);
|
|
||||||
writer.endList(frame);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import org.apache.calcite.sql.SqlBasicCall;
|
|
||||||
import org.apache.calcite.sql.SqlIdentifier;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class FilterNode extends SemanticNode {
|
|
||||||
|
|
||||||
public static void getFilterField(SqlNode sqlNode, Set<String> fields) {
|
|
||||||
if (sqlNode instanceof SqlIdentifier) {
|
|
||||||
SqlIdentifier sqlIdentifier = (SqlIdentifier) sqlNode;
|
|
||||||
fields.add(sqlIdentifier.names.get(0).toLowerCase());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (sqlNode instanceof SqlBasicCall) {
|
|
||||||
SqlBasicCall sqlBasicCall = (SqlBasicCall) sqlNode;
|
|
||||||
for (SqlNode operand : sqlBasicCall.getOperandList()) {
|
|
||||||
getFilterField(operand, fields);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.Identify;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.IdentifyType;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
public class IdentifyNode extends SemanticNode {
|
|
||||||
|
|
||||||
public static SqlNode build(Identify identify, SqlValidatorScope scope, EngineType engineType)
|
|
||||||
throws Exception {
|
|
||||||
return parse(identify.getName(), scope, engineType);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean isForeign(String name, List<Identify> identifies) {
|
|
||||||
Optional<Identify> identify =
|
|
||||||
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
|
|
||||||
if (identify.isPresent()) {
|
|
||||||
return IdentifyType.foreign.equals(identify.get().getType());
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean isPrimary(String name, List<Identify> identifies) {
|
|
||||||
Optional<Identify> identify =
|
|
||||||
identifies.stream().filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
|
|
||||||
if (identify.isPresent()) {
|
|
||||||
return IdentifyType.primary.equals(identify.get().getType());
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import org.apache.calcite.linq4j.Ord;
|
|
||||||
import org.apache.calcite.sql.SqlCall;
|
|
||||||
import org.apache.calcite.sql.SqlIdentifier;
|
|
||||||
import org.apache.calcite.sql.SqlKind;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
import org.apache.calcite.sql.SqlNodeList;
|
|
||||||
import org.apache.calcite.sql.SqlOperator;
|
|
||||||
import org.apache.calcite.sql.SqlWriter;
|
|
||||||
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
/** extend node to handle lateral explode dataSet */
|
|
||||||
public class LateralViewExplodeNode extends ExtendNode {
|
|
||||||
|
|
||||||
public final String sqlNameView = "view";
|
|
||||||
public final String sqlNameExplode = "explode";
|
|
||||||
public final String sqlNameExplodeSplit = "explode_split";
|
|
||||||
private Map<String, String> delimiterMap;
|
|
||||||
|
|
||||||
public LateralViewExplodeNode(Map<String, String> delimiterMap) {
|
|
||||||
super();
|
|
||||||
this.delimiterMap = delimiterMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
|
|
||||||
SqlOperator operator = call.getOperator();
|
|
||||||
writer.setNeedWhitespace(true);
|
|
||||||
assert call.operandCount() == 2;
|
|
||||||
writer.sep(SqlKind.SELECT.lowerName);
|
|
||||||
writer.sep(SqlIdentifier.STAR.toString());
|
|
||||||
writer.sep("from");
|
|
||||||
SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.SIMPLE);
|
|
||||||
call.operand(0).unparse(writer, leftPrec, operator.getLeftPrec());
|
|
||||||
writer.setNeedWhitespace(true);
|
|
||||||
writer.sep(SqlKind.LATERAL.lowerName);
|
|
||||||
writer.sep(sqlNameView);
|
|
||||||
SqlNodeList list = (SqlNodeList) call.operand(1);
|
|
||||||
Ord node;
|
|
||||||
Iterator var = Ord.zip(list).iterator();
|
|
||||||
while (var.hasNext()) {
|
|
||||||
node = (Ord) var.next();
|
|
||||||
if (node.i > 0 && node.i % 2 > 0) {
|
|
||||||
writer.sep(SqlKind.AS.lowerName);
|
|
||||||
((SqlNode) node.e).unparse(writer, 0, 0);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (node.i > 0 && node.i % 2 == 0) {
|
|
||||||
writer.sep(SqlKind.LATERAL.lowerName);
|
|
||||||
writer.sep(sqlNameView);
|
|
||||||
}
|
|
||||||
explode(writer, (SqlNode) node.e);
|
|
||||||
}
|
|
||||||
writer.endList(frame);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void explode(SqlWriter writer, SqlNode sqlNode) {
|
|
||||||
String delimiter =
|
|
||||||
Objects.nonNull(delimiterMap) && delimiterMap.containsKey(sqlNode.toString())
|
|
||||||
? delimiterMap.get(sqlNode.toString())
|
|
||||||
: "";
|
|
||||||
if (delimiter.isEmpty()) {
|
|
||||||
writer.sep(sqlNameExplode);
|
|
||||||
} else {
|
|
||||||
writer.sep(sqlNameExplodeSplit);
|
|
||||||
}
|
|
||||||
SqlWriter.Frame frame = writer.startList("(", ")");
|
|
||||||
sqlNode.unparse(writer, 0, 0);
|
|
||||||
if (!delimiter.isEmpty()) {
|
|
||||||
writer.sep(",");
|
|
||||||
writer.sep(String.format("'%s'", delimiter));
|
|
||||||
}
|
|
||||||
writer.endList(frame);
|
|
||||||
writer.sep("tmp_sgl_" + sqlNode.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.Measure;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
|
||||||
|
|
||||||
public class MeasureNode extends SemanticNode {
|
|
||||||
|
|
||||||
public static SqlNode buildNonAgg(String alias, Measure measure, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
return getExpr(measure, alias, scope, engineType);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static SqlNode getExpr(Measure measure, String alias, SqlValidatorScope scope,
|
|
||||||
EngineType enginType) throws Exception {
|
|
||||||
if (measure.getExpr() == null) {
|
|
||||||
return parse((alias.isEmpty() ? "" : alias + ".") + measure.getName(), scope,
|
|
||||||
enginType);
|
|
||||||
}
|
|
||||||
return parse(measure.getExpr(), scope, enginType);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.node;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
|
||||||
import lombok.Data;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Optional;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
public class MetricNode extends SemanticNode {
|
|
||||||
|
|
||||||
private MetricSchemaResp metric;
|
|
||||||
private Map<String, SqlNode> aggNode = new HashMap<>();
|
|
||||||
private Map<String, SqlNode> nonAggNode = new HashMap<>();
|
|
||||||
private Map<String, SqlNode> measureFilter = new HashMap<>();
|
|
||||||
private Map<String, String> aggFunction = new HashMap<>();
|
|
||||||
|
|
||||||
public static SqlNode build(MetricSchemaResp metric, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
return parse(metric.getExpr(), scope, engineType);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Boolean isMetricField(String name, S2CalciteSchema schema) {
|
|
||||||
Optional<MetricSchemaResp> metric = schema.getMetrics().stream()
|
|
||||||
.filter(m -> m.getName().equalsIgnoreCase(name)).findFirst();
|
|
||||||
return metric.isPresent()
|
|
||||||
&& metric.get().getMetricDefineType().equals(MetricDefineType.FIELD);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,236 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.Identify;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.DataModelNode;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.IdentifyNode;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Constants;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.apache.calcite.sql.*;
|
|
||||||
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
|
|
||||||
import org.apache.calcite.sql.parser.SqlParserPos;
|
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
import org.apache.commons.lang3.tuple.Triple;
|
|
||||||
import org.springframework.util.CollectionUtils;
|
|
||||||
|
|
||||||
import java.util.*;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/** process the join conditions when the source number is greater than 1 */
|
|
||||||
@Slf4j
|
|
||||||
public class JoinRender extends Renderer {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
|
|
||||||
SqlValidatorScope scope, S2CalciteSchema schema) throws Exception {
|
|
||||||
SqlNode left = null;
|
|
||||||
TableView leftTable = null;
|
|
||||||
Map<String, SqlNode> outerSelect = new HashMap<>();
|
|
||||||
Map<String, String> beforeModels = new HashMap<>();
|
|
||||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
|
||||||
|
|
||||||
for (int i = 0; i < dataModels.size(); i++) {
|
|
||||||
final DataModel dataModel = dataModels.get(i);
|
|
||||||
final Set<DimSchemaResp> queryDimensions =
|
|
||||||
ontologyQuery.getDimensionsByModel(dataModel.getId());
|
|
||||||
final Set<MetricSchemaResp> queryMetrics =
|
|
||||||
ontologyQuery.getMetricsByModel(dataModel.getId());
|
|
||||||
|
|
||||||
List<String> primary = new ArrayList<>();
|
|
||||||
for (Identify identify : dataModel.getIdentifiers()) {
|
|
||||||
primary.add(identify.getName());
|
|
||||||
}
|
|
||||||
|
|
||||||
TableView tableView =
|
|
||||||
renderOne(queryMetrics, queryDimensions, dataModel, scope, schema);
|
|
||||||
log.info("tableView {}", StringUtils.normalizeSpace(tableView.getTable().toString()));
|
|
||||||
String alias = Constants.JOIN_TABLE_PREFIX + dataModel.getName();
|
|
||||||
tableView.setAlias(alias);
|
|
||||||
tableView.setPrimary(primary);
|
|
||||||
tableView.setDataModel(dataModel);
|
|
||||||
for (String field : tableView.getFields()) {
|
|
||||||
outerSelect.put(field, SemanticNode.parse(alias + "." + field, scope, engineType));
|
|
||||||
}
|
|
||||||
if (left == null) {
|
|
||||||
left = SemanticNode.buildAs(tableView.getAlias(), getTable(tableView));
|
|
||||||
} else {
|
|
||||||
left = buildJoin(left, leftTable, tableView, beforeModels, dataModel, schema,
|
|
||||||
scope);
|
|
||||||
}
|
|
||||||
leftTable = tableView;
|
|
||||||
beforeModels.put(dataModel.getName(), leftTable.getAlias());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (Map.Entry<String, SqlNode> entry : outerSelect.entrySet()) {
|
|
||||||
tableView.getSelect().add(entry.getValue());
|
|
||||||
}
|
|
||||||
tableView.setTable(left);
|
|
||||||
}
|
|
||||||
|
|
||||||
private SqlNode getTable(TableView tableView) {
|
|
||||||
return SemanticNode.getTable(tableView.getTable());
|
|
||||||
}
|
|
||||||
|
|
||||||
private SqlNode buildJoin(SqlNode leftNode, TableView leftTable, TableView rightTable,
|
|
||||||
Map<String, String> before, DataModel dataModel, S2CalciteSchema schema,
|
|
||||||
SqlValidatorScope scope) throws Exception {
|
|
||||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
|
||||||
SqlNode condition =
|
|
||||||
getCondition(leftTable, rightTable, dataModel, schema, scope, engineType);
|
|
||||||
SqlLiteral sqlLiteral = SemanticNode.getJoinSqlLiteral("");
|
|
||||||
JoinRelation matchJoinRelation = getMatchJoinRelation(before, rightTable, schema);
|
|
||||||
SqlNode joinRelationCondition;
|
|
||||||
if (!CollectionUtils.isEmpty(matchJoinRelation.getJoinCondition())) {
|
|
||||||
sqlLiteral = SemanticNode.getJoinSqlLiteral(matchJoinRelation.getJoinType());
|
|
||||||
joinRelationCondition = getCondition(matchJoinRelation, scope, engineType);
|
|
||||||
condition = joinRelationCondition;
|
|
||||||
}
|
|
||||||
|
|
||||||
return new SqlJoin(SqlParserPos.ZERO, leftNode,
|
|
||||||
SqlLiteral.createBoolean(false, SqlParserPos.ZERO), sqlLiteral,
|
|
||||||
SemanticNode.buildAs(rightTable.getAlias(), getTable(rightTable)),
|
|
||||||
SqlLiteral.createSymbol(JoinConditionType.ON, SqlParserPos.ZERO), condition);
|
|
||||||
}
|
|
||||||
|
|
||||||
private JoinRelation getMatchJoinRelation(Map<String, String> before, TableView tableView,
|
|
||||||
S2CalciteSchema schema) {
|
|
||||||
JoinRelation matchJoinRelation = JoinRelation.builder().build();
|
|
||||||
if (!CollectionUtils.isEmpty(schema.getJoinRelations())) {
|
|
||||||
for (JoinRelation joinRelation : schema.getJoinRelations()) {
|
|
||||||
if (joinRelation.getRight().equalsIgnoreCase(tableView.getDataModel().getName())
|
|
||||||
&& before.containsKey(joinRelation.getLeft())) {
|
|
||||||
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
|
|
||||||
.map(r -> Triple.of(
|
|
||||||
before.get(joinRelation.getLeft()) + "." + r.getLeft(),
|
|
||||||
r.getMiddle(), tableView.getAlias() + "." + r.getRight()))
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
matchJoinRelation.setJoinType(joinRelation.getJoinType());
|
|
||||||
// Added join condition judgment to solve the problem of join condition order
|
|
||||||
} else if (joinRelation.getLeft()
|
|
||||||
.equalsIgnoreCase(tableView.getDataModel().getName())
|
|
||||||
&& before.containsKey(joinRelation.getRight())) {
|
|
||||||
matchJoinRelation.setJoinCondition(joinRelation.getJoinCondition().stream()
|
|
||||||
.map(r -> Triple.of(
|
|
||||||
before.get(joinRelation.getRight()) + "." + r.getRight(),
|
|
||||||
r.getMiddle(), tableView.getAlias() + "." + r.getLeft()))
|
|
||||||
.collect(Collectors.toList()));
|
|
||||||
matchJoinRelation.setJoinType(joinRelation.getJoinType());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return matchJoinRelation;
|
|
||||||
}
|
|
||||||
|
|
||||||
private SqlNode getCondition(JoinRelation joinRelation, SqlValidatorScope scope,
|
|
||||||
EngineType engineType) throws Exception {
|
|
||||||
SqlNode condition = null;
|
|
||||||
for (Triple<String, String, String> con : joinRelation.getJoinCondition()) {
|
|
||||||
List<SqlNode> ons = new ArrayList<>();
|
|
||||||
ons.add(SemanticNode.parse(con.getLeft(), scope, engineType));
|
|
||||||
ons.add(SemanticNode.parse(con.getRight(), scope, engineType));
|
|
||||||
if (Objects.isNull(condition)) {
|
|
||||||
condition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()), ons,
|
|
||||||
SqlParserPos.ZERO, null);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
SqlNode addCondition = new SqlBasicCall(SemanticNode.getBinaryOperator(con.getMiddle()),
|
|
||||||
ons, SqlParserPos.ZERO, null);
|
|
||||||
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
|
|
||||||
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
|
|
||||||
null);
|
|
||||||
}
|
|
||||||
return condition;
|
|
||||||
}
|
|
||||||
|
|
||||||
private SqlNode getCondition(TableView left, TableView right, DataModel dataModel,
|
|
||||||
S2CalciteSchema schema, SqlValidatorScope scope, EngineType engineType)
|
|
||||||
throws Exception {
|
|
||||||
|
|
||||||
Set<String> selectLeft = SemanticNode.getSelect(left.getTable());
|
|
||||||
Set<String> selectRight = SemanticNode.getSelect(right.getTable());
|
|
||||||
selectLeft.retainAll(selectRight);
|
|
||||||
SqlNode condition = null;
|
|
||||||
for (String on : selectLeft) {
|
|
||||||
if (!isDimension(on, dataModel, schema)) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (IdentifyNode.isForeign(on, left.getDataModel().getIdentifiers())) {
|
|
||||||
if (!IdentifyNode.isPrimary(on, right.getDataModel().getIdentifiers())) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (IdentifyNode.isForeign(on, right.getDataModel().getIdentifiers())) {
|
|
||||||
if (!IdentifyNode.isPrimary(on, left.getDataModel().getIdentifiers())) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
List<SqlNode> ons = new ArrayList<>();
|
|
||||||
ons.add(SemanticNode.parse(left.getAlias() + "." + on, scope, engineType));
|
|
||||||
ons.add(SemanticNode.parse(right.getAlias() + "." + on, scope, engineType));
|
|
||||||
if (condition == null) {
|
|
||||||
condition =
|
|
||||||
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
SqlNode addCondition =
|
|
||||||
new SqlBasicCall(SqlStdOperatorTable.EQUALS, ons, SqlParserPos.ZERO, null);
|
|
||||||
condition = new SqlBasicCall(SqlStdOperatorTable.AND,
|
|
||||||
new ArrayList<>(Arrays.asList(condition, addCondition)), SqlParserPos.ZERO,
|
|
||||||
null);
|
|
||||||
}
|
|
||||||
return condition;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public static TableView renderOne(Set<MetricSchemaResp> queryMetrics,
|
|
||||||
Set<DimSchemaResp> queryDimensions, DataModel dataModel, SqlValidatorScope scope,
|
|
||||||
S2CalciteSchema schema) {
|
|
||||||
TableView tableView = new TableView();
|
|
||||||
EngineType engineType = schema.getOntology().getDatabase().getType();
|
|
||||||
Set<String> queryFields = tableView.getFields();
|
|
||||||
queryMetrics.stream().forEach(m -> queryFields.addAll(m.getFields()));
|
|
||||||
queryDimensions.stream().forEach(m -> queryFields.add(m.getBizName()));
|
|
||||||
|
|
||||||
try {
|
|
||||||
for (String field : queryFields) {
|
|
||||||
tableView.getSelect().add(SemanticNode.parse(field, scope, engineType));
|
|
||||||
}
|
|
||||||
tableView.setTable(DataModelNode.build(dataModel, scope));
|
|
||||||
} catch (Exception e) {
|
|
||||||
log.error("Failed to create sqlNode for data model {}", dataModel);
|
|
||||||
}
|
|
||||||
|
|
||||||
return tableView;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static boolean isDimension(String name, DataModel dataModel, S2CalciteSchema schema) {
|
|
||||||
Optional<DimSchemaResp> dimension = dataModel.getDimensions().stream()
|
|
||||||
.filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
|
|
||||||
if (dimension.isPresent()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
Optional<Identify> identify = dataModel.getIdentifiers().stream()
|
|
||||||
.filter(i -> i.getName().equalsIgnoreCase(name)).findFirst();
|
|
||||||
if (identify.isPresent()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (schema.getDimensions().containsKey(dataModel.getName())) {
|
|
||||||
Optional<DimSchemaResp> dataSourceDim = schema.getDimensions().get(dataModel.getName())
|
|
||||||
.stream().filter(d -> d.getName().equalsIgnoreCase(name)).findFirst();
|
|
||||||
if (dataSourceDim.isPresent()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
package com.tencent.supersonic.headless.core.translator.parser.calcite.render;
|
|
||||||
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.OntologyQuery;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.TableView;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.node.SemanticNode;
|
|
||||||
import lombok.Data;
|
|
||||||
import org.apache.calcite.sql.SqlNode;
|
|
||||||
import org.apache.calcite.sql.validate.SqlValidatorScope;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/** process TableView */
|
|
||||||
@Data
|
|
||||||
public abstract class Renderer {
|
|
||||||
|
|
||||||
protected TableView tableView = new TableView();
|
|
||||||
|
|
||||||
public void setTable(SqlNode table) {
|
|
||||||
tableView.setTable(table);
|
|
||||||
}
|
|
||||||
|
|
||||||
public SqlNode build() {
|
|
||||||
return tableView.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
public SqlNode builderAs(String alias) throws Exception {
|
|
||||||
return SemanticNode.buildAs(alias, tableView.build());
|
|
||||||
}
|
|
||||||
|
|
||||||
public abstract void render(OntologyQuery ontologyQuery, List<DataModel> dataModels,
|
|
||||||
SqlValidatorScope scope, S2CalciteSchema schema) throws Exception;
|
|
||||||
}
|
|
||||||
@@ -5,7 +5,7 @@ import javax.sql.DataSource;
|
|||||||
import com.alibaba.druid.util.StringUtils;
|
import com.alibaba.druid.util.StringUtils;
|
||||||
import com.tencent.supersonic.common.util.MD5Util;
|
import com.tencent.supersonic.common.util.MD5Util;
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
|
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.JdbcDataSource;
|
import com.tencent.supersonic.headless.core.pojo.JdbcDataSource;
|
||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
@@ -18,14 +18,7 @@ import java.util.HashSet;
|
|||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
|
import static com.tencent.supersonic.common.pojo.Constants.*;
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.COLON;
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.DOUBLE_SLASH;
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.EMPTY;
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.JDBC_PREFIX_FORMATTER;
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.NEW_LINE_CHAR;
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.PATTERN_JDBC_TYPE;
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.SPACE;
|
|
||||||
|
|
||||||
/** tools functions about jdbc */
|
/** tools functions about jdbc */
|
||||||
@Slf4j
|
@Slf4j
|
||||||
@@ -39,7 +32,7 @@ public class JdbcDataSourceUtils {
|
|||||||
this.jdbcDataSource = jdbcDataSource;
|
this.jdbcDataSource = jdbcDataSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean testDatabase(Database database) {
|
public static boolean testDatabase(DatabaseResp database) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Class.forName(getDriverClassName(database.getUrl()));
|
Class.forName(getDriverClassName(database.getUrl()));
|
||||||
@@ -146,11 +139,11 @@ public class JdbcDataSourceUtils {
|
|||||||
return MD5Util.getMD5(sb.toString(), true, 64);
|
return MD5Util.getMD5(sb.toString(), true, 64);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataSource getDataSource(Database database) throws RuntimeException {
|
public DataSource getDataSource(DatabaseResp database) throws RuntimeException {
|
||||||
return jdbcDataSource.getDataSource(database);
|
return jdbcDataSource.getDataSource(database);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Connection getConnection(Database database) throws RuntimeException {
|
public Connection getConnection(DatabaseResp database) throws RuntimeException {
|
||||||
Connection conn = getConnectionWithRetry(database);
|
Connection conn = getConnectionWithRetry(database);
|
||||||
if (conn == null) {
|
if (conn == null) {
|
||||||
try {
|
try {
|
||||||
@@ -166,7 +159,7 @@ public class JdbcDataSourceUtils {
|
|||||||
return conn;
|
return conn;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Connection getConnectionWithRetry(Database database) {
|
private Connection getConnectionWithRetry(DatabaseResp database) {
|
||||||
int rc = 1;
|
int rc = 1;
|
||||||
for (;;) {
|
for (;;) {
|
||||||
|
|
||||||
@@ -193,7 +186,7 @@ public class JdbcDataSourceUtils {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void releaseDataSource(Database database) {
|
public void releaseDataSource(DatabaseResp database) {
|
||||||
jdbcDataSource.removeDatasource(database);
|
jdbcDataSource.removeDatasource(database);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,10 @@ package com.tencent.supersonic.headless.core.utils;
|
|||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
import com.tencent.supersonic.common.pojo.QueryColumn;
|
import com.tencent.supersonic.common.pojo.QueryColumn;
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.common.util.DateUtils;
|
import com.tencent.supersonic.common.util.DateUtils;
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
|
import com.tencent.supersonic.headless.api.pojo.enums.DataType;
|
||||||
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.JdbcDataSource;
|
import com.tencent.supersonic.headless.core.pojo.JdbcDataSource;
|
||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
@@ -24,11 +23,7 @@ import java.sql.SQLException;
|
|||||||
import java.time.LocalDate;
|
import java.time.LocalDate;
|
||||||
import java.time.LocalDateTime;
|
import java.time.LocalDateTime;
|
||||||
import java.time.format.DateTimeFormatter;
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Date;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
|
import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
|
||||||
|
|
||||||
@@ -38,7 +33,7 @@ import static com.tencent.supersonic.common.pojo.Constants.AT_SYMBOL;
|
|||||||
public class SqlUtils {
|
public class SqlUtils {
|
||||||
|
|
||||||
@Getter
|
@Getter
|
||||||
private Database database;
|
private DatabaseResp database;
|
||||||
|
|
||||||
@Autowired
|
@Autowired
|
||||||
private JdbcDataSource jdbcDataSource;
|
private JdbcDataSource jdbcDataSource;
|
||||||
@@ -57,15 +52,15 @@ public class SqlUtils {
|
|||||||
|
|
||||||
public SqlUtils() {}
|
public SqlUtils() {}
|
||||||
|
|
||||||
public SqlUtils(Database database) {
|
public SqlUtils(DatabaseResp database) {
|
||||||
this.database = database;
|
this.database = database;
|
||||||
this.dataTypeEnum = DataType.urlOf(database.getUrl());
|
this.dataTypeEnum = DataType.urlOf(database.getUrl());
|
||||||
}
|
}
|
||||||
|
|
||||||
public SqlUtils init(Database database) {
|
public SqlUtils init(DatabaseResp database) {
|
||||||
return SqlUtilsBuilder.getBuilder()
|
return SqlUtilsBuilder.getBuilder()
|
||||||
.withName(database.getId() + AT_SYMBOL + database.getName())
|
.withName(database.getId() + AT_SYMBOL + database.getName())
|
||||||
.withType(database.getType().getName()).withJdbcUrl(database.getUrl())
|
.withType(database.getType()).withJdbcUrl(database.getUrl())
|
||||||
.withUsername(database.getUsername()).withPassword(database.getPassword())
|
.withUsername(database.getUsername()).withPassword(database.getPassword())
|
||||||
.withJdbcDataSource(this.jdbcDataSource).withResultLimit(this.resultLimit)
|
.withJdbcDataSource(this.jdbcDataSource).withResultLimit(this.resultLimit)
|
||||||
.withIsQueryLogEnable(this.isQueryLogEnable).build();
|
.withIsQueryLogEnable(this.isQueryLogEnable).build();
|
||||||
@@ -225,9 +220,9 @@ public class SqlUtils {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public SqlUtils build() {
|
public SqlUtils build() {
|
||||||
Database database = Database.builder().name(this.name)
|
DatabaseResp database = DatabaseResp.builder().name(this.name)
|
||||||
.type(EngineType.fromString(this.type.toUpperCase())).url(this.jdbcUrl)
|
.type(this.type.toUpperCase()).url(this.jdbcUrl).username(this.username)
|
||||||
.username(this.username).password(this.password).build();
|
.password(this.password).build();
|
||||||
|
|
||||||
SqlUtils sqlUtils = new SqlUtils(database);
|
SqlUtils sqlUtils = new SqlUtils(database);
|
||||||
sqlUtils.jdbcDataSource = this.jdbcDataSource;
|
sqlUtils.jdbcDataSource = this.jdbcDataSource;
|
||||||
|
|||||||
@@ -3,23 +3,16 @@ package com.tencent.supersonic.headless.server.manager;
|
|||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.tencent.supersonic.common.pojo.ModelRela;
|
import com.tencent.supersonic.common.pojo.ModelRela;
|
||||||
import com.tencent.supersonic.common.pojo.enums.DataTypeEnums;
|
|
||||||
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
|
import com.tencent.supersonic.common.pojo.enums.FilterOperatorEnum;
|
||||||
import com.tencent.supersonic.headless.api.pojo.*;
|
import com.tencent.supersonic.headless.api.pojo.*;
|
||||||
|
import com.tencent.supersonic.headless.api.pojo.enums.DimensionType;
|
||||||
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
|
import com.tencent.supersonic.headless.api.pojo.enums.MetricDefineType;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
import com.tencent.supersonic.headless.api.pojo.response.*;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DimSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.MetricSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticSchemaResp;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.DataModel;
|
|
||||||
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
|
import com.tencent.supersonic.headless.core.pojo.JoinRelation;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Ontology;
|
import com.tencent.supersonic.headless.core.pojo.Ontology;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
import com.tencent.supersonic.headless.core.translator.parser.calcite.S2CalciteSchema;
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.*;
|
|
||||||
import com.tencent.supersonic.headless.core.translator.parser.s2sql.Materialization.TimePartType;
|
|
||||||
import com.tencent.supersonic.headless.server.pojo.yaml.*;
|
import com.tencent.supersonic.headless.server.pojo.yaml.*;
|
||||||
import com.tencent.supersonic.headless.server.service.SchemaService;
|
import com.tencent.supersonic.headless.server.service.SchemaService;
|
||||||
import com.tencent.supersonic.headless.server.utils.DatabaseConverter;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.commons.lang3.tuple.Triple;
|
import org.apache.commons.lang3.tuple.Triple;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
@@ -41,7 +34,15 @@ public class SemanticSchemaManager {
|
|||||||
|
|
||||||
public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) {
|
public Ontology buildOntology(SemanticSchemaResp semanticSchemaResp) {
|
||||||
Ontology ontology = new Ontology();
|
Ontology ontology = new Ontology();
|
||||||
ontology.setMetrics(semanticSchemaResp.getMetrics());
|
Map<String, List<MetricSchemaResp>> model2Metrics = Maps.newHashMap();
|
||||||
|
semanticSchemaResp.getMetrics().forEach(dim -> {
|
||||||
|
if (!model2Metrics.containsKey(dim.getModelBizName())) {
|
||||||
|
model2Metrics.put(dim.getModelBizName(), Lists.newArrayList());
|
||||||
|
}
|
||||||
|
model2Metrics.get(dim.getModelBizName()).add(dim);
|
||||||
|
});
|
||||||
|
ontology.setMetricMap(model2Metrics);
|
||||||
|
|
||||||
Map<String, List<DimSchemaResp>> model2Dimensions = Maps.newHashMap();
|
Map<String, List<DimSchemaResp>> model2Dimensions = Maps.newHashMap();
|
||||||
semanticSchemaResp.getDimensions().forEach(dim -> {
|
semanticSchemaResp.getDimensions().forEach(dim -> {
|
||||||
if (!model2Dimensions.containsKey(dim.getModelBizName())) {
|
if (!model2Dimensions.containsKey(dim.getModelBizName())) {
|
||||||
@@ -58,16 +59,16 @@ public class SemanticSchemaManager {
|
|||||||
schemaService.getSchemaYamlTpl(semanticSchemaResp, dimensionYamlTpls, dataModelYamlTpls,
|
schemaService.getSchemaYamlTpl(semanticSchemaResp, dimensionYamlTpls, dataModelYamlTpls,
|
||||||
metricYamlTpls, modelIdName);
|
metricYamlTpls, modelIdName);
|
||||||
DatabaseResp databaseResp = semanticSchemaResp.getDatabaseResp();
|
DatabaseResp databaseResp = semanticSchemaResp.getDatabaseResp();
|
||||||
ontology.setDatabase(DatabaseConverter.convert(databaseResp));
|
ontology.setDatabase(databaseResp);
|
||||||
if (!CollectionUtils.isEmpty(semanticSchemaResp.getModelRelas())) {
|
if (!CollectionUtils.isEmpty(semanticSchemaResp.getModelRelas())) {
|
||||||
ontology.setJoinRelations(
|
ontology.setJoinRelations(
|
||||||
getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName));
|
getJoinRelation(semanticSchemaResp.getModelRelas(), modelIdName));
|
||||||
}
|
}
|
||||||
if (!dataModelYamlTpls.isEmpty()) {
|
if (!dataModelYamlTpls.isEmpty()) {
|
||||||
Map<String, DataModel> dataModelMap =
|
Map<String, ModelResp> dataModelMap =
|
||||||
dataModelYamlTpls.stream().map(SemanticSchemaManager::getDataModel).collect(
|
dataModelYamlTpls.stream().map(SemanticSchemaManager::getDataModel).collect(
|
||||||
Collectors.toMap(DataModel::getName, item -> item, (k1, k2) -> k1));
|
Collectors.toMap(ModelResp::getName, item -> item, (k1, k2) -> k1));
|
||||||
ontology.setDataModelMap(dataModelMap);
|
ontology.setModelMap(dataModelMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ontology;
|
return ontology;
|
||||||
@@ -77,30 +78,30 @@ public class SemanticSchemaManager {
|
|||||||
return getMetricsByMetricYamlTpl(t);
|
return getMetricsByMetricYamlTpl(t);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<DimSchemaResp> getDimensions(final List<DimensionYamlTpl> t) {
|
public static List<Dimension> getDimensions(final List<DimensionYamlTpl> t) {
|
||||||
return getDimension(t);
|
return getDimension(t);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DataModel getDataModel(final DataModelYamlTpl d) {
|
public static ModelResp getDataModel(final DataModelYamlTpl d) {
|
||||||
DataModel dataModel = DataModel.builder().id(d.getId()).modelId(d.getSourceId())
|
// ModelResp dataModel = ModelResp.builder()(d.getId()).modelId(d.getSourceId())
|
||||||
.type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName())
|
// .type(d.getType()).sqlQuery(d.getSqlQuery()).name(d.getName())
|
||||||
.tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers()))
|
// .tableQuery(d.getTableQuery()).identifiers(getIdentify(d.getIdentifiers()))
|
||||||
.measures(getMeasureParams(d.getMeasures()))
|
// .measures(getMeasureParams(d.getMeasures()))
|
||||||
.dimensions(getDimensions(d.getDimensions())).build();
|
// .dimensions(getDimensions(d.getDimensions())).build();
|
||||||
dataModel.setAggTime(getDataModelAggTime(dataModel.getDimensions()));
|
ModelResp dataModel = new ModelResp();
|
||||||
if (Objects.nonNull(d.getModelSourceTypeEnum())) {
|
dataModel.setId(d.getId());
|
||||||
dataModel.setTimePartType(TimePartType.of(d.getModelSourceTypeEnum().name()));
|
dataModel.setName(d.getName());
|
||||||
}
|
ModelDetail modelDetail = new ModelDetail();
|
||||||
return dataModel;
|
dataModel.setModelDetail(modelDetail);
|
||||||
}
|
|
||||||
|
|
||||||
private static String getDataModelAggTime(List<DimSchemaResp> dimensions) {
|
modelDetail.setDbType(d.getType());
|
||||||
Optional<DimSchemaResp> timeDimension =
|
modelDetail.setSqlQuery(d.getSqlQuery());
|
||||||
dimensions.stream().filter(DimSchemaResp::isTimeDimension).findFirst();
|
modelDetail.setTableQuery(d.getTableQuery());
|
||||||
if (timeDimension.isPresent() && Objects.nonNull(timeDimension.get().getTypeParams())) {
|
modelDetail.getIdentifiers().addAll(getIdentify(d.getIdentifiers()));
|
||||||
return timeDimension.get().getTypeParams().getTimeGranularity();
|
modelDetail.getMeasures().addAll(getMeasureParams(d.getMeasures()));
|
||||||
}
|
modelDetail.getDimensions().addAll(getDimensions(d.getDimensions()));
|
||||||
return Constants.DIMENSION_TYPE_TIME_GRANULARITY_NONE;
|
|
||||||
|
return dataModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<MetricSchemaResp> getMetricsByMetricYamlTpl(
|
private static List<MetricSchemaResp> getMetricsByMetricYamlTpl(
|
||||||
@@ -170,24 +171,16 @@ public class SemanticSchemaManager {
|
|||||||
return measures;
|
return measures;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<DimSchemaResp> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) {
|
private static List<Dimension> getDimension(List<DimensionYamlTpl> dimensionYamlTpls) {
|
||||||
List<DimSchemaResp> dimensions = new ArrayList<>();
|
List<Dimension> dimensions = new ArrayList<>();
|
||||||
for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) {
|
for (DimensionYamlTpl dimensionYamlTpl : dimensionYamlTpls) {
|
||||||
DimSchemaResp dimension = new DimSchemaResp();
|
Dimension dimension = new Dimension();
|
||||||
// dimension.setType(dimensionYamlTpl.getType());
|
if (Objects.nonNull(dimensionYamlTpl.getType())) {
|
||||||
|
dimension.setType(DimensionType.valueOf(dimensionYamlTpl.getType()));
|
||||||
|
}
|
||||||
dimension.setExpr(dimensionYamlTpl.getExpr());
|
dimension.setExpr(dimensionYamlTpl.getExpr());
|
||||||
dimension.setName(dimensionYamlTpl.getName());
|
dimension.setName(dimensionYamlTpl.getName());
|
||||||
dimension.setBizName(dimensionYamlTpl.getBizName());
|
dimension.setBizName(dimensionYamlTpl.getBizName());
|
||||||
dimension.setDefaultValues(dimensionYamlTpl.getDefaultValues());
|
|
||||||
if (Objects.nonNull(dimensionYamlTpl.getDataType())) {
|
|
||||||
dimension.setDataType(dimensionYamlTpl.getDataType());
|
|
||||||
}
|
|
||||||
if (Objects.isNull(dimension.getDataType())) {
|
|
||||||
dimension.setDataType(DataTypeEnums.UNKNOWN);
|
|
||||||
}
|
|
||||||
if (Objects.nonNull(dimensionYamlTpl.getExt())) {
|
|
||||||
dimension.setExt(dimensionYamlTpl.getExt());
|
|
||||||
}
|
|
||||||
dimension.setTypeParams(dimensionYamlTpl.getTypeParams());
|
dimension.setTypeParams(dimensionYamlTpl.getTypeParams());
|
||||||
dimensions.add(dimension);
|
dimensions.add(dimension);
|
||||||
}
|
}
|
||||||
@@ -247,11 +240,11 @@ public class SemanticSchemaManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void update(S2CalciteSchema schema, DataModel datasourceYamlTpl)
|
public static void update(S2CalciteSchema schema, ModelResp datasourceYamlTpl)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
if (schema != null) {
|
if (schema != null) {
|
||||||
String dataSourceName = datasourceYamlTpl.getName();
|
String dataSourceName = datasourceYamlTpl.getName();
|
||||||
Optional<Entry<String, DataModel>> datasourceYamlTplMap =
|
Optional<Entry<String, ModelResp>> datasourceYamlTplMap =
|
||||||
schema.getDataModels().entrySet().stream()
|
schema.getDataModels().entrySet().stream()
|
||||||
.filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst();
|
.filter(t -> t.getKey().equalsIgnoreCase(dataSourceName)).findFirst();
|
||||||
if (datasourceYamlTplMap.isPresent()) {
|
if (datasourceYamlTplMap.isPresent()) {
|
||||||
|
|||||||
@@ -16,17 +16,12 @@ import com.tencent.supersonic.headless.api.pojo.response.ModelResp;
|
|||||||
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
import com.tencent.supersonic.headless.api.pojo.response.SemanticQueryResp;
|
||||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
|
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptor;
|
||||||
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
|
import com.tencent.supersonic.headless.core.adaptor.db.DbAdaptorFactory;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
|
||||||
import com.tencent.supersonic.headless.core.utils.JdbcDataSourceUtils;
|
import com.tencent.supersonic.headless.core.utils.JdbcDataSourceUtils;
|
||||||
import com.tencent.supersonic.headless.core.utils.SqlUtils;
|
import com.tencent.supersonic.headless.core.utils.SqlUtils;
|
||||||
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
|
import com.tencent.supersonic.headless.core.utils.SqlVariableParseUtils;
|
||||||
import com.tencent.supersonic.headless.server.persistence.dataobject.DatabaseDO;
|
import com.tencent.supersonic.headless.server.persistence.dataobject.DatabaseDO;
|
||||||
import com.tencent.supersonic.headless.server.persistence.mapper.DatabaseDOMapper;
|
import com.tencent.supersonic.headless.server.persistence.mapper.DatabaseDOMapper;
|
||||||
import com.tencent.supersonic.headless.server.pojo.DatabaseParameter;
|
import com.tencent.supersonic.headless.server.pojo.*;
|
||||||
import com.tencent.supersonic.headless.server.pojo.DbParameterFactory;
|
|
||||||
import com.tencent.supersonic.headless.server.pojo.DbParametersBuilder;
|
|
||||||
import com.tencent.supersonic.headless.server.pojo.DefaultParametersBuilder;
|
|
||||||
import com.tencent.supersonic.headless.server.pojo.ModelFilter;
|
|
||||||
import com.tencent.supersonic.headless.server.service.DatabaseService;
|
import com.tencent.supersonic.headless.server.service.DatabaseService;
|
||||||
import com.tencent.supersonic.headless.server.service.ModelService;
|
import com.tencent.supersonic.headless.server.service.ModelService;
|
||||||
import com.tencent.supersonic.headless.server.utils.DatabaseConverter;
|
import com.tencent.supersonic.headless.server.utils.DatabaseConverter;
|
||||||
@@ -58,7 +53,7 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean testConnect(DatabaseReq databaseReq, User user) {
|
public boolean testConnect(DatabaseReq databaseReq, User user) {
|
||||||
Database database = DatabaseConverter.convert(databaseReq);
|
DatabaseResp database = DatabaseConverter.convert(databaseReq);
|
||||||
return JdbcDataSourceUtils.testDatabase(database);
|
return JdbcDataSourceUtils.testDatabase(database);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -157,7 +152,7 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SemanticQueryResp executeSql(String sql, DatabaseResp databaseResp) {
|
public SemanticQueryResp executeSql(String sql, DatabaseResp databaseResp) {
|
||||||
return queryWithColumns(sql, DatabaseConverter.convert(databaseResp));
|
return queryWithColumns(sql, databaseResp);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -192,7 +187,7 @@ public class DatabaseServiceImpl extends ServiceImpl<DatabaseDOMapper, DatabaseD
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private SemanticQueryResp queryWithColumns(String sql, Database database) {
|
private SemanticQueryResp queryWithColumns(String sql, DatabaseResp database) {
|
||||||
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
|
SemanticQueryResp queryResultWithColumns = new SemanticQueryResp();
|
||||||
SqlUtils sqlUtils = this.sqlUtils.init(database);
|
SqlUtils sqlUtils = this.sqlUtils.init(database);
|
||||||
log.info("query SQL: {}", sql);
|
log.info("query SQL: {}", sql);
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
package com.tencent.supersonic.headless.server.utils;
|
package com.tencent.supersonic.headless.server.utils;
|
||||||
|
|
||||||
import com.alibaba.fastjson.JSONObject;
|
import com.alibaba.fastjson.JSONObject;
|
||||||
import com.tencent.supersonic.common.pojo.enums.EngineType;
|
|
||||||
import com.tencent.supersonic.headless.api.pojo.request.DatabaseReq;
|
import com.tencent.supersonic.headless.api.pojo.request.DatabaseReq;
|
||||||
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
import com.tencent.supersonic.headless.api.pojo.response.DatabaseResp;
|
||||||
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
|
import com.tencent.supersonic.headless.core.pojo.ConnectInfo;
|
||||||
import com.tencent.supersonic.headless.core.pojo.Database;
|
|
||||||
import com.tencent.supersonic.headless.server.persistence.dataobject.DatabaseDO;
|
import com.tencent.supersonic.headless.server.persistence.dataobject.DatabaseDO;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.springframework.beans.BeanUtils;
|
import org.springframework.beans.BeanUtils;
|
||||||
@@ -14,15 +12,8 @@ import java.util.Arrays;
|
|||||||
|
|
||||||
public class DatabaseConverter {
|
public class DatabaseConverter {
|
||||||
|
|
||||||
public static Database convert(DatabaseResp databaseResp) {
|
public static DatabaseResp convert(DatabaseReq databaseReq) {
|
||||||
Database database = new Database();
|
DatabaseResp database = new DatabaseResp();
|
||||||
BeanUtils.copyProperties(databaseResp, database);
|
|
||||||
database.setType(EngineType.fromString(databaseResp.getType().toUpperCase()));
|
|
||||||
return database;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Database convert(DatabaseReq databaseReq) {
|
|
||||||
Database database = new Database();
|
|
||||||
BeanUtils.copyProperties(databaseReq, database);
|
BeanUtils.copyProperties(databaseReq, database);
|
||||||
return database;
|
return database;
|
||||||
}
|
}
|
||||||
@@ -69,8 +60,7 @@ public class DatabaseConverter {
|
|||||||
return databaseResp;
|
return databaseResp;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ConnectInfo getConnectInfo(DatabaseResp databaseResp) {
|
public static ConnectInfo getConnectInfo(DatabaseResp database) {
|
||||||
Database database = convert(databaseResp);
|
|
||||||
ConnectInfo connectInfo = new ConnectInfo();
|
ConnectInfo connectInfo = new ConnectInfo();
|
||||||
connectInfo.setUserName(database.getUsername());
|
connectInfo.setUserName(database.getUsername());
|
||||||
connectInfo.setPassword(database.passwordDecrypt());
|
connectInfo.setPassword(database.passwordDecrypt());
|
||||||
|
|||||||
Reference in New Issue
Block a user